Commit b59c3b89 authored by Blais, Chris's avatar Blais, Chris
Browse files

adding return all arg to pareto front func

parent 6aad911e
Loading
Loading
Loading
Loading

testing/__init__.py

0 → 100644
+0 −0

Empty file added.

+77 −0
Original line number Diff line number Diff line
import numpy as np
import pickle
from context import *
import logging
import argparse

# soar_integration tools
from tools.soar_utilities import get_pareto_front()

def generate_pareto_front(
        sys_name="ccro", 
        objective= "linear", 
        n_criteria=2, 
        red_deficit=True, 
        overwrite=False
        ):

    # check that pareto front exists
    pareto_path = os.path.join(
        module_path, "testing", "pareto_front", 
        f"{sys_name}_{objective}_{str(n_criteria)}_pareto_front.pkl"
    )
    if os.path.exists(pareto_path):
        if overwrite:
            logging.warning(f"WARNING: file {pgraph_path} already exists, overwriting file")
        else:
            raise Exception(f"ERROR: file {pgraph_path} already exists and overwrite not specified. Aborting.")

    # check if pgraph exists
    pgraph_path = os.path.join(module_path, "testing", "process_graphs", f"{sys_name}_pgraph.pkl")
    if os.path.exists(pgraph_path):
        with open(pgraph_path, "rb") as f:
            pgraph = pickle.load(f)
    else:
        raise Exception(f"no process graph found at {pgraph_path}")
    
    
    results_dict = get_pareto_front(
        process_graph = pgraph, 
        plot_all=False, 
        objective=objective, 
        n_criteria = n_criteria, 
        red_deficit=red_deficit, 
        save_results=False, 
        return_all=True,
        verbose=5
    )

    with open(pareto_path, "wb") as f:
        pickle.dump(results_dict, f)


if __name__ == "__main__":

    parser = argparse.ArgumentParser(description="Generates a pickle file for a soar processGraph")
    parser.add_argument('--sysname', type=str, help='Name of the system', required=True)
    parser.add_argument('--objective', type=str, help='objective (linear, bilinear)', required=True)
    parser.add_argument('--criteria', type=int, help='criteria (1=nsensors, 2=obs+nsensors, 3=obs+red+nsensors)', required=True)
    parser.add_argument('--reddeficit', action='store_true', help="redundancy defecit")
    parser.add_argument('--overwrite', action='store_true', help='overwrite existing pickle file')

    args = parser.parse_args()

    generate_pareto_front(        
        sys_name=args.sysname,
        objective= args.objective, 
        n_criteria=args.criteria, 
        red_deficit=args.reddefecit, 
        overwrite=args.overwrite
    )
    



    

        
+120 −0
Original line number Diff line number Diff line
import numpy as np
import pickle
from context import *
import logging
import argparse

# soar_integration tools
from tools.soar_utilities import check_incidence, define_system

def build_pgraph(sys_name="ccro", overwrite=False, n_components=3):
    """
    Build soar process graph object for the CCRO example
    """
    # standardize input
    sys_name = str(sys_name).lower()
    
    # check if pgraph already exists
    pgraph_path = os.path.join(module_path, "testing", "process_graphs", f"{sys_name}_pgraph.pkl")
    if os.path.exists(pgraph_path):
        if overwrite:
            logging.warning(f"WARNING: file {pgraph_path} already exists, overwriting file")
        else:
            raise Exception(f"ERROR: file {pgraph_path} already exists and overwrite not specified. Aborting.")

    if sys_name == "ccro":
        incidence = np.array(
            [   # e   P1  Mx Me1 Sp1  P2
                [-1, +1,  0,  0,  0,  0],  # 1
                [ 0, -1, +1,  0,  0,  0],  # 2
                [ 0,  0, -1, +1,  0,  0],  # 3
                [+1,  0,  0, -1,  0,  0],  # 4
                [ 0,  0,  0, -1, +1,  0],  # 5
                [+1,  0,  0,  0, -1,  0],  # 6
                [ 0,  0,  0,  0, -1, +1],  # 7
                [ 0,  0, +1,  0,  0, -1],  # 8
            ]
        )

        coordinates = np.array([
            [2, 1], #env
            [0, 0], #p1
            [0.75, 0], #mx
            [1.5, 0], #me1
            [2, -1], # sp1
            [1.5,-1]] # p2
        )

        arc_splitter =[[5 - 1, 6 - 1, 7 - 1]]
        arc_equal_composition = [[1-1, 2-1], [5 - 1, 6 - 1, 7 - 1, 8 - 1]]

    if sys_name == "pilot":
        incidence = np.array(
            [   # a   b   c   d   e   f   g   h   i   j   k   l
                [+1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, -1],  # 1
                [-1, +1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0],  # 2
                [ 0, -1, +1,  0,  0,  0,  0,  0,  0,  0,  0,  0],  # 3
                [ 0,  0, -1,  0, +1,  0,  0,  0,  0,  0,  0,  0],  # 4
                [ 0,  0, -1, +1,  0,  0,  0,  0,  0,  0,  0,  0],  # 5
                [ 0,  0,  0, -1, +1,  0,  0,  0,  0,  0,  0,  0],  # 6
                [ 0,  0,  0,  0, -1,  0, +1,  0,  0,  0,  0,  0],  # 7
                [ 0,  0,  0, -1,  0, +1,  0,  0,  0,  0,  0,  0],  # 8
                [ 0,  0,  0,  0,  0, -1, +1,  0,  0,  0,  0,  0],  # 9
                [ 0,  0,  0,  0,  0,  0, -1,  0, +1,  0,  0,  0],  # 10
                [ 0,  0,  0,  0,  0, -1,  0, +1,  0,  0,  0,  0],  # 11
                [ 0,  0,  0,  0,  0,  0,  0, -1, +1,  0,  0,  0],  # 12
                [ 0,  0,  0,  0,  0,  0,  0,  0, -1,  0,  0, +1],  # 13
                [ 0,  0,  0,  0,  0,  0,  0, -1,  0, +1,  0,  0],  # 14
                [ 0,  0,  0,  0,  0,  0,  0,  0,  0, -1,  0, +1],  # 15
                [ 0,  0,  0,  0,  0,  0,  0,  0,  0, -1, +1,  0],  # 16
                [+1,  0,  0,  0,  0,  0,  0,  0,  0,  0, -1,  0],  # 17
            ]
        )
        # define coordinates for plotting process graph
        coordinates = np.array([
            [ 0, 0], # a
            [ 1, 0], # b
            [ 2, 0], # c
            [ 3, 0], # d
            [ 3,-1], # e
            [ 4, 0], # f
            [ 4,-1], # g
            [ 5, 0], # h
            [ 5,-1], # i
            [ 5, 1], # j
            [ 1, 1], # k
            [ 2, 1.25], # l
        ]
        )  
        arc_splitter =[[14 - 1, 15 - 1, 16 - 1]]
        arc_equal_composition = [[2-1, 3-1], [14 - 1, 15 - 1, 16 - 1, 17 - 1]]
    else:
        logging.error("ERROR: Please specify a valid system name")
        
    check_incidence(incidence)

    pgraph = define_system(
        iIncidence=incidence, coordinates = coordinates, verbose=5,
        arcSplitter = arc_splitter,
        arcEqualComposition = arc_equal_composition,
        numberOfComponents = n_components,
        )
    
    # this is the longest step, generates the cutsets. 
    pgraph.add_graph_info(verbose=5)

    # save pgraph
    with open(pgraph_path, "wb") as f: 
        pickle.dump(pgraph,f)

        
if __name__ == "__main__":

    parser = argparse.ArgumentParser(description="Generates a pickle file for a soar processGraph")
    parser.add_argument('--sysname', type=str, help='Name of the system', required=True)
    parser.add_argument('--overwrite', action='store_true', help='overwrite existing pickle file')

    # 3. Parse the arguments from the command line
    args = parser.parse_args()

    build_pgraph(sys_name=args.sysname, overwrite=args.overwrite)
+46 −29
Original line number Diff line number Diff line
@@ -185,7 +185,9 @@ def get_pareto_front(
        objective="linear", 
        n_criteria = 3, 
        red_deficit=True, 
        exhaustive=True,
        save_results=False, 
        return_all=False,
        verbose=5
        ): 
    """
@@ -232,14 +234,14 @@ def get_pareto_front(
            iObj = 1
            # we want to enumerate all hydraulic layouts
            strObjective = "FlowOnly"
            xExhaustive = True
            xExhaustive = exhaustive
            feval.RedundancyDeficit = red_deficit
            iSearchMethod = 0
        case "bilinear":
            iObj = 2
            # too many layouts for flow + component, just do pareto
            strObjective = "FlowAndComponent1"
            xExhaustive = False
            xExhaustive = exhaustive
            feval.RedundancyDeficit = red_deficit
            iSearchMethod = 1
            pass
@@ -277,8 +279,8 @@ def get_pareto_front(
            pass

    X = process_graph.xSensorCandidate
    nSensor = np.sum(X).astype(int)
    nLayout = int(2**nSensor)
    nSensor = int(np.sum(X))
    nLayout = 2**nSensor
    # nRedundantMax = nSensor
    # nObservableMax=np.sum(process_graph.rWeightObservability)

@@ -288,6 +290,7 @@ def get_pareto_front(
    )[::-1]
    process_graph.nSensorCandidate = nSensor

    if xExhaustive:
        feval.criteria = np.ones([nLayout, nCriteria]) * np.nan
        feval.observableconfig = (
            np.ones(
@@ -298,12 +301,11 @@ def get_pareto_front(
            )
            * np.nan
        )
        feval.lapse = np.ones([nLayout, nCriteria]) * np.nan

    # nLayout = 2
    iLayoutBnd = [0, nLayout]

    feval.lapse = np.ones([nLayout, nCriteria]) * np.nan

    xMeasured_default = process_graph.xMeasured.copy()

    if xExhaustive:
@@ -375,8 +377,14 @@ def get_pareto_front(
        fig.canvas.flush_events()
        plt.show(block=False)

    if hasattr(feval, "criteria"):
        criteria = feval.criteria.copy()
    else: 
        criteria = None
    if hasattr(feval, "observableconfig"):
        observableconfig = feval.observableconfig.copy()
    else: 
        observableconfig = None

    # save a pickle file
    if save_results:
@@ -414,8 +422,17 @@ def get_pareto_front(
            print("Store results - done")
            pass
    
    if return_all:
        ans = {
            "criteria": criteria, 
            "solutions": Solutions, 
            "process_graph": process_graph, 
            "observableconfig": observableconfig
        }
    else:
        ans = Solutions
    
    return Solutions
    return ans

def get_one_result(        
        process_graph, 
@@ -492,7 +509,7 @@ def get_one_result(
        process_graph.xSensorCandidate.T
    )[::-1]
    process_graph.nSensorCandidate = nSensor

    if xExhaustive:
        feval.criteria = np.ones([nLayout, nCriteria]) * np.nan
        feval.observableconfig = (
            np.ones(
@@ -506,7 +523,7 @@ def get_one_result(

    iLayoutBnd = [0, nLayout]

    feval.lapse = np.ones([nLayout, nCriteria]) * np.nan
    # feval.lapse = np.ones([nLayout, nCriteria]) * np.nan

    xMeasured_default = process_graph.xMeasured.copy()