diff --git a/src/topupopt/problems/esipp/blocks/prices.py b/src/topupopt/problems/esipp/blocks/prices.py index 8fc17ca0d1f5df77e235b10251cdd385d81422b3..404c8525a21d3fba90976e7542eccb8a2be29ca8 100644 --- a/src/topupopt/problems/esipp/blocks/prices.py +++ b/src/topupopt/problems/esipp/blocks/prices.py @@ -19,76 +19,21 @@ def add_prices_block( # ***************************************************************************** # ***************************************************************************** -# TODO: try to implement it as a block + +# TODO: try to implement it as a block (might make things look cleaner) def price_block_other( model: pyo.AbstractModel, + convex_price_function: bool = True, enable_default_values: bool = True, enable_validation: bool = True, enable_initialisation: bool = True ): + # auxiliary set for pyomo model.set_GLQPK = model.set_GL_exp_imp*model.set_QPK - - def rule_node_prices(b, g, l, q, p, k): - - # imported flow - def bounds_var_if_glqpks(m, g, l, q, p, k, s): - if (g, l, q, p, k, s) in m.param_v_max_glqpks: - # predefined finite capacity - return (0, m.param_v_max_glqpks[(g, l, q, p, k, s)]) - else: - # infinite capacity - return (0, None) - - b.var_trans_flow_s = pyo.Var( - b.set_GLQPKS, within=pyo.NonNegativeReals, bounds=bounds_var_trans_flow_s - ) - # imported flow cost - def rule_constr_imp_flow_cost(m, g, l, q, p, k): - return ( - sum( - m.var_if_glqpks[(g, l, q, p, k, s)] - * m.param_p_glqpks[(g, l, q, p, k, s)] - for s in m.set_S[(g, l, q, p, k)] - ) - == m.var_ifc_glqpk[(g, l, q, p, k)] - ) - - model.constr_imp_flow_cost = pyo.Constraint( - model.set_GL_imp, model.set_QPK, rule=rule_constr_imp_flow_cost - ) - - # imported flows - def rule_constr_imp_flows(m, g, l, q, p, k): - return sum( - m.var_v_glljqk[(g, l, l_star, j, q, k)] - for l_star in m.set_L[g] - if l_star not in m.set_L_imp[g] - for j in m.set_J[(g, l, l_star)] # only directed arcs - ) == sum(m.var_if_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)]) - - model.constr_imp_flows = pyo.Constraint( - model.set_GL_imp, model.set_QPK, rule=rule_constr_imp_flows - ) - - - - - # if (g,l) in b.parent_block().set_GL_imp: - # # import node - - - - # pass - # elif (g,l) in b.parent_block().set_GL_exp: - # # export node - # pass - # otherwise: do nothing - - model.node_price_block = pyo.Block(model.set_GLQPK, rule=rule_node_prices) # set of price segments - model.node_price_block.set_S = pyo.Set() + model.set_S = pyo.Set(model.set_GLQPK) # set of GLQKS tuples def init_set_GLQPKS(m): @@ -96,32 +41,14 @@ def price_block_other( (g, l, q, p, k, s) # for (g,l) in m.set_GL_exp_imp # for (q,k) in m.set_QK - for (g, l, q, p, k) in m.node_price_block.set_S - for s in m.node_price_block.set_S[(g, l, q, p, k)] + for (g, l, q, p, k) in m.set_S + for s in m.set_S[(g, l, q, p, k)] ) - model.node_price_block.set_GLQPKS = pyo.Set( + model.set_GLQPKS = pyo.Set( dimen=6, initialize=(init_set_GLQPKS if enable_initialisation else None) ) - def init_set_GLQPKS_exp(m): - return ( - glqpks for glqpks in m.set_GLQPKS if glqpks[1] in m.set_L_exp[glqpks[0]] - ) - - model.node_price_block.set_GLQPKS_exp = pyo.Set( - dimen=6, initialize=(init_set_GLQPKS_exp if enable_initialisation else None) - ) - - def init_set_GLQPKS_imp(m): - return ( - glqpks for glqpks in m.set_GLQPKS if glqpks[1] in m.set_L_imp[glqpks[0]] - ) - - model.node_price_block.set_GLQPKS_imp = pyo.Set( - dimen=6, initialize=(init_set_GLQPKS_imp if enable_initialisation else None) - ) - # ************************************************************************* # ************************************************************************* @@ -130,6 +57,13 @@ def price_block_other( # resource prices model.param_p_glqpks = pyo.Param(model.set_GLQPKS, within=pyo.NonNegativeReals) + + # price function convexity + + model.param_price_function_is_convex = pyo.Param( + model.set_GLQPK, + within=pyo.Boolean + ) # maximum resource volumes for each prices @@ -137,6 +71,9 @@ def price_block_other( model.set_GLQPKS, within=pyo.NonNegativeReals ) + \ + # price block + model.price_block = pyo.Block(model.set_GLQPK) # ************************************************************************* # ************************************************************************* @@ -146,57 +83,129 @@ def price_block_other( # ************************************************************************* # ************************************************************************* - # exported flow - - # TODO: validate the bounds by ensuring inf. cap. only exists in last segm. - - def bounds_var_ef_glqpks(m, g, l, q, p, k, s): + # import and export flows + def bounds_var_trans_flows_glqpks(m, g, l, q, p, k, s): if (g, l, q, p, k, s) in m.param_v_max_glqpks: # predefined finite capacity return (0, m.param_v_max_glqpks[(g, l, q, p, k, s)]) else: # infinite capacity return (0, None) - - model.var_ef_glqpks = pyo.Var( - model.set_GLQPKS_exp, within=pyo.NonNegativeReals, bounds=bounds_var_ef_glqpks + model.var_trans_flows_glqpks = pyo.Var( + model.set_GLQPKS, within=pyo.NonNegativeReals, bounds=bounds_var_trans_flows_glqpks ) - - # ************************************************************************* # ************************************************************************* - # exported flow revenue - def rule_constr_exp_flow_revenue(m, g, l, q, p, k): - return ( - sum( - m.var_ef_glqpks[(g, l, q, p, k, s)] - * m.param_p_glqpks[(g, l, q, p, k, s)] - for s in m.set_S[(g, l, q, p, k)] + # import flow costs and export flow revenues + def rule_constr_trans_monetary_flows(m, g, l, q, p, k): + if (g,l) in m.set_GL_imp: + return ( + sum( + m.var_trans_flows_glqpks[(g, l, q, p, k, s)] + * m.param_p_glqpks[(g, l, q, p, k, s)] + for s in m.set_S[(g, l, q, p, k)] + ) + == m.var_ifc_glqpk[(g, l, q, p, k)] ) - == m.var_efr_glqpk[(g, l, q, p, k)] - ) - - model.constr_exp_flow_revenue = pyo.Constraint( - model.set_GL_exp, model.set_QPK, rule=rule_constr_exp_flow_revenue + else: + return ( + sum( + m.var_trans_flows_glqpks[(g, l, q, p, k, s)] + * m.param_p_glqpks[(g, l, q, p, k, s)] + for s in m.set_S[(g, l, q, p, k)] + ) + == m.var_efr_glqpk[(g, l, q, p, k)] + ) + model.constr_trans_monetary_flows = pyo.Constraint( + model.set_GLQPK, rule=rule_constr_trans_monetary_flows ) - + # imported and exported flows + def rule_constr_trans_flows(m, g, l, q, p, k): + if (g,l) in m.set_GL_imp: + return sum( + m.var_v_glljqk[(g, l, l_star, j, q, k)] + for l_star in m.set_L[g] + if l_star not in m.set_L_imp[g] + for j in m.set_J[(g, l, l_star)] # only directed arcs + ) == sum(m.var_trans_flows_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)]) + else: + return sum( + m.var_v_glljqk[(g, l_star, l, j, q, k)] + * m.param_eta_glljqk[(g, l_star, l, j, q, k)] + for l_star in m.set_L[g] + if l_star not in m.set_L_exp[g] + for j in m.set_J[(g, l_star, l)] # only directed arcs + ) == sum(m.var_trans_flows_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)]) - # exported flows - def rule_constr_exp_flows(m, g, l, q, p, k): - return sum( - m.var_v_glljqk[(g, l_star, l, j, q, k)] - * m.param_eta_glljqk[(g, l_star, l, j, q, k)] - for l_star in m.set_L[g] - if l_star not in m.set_L_exp[g] - for j in m.set_J[(g, l_star, l)] # only directed arcs - ) == sum(m.var_ef_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)]) + model.constr_trans_flows = pyo.Constraint( + model.set_GLQPK, rule=rule_constr_trans_flows + ) - model.constr_exp_flows = pyo.Constraint( - model.set_GL_exp, model.set_QPK, rule=rule_constr_exp_flows + # ************************************************************************* + # ************************************************************************* + + # non-convex price functions + + # delta variables + model.var_active_segment_glqpks = pyo.Var( + model.set_GLQPKS, within=pyo.Binary ) + + # segments must be empty if the respective delta variable is zero + def rule_constr_empty_segment_if_delta_zero(m, g, l, q, p, k, s): + if len(m.set_S[(g,l,q,p,k)]) == 1 or m.param_price_function_is_convex[(g,l,q,p,k)]: + # single segment, skip + # convex, skip + return pyo.Constraint.Skip + return ( + m.var_trans_flows_glqpks[(g,l,q,p,k,s)] <= + m.param_v_max_glqpks[(g,l,q,p,k,s)]* + m.var_active_segment_glqpks[(g,l,q,p,k,s)] + ) + model.constr_empty_segment_if_delta_zero = pyo.Constraint( + model.set_GLQPKS, rule=rule_constr_empty_segment_if_delta_zero + ) + + # if delta var is one, previous ones must be one too + # if delta var is zero, the next ones must also be zero + def rule_constr_delta_summing_logic(m, g, l, q, p, k, s): + if s == len(m.set_S[(g,l,q,p,k)])-1 or m.param_price_function_is_convex[(g,l,q,p,k)]: + # last segment, skip + # convex, skip + return pyo.Constraint.Skip + return ( + m.var_active_segment_glqpks[(g,l,q,p,k,s)] >= + m.var_active_segment_glqpks[(g,l,q,p,k,s+1)] + ) + model.constr_delta_summing_logic = pyo.Constraint( + model.set_GLQPKS, rule=rule_constr_delta_summing_logic + ) + + # if a segment is not completely used, the next ones must remain empty + def rule_constr_fill_up_segment_before_next(m, g, l, q, p, k, s): + if s == len(m.set_S[(g,l,q,p,k)])-1 or m.param_price_function_is_convex[(g,l,q,p,k)]: + # last segment, skip + # convex, skip + return pyo.Constraint.Skip + return ( + m.var_trans_flows_glqpks[(g,l,q,p,k,s)] >= + m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]* + m.param_v_max_glqpks[(g,l,q,p,k,s)] + ) + # return ( + # m.var_if_glqpks[(g,l,q,p,k,s)]/m.param_v_max_glqpks[(g,l,q,p,k,s)] >= + # m.var_active_segment_glqpks[(g,l,q,p,k,s+1)] + # ) + # return ( + # m.param_v_max_glqpks[(g,l,q,p,k,s)]-m.var_if_glqpks[(g,l,q,p,k,s)] <= + # m.param_v_max_glqpks[(g,l,q,p,k,s)]*(1- m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]) + # ) + model.constr_fill_up_segment_before_next = pyo.Constraint( + model.set_GLQPKS, rule=rule_constr_fill_up_segment_before_next + ) # ************************************************************************* # ************************************************************************* @@ -622,7 +631,7 @@ def price_other( # ************************************************************************* # non-convex price functions - + # TODO: remove these variables from the model if they are not needed # delta variables model.var_active_segment_glqpks = pyo.Var( model.set_GLQPKS, within=pyo.Binary diff --git a/src/topupopt/problems/esipp/network.py b/src/topupopt/problems/esipp/network.py index 0b257e6b22df89ff3bc35c07e69d469399e8c388..60fa3a8eecab944fe485868de5e52c5996cfaa4e 100644 --- a/src/topupopt/problems/esipp/network.py +++ b/src/topupopt/problems/esipp/network.py @@ -15,9 +15,9 @@ from math import inf # import numpy as np import networkx as nx - +from ...data.gis.identify import get_edges_involving_node from ...data.gis.identify import find_unconnected_nodes -from .resource import are_prices_time_invariant +from .resource import are_prices_time_invariant, ResourcePrice # ***************************************************************************** # ***************************************************************************** @@ -624,27 +624,33 @@ class Network(nx.MultiDiGraph): ) def __init__(self, network_type = NET_TYPE_HYBRID, **kwargs): + # run base class init routine - nx.MultiDiGraph.__init__(self, **kwargs) - - # identify node types - - self.identify_node_types() + + # initialise the node type + self.import_nodes = set() + self.export_nodes = set() + self.source_sink_nodes = set() + self.waypoint_nodes = set() # declare variables for the nodes without directed arc limitations if network_type not in self.NET_TYPES: raise ValueError('Unknown network type.') self.network_type = network_type - + # nodes without incoming directed arcs limitations self.nodes_w_in_dir_arc_limitations = dict() - + # nodes without outgoing directed arcs limitations self.nodes_w_out_dir_arc_limitations = dict() - # set up initial nodes if the network is not hybrid - if self.network_type != self.NET_TYPE_HYBRID: - for node_key in self.nodes(): - self._set_up_node(node_key) + # process the input data + for node_key in self.nodes(): + self._process_node_data(node_key, data=self.nodes[node_key]) + + # # set up initial nodes if the network is not hybrid + # if self.network_type != self.NET_TYPE_HYBRID: + # for node_key in self.nodes(): + # self._set_up_node(node_key) # ************************************************************************* @@ -679,16 +685,174 @@ class Network(nx.MultiDiGraph): # ************************************************************************* # ************************************************************************* + + def _process_node_data(self, node_key, data: dict, **kwargs): + "Check the data, determine the node type and update the structures." + + # find out whicht type of node it is + # no data: waypoint + # prices: transshipment (imp/exp) + # flows: source/sink + if type(data) == type(None): + self.waypoint_nodes.add(node_key) + elif type(data) == dict: + # transshipment or source/sink + if self.KEY_NODE_BASE_FLOW in data and len(data) == 1: + # source/sink + self.source_sink_nodes.add(node_key) + elif self.KEY_NODE_PRICES in data and self.KEY_NODE_TYPE in data and data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP: + # import node + self.import_nodes.add(node_key) + elif self.KEY_NODE_PRICES in data and self.KEY_NODE_TYPE in data and data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP: + # export node + self.export_nodes.add(node_key) + elif self.KEY_NODE_PRICES not in data: + # waypoint node + self.waypoint_nodes.add(node_key) + else: + # error + raise TypeError('Invalid input data combination.') + else: + raise TypeError('Invalid type for node data.') + # set up the node + self._set_up_node(node_key, **kwargs) + + # TODO: automatically identify import and export nodes (without defining them explicitly) + + # ************************************************************************* + # ************************************************************************* + + # TODO: use a decorator function to prevent the original method(s) from being used inappropriately + + def add_node(self, node_key, **kwargs): + + self._handle_node(node_key, **kwargs) + + # ************************************************************************* + # ************************************************************************* + + # TODO: automatically check if node already exists and implications when "adding" one + + def add_nodes(self, node_key_data: list): + + # process the input data + for entry in node_key_data: + if type(entry) != tuple : + raise ValueError('The input must be a list of tuples.') + self._process_node_data(entry[0], entry[1]) + # add the nodes + nx.MultiDiGraph.add_nodes_from(self, node_key_data) + + # ************************************************************************* + # ************************************************************************* + + def is_export_node(self, node_key) -> bool: + "Returns True if the key matches that of an export node." + return node_key in self.export_nodes + + def is_import_node(self, node_key) -> bool: + "Returns True if the key matches that of an import node." + return node_key in self.import_nodes + + def is_waypoint_node(self, node_key) -> bool: + "Returns True if the key matches that of an waypoint node." + return node_key in self.waypoint_nodes + + def is_source_sink_node(self, node_key) -> bool: + "Returns True if the key matches that of an source or sink node." + return node_key in self.source_sink_nodes + + # ************************************************************************* + # ************************************************************************* + + def _reset_node_type(self, node_key): + + if self.is_export_node(node_key): + # export node + self.export_nodes.remove(node_key) + elif self.is_import_node(node_key): + # import node + self.import_nodes.remove(node_key) + elif self.is_source_sink_node(node_key): + # source/sink node + self.source_sink_nodes.remove(node_key) + else: # self.is_waypoint_node(node_key): + # has to be a waypoint node + self.waypoint_nodes.remove(node_key) + + # ************************************************************************* + # ************************************************************************* + + def modify_node(self, node_key, **kwargs): + if not self.has_node(node_key): + raise ValueError('The node indicated does not exist.') + self._handle_node(node_key, **kwargs) + + def _handle_node(self, node_key, **kwargs): + + # node has to exist + # the changes have to be compatible with the arcs involving the node + # - has outgoing arcs: cannot be an export node + # - has incoming arcs: cannot be an import node + # - has no arcs: can be anything + # the structures have to be updated accordingly + + if self.has_node(node_key): + # node exists + if type(kwargs) != type(None): + # has data, check if the node type changes + if self.KEY_NODE_TYPE in kwargs: + # node type is in the dict + _edges = get_edges_involving_node(self, node_key, include_self_loops=False) + # the node type is specified, possible change of node + if kwargs[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP and not self.is_export_node(node_key): + # change to an export node: it cannot have outgoing arcs + for _edge in _edges: + if _edge[0] == node_key: + # outgoing arc, raise error + raise ValueError( + 'A node with outgoing arcs cannot be an ' + 'export node.' + ) + # change the node type + elif kwargs[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP and not self.is_import_node(node_key): + # change to an import node: it cannot have incoming arcs + for _edge in _edges: + if _edge[1] == node_key: + # outgoing arc, raise error + raise ValueError( + 'A node with outgoing arcs cannot be an ' + 'export node.' + ) + # else: + # raise ValueError('Unknown option.') + else: + # no data: waypoint node, clear node data + self._reset_node_type(node_key) + keys = (self.KEY_NODE_BASE_FLOW, self.KEY_NODE_PRICES) + for key in keys: + if key in self.nodes[node_key]: + self.nodes[node_key].pop(key) + # the changes seem okay + self._process_node_data(node_key, kwargs) + nx.MultiDiGraph.add_node(self, node_key, **kwargs) + + else: + # process the input data + self._process_node_data(node_key, kwargs) + # add the node + nx.MultiDiGraph.add_node(self, node_key, **kwargs) + + # ************************************************************************* + # ************************************************************************* # add a new import node def add_import_node(self, node_key, prices: dict): node_dict = { self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_IMP, - self.KEY_NODE_PRICES: prices, - self.KEY_NODE_PRICES_TIME_INVARIANT: (are_prices_time_invariant(prices)), + self.KEY_NODE_PRICES: prices } - self.add_node(node_key, **node_dict) # ************************************************************************* @@ -699,10 +863,8 @@ class Network(nx.MultiDiGraph): def add_export_node(self, node_key, prices: dict): node_dict = { self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_EXP, - self.KEY_NODE_PRICES: prices, - self.KEY_NODE_PRICES_TIME_INVARIANT: (are_prices_time_invariant(prices)), + self.KEY_NODE_PRICES: prices } - self.add_node(node_key, **node_dict) # ************************************************************************* @@ -712,169 +874,164 @@ class Network(nx.MultiDiGraph): def add_source_sink_node(self, node_key, base_flow: dict, **kwargs): node_dict = { - self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_SOURCE_SINK, self.KEY_NODE_BASE_FLOW: base_flow, } - self.add_node(node_key, **node_dict) - self._set_up_node(node_key, **kwargs) # ************************************************************************* # ************************************************************************* # add a new waypoint node - def add_waypoint_node(self, node_key, **kwargs): - node_dict = {self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_WAY} - - self.add_node(node_key, **node_dict) - self._set_up_node(node_key, **kwargs) + def add_waypoint_node(self, node_key): + self.add_node(node_key) # ************************************************************************* # ************************************************************************* - # modify an existing network node + # # modify an existing network node - def modify_network_node(self, node_key, node_data: dict): - """ - Modifies a node in the network object. + # def modify_network_node(self, node_key, node_data: dict, **kwargs): + # """ + # Modifies a node in the network object. - Parameters - ---------- - node_key : hashable-type - The key that identifies the node. - node_data : dict - A dictionary with the data that one wishes to change in the object. - - Raises - ------ - ValueError - Errors are raised if the node does not exist in the network object, - and if the node changed has arcs that are incompatible with its new - version, namely in terms of incoming and outgoing arcs. + # Parameters + # ---------- + # node_key : hashable-type + # The key that identifies the node. + # node_data : dict + # A dictionary with the data that one wishes to change in the object. - Returns - ------- - None. + # Raises + # ------ + # ValueError + # Errors are raised if the node does not exist in the network object, + # and if the node changed has arcs that are incompatible with its new + # version, namely in terms of incoming and outgoing arcs. - """ + # Returns + # ------- + # None. - if self.has_node(node_key): - # check if there will be changes to the type of node + # """ - if ( - self.KEY_NODE_TYPE in node_data - and self.KEY_NODE_TYPE in self.nodes[node_key] - ): - if ( - node_data[self.KEY_NODE_TYPE] - != self.nodes[node_key][self.KEY_NODE_TYPE] - ): - # the node type changed: check if final node is imp./exp. + # if self.has_node(node_key): + # # check if there will be changes to the type of node - # to export nodes + # if ( + # self.KEY_NODE_TYPE in node_data + # and self.KEY_NODE_TYPE in self.nodes[node_key] + # ): + # if ( + # node_data[self.KEY_NODE_TYPE] + # != self.nodes[node_key][self.KEY_NODE_TYPE] + # ): + # # the node type changed: check if final node is imp./exp. - if node_data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP: - # export nodes cannot have outgoing arcs - # check if there are outgoing arcs involving this node + # # to export nodes - number_out_arcs = len( - tuple( - arc_key - for arc_key in self.edges(keys=True) - if arc_key[0] == node_key # is source - ) - ) + # if node_data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP: + # # export nodes cannot have outgoing arcs + # # check if there are outgoing arcs involving this node - if number_out_arcs > 0: - raise ValueError( - "A node with outgoing arcs cannot be changed" - + " into an export node, since export nodes " - + " cannot have outgoing arcs." - ) - - # to import nodes - - if node_data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP: - # import nodes cannot have incoming arcs - # check if there are incoming arcs involving this node - - number_in_arcs = len( - tuple( - arc_key - for arc_key in self.edges(keys=True) - if arc_key[1] == node_key # is destination - ) - ) + # number_out_arcs = len( + # tuple( + # arc_key + # for arc_key in self.edges(keys=True) + # if arc_key[0] == node_key # is source + # ) + # ) - if number_in_arcs > 0: - raise ValueError( - "A node with incoming arcs cannot be changed" - + " into an import node, since import nodes " - + " cannot have incoming arcs." - ) + # if number_out_arcs > 0: + # raise ValueError( + # "A node with outgoing arcs cannot be changed" + # + " into an export node, since export nodes " + # + " cannot have outgoing arcs." + # ) - # all good + # # to import nodes - self.add_node(node_key, **node_data) + # if node_data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP: + # # import nodes cannot have incoming arcs + # # check if there are incoming arcs involving this node - else: - raise ValueError("No such node was found.") + # number_in_arcs = len( + # tuple( + # arc_key + # for arc_key in self.edges(keys=True) + # if arc_key[1] == node_key # is destination + # ) + # ) - # ************************************************************************* - # ************************************************************************* + # if number_in_arcs > 0: + # raise ValueError( + # "A node with incoming arcs cannot be changed" + # + " into an import node, since import nodes " + # + " cannot have incoming arcs." + # ) - # identify importing nodes + # # all good - def identify_import_nodes(self): - self.import_nodes = tuple( - node_key - for node_key in self.nodes - if self.KEY_NODE_TYPE in self.nodes[node_key] - if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP) - ) + # self.add_node(node_key, **node_data) + # self._set_up_node(node_key, **kwargs) + + # else: + # raise ValueError("No such node was found.") # ************************************************************************* # ************************************************************************* - # identify exporting nodes + # # identify importing nodes - def identify_export_nodes(self): - self.export_nodes = tuple( - node_key - for node_key in self.nodes - if self.KEY_NODE_TYPE in self.nodes[node_key] - if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP) - ) + # def identify_import_nodes(self): + # self.import_nodes = tuple( + # node_key + # for node_key in self.nodes + # if self.KEY_NODE_TYPE in self.nodes[node_key] + # if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP) + # ) - # ************************************************************************* - # ************************************************************************* + # # ************************************************************************* + # # ************************************************************************* - # identify waypoint nodes + # # identify exporting nodes + + # def identify_export_nodes(self): + # self.export_nodes = tuple( + # node_key + # for node_key in self.nodes + # if self.KEY_NODE_TYPE in self.nodes[node_key] + # if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP) + # ) - def identify_waypoint_nodes(self): - self.waypoint_nodes = tuple( - node_key - for node_key in self.nodes - if self.KEY_NODE_TYPE in self.nodes[node_key] - if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_WAY) - ) + # # ************************************************************************* + # # ************************************************************************* - # ************************************************************************* - # ************************************************************************* + # # identify waypoint nodes - # identify source sink nodes + # def identify_waypoint_nodes(self): + # self.waypoint_nodes = tuple( + # node_key + # for node_key in self.nodes + # if self.KEY_NODE_TYPE in self.nodes[node_key] + # if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_WAY) + # ) - def identify_source_sink_nodes(self): - self.source_sink_nodes = tuple( - node_key - for node_key in self.nodes - if self.KEY_NODE_TYPE in self.nodes[node_key] - if ( - self.nodes[node_key][self.KEY_NODE_TYPE] - == self.KEY_NODE_TYPE_SOURCE_SINK - ) - ) + # # ************************************************************************* + # # ************************************************************************* + + # # identify source sink nodes + + # def identify_source_sink_nodes(self): + # self.source_sink_nodes = tuple( + # node_key + # for node_key in self.nodes + # if self.KEY_NODE_TYPE in self.nodes[node_key] + # if ( + # self.nodes[node_key][self.KEY_NODE_TYPE] + # == self.KEY_NODE_TYPE_SOURCE_SINK + # ) + # ) # ************************************************************************* # ************************************************************************* @@ -918,52 +1075,47 @@ class Network(nx.MultiDiGraph): # ************************************************************************* # ************************************************************************* - # identify node types + # # identify node types - def identify_node_types(self): - "Identifies the node type for each node in the network objects." + # def identify_node_types(self): + # "Identifies the node type for each node in the network objects." - # identify import nodes + # # identify import nodes - self.identify_import_nodes() + # self.identify_import_nodes() - # identify export nodes + # # identify export nodes - self.identify_export_nodes() + # self.identify_export_nodes() - # identify source/sink nodes + # # identify source/sink nodes - self.identify_source_sink_nodes() + # self.identify_source_sink_nodes() - # identify waypoint nodes + # # identify waypoint nodes - self.identify_waypoint_nodes() + # self.identify_waypoint_nodes() - # validate + # # validate - self.validate() + # self.validate() # ************************************************************************* # ************************************************************************* def add_directed_arc(self, node_key_a, node_key_b, arcs: Arcs): # check if the arc ends in an import node - if node_key_b in self.import_nodes: raise ValueError("Directed arcs cannot end in an import node.") # check if the arc starts in an export node - if node_key_a in self.export_nodes: raise ValueError("Directed arcs cannot start in an export node.") # check the arc is between import and export nodes - if node_key_a in self.import_nodes and node_key_b in self.export_nodes: # it is between import and export nodes - # check if it involves static losses - if arcs.has_static_losses(): raise ValueError( "Arcs between import and export nodes cannot have static " @@ -971,7 +1123,6 @@ class Network(nx.MultiDiGraph): ) # add a new arc - return self.add_edge( node_key_a, node_key_b, **{self.KEY_ARC_TECH: arcs, self.KEY_ARC_UND: False} ) diff --git a/src/topupopt/problems/esipp/problem.py b/src/topupopt/problems/esipp/problem.py index 3659ad67caea582d20dab8d2251aa3d023987d71..8b0dd6b743d81a144b0b7080f260c0d4f69b6e07 100644 --- a/src/topupopt/problems/esipp/problem.py +++ b/src/topupopt/problems/esipp/problem.py @@ -1883,19 +1883,6 @@ class InfrastructurePlanningProblem(EnergySystem): .number_segments() ) ) - if not self.networks[g].nodes[l][Network.KEY_NODE_PRICES_TIME_INVARIANT] - else tuple( - s - for s in range( - self.networks[g] - .nodes[l][Network.KEY_NODE_PRICES][(q, p, k)] - .number_segments() - ) - ) - # for g in self.networks.keys() - # for l in self.networks[g].nodes - # if (l in self.networks[g].import_nodes or - # l in self.networks[g].export_nodes) for (g, l) in set_GL_exp_imp for (q, p, k) in set_QPK } diff --git a/tests/examples_signal.py b/tests/examples_signal.py index dc481e4c618a094a1690431a10e6dc7f34e3fc30..867a2ad58ba42775dbbbefc292d2afec3b310523 100644 --- a/tests/examples_signal.py +++ b/tests/examples_signal.py @@ -122,18 +122,18 @@ def example_amplitude_constrained_nnr_signals(): # by providing a non-numeric nr. of samples without specific lower bounds - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedNNRSignal( number_samples=(number_intervals,), max_pos_amp_limit=10 ) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing negative lower bounds - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedNNRSignal( number_samples=number_intervals, @@ -141,8 +141,8 @@ def example_amplitude_constrained_nnr_signals(): lower_bounds=[-1 for i in range(number_intervals)], ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # ****************************************************************************** @@ -378,7 +378,7 @@ def example_amplitude_constrained_signals(): # by providing negative 'positive' amplitude limits - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -388,10 +388,10 @@ def example_amplitude_constrained_signals(): min_neg_amp_limit=None, ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -401,12 +401,12 @@ def example_amplitude_constrained_signals(): min_neg_amp_limit=None, ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing negative 'negative' amplitude limits - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -416,10 +416,10 @@ def example_amplitude_constrained_signals(): min_neg_amp_limit=4, ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -429,12 +429,12 @@ def example_amplitude_constrained_signals(): min_neg_amp_limit=-4, ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing non-numeric or not None amplitude limits (e.g. tuple) - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -444,10 +444,10 @@ def example_amplitude_constrained_signals(): min_neg_amp_limit=None, ) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -457,10 +457,10 @@ def example_amplitude_constrained_signals(): min_neg_amp_limit=None, ) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -470,10 +470,10 @@ def example_amplitude_constrained_signals(): min_neg_amp_limit=None, ) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -483,12 +483,12 @@ def example_amplitude_constrained_signals(): min_neg_amp_limit=(3,), ) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing bounds incompatible with positive limits - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -500,12 +500,12 @@ def example_amplitude_constrained_signals(): lower_bounds=[10 for i in range(number_intervals)], ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing bounds incompatible with negative limits - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -517,12 +517,12 @@ def example_amplitude_constrained_signals(): lower_bounds=None, ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing incompatible maximum and minimum positive limits - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -532,12 +532,12 @@ def example_amplitude_constrained_signals(): min_neg_amp_limit=None, ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing incompatible maximum and minimum negative limits - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -547,12 +547,12 @@ def example_amplitude_constrained_signals(): min_neg_amp_limit=11, ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing non-numeric or not None amplitude limits (e.g. tuple) - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -563,12 +563,12 @@ def example_amplitude_constrained_signals(): ) sig.set_positive_amplitude(positive_amplitude=(5,)) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing non-numeric or not None amplitude limits (e.g. tuple) - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -579,12 +579,12 @@ def example_amplitude_constrained_signals(): ) sig.set_negative_amplitude(negative_amplitude=(5,)) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by checking if bounds have been violated without there being samples - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -602,13 +602,13 @@ def example_amplitude_constrained_signals(): assert not sig.is_signal_fixed() # signal is not set assert not sig.violates_amplitude_limits() # since the sig is not set except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by seeking to validate a positive amplitude when there are no positive # amplitude limits - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -619,13 +619,13 @@ def example_amplitude_constrained_signals(): ) sig.validate_negative_amplitude() except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by seeking to validate a negative amplitude when there are no negative # amplitude limits - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -636,13 +636,13 @@ def example_amplitude_constrained_signals(): ) sig.validate_positive_amplitude() except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by seeking to validate a positive amplitude that exceeds its tolerated # maximum, using the internal positive amplitude - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -654,13 +654,13 @@ def example_amplitude_constrained_signals(): sig.set_positive_amplitude(12) sig.validate_positive_amplitude() except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by seeking to validate a positive amplitude that exceeds its tolerated # maximum, using an externally supplied amplitude - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -671,13 +671,13 @@ def example_amplitude_constrained_signals(): ) sig.validate_positive_amplitude(12) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by seeking to validate a positive amplitude that is below its tolerated # minimum, using the internal positive amplitude - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -689,13 +689,13 @@ def example_amplitude_constrained_signals(): sig.set_positive_amplitude(2) sig.validate_positive_amplitude() except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by seeking to validate a positive amplitude that is below its tolerated # minimum, using an externally supplied amplitude - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -706,13 +706,13 @@ def example_amplitude_constrained_signals(): ) sig.validate_positive_amplitude(2) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by seeking to validate a negative amplitude that exceeds its tolerated # maximum, using the internal negative amplitude - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -724,13 +724,13 @@ def example_amplitude_constrained_signals(): sig.set_negative_amplitude(12) sig.validate_negative_amplitude() except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by seeking to validate a negative amplitude that exceeds its tolerated # maximum, using an externally supplied amplitude - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -741,13 +741,13 @@ def example_amplitude_constrained_signals(): ) sig.validate_negative_amplitude(12) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by seeking to validate a negative amplitude that is below its tolerated # minimum, using the internal negative amplitude - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -759,13 +759,13 @@ def example_amplitude_constrained_signals(): sig.set_negative_amplitude(2) sig.validate_negative_amplitude() except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by seeking to validate a negative amplitude that is below its tolerated # minimum, using an externally supplied amplitude - error_was_triggered = False + error_was_raised = False try: sig = signal.AmplitudeConstrainedSignal( number_samples=number_intervals, @@ -776,8 +776,8 @@ def example_amplitude_constrained_signals(): ) sig.validate_negative_amplitude(2) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # ****************************************************************************** @@ -793,7 +793,7 @@ def example_peculiar_errors(): # by providing samples as something other than a list, e.g. tuples - error_was_triggered = False + error_was_raised = False try: _ = signal.Signal( number_samples=number_intervals, @@ -802,12 +802,12 @@ def example_peculiar_errors(): upper_bounds=None, ) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing an incorrect number of samples - error_was_triggered = False + error_was_raised = False try: _ = signal.Signal( number_samples=number_intervals, @@ -816,8 +816,8 @@ def example_peculiar_errors(): upper_bounds=None, ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # ************************************************************************** # ************************************************************************** @@ -830,7 +830,7 @@ def example_peculiar_errors(): upper_bounds = [7 for i in range(number_intervals)] - error_was_triggered = False + error_was_raised = False try: sig = signal.Signal( number_samples=number_intervals, @@ -841,12 +841,12 @@ def example_peculiar_errors(): sig.lower_bounds = [random.random() for i in range(number_intervals + 1)] sig.has_lower_bounds() except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing an incorrect number of upper bounds - error_was_triggered = False + error_was_raised = False try: sig = signal.Signal( number_samples=number_intervals, @@ -857,12 +857,12 @@ def example_peculiar_errors(): sig.upper_bounds = [random.random() for i in range(number_intervals - 1)] sig.has_upper_bounds() except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing an incorrect number of samples - error_was_triggered = False + error_was_raised = False try: sig = signal.Signal( number_samples=number_intervals, @@ -873,12 +873,12 @@ def example_peculiar_errors(): sig.samples = [random.random() for i in range(number_intervals - 1)] sig.is_signal_fixed() except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by deleting the lower bounds after creating the object - error_was_triggered = False + error_was_raised = False try: sig = signal.NonNegativeRealSignal(number_samples=number_intervals) sig.lower_bounds = None @@ -886,12 +886,12 @@ def example_peculiar_errors(): if not sig.are_bounds_nnr(): raise ValueError() except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing negative upper bounds (requires even lower lower bounds) - error_was_triggered = False + error_was_raised = False try: sig = signal.NonNegativeRealSignal(number_samples=number_intervals) sig.is_upper_bounded = True @@ -899,8 +899,8 @@ def example_peculiar_errors(): if not sig.are_bounds_nnr(): raise ValueError() except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # ****************************************************************************** @@ -1016,48 +1016,48 @@ def example_binary_signals(): # by specifying an integrality tolerance greater than or equal to 0.5 - error_was_triggered = False + error_was_raised = False try: sig.is_signal_binary_only(integrality_tolerance=0.5) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by specifying an integrality tolerance greater than or equal to 0.5 - error_was_triggered = False + error_was_raised = False try: sig.is_signal_integer_only(integrality_tolerance=0.5) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by specifying an integrality tolerance as a tuple - error_was_triggered = False + error_was_raised = False try: sig.is_signal_binary_only(integrality_tolerance=(0.5,)) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by specifying an integrality tolerance as a tuple - error_was_triggered = False + error_was_raised = False try: sig.is_signal_integer_only(integrality_tolerance=(0.5,)) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by specifying the number of samples as a float - error_was_triggered = False + error_was_raised = False try: sig = signal.BinarySignal(number_samples=float(number_intervals)) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # ****************************************************************************** @@ -1159,24 +1159,24 @@ def example_nnr_signals(): # by providing a float as the number of intervals - error_was_triggered = False + error_was_raised = False try: sig = signal.NonNegativeRealSignal(number_samples=float(number_intervals)) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing negative lower bounds - error_was_triggered = False + error_was_raised = False try: sig = signal.NonNegativeRealSignal( number_samples=number_intervals, lower_bounds=[-1 for i in range(number_intervals)], ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing samples that are not nnr @@ -1184,23 +1184,23 @@ def example_nnr_signals(): samples[-1] = -1 - error_was_triggered = False + error_was_raised = False try: sig = signal.FixedNonNegativeRealSignal(samples=samples) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing samples as tuples samples = (random.random() for i in range(number_intervals)) - error_was_triggered = False + error_was_raised = False try: sig = signal.FixedNonNegativeRealSignal(samples=samples) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # ****************************************************************************** @@ -1254,21 +1254,21 @@ def example_set_signal(): # by providing an integer instead of a list - error_was_triggered = False + error_was_raised = False try: sig.set_signal(samples=3) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing an incorrectly sized list - error_was_triggered = False + error_was_raised = False try: sig.set_signal(samples=[2 for i in range(number_intervals + 1)]) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # ************************************************************************** @@ -1413,7 +1413,7 @@ def example_bounded_signals(): # by providing upper bounds with an inconsistent number of samples - error_was_triggered = False + error_was_raised = False try: sig = signal.Signal( number_samples=number_intervals, @@ -1422,12 +1422,12 @@ def example_bounded_signals(): upper_bounds=[10 for i in range(number_intervals - 1)], # one too few ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing lower bounds with an inconsistent number of samples - error_was_triggered = False + error_was_raised = False try: sig = signal.Signal( number_samples=number_intervals, @@ -1436,12 +1436,12 @@ def example_bounded_signals(): upper_bounds=None, ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing upper bounds not as a list but as a numeric type - error_was_triggered = False + error_was_raised = False try: sig = signal.Signal( number_samples=number_intervals, @@ -1450,12 +1450,12 @@ def example_bounded_signals(): upper_bounds=6, ) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing lower bounds not as a list but as a numeric type - error_was_triggered = False + error_was_raised = False try: sig = signal.Signal( number_samples=number_intervals, @@ -1464,8 +1464,8 @@ def example_bounded_signals(): upper_bounds=[5 for i in range(number_intervals)], ) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing upper bounds lower than the lower bounds @@ -1475,7 +1475,7 @@ def example_bounded_signals(): upper_bounds[-1] = 3 - error_was_triggered = False + error_was_raised = False try: sig = signal.Signal( number_samples=number_intervals, @@ -1484,8 +1484,8 @@ def example_bounded_signals(): upper_bounds=upper_bounds, ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing lower bounds higher than the uppper bounds @@ -1495,7 +1495,7 @@ def example_bounded_signals(): lower_bounds[-1] = 9 - error_was_triggered = False + error_was_raised = False try: sig = signal.Signal( number_samples=number_intervals, @@ -1504,8 +1504,8 @@ def example_bounded_signals(): upper_bounds=upper_bounds, ) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # ****************************************************************************** @@ -1562,7 +1562,7 @@ def example_free_signals(): # by providing a float as the number of intervals - error_was_triggered = False + error_was_raised = False try: sig = signal.Signal( number_samples=float(number_intervals), @@ -1571,8 +1571,8 @@ def example_free_signals(): upper_bounds=None, ) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # ****************************************************************************** @@ -1619,25 +1619,25 @@ def example_fixed_signals(): # by providing a None when creating a FixedSignal - error_was_triggered = False + error_was_raised = False try: sig = signal.FixedSignal(samples=None) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing an empty list - error_was_triggered = False + error_was_raised = False try: sig = signal.FixedSignal(samples=[]) except ValueError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # by providing the number of samples as a float - error_was_triggered = False + error_was_raised = False try: sig = signal.Signal( number_samples=float(number_intervals), @@ -1646,8 +1646,8 @@ def example_fixed_signals(): upper_bounds=None, ) except TypeError: - error_was_triggered = True - assert error_was_triggered + error_was_raised = True + assert error_was_raised # ****************************************************************************** diff --git a/tests/test_data_finance.py b/tests/test_data_finance.py index 051f3c28bae2631123265ccbd42e3f06c5cd3bb4..0e5b36153c4dcf9200f65b41e26243cb5b31e09e 100644 --- a/tests/test_data_finance.py +++ b/tests/test_data_finance.py @@ -651,7 +651,7 @@ class TestDataFinance: # trigger ValueError - error_triggered = False + error_raised = False investment_period = analysis_period_span + 1 try: npv_salvage = present_salvage_value_annuity( @@ -662,8 +662,8 @@ class TestDataFinance: analysis_period_span=analysis_period_span, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* @@ -1197,7 +1197,7 @@ class TestDataFinance: investment_period = analysis_period_span + 1 - error_triggered = False + error_raised = False try: residual_value = salvage_value_linear_depreciation( investment=investment, @@ -1206,8 +1206,8 @@ class TestDataFinance: analysis_period_span=analysis_period_span, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ************************************************************************* # ************************************************************************* @@ -1318,78 +1318,78 @@ class TestDataFinance: # TypeError('The discount rates must be provided as a tuple.') - error_triggered = False + error_raised = False try: my_inv = Investment(list(i_t), R_t) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # ValueError('The duration of the period under analysis must be positive.') - error_triggered = False + error_raised = False try: my_inv = Investment(tuple()) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # TypeError('The discount rate must be provided as a float.') - error_triggered = False + error_raised = False try: my_inv = Investment(None, None, 5, 10) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # ValueError('The discount rate must be in the open interval between 0 and 1.) - error_triggered = False + error_raised = False try: my_inv = Investment(None, None, 1.35, 10) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # TypeError('The duration of the period under consideration must be provided as an integer.') - error_triggered = False + error_raised = False try: my_inv = Investment(None, None, 0.35, 10.0) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # ValueError('The duration of the period under analysis must be positive.) - error_triggered = False + error_raised = False try: my_inv = Investment(None, None, 0.35, 0) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # TypeError('The net cash flows must be provided as a list.') - error_triggered = False + error_raised = False try: my_inv = Investment(i_t, tuple(R_t)) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* diff --git a/tests/test_data_utils.py b/tests/test_data_utils.py index d653fe46c327ec833a8bae0b3f50ceab8b7cd2a2..27781c31e05273a8b62aaa911e4e7ceabe9318b7 100644 --- a/tests/test_data_utils.py +++ b/tests/test_data_utils.py @@ -140,7 +140,7 @@ class TestDataUtils: # raise exception - error_triggered = False + error_raised = False time_interval_durations.pop(0) try: new_profile = utils.create_profile_using_time_weighted_state( @@ -151,8 +151,8 @@ class TestDataUtils: states_correlate_profile=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # ********************************************************************* @@ -333,14 +333,14 @@ class TestDataUtils: # use zero iterations to force an error - error_triggered = False + error_raised = False try: new_key = utils.generate_pseudo_unique_key( key_list=key_list, max_iterations=0 ) except Exception: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # use a seed number to trigger more iterations diff --git a/tests/test_dhn.py b/tests/test_dhn.py index 11a9382ca3ad2e1b8061f7529016262144b85e13..e44b3781f232780fd53f112fb3fb610ad78b1971 100644 --- a/tests/test_dhn.py +++ b/tests/test_dhn.py @@ -654,7 +654,7 @@ class TestDistrictHeatingNetwork: # # single pipe, external cost, offset -# error_triggered = False +# error_raised = False # try: # pipe_trench_obj = PipeTrench(name='hello', # trenches={0: trench_tech}, @@ -665,12 +665,12 @@ class TestDistrictHeatingNetwork: # minimum_cost_offset=external_cost, # validate=True) # except TypeError: -# error_triggered = True -# assert error_triggered +# error_raised = True +# assert error_raised # # use list as minimum cost offset -# error_triggered = False +# error_raised = False # try: # pipe_trench_obj = PipeTrench(name='hello', # trenches={0: trench_tech}, @@ -684,8 +684,8 @@ class TestDistrictHeatingNetwork: # ), # validate=True) # except TypeError: -# error_triggered = True -# assert error_triggered +# error_raised = True +# assert error_raised # #************************************************************************** # #************************************************************************** @@ -754,7 +754,7 @@ class TestDistrictHeatingNetwork: # # single pipe, external cost, offset -# error_triggered = False +# error_raised = False # try: # pipe_trench_obj = PipeTrench(name='hello', # trenches={0: trench_tech}, @@ -765,12 +765,12 @@ class TestDistrictHeatingNetwork: # minimum_cost_offset=external_cost, # validate=True) # except TypeError: -# error_triggered = True -# assert error_triggered +# error_raised = True +# assert error_raised # # use list as minimum cost offset -# error_triggered = False +# error_raised = False # try: # pipe_trench_obj = PipeTrench(name='hello', # trenches={0: trench_tech}, @@ -784,8 +784,8 @@ class TestDistrictHeatingNetwork: # ), # validate=True) # except TypeError: -# error_triggered = True -# assert error_triggered +# error_raised = True +# assert error_raised # #************************************************************************** # #************************************************************************** diff --git a/tests/test_dhn_utils.py b/tests/test_dhn_utils.py index 0b75ff08ed0a380f8a7e10b5d13f76183b224039..0ad992e353b0a7469b53ec2dfa7a87a2b95fb9a4 100644 --- a/tests/test_dhn_utils.py +++ b/tests/test_dhn_utils.py @@ -528,7 +528,7 @@ class TestDistrictHeatingNetworkUtils: # update the nodes network.add_node(0, x=55, y=12) network.add_node(2, x=55.01, y=12.01) - + # ********************************************************************* utils.summarise_network_by_pipe_technology(network, False) diff --git a/tests/test_esipp_network.py b/tests/test_esipp_network.py index d731bb963f73bd24d1cd08fac3ff5f34a065f24e..b965a3e023df307db740f2ce3d981ba0cd993e09 100644 --- a/tests/test_esipp_network.py +++ b/tests/test_esipp_network.py @@ -168,7 +168,7 @@ class TestNetwork: # TypeError: The static losses should be given as a dict or None. - error_triggered = False + error_raised = False try: _ = Arcs( name="any", @@ -185,13 +185,13 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError('The static losses should be specified for each arc # option.') - error_triggered = False + error_raised = False try: _ = Arcs( name="any", @@ -212,12 +212,12 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError('The static losses must be specified via a list of lists.') - error_triggered = False + error_raised = False try: _ = Arcs( name="any", @@ -234,13 +234,13 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError('The static loss values are inconsistent with the number ' # 'of options, scenarios and intervals.') - error_triggered = False + error_raised = False try: arc_tech = Arcs( name="any", @@ -267,12 +267,12 @@ class TestNetwork: ], ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError('The static losses were not provided as numbers.') - error_triggered = False + error_raised = False try: _ = Arcs( name="any", @@ -291,12 +291,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError('The static losses must be positive or zero.') - error_triggered = False + error_raised = False try: _ = Arcs( name="any", @@ -315,12 +315,12 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError: The static loss dict keys must be tuples - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -334,12 +334,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError( 'The static loss dict keys must be tuples of size 3.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -353,12 +353,12 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError(The staticl osses should be given as a dict or None.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -372,14 +372,14 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError( # 'No static loss values were provided. There should be one'+ # ' value per option, scenario and time interval.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -393,8 +393,8 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ************************************************************************* # ************************************************************************* @@ -616,7 +616,7 @@ class TestNetwork: # TypeError('The name attribute is not hashable.') - error_triggered = False + error_raised = False try: _ = Arcs( name=[1, 2, 3], @@ -630,12 +630,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError:The efficiency dict keys must be (scenario, interval) tuples - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -649,12 +649,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError( 'The efficiency dict keys must be tuples of size 2.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -668,12 +668,12 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError(The efficiency should be given as a dict or None.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -687,13 +687,13 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError('The reverse efficiency has to match the nominal'+ # ' one when there are no proportional losses.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -707,12 +707,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError:'The reverse efficiency should be given as a dict or None.' - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -726,14 +726,14 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError( # 'No efficiency values were provided. There should be '+ # 'one value per scenario and time interval.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -747,12 +747,12 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError: The keys for the efficiency dicts do not match. - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -769,12 +769,12 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError: Efficiency values must be provided as numeric types. - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -791,12 +791,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError('Efficiency values must be positive.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -812,12 +812,12 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError('The capacity should be given as a list or tuple.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -831,12 +831,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError: The minimum cost values should be given as a list or tuple - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -850,12 +850,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError: The specific capacity cost was not given as a numeric type - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -869,12 +869,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError:The number of capacity and minimum cost entries must match - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -888,13 +888,13 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError: No entries for capacity and minimum cost were provided. # At least one option should be provided. - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -908,13 +908,13 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError: No entries for efficiency were provided. There should be # one entry per time interval. - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -928,8 +928,8 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError('The number of efficiency values must match the number of # time intervals.') @@ -950,7 +950,7 @@ class TestNetwork: validate=True, ) - error_triggered = False + error_raised = False try: arc_tech.validate_sizes( number_options=number_options, @@ -960,13 +960,13 @@ class TestNetwork: ], ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError('The number of efficiency values must match the number of # time intervals.') - error_triggered = False + error_raised = False try: arc_tech = Arcs( name="hey", @@ -995,8 +995,8 @@ class TestNetwork: ], ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError('The number of capacity values must match the number of # options.') @@ -1013,7 +1013,7 @@ class TestNetwork: validate=True, ) - error_triggered = False + error_raised = False try: arc_tech.validate_sizes( number_options=number_options, @@ -1023,8 +1023,8 @@ class TestNetwork: ], ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError: The minimum cost values are inconsistent with the number # of options. @@ -1041,7 +1041,7 @@ class TestNetwork: validate=True, ) - error_triggered = False + error_raised = False try: arc_tech.validate_sizes( number_options=number_options, @@ -1051,12 +1051,12 @@ class TestNetwork: ], ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError('Efficiency values must be provided as numeric types.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -1072,12 +1072,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError('Efficiency values must be positive.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -1094,12 +1094,12 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError('Capacity values must be provided as numeric types.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -1113,12 +1113,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError('Capacity values must be positive.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -1134,12 +1134,12 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError('Minimum cost values must be provided as numeric types.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -1153,12 +1153,12 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ValueError('Minimum cost values must be positive or zero.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -1172,13 +1172,13 @@ class TestNetwork: validate=True, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # TypeError('The information about capacities being instantaneous or not # should be given as a boolean variable.') - error_triggered = False + error_raised = False try: _ = Arcs( name="hey", @@ -1192,8 +1192,8 @@ class TestNetwork: validate=True, ) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # ********************************************************************* @@ -1278,7 +1278,7 @@ class TestNetwork: ], ) - net.add_import_node(node_key="G", prices={(0, 0, 0): imp_resource_price}) + net.add_import_node("G", prices={(0, 0, 0): imp_resource_price}) # add export node @@ -1290,21 +1290,17 @@ class TestNetwork: ], ) - net.add_export_node(node_key="H", prices={(0, 0, 0): exp_resource_price}) + net.add_export_node("H", prices={(0, 0, 0): exp_resource_price}) - net.add_waypoint_node(node_key="Z") + net.add_waypoint_node("Z") base_flow = {(i, j): random.random() for i in range(3) for j in range(4)} - net.add_source_sink_node(node_key="Y", base_flow=base_flow) + net.add_source_sink_node("Y", base_flow=base_flow) base_flow[(2, 3)] = random.random() - net.modify_network_node( - node_key="Y", node_data={net.KEY_NODE_BASE_FLOW: base_flow} - ) - - net.identify_node_types() + net.modify_node("Y", **{net.KEY_NODE_BASE_FLOW: base_flow}) assert "Z" in net.waypoint_nodes @@ -1415,35 +1411,35 @@ class TestNetwork: # add isolated import node - net.add_import_node(node_key="I_iso", prices={(0, 0, 0): resource_price}) + net.add_import_node("I_iso", prices={(0, 0, 0): resource_price}) # add import node with outgoing arcs - net.add_import_node(node_key="I", prices={(0, 0, 0): resource_price}) + net.add_import_node("I", prices={(0, 0, 0): resource_price}) # add isolated export node - net.add_import_node(node_key="E_iso", prices={(0, 0, 0): resource_price}) + net.add_import_node("E_iso", prices={(0, 0, 0): resource_price}) # add export node with incoming arcs - net.add_export_node(node_key="E", prices={(0, 0, 0): resource_price}) + net.add_export_node("E", prices={(0, 0, 0): resource_price}) # add isolated normal node - net.add_source_sink_node(node_key="A_iso", base_flow=base_flow) + net.add_source_sink_node("A_iso", base_flow=base_flow) # add normal node with incoming arcs - net.add_source_sink_node(node_key="A_in", base_flow=base_flow) + net.add_source_sink_node("A_in", base_flow=base_flow) # add normal node with outgoing arcs - net.add_source_sink_node(node_key="A_out", base_flow=base_flow) + net.add_source_sink_node("A_out", base_flow=base_flow) # add normal node with incoming and outgoing arcs - net.add_source_sink_node(node_key="A", base_flow=base_flow) + net.add_source_sink_node("A", base_flow=base_flow) # ********************************************************************* @@ -1461,19 +1457,18 @@ class TestNetwork: # change I_iso to regular: okay - net.modify_network_node( - node_key="I_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, + net.modify_node( + "I_iso", + **{ net.KEY_NODE_BASE_FLOW: base_flow, }, ) # reverse: okay - net.modify_network_node( - node_key="I_iso", - node_data={ + net.modify_node( + "I_iso", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_PRICES: resource_price, }, @@ -1481,9 +1476,9 @@ class TestNetwork: # change I_iso to export: okay - net.modify_network_node( - node_key="I_iso", - node_data={ + net.modify_node( + "I_iso", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_PRICES: resource_price, }, @@ -1491,9 +1486,9 @@ class TestNetwork: # reverse: okay - net.modify_network_node( - node_key="I_iso", - node_data={ + net.modify_node( + "I_iso", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_PRICES: resource_price, }, @@ -1501,15 +1496,15 @@ class TestNetwork: # change I_iso to waypoint: okay - net.modify_network_node( - node_key="I_iso", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} + net.modify_node( + "I_iso" ) # reverse: okay - net.modify_network_node( - node_key="I_iso", - node_data={ + net.modify_node( + "I_iso", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_PRICES: resource_price, }, @@ -1519,19 +1514,18 @@ class TestNetwork: # change E_iso to regular: okay - net.modify_network_node( - node_key="E_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, + net.modify_node( + "E_iso", + **{ net.KEY_NODE_BASE_FLOW: base_flow, }, ) # reverse: okay - net.modify_network_node( - node_key="E_iso", - node_data={ + net.modify_node( + "E_iso", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_PRICES: resource_price, }, @@ -1539,9 +1533,9 @@ class TestNetwork: # change E_iso to import: okay - net.modify_network_node( - node_key="E_iso", - node_data={ + net.modify_node( + "E_iso", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_PRICES: resource_price, }, @@ -1549,9 +1543,9 @@ class TestNetwork: # reverse: okay - net.modify_network_node( - node_key="E_iso", - node_data={ + net.modify_node( + "E_iso", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_PRICES: resource_price, }, @@ -1559,15 +1553,15 @@ class TestNetwork: # change E_iso to waypoint: okay - net.modify_network_node( - node_key="E_iso", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} + net.modify_node( + "E_iso" ) # reverse: okay - net.modify_network_node( - node_key="E_iso", - node_data={ + net.modify_node( + "E_iso", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_PRICES: resource_price, }, @@ -1577,9 +1571,9 @@ class TestNetwork: # change A_iso to export: okay - net.modify_network_node( - node_key="A_iso", - node_data={ + net.modify_node( + "A_iso", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_PRICES: resource_price, }, @@ -1587,19 +1581,18 @@ class TestNetwork: # reverse: okay - net.modify_network_node( - node_key="A_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, + net.modify_node( + "A_iso", + **{ net.KEY_NODE_BASE_FLOW: base_flow, }, ) # change A_iso to import: okay - net.modify_network_node( - node_key="A_iso", - node_data={ + net.modify_node( + "A_iso", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_PRICES: resource_price, }, @@ -1607,26 +1600,24 @@ class TestNetwork: # reverse: okay - net.modify_network_node( - node_key="A_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, + net.modify_node( + "A_iso", + **{ net.KEY_NODE_BASE_FLOW: base_flow, }, ) # change A_iso to waypoint: okay - net.modify_network_node( - node_key="A_iso", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} + net.modify_node( + "A_iso" ) # reverse: okay - net.modify_network_node( - node_key="A_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, + net.modify_node( + "A_iso", + **{ net.KEY_NODE_BASE_FLOW: base_flow, }, ) @@ -1635,19 +1626,18 @@ class TestNetwork: # change I to regular: okay - net.modify_network_node( - node_key="I", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, + net.modify_node( + "I", + **{ net.KEY_NODE_BASE_FLOW: base_flow, }, ) # reverse: okay - net.modify_network_node( - node_key="I", - node_data={ + net.modify_node( + "I", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_PRICES: resource_price, }, @@ -1655,15 +1645,15 @@ class TestNetwork: # change I to waypoint: okay - net.modify_network_node( - node_key="I", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} + net.modify_node( + "I" ) # reverse: okay - net.modify_network_node( - node_key="I", - node_data={ + net.modify_node( + "I", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_PRICES: resource_price, }, @@ -1673,19 +1663,18 @@ class TestNetwork: # change E to regular: okay - net.modify_network_node( - node_key="E", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, + net.modify_node( + "E", + **{ net.KEY_NODE_BASE_FLOW: base_flow, }, ) # reverse: okay - net.modify_network_node( - node_key="E", - node_data={ + net.modify_node( + "E", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_PRICES: resource_price, }, @@ -1693,15 +1682,15 @@ class TestNetwork: # change E to waypoint: okay - net.modify_network_node( - node_key="E", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} + net.modify_node( + "E" ) # reverse: okay - net.modify_network_node( - node_key="E", - node_data={ + net.modify_node( + "E", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_PRICES: resource_price, }, @@ -1711,9 +1700,9 @@ class TestNetwork: # change A_in to export: okay - net.modify_network_node( - node_key="A_in", - node_data={ + net.modify_node( + "A_in", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_PRICES: resource_price, }, @@ -1721,26 +1710,24 @@ class TestNetwork: # reverse: okay - net.modify_network_node( - node_key="A_in", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, + net.modify_node( + "A_in", + **{ net.KEY_NODE_BASE_FLOW: base_flow, }, ) # change A_in to waypoint: okay - net.modify_network_node( - node_key="A_in", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} + net.modify_node( + "A_in", **{net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} ) # reverse: okay - net.modify_network_node( - node_key="A_in", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, + net.modify_node( + "A_in", + **{ net.KEY_NODE_BASE_FLOW: base_flow, }, ) @@ -1749,9 +1736,9 @@ class TestNetwork: # change A_out to import: okay - net.modify_network_node( - node_key="A_out", - node_data={ + net.modify_node( + "A_out", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_PRICES: resource_price, }, @@ -1759,26 +1746,24 @@ class TestNetwork: # reverse: okay - net.modify_network_node( - node_key="A_out", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, + net.modify_node( + "A_out", + **{ net.KEY_NODE_BASE_FLOW: base_flow, }, ) # change A_out to waypoint: okay - net.modify_network_node( - node_key="A_out", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} + net.modify_node( + "A_out", **{net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} ) # reverse: okay - net.modify_network_node( - node_key="A_out", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, + net.modify_node( + "A_out", + **{ net.KEY_NODE_BASE_FLOW: base_flow, }, ) @@ -1787,106 +1772,104 @@ class TestNetwork: # change I to export: fail - error_triggered = False + error_raised = False try: - net.modify_network_node( - node_key="I", - node_data={ + net.modify_node( + "I", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_PRICES: resource_price, }, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # change E to import: fail - error_triggered = False + error_raised = False try: - net.modify_network_node( - node_key="E", - node_data={ + net.modify_node( + "E", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_PRICES: resource_price, }, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # change A_out to export: fail - error_triggered = False + error_raised = False try: - net.modify_network_node( - node_key="A_out", - node_data={ + net.modify_node( + "A_out", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_PRICES: resource_price, }, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # change A_in to import: fail - error_triggered = False + error_raised = False try: - net.modify_network_node( - node_key="A_in", - node_data={ + net.modify_node( + "A_in", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_PRICES: resource_price, }, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # change A to export: fail - error_triggered = False + error_raised = False try: - net.modify_network_node( - node_key="A", - node_data={ + net.modify_node( + "A", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_PRICES: resource_price, }, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # change A to import: fail - error_triggered = False + error_raised = False try: - net.modify_network_node( - node_key="A", - node_data={ + net.modify_node( + "A", + **{ net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_PRICES: resource_price, }, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # try to modify a non-existent node - error_triggered = False + error_raised = False try: - net.modify_network_node( - node_key="ABCD", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} - ) + net.modify_node("ABCD") except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* @@ -1925,28 +1908,24 @@ class TestNetwork: # add import node I - net.add_import_node(node_key="I", prices={(0, 0, 0): resource_price}) + net.add_import_node("I", prices={(0, 0, 0): resource_price}) # add export node E - net.add_export_node(node_key="E", prices={(0, 0, 0): resource_price}) + net.add_export_node("E", prices={(0, 0, 0): resource_price}) # add regular node A - net.add_source_sink_node(node_key="A", base_flow=base_flow) + net.add_source_sink_node("A", base_flow=base_flow) # add regular node B - net.add_source_sink_node(node_key="B", base_flow=base_flow) + net.add_source_sink_node("B", base_flow=base_flow) # add a valid import-export arc net.add_directed_arc(node_key_a="I", node_key_b="E", arcs=lossless_arcs) - # identify the nodes and validate - - net.identify_node_types() - # ********************************************************************* # ********************************************************************* @@ -1954,57 +1933,57 @@ class TestNetwork: # directed arcs cannot start in an export node: E -> B - error_triggered = False + error_raised = False try: net.add_directed_arc(node_key_a="E", node_key_b="B", arcs=lossless_arcs) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # directed arcs cannot end on an import node: A -> I - error_triggered = False + error_raised = False try: net.add_directed_arc(node_key_a="A", node_key_b="I", arcs=lossless_arcs) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # import-export nodes cannot have static losses - error_triggered = False + error_raised = False try: net.add_directed_arc(node_key_a="I", node_key_b="E", arcs=lossy_arcs) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # undirected arcs cannot involve import nor export nodes - error_triggered = False + error_raised = False try: net.add_undirected_arc(node_key_a="I", node_key_b="A", arcs=lossless_arcs) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # undirected arcs cannot involve import nor export nodes - error_triggered = False + error_raised = False try: net.add_undirected_arc(node_key_a="B", node_key_b="E", arcs=lossless_arcs) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # undirected arcs cannot involve import nor export nodes - error_triggered = False + error_raised = False try: net.add_undirected_arc(node_key_a="I", node_key_b="E", arcs=lossy_arcs) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* @@ -2014,46 +1993,41 @@ class TestNetwork: # create a new export node - net.add_export_node(node_key="E1", prices={(0, 0, 0): resource_price}) + net.add_export_node("E1", prices={(0, 0, 0): resource_price}) # create an arc starting in that export node - error_triggered = False + error_raised = False try: net.add_directed_arc(node_key_a="E1", node_key_b="B", arcs=lossless_arcs) - net.identify_node_types() except ValueError: - error_triggered = True - assert error_triggered - - # remove the troublesome arc + error_raised = True + assert error_raised - net.remove_edge(u="E1", v="B") + # # remove the troublesome arc + # net.remove_edge(u="E1", v="B") # ********************************************************************* # create a new import node - net.add_import_node(node_key="I1", prices={(0, 0, 0): resource_price}) + net.add_import_node("I1", prices={(0, 0, 0): resource_price}) # create an arc ending in that import node - error_triggered = False + error_raised = False try: net.add_directed_arc(node_key_a="A", node_key_b="I1", arcs=lossless_arcs) - net.identify_node_types() except ValueError: - error_triggered = True - assert error_triggered - - # remove the troublesome arc + error_raised = True + assert error_raised - net.remove_edge(u="A", v="I1") + # # remove the troublesome arc + # net.remove_edge(u="A", v="I1") # ********************************************************************* # check non-existent arc - net.arc_is_undirected(("X", "Y", 1)) # ************************************************************************* @@ -2067,7 +2041,7 @@ class TestNetwork: # import node imp_node_key = generate_pseudo_unique_key(mynet.nodes()) mynet.add_import_node( - node_key=imp_node_key, + imp_node_key, prices={ (0, 0, 0): ResourcePrice(prices=1+0.05, volumes=None) }, @@ -2076,13 +2050,13 @@ class TestNetwork: # other nodes node_A = generate_pseudo_unique_key(mynet.nodes()) mynet.add_source_sink_node( - node_key=node_A, + node_A, # base_flow=[1, -1, 0.5, -0.5] base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, ) node_B = generate_pseudo_unique_key(mynet.nodes()) mynet.add_source_sink_node( - node_key=node_B, + node_B, # base_flow=[-1, 1, -0.5, 0.5] base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, ) @@ -2102,14 +2076,13 @@ class TestNetwork: static_loss=None, validate=False, ) - mynet.add_undirected_arc( - node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA - ) - + error_raised = False try: - # identify node types - mynet.identify_node_types() + # ValueError: Undirected arcs cannot involve import or export nodes. + mynet.add_undirected_arc( + node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA + ) except ValueError: error_raised = True assert error_raised @@ -2128,7 +2101,7 @@ class TestNetwork: # export node exp_node_key = generate_pseudo_unique_key(mynet.nodes()) mynet.add_export_node( - node_key=exp_node_key, + exp_node_key, prices={ (0, 0, 0): ResourcePrice(prices=0.1+0.05, volumes=None) }, @@ -2137,7 +2110,7 @@ class TestNetwork: # other nodes node_B = generate_pseudo_unique_key(mynet.nodes()) mynet.add_source_sink_node( - node_key=node_B, + node_B, # base_flow=[-1, 1, -0.5, 0.5] base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, ) @@ -2154,14 +2127,13 @@ class TestNetwork: static_loss=None, validate=False, ) - mynet.add_undirected_arc( - node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE - ) error_raised = False try: - # identify node types - mynet.identify_node_types() + # ValueError: Undirected arcs cannot involve import or export nodes. + mynet.add_undirected_arc( + node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE + ) except ValueError: error_raised = True assert error_raised @@ -2173,7 +2145,7 @@ class TestNetwork: # create a network object with a tree topology tree_network = binomial_tree(3, create_using=MultiDiGraph) - network = Network(incoming_graph_data=tree_network) + network = Network(network_type=Network.NET_TYPE_TREE, incoming_graph_data=tree_network) for edge_key in network.edges(keys=True): arc = ArcsWithoutLosses( name=str(edge_key), @@ -2184,6 +2156,9 @@ class TestNetwork: ) network.add_edge(*edge_key, **{Network.KEY_ARC_TECH: arc}) + # assert that it should have a tree topology + assert network.should_be_tree_network() + # assert that it does not have a tree topology assert not network.has_tree_topology() @@ -2193,6 +2168,8 @@ class TestNetwork: # assert that it has a tree topology assert network.has_tree_topology() + + # ************************************************************************* # ************************************************************************* @@ -2203,13 +2180,10 @@ class TestNetwork: network = Network() # add node A - network.add_waypoint_node(node_key="A") + network.add_waypoint_node("A") # add node B - network.add_waypoint_node(node_key="B") - - # identify nodes - network.identify_node_types() + network.add_waypoint_node("B") # add arcs key_list = [ @@ -2236,14 +2210,14 @@ class TestNetwork: rand.seed(360) uuid.uuid4 = lambda: uuid.UUID(int=rand.getrandbits(128), version=4) - error_triggered = False + error_raised = False try: _ = network.get_pseudo_unique_arc_key( node_key_start="A", node_key_end="B", max_iterations=len(key_list) - 1 ) except Exception: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ************************************************************************* # ************************************************************************* @@ -2265,7 +2239,7 @@ class TestNetwork: for qpk in [(0,0,0),(0,0,1),(0,1,0),(0,1,1)] } mynet.add_import_node( - node_key=imp_node_key, + imp_node_key, prices=imp_prices ) @@ -2279,7 +2253,7 @@ class TestNetwork: for qpk in [(0,0,0),(0,0,1),(0,1,0),(0,1,1)] } mynet.add_export_node( - node_key=exp_node_key, + exp_node_key, prices=exp_prices, ) @@ -2308,15 +2282,13 @@ class TestNetwork: (2, q, 1): 0.25, }, ) - - mynet.add_directed_arc( - node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE_fix - ) error_raised = False try: - # identify node types - mynet.identify_node_types() + # ValueError: Arcs between import and export nodes cannot have static losses. + mynet.add_directed_arc( + node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE_fix + ) except ValueError: error_raised = True assert error_raised @@ -2349,8 +2321,6 @@ class TestNetwork: capacity=1, capacity_is_instantaneous=False ) - # identify the node types - net.identify_node_types() # assert that it can detected the selected antiparallel arcs assert net.has_selected_antiparallel_arcs() diff --git a/tests/test_esipp_prices.py b/tests/test_esipp_prices.py index 783304755c4d764ac00cd70e0ac0da3deb620f71..0bccfd498681c2eb8172fc8cc272617eb9f41817 100644 --- a/tests/test_esipp_prices.py +++ b/tests/test_esipp_prices.py @@ -287,9 +287,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -392,9 +389,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -496,9 +490,6 @@ class TestESIPPProblem: validate=False, ) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - - # identify node types - mynet.identify_node_types() # trigger the error error_raised = False @@ -571,9 +562,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_IA) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -680,9 +668,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_IA) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -788,9 +773,6 @@ class TestESIPPProblem: validate=False, ) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_IA) - - # identify node types - mynet.identify_node_types() # trigger the error error_raised = False @@ -890,9 +872,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_AE) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -1042,9 +1021,6 @@ class TestESIPPProblem: node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, diff --git a/tests/test_esipp_problem.py b/tests/test_esipp_problem.py index 3f69ee09d398608ebd37a34d7d1801926353ecf8..511b874aa72c49202f2e425d9e52c927bfe55374 100644 --- a/tests/test_esipp_problem.py +++ b/tests/test_esipp_problem.py @@ -291,9 +291,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -434,9 +431,6 @@ class TestESIPPProblem: validate=False, ) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_AE) - - # identify node types - mynet.identify_node_types() # no sos, regular time intervals ipp = self.build_solve_ipp( @@ -554,9 +548,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -660,10 +651,6 @@ class TestESIPPProblem: mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - # identify node types - - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( @@ -785,9 +772,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -871,9 +855,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -997,9 +978,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -1090,9 +1068,6 @@ class TestESIPPProblem: node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -1183,9 +1158,6 @@ class TestESIPPProblem: node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -1287,9 +1259,6 @@ class TestESIPPProblem: capacity_is_instantaneous=capacity_is_instantaneous, ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -1357,9 +1326,6 @@ class TestESIPPProblem: static_loss=None, ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -1485,9 +1451,6 @@ class TestESIPPProblem: node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -1655,9 +1618,6 @@ class TestESIPPProblem: node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -1820,9 +1780,6 @@ class TestESIPPProblem: node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -1961,9 +1918,6 @@ class TestESIPPProblem: node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={},solver='scip', @@ -2187,9 +2141,6 @@ class TestESIPPProblem: ) } - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals solver_options = {} solver_options["relative_mip_gap"] = 0 @@ -2524,10 +2475,6 @@ class TestESIPPProblem: # do not use arc groups arc_groups_dict = {} - # identify node types - - mynet.identify_node_types() - # no sos, regular time intervals solver_options = {} solver_options["relative_mip_gap"] = 0 @@ -2796,9 +2743,6 @@ class TestESIPPProblem: ) } - # identify node types - mynet.identify_node_types() - # solver settings solver_options = {} solver_options["relative_mip_gap"] = 0 @@ -3107,9 +3051,6 @@ class TestESIPPProblem: # arc groups arc_groups_dict = {} - # identify node types - mynet.identify_node_types() - # solver settings solver_options = {} solver_options["relative_mip_gap"] = 0 @@ -3299,9 +3240,6 @@ class TestESIPPProblem: node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -3462,11 +3400,6 @@ class TestESIPPProblem: arc_key_AB_und = mynet.add_undirected_arc( node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab ) - - - # identify node types - - mynet.identify_node_types() # no sos, regular time intervals @@ -3855,10 +3788,6 @@ class TestESIPPProblem: capacity_is_instantaneous=False, ) - # identify node types - - mynet.identify_node_types() - # no sos, regular time intervals for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: @@ -4270,9 +4199,6 @@ class TestESIPPProblem: node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: @@ -4895,9 +4821,6 @@ class TestESIPPProblem: capacity_is_instantaneous=False, ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: @@ -5457,9 +5380,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals for static_losses_mode in [ InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, @@ -5615,9 +5535,6 @@ class TestESIPPProblem: # arc_tech_AB.options_selected[0] = True # mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals for static_losses_mode in [ InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, @@ -5761,9 +5678,6 @@ class TestESIPPProblem: arc_key_AB_und = mynet.add_undirected_arc( node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB ) - - # identify node types - mynet.identify_node_types() # no sos, regular time intervals for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: @@ -6038,9 +5952,6 @@ class TestESIPPProblem: capacity=1.0, capacity_is_instantaneous=False, ) - - # identify node types - mynet.identify_node_types() # no sos, regular time intervals for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: @@ -6321,9 +6232,6 @@ class TestESIPPProblem: arc_key_AB_und = mynet.add_undirected_arc( node_key_a=node_B, node_key_b=node_A, arcs=arc_tech_AB ) - - # identify node types - mynet.identify_node_types() # no sos, regular time intervals for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: @@ -6599,9 +6507,6 @@ class TestESIPPProblem: capacity=1.0, capacity_is_instantaneous=False, ) - - # identify node types - mynet.identify_node_types() # no sos, regular time intervals for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: @@ -6917,9 +6822,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( @@ -7085,9 +6987,6 @@ class TestESIPPProblem: capacity_is_instantaneous=False, ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( @@ -7229,9 +7128,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arcs_ia2) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver='cbc', # TODO: make this work with other solvers @@ -7359,9 +7255,6 @@ class TestESIPPProblem: capacity_is_instantaneous=False, ) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver='cbc', # TODO: make this work with other solvers @@ -7687,9 +7580,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(*node_pair, arcs=new_arc_tech) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, @@ -7833,9 +7723,6 @@ class TestESIPPProblem: ) mynet.add_directed_arc(*node_pair, arcs=new_arc_tech) - # identify node types - mynet.identify_node_types() - # no sos, regular time intervals ipp = self.build_solve_ipp( solver_options={}, diff --git a/tests/test_esipp_resource.py b/tests/test_esipp_resource.py index 0fc4a96bbdfb4984c05adfba2b83bb617251a8dc..b58976257b1b71db2c34455bfbcb84ed487df50e 100644 --- a/tests/test_esipp_resource.py +++ b/tests/test_esipp_resource.py @@ -598,145 +598,145 @@ class TestResourcePrice: # create object without prices - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=None, volumes=volumes) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # create object with negative prices in lists - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=[7, -3, 2], volumes=[3, 4, 5]) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # create object where an intermediate segment has no volume limit - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=[7, 4, 2], volumes=[3, None, 5]) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # create object with negative volumes in lists - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=[7, 3, 2], volumes=[4, -1, 2]) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # create object with non-numeric prices in lists - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=[7, "4", 2], volumes=[3, 4, 5]) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # create object with non-numeric volumes in lists - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=[7, 3, 2], volumes=[4, "3", 2]) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # create object with mismatched price and volume lists - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=[7, 3, 2], volumes=[5, 7]) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # create object with a price list as an input and an unsupported type - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=[7, 3, 2], volumes="hello") except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # create object with negative prices in lists (no volumes are provided) - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=[7, 3, -2], volumes=None) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # create object with non-numeric prices in lists (no volumes are provided) - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=[7, 3, "a"], volumes=None) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # create object with non-numeric prices in lists (no volumes are provided) - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=5, volumes=[7, 3, 4]) except TypeError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # create object with negative prices - error_triggered = False + error_raised = False try: _ = ResourcePrice(prices=-3, volumes=None) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* diff --git a/tests/test_gis_calculate.py b/tests/test_gis_calculate.py index 5c2ee6339686a5c5648c2d3b51c6ab39f92ce2ee..fda713bc26c9c9561956a481a5ac474bb77390bf 100644 --- a/tests/test_gis_calculate.py +++ b/tests/test_gis_calculate.py @@ -321,12 +321,12 @@ class TestGisCalculate: true_length_3_points = true_length_2_points_a + true_length_2_points_b # make sure the function fails with a single point (sanity check) - error_triggered = False + error_raised = False try: line = LineString(list_1_points) except Exception: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # make sure it works with 2 points diff --git a/tests/test_gis_identify.py b/tests/test_gis_identify.py index 9eef616d0d27d2dcb993ba370002286719448a6e..383bb75ed4d6c7dbb5151da9d7c21287029458fc 100644 --- a/tests/test_gis_identify.py +++ b/tests/test_gis_identify.py @@ -4215,7 +4215,7 @@ class TestGisIdentify: # not allowed - error_triggered = False + error_raised = False try: # inconsistent edge key format gis_iden.is_edge_path( @@ -4224,8 +4224,8 @@ class TestGisIdentify: allow_multiple_formats=False, ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # ********************************************************************* @@ -4294,15 +4294,15 @@ class TestGisIdentify: # not allowed - error_triggered = False + error_raised = False try: # inconsistent edge key format gis_iden.is_edge_path( network, path=[(10, 8, 0), (8, 9)], allow_multiple_formats=False ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # inconsistent edge key format @@ -4314,15 +4314,15 @@ class TestGisIdentify: # not allowed - error_triggered = False + error_raised = False try: # inconsistent edge key format gis_iden.is_edge_path( network, path=[(6, 5), (5, 4, 0), (4, 3)], allow_multiple_formats=False ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # ********************************************************************* diff --git a/tests/test_gis_utils.py b/tests/test_gis_utils.py index b5615465388dd1259f6bb56ba2083dcd74ae4b3d..3a17ecbdf14887231163a8c1c0ec59c59e79764d 100644 --- a/tests/test_gis_utils.py +++ b/tests/test_gis_utils.py @@ -1,7 +1,7 @@ # imports # standard - +import sys from ast import literal_eval import random @@ -1121,7 +1121,7 @@ class TestGisUtils: # trigger the error - error_triggered = False + error_raised = False try: ( node_keys, @@ -1129,8 +1129,8 @@ class TestGisUtils: _, ) = gis_utils.prepare_node_data_from_geodataframe(gdf=gdf) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ***************************************************************************** # ***************************************************************************** @@ -1322,18 +1322,18 @@ class TestGisUtils: # mismatched longitudes and latitudes - error_triggered = False + error_raised = False try: _ = gis_utils.create_node_geodataframe( longitudes=(_longitude, 528), latitudes=(_latitude,) ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # mismatched longitudes/latitudes and osmids - error_triggered = False + error_raised = False try: _ = gis_utils.create_node_geodataframe( longitudes=(_longitude, 528), @@ -1341,8 +1341,8 @@ class TestGisUtils: osmids=(59, 482, 135), ) except ValueError: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ************************************************************************* # ************************************************************************* @@ -1881,12 +1881,12 @@ class TestGisUtils: gdf.to_file(filename_gpkg) else: # incompatible: errors are expected - error_triggered = False + error_raised = False try: gdf.to_file(filename_gpkg) except Exception: - error_triggered = True - assert error_triggered + error_raised = True + assert error_raised # ********************************************************************* # ********************************************************************* @@ -2603,6 +2603,21 @@ class TestGisUtils: # ************************************************************************* def test_simplify_network_osmnx(self): + + # BUG: there is a bug here, therefore we are printing the seed number + # it seems to occur if all the nodes around 1106295281 are simplified + # seed: 5785034948163332129 +# for edge_key in network.edges(keys=True): +# > assert len(tuple(gis_iden.get_edges_between_two_nodes(network, *edge_key[0:2]))) == 1 +# E assert 2 == 1 +# E + where 2 = len(((1106295281, 1106295315, 0), (1106295315, 1106295281, 0))) +# E + where ((1106295281, 1106295315, 0), (1106295315, 1106295281, 0)) = tuple([(1106295281, 1106295315, 0), (1106295315, 1106295281, 0)]) +# E + where [(1106295281, 1106295315, 0), (1106295315, 1106295281, 0)] = <function get_edges_between_two_nodes at 0x7f8d07bc9ee0>(<networkx.classes.multidigraph.MultiDiGraph object at 0x7f8d00d17410>, *(1106295281, 1106295315)) +# E + where <function get_edges_between_two_nodes at 0x7f8d07bc9ee0> = gis_iden.get_edges_between_two_nodes + seed = random.randrange(sys.maxsize) + random.seed(seed) + print("Seed was:", seed) + # get a network network = ox.graph_from_point( (55.71654, 9.11728), @@ -2618,6 +2633,11 @@ class TestGisUtils: node_keys[random.randint(0, len(node_keys) - 1)] for i in range(number_nodes_protected) ] + # protected_nodes.append(1106295281) + # assert 1 == 0 + # E + where 1 = len([[317212013, 1106295281, 1106295315]]) + # E + where [[317212013, 1106295281, 1106295315]] = <function find_simplifiable_paths at 0x7f9ac0cf22a0>(<networkx.classes.multidigraph.MultiDiGraph object at 0x7f9ab2c7b350>, [115838, 317195115, 5031764839, 317811652, 5076232388, 615539272, ...]) + # E + where <function find_simplifiable_paths at 0x7f9ac0cf22a0> = gis_iden.find_simplifiable_paths # try simplifying it gis_utils.simplify_network(network, protected_nodes=protected_nodes) # protected nodes must still exist