diff --git a/src/topupopt/data/buildings/dk/heat.py b/src/topupopt/data/buildings/dk/heat.py index c492c29b4736fa3b38fdb318217d1a4f26c8dd2a..66d60eea9004bd3a371a9c0719fe39af818d1b98 100644 --- a/src/topupopt/data/buildings/dk/heat.py +++ b/src/topupopt/data/buildings/dk/heat.py @@ -106,7 +106,7 @@ def heat_demand_dict_by_building_entrance( discrete_sinusoid_matching_integral( bdg_specific_demand[building_index] * area, time_interval_durations=time_interval_durations, - bdg_ratio_min_max=bdg_ratio_min_max[building_index], + min_to_max_ratio=bdg_ratio_min_max[building_index], phase_shift_radians=( bdg_demand_phase_shift[building_index] # bdg_demand_phase_shift_amplitude*np.random.random() @@ -128,7 +128,7 @@ def heat_demand_dict_by_building_entrance( ), avg_state=avg_state, time_interval_durations=time_interval_durations, - bdg_ratio_min_max=bdg_ratio_min_max[building_index], + min_to_max_ratio=bdg_ratio_min_max[building_index], state_correlates_with_output=state_correlates_with_output, ) ) diff --git a/src/topupopt/problems/esipp/problem.py b/src/topupopt/problems/esipp/problem.py index dfc9f4d1ff26de615ebc44cb47c1e4805ea6673a..006e1d7eebdcee930df20da72371f36b65968dd3 100644 --- a/src/topupopt/problems/esipp/problem.py +++ b/src/topupopt/problems/esipp/problem.py @@ -123,13 +123,13 @@ class InfrastructurePlanningProblem(EnergySystem): # self.time_intervals = dict(time_intervals) self.average_time_interval = { - q: mean(self.time_frame.time_intervals[q]) for q in self.time_frame.assessments + q: mean(self.time_frame.time_interval_durations[q]) for q in self.time_frame.assessments } self.normalised_time_interval_duration = { (q, k): duration / self.average_time_interval[q] for q in self.time_frame.assessments - for k, duration in enumerate(self.time_frame.time_intervals[q]) + for k, duration in enumerate(self.time_frame.time_interval_durations[q]) } # relation between reporting periods and time intervals @@ -1774,18 +1774,18 @@ class InfrastructurePlanningProblem(EnergySystem): # set of representative periods - set_Q = self.assessment_keys # tuple(self.assessment_keys) + set_Q = tuple(self.time_frame.assessments) # tuple(self.assessment_keys) # set of representative periods - set_P_q = {q: tuple(p for p in self.reporting_periods[q]) for q in set_Q} + set_P_q = {q: tuple(p for p in self.time_frame.reporting_periods[q]) for q in set_Q} # set of time intervals set_K_q = { - q: tuple(k for k in range(self.number_time_intervals[q])) for q in set_Q + q: tuple(k for k in self.time_frame.time_intervals[q]) for q in set_Q } - + # set of (q,p) tuples set_QP = [(q, p) for q in set_Q for p in set_P_q[q]] @@ -2927,7 +2927,7 @@ class InfrastructurePlanningProblem(EnergySystem): param_f_amp_v_glljqk.update( { (g, u, v, j, q, k): ( - self.time_intervals[q][k] # instantaneous + self.time_frame.time_interval_durations[q][k] # instantaneous if self.networks[g] .edges[(u, v, j)][Network.KEY_ARC_TECH] .capacity_is_instantaneous @@ -3624,6 +3624,8 @@ def simplify_peak_total_problem( # ************************************************************************* # ************************************************************************* + # TODO: make this compatible with multiple assessments + # check if the simplification is feasible if not is_peak_total_problem(problem): # it is not possible to simplify the problem @@ -3754,12 +3756,12 @@ def simplify_peak_total_problem( (q_total, p, k_total): ( net.nodes[node_key][Network.KEY_NODE_PRICES][(q_ref, p, k_ref)] ) - for p in problem.reporting_periods[q_ref] + for p in problem.time_frame.reporting_periods[q_ref] } net.nodes[node_key][Network.KEY_NODE_PRICES].update( { (q_peak, p, k_peak): ResourcePrice(prices=0) - for p in problem.reporting_periods[q_ref] + for p in problem.time_frame.reporting_periods[q_ref] } ) else: # other nodes @@ -3782,7 +3784,9 @@ def simplify_peak_total_problem( total_flow = { (q_total, k_total): sum( net.nodes[node_key][Network.KEY_NODE_BASE_FLOW][(q_ref, k)] - for k in range(problem.number_time_intervals[q_ref]) + for k in range( + problem.time_frame.number_time_intervals(q_ref) + ) ) } # 3.2) insert the prices and base flows for the peak scenario @@ -3804,31 +3808,41 @@ def simplify_peak_total_problem( # ************************************************************************* # update the assessments, reporting periods, intervals and segments - - # assessments - problem.assessment_keys = (q_peak, q_total) - problem.number_assessments = len(problem.assessment_keys) - - # reporting periods - problem.reporting_periods = { - q: tuple(problem.reporting_periods[q_ref]) for q in problem.assessment_keys - } - problem.number_reporting_periods = { - q: len(problem.reporting_periods[q]) for q in problem.assessment_keys - } - - # time intervals - problem.time_intervals = { - q_peak: [problem.time_intervals[q_ref][k_ref]], - q_total: [sum(problem.time_intervals[q_ref])], - } - problem.number_time_intervals = { - q: len(problem.time_intervals[q]) for q in problem.assessment_keys - } + + # assessments: q_peak and q_total + # reporting_periods: same as before, applied to q_total only (P_q[q=q_peak]=empty set) + # reporting period durations: same as before + # intervals: q_peak has 1, q_total has 1 + # interval duration: q_peak, q_total should be the sum of all intervals + + problem.time_frame = EconomicTimeFrame( + discount_rates_q={ + q_peak: [], + q_total: [problem.time_frame.discount_rate(q_ref, p) + for p in problem.time_frame.reporting_periods[q_ref]] + }, + reporting_periods={ + q_peak: [], + q_total: problem.time_frame.reporting_periods[q_ref], + }, + reporting_period_durations={ + q_peak: [], + q_total: problem.time_frame.reporting_period_durations[q_ref], + }, + time_intervals={ + q_peak: [k_peak], + q_total: [k_total], + }, + time_interval_durations={ + q_peak: [problem.time_frame.time_interval_durations[q_ref][k_ref]], + q_total: [sum(problem.time_frame.time_interval_durations[q_ref])], + } + ) # average time interval problem.average_time_interval = { - q: mean(problem.time_intervals[q]) for q in problem.assessment_keys + q: mean(problem.time_frame.time_interval_durations[q]) + for q in problem.time_frame.assessments } # normalised time interval duration # problem.normalised_time_interval_duration = { @@ -3839,18 +3853,11 @@ def simplify_peak_total_problem( problem.normalised_time_interval_duration = { (q_peak, k_peak): 1, (q_total, k_total): ( - sum(problem.time_intervals[q_total]) - / problem.time_intervals[q_peak][k_total] + sum(problem.time_frame.time_interval_durations[q_total]) + / problem.time_frame.time_interval_durations[q_peak][k_total] ), } - # discount factors (use the reference assessment) - problem.discount_rates[q_peak] = tuple(problem.discount_rates[q_ref]) - problem.discount_rates[q_total] = tuple(problem.discount_rates[q_ref]) - problem.discount_rates.pop(q_ref) - - # f coefficients - # ************************************************************************* # ************************************************************************* @@ -3868,15 +3875,14 @@ def is_peak_total_problem(problem: InfrastructurePlanningProblem) -> bool: # conditions: # 1) maximum congestion occurs simultaneously across the network # - corollary: dynamic behaviours do not change the peaks - # - corollary: arc efficiencies are constant + # - corollary: arc efficiencies are constant? # - simplifying assumption: no proportional losses in the network + # - simplifying assumption: no converters or only stateless ones + # - simplifying assumption: only source or sink nodes, no hybrid ones # 2) the time during which maximum congestion occurs can be determined - # 3) energy prices are constant in time and volume - - # check #1 + # 3) energy prices are constant in time and volume (per assessment) - # check #2 - # check #3: energy prices are constant in time and volume + # check: energy prices are constant in time and volume for key, net in problem.networks.items(): # check import nodes for imp_node_key in net.import_nodes: @@ -3906,14 +3912,18 @@ def is_peak_total_problem(problem: InfrastructurePlanningProblem) -> bool: return False # if the entries are time invariant, checking one will do break + # check: no converters + if len(problem.converters) != 0: + # there are converters = cannot be simplified (might be relaxed later) + return False - # # check #4: none of the arcs can have proportional losses - # for key, net in problem.networks.items(): - # # check each edge - # for edge_key in net.edges(keys=True): - # if net.edges[edge_key][ - # Network.KEY_ARC_TECH].has_proportional_losses(): - # return False # has proportional losses, return False + # check: arc efficiencies must be constant in time + for key, net in problem.networks.items(): + # check each edge + for edge_key in net.edges(keys=True): + if not net.edges[edge_key][ + Network.KEY_ARC_TECH].has_constant_efficiency(): + return False # does not have constant efficiency, return False return True # all conditions are true diff --git a/src/topupopt/problems/esipp/time.py b/src/topupopt/problems/esipp/time.py index 1f5ba75dd39904457433b418a34760a8024ec72a..308f7d363814f653d68b1f57576abc467a0dc637 100644 --- a/src/topupopt/problems/esipp/time.py +++ b/src/topupopt/problems/esipp/time.py @@ -230,7 +230,7 @@ class TimeFrame: def qpk(self, redo: bool = False): "Return all valid (q,p,k) tuples." if redo: - self._qpk = ( + self._qpk = tuple( (q, p, k) for q, _p in self.reporting_periods.items() for p in _p @@ -251,7 +251,7 @@ class EconomicTimeFrame(TimeFrame): self, discount_rate=None, # real: same value for all q and p discount_rates_p=None, # list: same values for all q (1 per p) - discount_rates_qp=None, # dict: 1 value per p and q + discount_rates_q=None, # dict: key=q, value= list for each p **kwargs ): @@ -274,14 +274,10 @@ class EconomicTimeFrame(TimeFrame): for p in self.reporting_periods[q]) for q in self.assessments } - elif (type(discount_rates_qp) == dict and - self.complete_qp(discount_rates_qp)): + elif (type(discount_rates_q) == dict and + self.complete_q(discount_rates_q)): # dict: 1 value per p and q - self._discount_rates = { - q: tuple(discount_rates_qp[(q,p)] - for p in self.reporting_periods[q]) - for q in self.assessments - } + self._discount_rates = dict(discount_rates_q) else: raise ValueError('Unrecognised inputs.') diff --git a/tests/examples_esipp_problem.py b/tests/examples_esipp_problem.py index 8c480f9dcd905187c685c9f4aceabb79eb7f2c53..1fd6782b1dc71597629278843596be61228b853c 100644 --- a/tests/examples_esipp_problem.py +++ b/tests/examples_esipp_problem.py @@ -30,8 +30,8 @@ from src.topupopt.problems.esipp.resource import ResourcePrice from src.topupopt.problems.esipp.utils import compute_cost_volume_metrics -# ****************************************************************************** -# ****************************************************************************** +# ***************************************************************************** +# ***************************************************************************** def examples(solver: str, solver_options: dict = None, init_aux_sets: bool = False): @@ -65,44 +65,6 @@ def examples(solver: str, solver_options: dict = None, init_aux_sets: bool = Fal # ************************************************************************** - # problem with two scenarios - - example_single_network_single_arc_problem_two_scenarios( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # problem with one import node, one regular node and one arc - - example_single_network_single_arc_problem( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - # problem with two symmetrical nodes and one undirected arc example_isolated_undirected_network( @@ -120,23 +82,6 @@ def examples(solver: str, solver_options: dict = None, init_aux_sets: bool = Fal init_aux_sets=init_aux_sets, ) - # problem with symmetrical nodes and one undirected arc with diff. tech. - - example_isolated_undirected_network_diff_tech( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - # problem with symmetrical nodes and one undirected arc, irregular steps example_isolated_undirected_network( @@ -1198,22 +1143,6 @@ def examples(solver: str, solver_options: dict = None, init_aux_sets: bool = Fal # test using groups of arcs - example_arc_groups_individual( - solver=solver, - solver_options=solver_options, - use_arc_groups=False, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, - init_aux_sets=init_aux_sets, - ) - - example_arc_groups_individual( - solver=solver, - solver_options=solver_options, - use_arc_groups=True, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, - init_aux_sets=init_aux_sets, - ) - # TODO: perform additional tests involving groups of arcs for mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: @@ -1839,196 +1768,6 @@ def example_single_network_single_arc_problem( # ****************************************************************************** -def example_single_network_single_arc_problem_two_scenarios( - solver, - solver_options, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - print_model, - init_aux_sets, -): - # number_intraperiod_time_intervals = 4 - - nominal_discount_rate = 0.035 - - assessment_weights = {0: 0.7, 1: 0.3} - - number_reporting_periods = 3 # total - - reporting_periods = {0: (0, 1), 1: (0, 1, 2)} # 2 and 3 - - number_time_intervals = {0: 3, 1: 2} - - discount_rates = { - q: tuple(nominal_discount_rate for p in range(number_reporting_periods)) - for q in assessment_weights - } - - time_intervals = {0: (1, 1, 1), 1: (1, 1)} - - # 2 nodes: one import, one regular - - mynet = Network() - - # import node - - # res_price = { - # q: ResourcePrice(prices=[1.0 for i in range(number_time_intervals[q])], - # volumes=None) - # for q in assessment_weights} - - node_IMP = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=node_IMP, - prices={ - (q, p, k): ResourcePrice(prices=1.0, volumes=None) - for q in assessment_weights - for p in range(number_reporting_periods) - for k in range(number_time_intervals[q]) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - base_flow={ - (0, 0): 0.50, - (0, 1): 0.00, - (0, 2): 1.00, - (1, 0): 1.25, - (1, 1): 0.30, - }, - ) - - # arc IA - - arc_tech_IA = Arcs( - name="any", - # efficiency=[0.5, 0.5, 0.5], - efficiency={(0, 0): 0.5, (0, 1): 0.5, (0, 2): 0.5, (1, 0): 0.5, (1, 1): 0.5}, - efficiency_reverse=None, - static_loss=None, - capacity=[3], - minimum_cost=[2], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - validate=False, - ) - - mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=None, - static_losses_mode=True, # just to reach a line, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - discount_rates=discount_rates, - reporting_periods=reporting_periods, - time_intervals=time_intervals, - assessment_weights=assessment_weights, - ) - - # ************************************************************************** - - # validation - - # the arc should be installed since it is the only feasible solution - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_IMP, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the flows should be 1.0, 0.0 and 2.0 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 0)]), - 1.0, - abs_tol=1e-6, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 1)]), - 0.0, - abs_tol=1e-6, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 2)]), - 2.0, - abs_tol=1e-6, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 1, 0)]), - 2.5, - abs_tol=1e-6, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 1, 1)]), - 0.6, - abs_tol=1e-6, - ) - - # arc amplitude should be two - - assert math.isclose( - pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]), - 2.5, - abs_tol=0.01, - ) - - # capex should be four - - assert math.isclose(pyo.value(ipp.instance.var_capex), 4.5, abs_tol=1e-3) - - # sdncf_q[0] should be -5.7 - - assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[0]), -5.7, abs_tol=1e-3) - - # the objective function should be -9.7 - - assert math.isclose(pyo.value(ipp.instance.obj_f), -11.096, abs_tol=3e-3) - - -# ****************************************************************************** -# ****************************************************************************** - - def example_isolated_undirected_network( solver, solver_options, @@ -2163,14 +1902,14 @@ def example_isolated_undirected_network( # ************************************************************************** - # ****************************************************************************** # ****************************************************************************** -def example_isolated_undirected_network_diff_tech( +def example_nonisolated_undirected_network( solver, solver_options, + different_technologies, irregular_time_intervals, use_sos_arcs, sos_weight_key, @@ -2179,6 +1918,8 @@ def example_isolated_undirected_network_diff_tech( sense_sos_weight_key, sense_use_real_variables_if_possible, use_arc_interfaces, + undirected_arc_imports, + undirected_arc_exports, print_model, init_aux_sets, ): @@ -2188,160 +1929,35 @@ def example_isolated_undirected_network_diff_tech( number_intervals = 4 + number_periods = 2 + # 4 nodes: one import, one export, two supply/demand nodes mynet = Network() - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) + # import node - mynet.add_source_sink_node( - node_key=node_A, - # base_flow=[1, -1, 0.5, -0.5] - base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, - ) + # imp_prices = ResourcePrice( + # prices=[1+random.random() for i in range(number_intervals)], + # volumes=None + # ) - node_B = generate_pseudo_unique_key(mynet.nodes()) + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - mynet.add_source_sink_node( - node_key=node_B, - # base_flow=[-1.25, 1, -0.625, 0.5] - base_flow={(0, 0): -1.25, (0, 1): 1.0, (0, 2): -0.625, (0, 3): 0.5}, + mynet.add_import_node( + node_key=imp_node_key, + prices={ + (q, p, k): ResourcePrice(prices=1 + random.random(), volumes=None) + for p in range(number_periods) + for k in range(number_intervals) + }, ) - # add arcs + # export node - # undirected arc - - arc_tech_AB = ArcsWithoutStaticLosses( - name="any", - # efficiency=[0.8, 1.0, 0.8, 1.0], - efficiency={(0, 0): 0.8, (0, 1): 1.0, (0, 2): 0.8, (0, 3): 1.0}, - efficiency_reverse=None, - capacity=[1.25, 2.5], - minimum_cost=[10, 15], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # validation - - # the arc should be installed since it is the only feasible solution - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # there should be no opex (imports or exports), only capex from arcs - - assert pyo.value(ipp.instance.var_sdncf_q[q]) == 0 - - assert pyo.value(ipp.instance.var_capex) > 0 - - assert ( - pyo.value( - ipp.instance.var_capex_arc_gllj[("mynet", node_A, node_B, arc_key_AB_und)] - ) - > 0 - ) - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_nonisolated_undirected_network( - solver, - solver_options, - different_technologies, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - undirected_arc_imports, - undirected_arc_exports, - print_model, - init_aux_sets, -): - q = 0 - - # time - - number_intervals = 4 - - number_periods = 2 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # import node - - # imp_prices = ResourcePrice( - # prices=[1+random.random() for i in range(number_intervals)], - # volumes=None - # ) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1 + random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # export node - - # exp_prices = ResourcePrice( - # prices=[random.random() for i in range(number_intervals)], - # volumes=None) + # exp_prices = ResourcePrice( + # prices=[random.random() for i in range(number_intervals)], + # volumes=None) exp_node_key = generate_pseudo_unique_key(mynet.nodes()) @@ -8749,516 +8365,8 @@ def example_undirected_arc_static_downstream( abs_tol=abs_tol, ) - -# ****************************************************************************** -# ****************************************************************************** - - -def example_arc_groups_individual( - solver, solver_options, use_arc_groups, static_losses_mode, init_aux_sets -): - q = 0 - - # time - - number_intervals = 2 - - number_periods = 2 - - # 4 nodes: two import nodes, two supply/demand nodes - - mynet = Network() - - # ************************************************************************** - - # import nodes - - # imp1_prices = ResourcePrice( - # prices=[1, - # 2], - # volumes=None) - - imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp1_node_key, - prices={ - (q, p, k): ResourcePrice(prices=k + 1, volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # imp2_prices = ResourcePrice( - # prices=[3, - # 2], - # volumes=None) - - imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp2_node_key, - prices={ - (q, p, k): ResourcePrice(prices=3 - k, volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # imp3_prices = ResourcePrice( - # prices=[1, - # 3], - # volumes=None) - - imp3_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp3_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1 + 2 * k, volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # ************************************************************************** - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 1.0, (q, 1): 0.5}) - - # add arcs - - efficiency_IA1 = { - (q, 0): 1.00, - (q, 1): 0.85, - } - - efficiency_IA2 = { - (q, 0): 0.95, - (q, 1): 0.90, - } - - efficiency_IA3 = { - (q, 0): 0.90, - (q, 1): 0.95, - } - - # I1A - - static_loss_IA1 = { - (0, q, 0): 0.10, - (0, q, 1): 0.10, - (1, q, 0): 0.15, - (1, q, 1): 0.15, - (2, q, 0): 0.20, - (2, q, 1): 0.20, - } - - # I2A - - static_loss_IA2 = { - (0, q, 0): 0.20, - (0, q, 1): 0.20, - (1, q, 0): 0.05, - (1, q, 1): 0.05, - (2, q, 0): 0.10, - (2, q, 1): 0.10, - } - - # I3A - - static_loss_IA3 = { - (0, q, 0): 0.15, - (0, q, 1): 0.15, - (1, q, 0): 0.10, - (1, q, 1): 0.10, - (2, q, 0): 0.05, - (2, q, 1): 0.05, - } - - arcs_I1A = Arcs( - name="IA1", - efficiency=efficiency_IA1, - efficiency_reverse=None, - static_loss=static_loss_IA1, - capacity=( - 0.5, - 0.75, - 1.2, - ), - minimum_cost=( - 0.2, - 0.5, - 0.75, - ), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=True, - ) - - arc_key_I1A = mynet.add_directed_arc( - node_key_a=imp1_node_key, node_key_b=node_A, arcs=arcs_I1A - ) - - arcs_I2A = Arcs( - name="IA2", - efficiency=efficiency_IA2, - efficiency_reverse=None, - static_loss=static_loss_IA2, - capacity=( - 0.5, - 0.75, - 1.2, - ), - minimum_cost=( - 0.2, - 0.5, - 0.75, - ), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=True, - ) - - arc_key_I2A = mynet.add_directed_arc( - node_key_a=imp2_node_key, node_key_b=node_A, arcs=arcs_I2A - ) - - arcs_I3A = Arcs( - name="IA3", - efficiency=efficiency_IA3, - efficiency_reverse=None, - static_loss=static_loss_IA3, - capacity=( - 0.5, - 0.75, - 1.2, - ), - minimum_cost=( - 0.2, - 0.5, - 0.75, - ), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=True, - ) - - arc_key_I3A = mynet.add_directed_arc( - node_key_a=imp3_node_key, node_key_b=node_A, arcs=arcs_I3A - ) - - if use_arc_groups: - arc_groups_dict = { - 0: ( - ("mynet", imp1_node_key, node_A, arc_key_I1A), - ("mynet", imp2_node_key, node_A, arc_key_I2A), - ("mynet", imp3_node_key, node_A, arc_key_I3A), - ) - } - - else: - arc_groups_dict = {} - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - solver_options["relative_mip_gap"] = 0 - solver_options["absolute_mip_gap"] = 1e-4 - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=False, - arc_sos_weight_key=None, - arc_use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - sense_use_arc_interfaces=False, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=False, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=static_losses_mode, - mandatory_arcs=[], - max_number_parallel_arcs={}, - arc_groups_dict=arc_groups_dict, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - # print('**********(((((((((((((((())))))))))))))))))))))') - # if use_arc_groups: - # print('hohohoho') - # # ipp.instance.param_w_new_glljhk.pprint() - # for arc_key in ipp.networks['mynet'].edges(keys=True): - # print(arc_key) - # print(ipp.networks['mynet'].edges[arc_key]['technology'].static_loss) - # #print() - # ipp.instance.param_w_new_glljhk.pprint() - # # assert False - # print('flow in') - # print(flow_in) - # print('flow out') - # print(flow_out) - # print('var_capex') - # print(pyo.value(ipp.instance.var_capex)) - # print('var_sdncf') - # print(pyo.value(ipp.instance.var_sdncf)) - # print('var_sdext') - # print(pyo.value(ipp.instance.var_sdext)) - q = 0 - capex_ind = 0.75 - capex_group = 1.5 - imp_ind = 1.9882352941176473 - imp_group = 2.2 # {'mynet': 2.2245012886626414} - sdncf_group = -6.184560251632995 # -6.2386008960367345 - sdncf_ind = -5.274445281858455 - sdext_group = 0 - sdext_ind = 0 - losses_ind = 0 - losses_group = 0 - obj_ind = sdncf_ind + sdext_ind - capex_ind - obj_group = sdncf_group + sdext_group - capex_group - - assert capex_ind < capex_group - assert imp_ind < imp_group - assert obj_ind > obj_group - - if use_arc_groups: - # all arcs have to be installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp1_node_key, node_A, arc_key_I1A)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp2_node_key, node_A, arc_key_I2A)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp3_node_key, node_A, arc_key_I3A)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the same option has to be selected in all arcs - - h1 = ( - ipp.networks["mynet"] - .edges[(imp1_node_key, node_A, arc_key_I1A)][Network.KEY_ARC_TECH] - .options_selected.index(True) - ) - - h2 = ( - ipp.networks["mynet"] - .edges[(imp2_node_key, node_A, arc_key_I2A)][Network.KEY_ARC_TECH] - .options_selected.index(True) - ) - - h3 = ( - ipp.networks["mynet"] - .edges[(imp3_node_key, node_A, arc_key_I3A)][Network.KEY_ARC_TECH] - .options_selected.index(True) - ) - - assert h1 == h2 - - assert h1 == h3 - - # the capex have to be higher than those of the best individual arc - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value(ipp.instance.var_capex), capex_group, abs_tol=abs_tol - ) - - # there should be no exports - - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) - - # the imports should be higher than with individual arcs - - abs_tol = 1e-3 - - assert math.isclose(flow_in[("mynet", 0, 0)], imp_group, abs_tol=abs_tol) - - # the operating results should be lower than with an individual arc - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_group, abs_tol=abs_tol - ) - - # the externalities should be zero - - abs_tol = 1e-3 - - assert math.isclose(pyo.value(ipp.instance.var_sdext_q[q]), 0, abs_tol=abs_tol) - - # the objective function should be -6.3639758220728595-1.5 - - abs_tol = 1e-3 - - assert math.isclose(pyo.value(ipp.instance.obj_f), obj_group, abs_tol=abs_tol) - - # the imports should be greater than or equal to the losses for all arx - - losses_model = sum( - pyo.value( - ipp.instance.var_w_glljqk[ - ("mynet", imp1_node_key, node_A, arc_key_I1A, q, k) - ] - ) - + pyo.value( - ipp.instance.var_w_glljqk[ - ("mynet", imp2_node_key, node_A, arc_key_I2A, q, k) - ] - ) - + pyo.value( - ipp.instance.var_w_glljqk[ - ("mynet", imp3_node_key, node_A, arc_key_I3A, q, k) - ] - ) - for k in range(number_intervals) - ) - - losses_data = sum( - static_loss_IA1[(h1, q, k)] - + static_loss_IA2[(h2, q, k)] - + static_loss_IA3[(h3, q, k)] - for k in range(number_intervals) - ) - - assert math.isclose(losses_model, losses_data, abs_tol=abs_tol) - - assert flow_in[("mynet", 0, 0)] >= losses_model - - else: - # at least one arc has to be installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp1_node_key, node_A, arc_key_I1A)][Network.KEY_ARC_TECH] - .options_selected - or True - in ipp.networks["mynet"] - .edges[(imp2_node_key, node_A, arc_key_I2A)][Network.KEY_ARC_TECH] - .options_selected - or True - in ipp.networks["mynet"] - .edges[(imp3_node_key, node_A, arc_key_I3A)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the capex have to be lower than with a group of arcs - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value(ipp.instance.var_capex), capex_ind, abs_tol=abs_tol - ) - - # there should be no exports - - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) - - # the imports should be lower than with a group of arcs - - abs_tol = 1e-3 - - assert math.isclose(flow_in[("mynet", 0, 0)], imp_ind, abs_tol=abs_tol) - - # the operating results should be lower than with an individual arc - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_ind, abs_tol=abs_tol - ) - - # the externalities should be zero - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value(ipp.instance.var_sdext_q[q]), sdext_ind, abs_tol=abs_tol - ) - - # the objective function should be -6.3639758220728595-1.5 - - abs_tol = 1e-3 - - assert math.isclose(pyo.value(ipp.instance.obj_f), obj_ind, abs_tol=abs_tol) - - # the imports should be greater than or equal to the losses for all arx - - losses_model = sum( - pyo.value( - ipp.instance.var_w_glljqk[ - ("mynet", imp1_node_key, node_A, arc_key_I1A, q, k) - ] - ) - + pyo.value( - ipp.instance.var_w_glljqk[ - ("mynet", imp2_node_key, node_A, arc_key_I2A, q, k) - ] - ) - + pyo.value( - ipp.instance.var_w_glljqk[ - ("mynet", imp3_node_key, node_A, arc_key_I3A, q, k) - ] - ) - for k in range(number_intervals) - ) - - # losses_data = sum( - # static_loss_IA1[(h1,0,k)]+ - # static_loss_IA2[(h2,0,k)]+ - # static_loss_IA3[(h3,0,k)] - # for k in range(number_intervals) - # ) - - # assert math.isclose( - # losses_model, - # losses_data, - # abs_tol=abs_tol) - - assert flow_in[("mynet", 0, 0)] >= losses_model - - -# ****************************************************************************** -# ****************************************************************************** - +# ***************************************************************************** +# ***************************************************************************** def example_arc_groups_individual_undirected( solver, solver_options, use_arc_groups, static_losses_mode, init_aux_sets diff --git a/tests/test_esipp_network.py b/tests/test_esipp_network.py index 414ddbe2ce1f8b63f2a59fa40382cb165630ed54..b30b1f95afeb8832358df7dfe8bdf6bb06a72d25 100644 --- a/tests/test_esipp_network.py +++ b/tests/test_esipp_network.py @@ -18,14 +18,13 @@ from src.topupopt.problems.esipp.network import ArcsWithoutStaticLosses from src.topupopt.problems.esipp.resource import ResourcePrice +from src.topupopt.data.misc.utils import generate_pseudo_unique_key + # ***************************************************************************** # ***************************************************************************** # TODO: add test for directed arcs between import and export nodes with static losses -# TODO: add test for undirected arcs involving import and export nodes - - class TestNetwork: # ************************************************************************* # ************************************************************************* @@ -2086,6 +2085,116 @@ class TestNetwork: # check non-existent arc net.arc_is_undirected(("X", "Y", 1)) + + # ************************************************************************* + # ************************************************************************* + + def test_undirected_arc_import_error(self): + + # network + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + (0, 0, 0): ResourcePrice(prices=1+0.05, volumes=None) + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + # base_flow=[1, -1, 0.5, -0.5] + base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, + ) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + # base_flow=[-1, 1, -0.5, 0.5] + base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, + ) + + # add arcs + + # import arc + arc_tech_IA = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + efficiency_reverse=None, + static_loss=None, + validate=False, + ) + mynet.add_undirected_arc( + node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA + ) + + error_raised = False + try: + # identify node types + mynet.identify_node_types() + except ValueError: + error_raised = True + assert error_raised + + # ********************************************************************* + # ********************************************************************* + + # ************************************************************************* + # ************************************************************************* + + def test_undirected_arc_export_error(self): + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # export node + exp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_export_node( + node_key=exp_node_key, + prices={ + (0, 0, 0): ResourcePrice(prices=0.1+0.05, volumes=None) + }, + ) + + # other nodes + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + # base_flow=[-1, 1, -0.5, 0.5] + base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, + ) + # export arc + arc_tech_BE = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + efficiency_reverse=None, + static_loss=None, + validate=False, + ) + mynet.add_undirected_arc( + node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE + ) + + error_raised = False + try: + # identify node types + mynet.identify_node_types() + except ValueError: + error_raised = True + assert error_raised # ************************************************************************* # ************************************************************************* @@ -2142,6 +2251,5 @@ class TestNetwork: error_triggered = True assert error_triggered - # ***************************************************************************** # ***************************************************************************** diff --git a/tests/test_esipp_problem.py b/tests/test_esipp_problem.py index 79f77549351c8058bc712e5dbaaefacc9541b696..d695e5b00505b7f5681f213acb244f8f19a6de7e 100644 --- a/tests/test_esipp_problem.py +++ b/tests/test_esipp_problem.py @@ -13,10 +13,12 @@ import pyomo.environ as pyo from src.topupopt.data.misc.utils import generate_pseudo_unique_key from src.topupopt.problems.esipp.problem import InfrastructurePlanningProblem from src.topupopt.problems.esipp.network import Arcs, Network +from src.topupopt.problems.esipp.network import ArcsWithoutStaticLosses from src.topupopt.problems.esipp.resource import ResourcePrice from src.topupopt.problems.esipp.problem import simplify_peak_total_problem from src.topupopt.problems.esipp.problem import is_peak_total_problem -from src.topupopt.problems.esipp.time import TimeFrame +from src.topupopt.problems.esipp.utils import compute_cost_volume_metrics +from src.topupopt.problems.esipp.time import EconomicTimeFrame # ***************************************************************************** # ***************************************************************************** @@ -38,7 +40,7 @@ class TestESIPPProblem: perform_analysis: bool = False, plot_results: bool = False, print_solver_output: bool = False, - time_frame: TimeFrame = None, + time_frame: EconomicTimeFrame = None, networks: dict = None, converters: dict = None, static_losses_mode=None, @@ -46,7 +48,7 @@ class TestESIPPProblem: max_number_parallel_arcs: dict = None, arc_groups_dict: dict = None, init_aux_sets: bool = False, - discount_rates: dict = None, + # discount_rates: dict = None, assessment_weights: dict = None, simplify_problem: bool = False, ): @@ -76,7 +78,7 @@ class TestESIPPProblem: # create problem object ipp = InfrastructurePlanningProblem( - discount_rates=discount_rates, + # discount_rates=discount_rates, time_frame=time_frame, # reporting_periods=time_frame.reporting_periods, # time_intervals=time_frame.time_interval_durations, @@ -212,13 +214,13 @@ class TestESIPPProblem: ipp = simplify_peak_total_problem(ipp) # ********************************************************************* - + # instantiate (disable the default case v-a-v fixed losses) # ipp.instantiate(place_fixed_losses_upstream_if_possible=False) ipp.instantiate(initialise_ancillary_sets=init_aux_sets) - + # optimise ipp.optimise( @@ -239,12 +241,12 @@ class TestESIPPProblem: # ************************************************************************* def test_single_network_single_arc_problem(self): - # scenario + + # assessment q = 0 - # time - number_intervals = 3 - tf = TimeFrame( + tf = EconomicTimeFrame( + discount_rate=3.5/100, reporting_periods={q: (0, 1)}, reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, time_intervals={q: (0, 1, 2)}, @@ -318,7 +320,7 @@ class TestESIPPProblem: static_losses_mode=True, # just to reach a line, mandatory_arcs=[], max_number_parallel_arcs={}, - discount_rates={0: tuple([0.035, 0.035])}, + # discount_rates={0: tuple([0.035, 0.035])}, # init_aux_sets=init_aux_sets, simplify_problem=False, ) @@ -373,17 +375,183 @@ class TestESIPPProblem: # the objective function should be -9.7 assert math.isclose(pyo.value(ipp.instance.obj_f), -9.7, abs_tol=1e-3) + + # ************************************************************************* + # ************************************************************************* + + def test_single_network_two_arcs_problem(self): + + # TODO: test simplifying this problem + + # assessment + q = 0 + + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0, 1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0, 1, 2)}, + time_interval_durations={q: (1, 1, 1)}, + ) + + # 2 nodes: one import, one regular + mynet = Network() + + # import node + node_IMP = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=node_IMP, + prices={ + qpk: ResourcePrice(prices=1.0, volumes=None) + for qpk in tf.qpk() + } + ) + + # export node + node_EXP = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_export_node( + node_key=node_EXP, + prices={ + qpk: ResourcePrice(prices=0.5, volumes=None) + for qpk in tf.qpk() + } + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + base_flow={(q, 0): 0.50, (q, 1): -1.50, (q, 2): 1.00}, + ) + + # arc IA + arc_tech_IA = Arcs( + name="any", + efficiency={qk: 0.5 for qk in tf.qk()}, + efficiency_reverse=None, + static_loss=None, + capacity=[3], + minimum_cost=[2], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + validate=False, + ) + mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) + + # arc AE + arc_tech_AE = Arcs( + name="any", + efficiency={qk: 0.8 for qk in tf.qk()}, + efficiency_reverse=None, + static_loss=None, + capacity=[3], + minimum_cost=[2], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + validate=False, + ) + mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_AE) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=True, # just to reach a line, + mandatory_arcs=[], + max_number_parallel_arcs={}, + simplify_problem=False, + ) + + assert is_peak_total_problem(ipp) + assert ipp.results["Problem"][0]["Number of constraints"] == 42 + assert ipp.results["Problem"][0]["Number of variables"] == 40 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 95 + + # ********************************************************************* + # ********************************************************************* + + # validation + + # the arc should be installed since it is required for feasibility + assert ( + True + in ipp.networks["mynet"] + .edges[(node_IMP, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the flows should be 1.0, 0.0 and 2.0 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 0)]), + 1.0, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 1)]), + 0.0, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 2)]), + 2.0, + abs_tol=1e-6, + ) + + # the flows should be 1.0, 0.0 and 2.0 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_EXP, 0, q, 0)]), + 0.0, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_EXP, 0, q, 1)]), + 1.5, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_EXP, 0, q, 2)]), + 0.0, + abs_tol=1e-6, + ) + + # arc amplitude should be two + assert math.isclose( + pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]), + 2.0, + abs_tol=0.01, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_A, node_EXP, 0)]), + 1.5, + abs_tol=0.01, + ) + + # capex should be four + assert math.isclose(pyo.value(ipp.instance.var_capex), 7.5, abs_tol=1e-3) + + # sdncf should be -5.7+0.6*(0.966+0.934) + assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), -5.7+0.6*(0.966+0.934), abs_tol=1e-3) + + # the objective function should be -9.7+0.6*(0.966+0.934) + assert math.isclose(pyo.value(ipp.instance.obj_f), -9.7+0.6*(0.966+0.934)-3.5, abs_tol=1e-3) # ************************************************************************* # ************************************************************************* def test_single_network_single_arc_problem_simpler(self): - # scenario + + # assessment q = 0 - # time - number_intervals = 3 - tf = TimeFrame( + tf = EconomicTimeFrame( + discount_rate=3.5/100, reporting_periods={q: (0, 1)}, reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, time_intervals={q: (0, 1, 2)}, @@ -458,15 +626,15 @@ class TestESIPPProblem: static_losses_mode=True, # just to reach a line, mandatory_arcs=[], max_number_parallel_arcs={}, - discount_rates={0: tuple([0.035, 0.035])}, + # discount_rates={0: tuple([0.035, 0.035])}, # init_aux_sets=init_aux_sets, simplify_problem=True, ) assert is_peak_total_problem(ipp) - assert ipp.results["Problem"][0]["Number of constraints"] == 20 - assert ipp.results["Problem"][0]["Number of variables"] == 19 - assert ipp.results["Problem"][0]["Number of nonzeros"] == 36 + assert ipp.results["Problem"][0]["Number of constraints"] == 16 # 20 + assert ipp.results["Problem"][0]["Number of variables"] == 15 # 19 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 28 # 36 # ********************************************************************* # ********************************************************************* @@ -494,7 +662,7 @@ class TestESIPPProblem: # print('wow wow wow') # ipp.instance.constr_imp_flow_cost.pprint() - expected_string = """constr_imp_flow_cost : Size=4, Index=constr_imp_flow_cost_index, Active=True\n Key : Lower : Body : Upper : Active\n ('mynet', 'thatimpnode', 'peak', 0, 0) : 0.0 : 0*var_if_glqpks[mynet,thatimpnode,peak,0,0,0] - var_ifc_glqpk[mynet,thatimpnode,peak,0,0] : 0.0 : True\n ('mynet', 'thatimpnode', 'peak', 1, 0) : 0.0 : 0*var_if_glqpks[mynet,thatimpnode,peak,1,0,0] - var_ifc_glqpk[mynet,thatimpnode,peak,1,0] : 0.0 : True\n ('mynet', 'thatimpnode', 'total', 0, 0) : 0.0 : var_if_glqpks[mynet,thatimpnode,total,0,0,0] - var_ifc_glqpk[mynet,thatimpnode,total,0,0] : 0.0 : True\n ('mynet', 'thatimpnode', 'total', 1, 0) : 0.0 : var_if_glqpks[mynet,thatimpnode,total,1,0,0] - var_ifc_glqpk[mynet,thatimpnode,total,1,0] : 0.0 : True\n""" + expected_string = """constr_imp_flow_cost : Size=2, Index=constr_imp_flow_cost_index, Active=True\n Key : Lower : Body : Upper : Active\n ('mynet', 'thatimpnode', 'total', 0, 0) : 0.0 : var_if_glqpks[mynet,thatimpnode,total,0,0,0] - var_ifc_glqpk[mynet,thatimpnode,total,0,0] : 0.0 : True\n ('mynet', 'thatimpnode', 'total', 1, 0) : 0.0 : var_if_glqpks[mynet,thatimpnode,total,1,0,0] - var_ifc_glqpk[mynet,thatimpnode,total,1,0] : 0.0 : True\n""" cmd_output = io.StringIO() sys.stdout = cmd_output @@ -549,14 +717,12 @@ class TestESIPPProblem: # ************************************************************************* def test_problem_increasing_imp_prices(self): - # scenario + + # assessment q = 0 - # time - number_intervals = 1 - # # periods - # number_periods = 1 - tf = TimeFrame( + tf = EconomicTimeFrame( + discount_rate=0.0, reporting_periods={q: (0,)}, reporting_period_durations={q: (365 * 24 * 3600,)}, time_intervals={q: (0,)}, @@ -622,7 +788,7 @@ class TestESIPPProblem: # init_aux_sets=init_aux_sets, simplify_problem=False, # reporting_periods={0: (0,)}, - discount_rates={0: (0.0,)}, + # discount_rates={0: (0.0,)}, ) assert not is_peak_total_problem(ipp) @@ -670,14 +836,15 @@ class TestESIPPProblem: # ************************************************************************* def test_problem_decreasing_exp_prices(self): - # scenario + # assessment q = 0 # time number_intervals = 1 # periods number_periods = 1 - tf = TimeFrame( + tf = EconomicTimeFrame( + discount_rate=0.0, reporting_periods={q: (0,)}, reporting_period_durations={q: (365 * 24 * 3600,)}, time_intervals={q: (0,)}, @@ -741,7 +908,7 @@ class TestESIPPProblem: # init_aux_sets=init_aux_sets, simplify_problem=False, # reporting_periods={0: (0,)}, - discount_rates={0: (0.0,)}, + # discount_rates={0: (0.0,)}, ) assert not is_peak_total_problem(ipp) @@ -796,7 +963,8 @@ class TestESIPPProblem: # periods number_periods = 1 - tf = TimeFrame( + tf = EconomicTimeFrame( + discount_rate=0.0, reporting_periods={q: (0,)}, reporting_period_durations={q: (365 * 24 * 3600,)}, time_intervals={q: (0,1)}, @@ -877,7 +1045,7 @@ class TestESIPPProblem: mandatory_arcs=[], max_number_parallel_arcs={}, simplify_problem=False, - discount_rates={0: (0.0,)}, + # discount_rates={0: (0.0,)}, ) assert not is_peak_total_problem(ipp) @@ -952,33 +1120,34 @@ class TestESIPPProblem: # ************************************************************************* # ************************************************************************* + + def test_problem_two_scenarios(self): + + # number_intraperiod_time_intervals = 4 - def test_problem_converter_sink(self): - # scenario - q = 0 - # time - number_intervals = 3 - # periods - number_periods = 1 + nominal_discount_rate = 0.035 - tf = TimeFrame( - reporting_periods={q: (0,)}, - reporting_period_durations={q: (365 * 24 * 3600,)}, - time_intervals={q: (0,1,2)}, - time_interval_durations={q: (1,1,1)}, - ) + assessment_weights = {0: 0.7, 1: 0.3} + + tf = EconomicTimeFrame( + discount_rate=nominal_discount_rate, + reporting_periods={0: (0, 1), 1: (0, 1, 2)}, + reporting_period_durations={0: (1, 1), 1: (1, 1, 1)}, # does not matter + time_intervals={0: (0, 1, 2), 1: (0, 1)}, + time_interval_durations={0: (1, 1, 1), 1: (1, 1)}, + ) # 2 nodes: one import, one regular + mynet = Network() - # import node node_IMP = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( node_key=node_IMP, prices={ - (q, p, k): ResourcePrice(prices=1.0, volumes=None) - for p in range(number_periods) - for k in range(number_intervals) + qpk: ResourcePrice(prices=1.0, volumes=None) + for qpk in tf.qpk() }, ) @@ -988,8 +1157,13 @@ class TestESIPPProblem: mynet.add_source_sink_node( node_key=node_A, - # base_flow=[0.5, 0.0, 1.0], - base_flow={(q, 0): 0.50, (q, 1): 0.00, (q, 2): 1.00}, + base_flow={ + (0, 0): 0.50, + (0, 1): 0.00, + (0, 2): 1.00, + (1, 0): 1.25, + (1, 1): 0.30, + }, ) # arc IA @@ -997,7 +1171,7 @@ class TestESIPPProblem: arc_tech_IA = Arcs( name="any", # efficiency=[0.5, 0.5, 0.5], - efficiency={(q, 0): 0.5, (q, 1): 0.5, (q, 2): 0.5}, + efficiency={(0, 0): 0.5, (0, 1): 0.5, (0, 2): 0.5, (1, 0): 0.5, (1, 1): 0.5}, efficiency_reverse=None, static_loss=None, capacity=[3], @@ -1013,89 +1187,34 @@ class TestESIPPProblem: mynet.identify_node_types() - # converters - - # number of samples - time_step_durations = [1, 1, 1] - number_time_steps = len(time_step_durations) - - # get the coefficients - import numpy as np - - # a_innk - a_innk = { - ("cvt1", 0, 0, 0): 0.95, - ("cvt1", 0, 0, 1): 0.95, - ("cvt1", 0, 0, 2): 0.95, - } - - # b_inmk - b_inmk = {("cvt1", 0, 0, 0): 3, ("cvt1", 0, 0, 1): 3, ("cvt1", 0, 0, 2): 3} - - # c_irnk - c_irnk = {} - # d_irmk - d_irmk = {} - # e_x_ink: depends on fixed signals - e_x_ink = {} - # e_y_irk: depends on fixed signals - e_y_irk = {} - - # get the signals - inputs, states, outputs = get_two_node_model_signals(number_time_steps) - - # create a dynamic system - ds = dynsys.DynamicSystem( - time_interval_durations=time_step_durations, A=a, B=b, C=c, D=d - ) - - # create a converter - cvn1 = cvn.Converter( - "cvn1", - sys=ds, - initial_states=x0, - turn_key_cost=3, - inputs=inputs, - states=states, - outputs=outputs, - ) - # no sos, regular time intervals ipp = self.build_solve_ipp( # solver=solver, solver_options={}, - # use_sos_arcs=use_sos_arcs, - # arc_sos_weight_key=sos_weight_key, - # arc_use_real_variables_if_possible=use_real_variables_if_possible, - # use_sos_sense=use_sos_sense, - # sense_sos_weight_key=sense_sos_weight_key, - # sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - # sense_use_arc_interfaces=use_arc_interfaces, perform_analysis=False, plot_results=False, # True, print_solver_output=False, - time_frame=tf, networks={"mynet": mynet}, - converters={"mycvt": cvt}, + converters={}, + time_frame=tf, static_losses_mode=True, # just to reach a line, mandatory_arcs=[], max_number_parallel_arcs={}, - # init_aux_sets=init_aux_sets, - simplify_problem=False, + assessment_weights=assessment_weights, ) - + assert is_peak_total_problem(ipp) - assert ipp.results["Problem"][0]["Number of constraints"] == 24 - assert ipp.results["Problem"][0]["Number of variables"] == 22 - assert ipp.results["Problem"][0]["Number of nonzeros"] == 49 + assert ipp.results["Problem"][0]["Number of constraints"] == 42 + assert ipp.results["Problem"][0]["Number of variables"] == 38 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 87 - # ********************************************************************* # ********************************************************************* # validation - # the arc should be installed since it is required for feasibility + # the arc should be installed since it is the only feasible solution + assert ( True in ipp.networks["mynet"] @@ -1104,38 +1223,2613 @@ class TestESIPPProblem: ) # the flows should be 1.0, 0.0 and 2.0 + assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 0)]), + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 0)]), 1.0, abs_tol=1e-6, ) + assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 1)]), + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 1)]), 0.0, abs_tol=1e-6, ) + assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 2)]), + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 2)]), 2.0, abs_tol=1e-6, ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 1, 0)]), + 2.5, + abs_tol=1e-6, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 1, 1)]), + 0.6, + abs_tol=1e-6, + ) + # arc amplitude should be two + assert math.isclose( pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]), - 2.0, + 2.5, abs_tol=0.01, ) # capex should be four - assert math.isclose(pyo.value(ipp.instance.var_capex), 4.0, abs_tol=1e-3) - # sdncf should be -5.7 - assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), -5.7, abs_tol=1e-3) + assert math.isclose(pyo.value(ipp.instance.var_capex), 4.5, abs_tol=1e-3) + + # sdncf_q[0] should be -5.7 + + assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[0]), -5.7, abs_tol=1e-3) # the objective function should be -9.7 - assert math.isclose(pyo.value(ipp.instance.obj_f), -9.7, abs_tol=1e-3) + assert math.isclose(pyo.value(ipp.instance.obj_f), -11.096, abs_tol=3e-3) + + + # ************************************************************************* + # ************************************************************************* + + def test_problem_two_scenarios_two_discount_rates(self): + + # two discount rates + + assessment_weights = {0: 0.7, 1: 0.3} + + tf = EconomicTimeFrame( + discount_rates_q={0: (0.035, 0.035), 1: (0.1, 0.1, 0.1)}, + reporting_periods={0: (0, 1), 1: (0, 1, 2)}, + reporting_period_durations={0: (1, 1), 1: (1, 1, 1)}, # does not matter + time_intervals={0: (0, 1, 2), 1: (0, 1)}, + time_interval_durations={0: (1, 1, 1), 1: (1, 1)}, + ) + + # 2 nodes: one import, one regular + + mynet = Network() + + node_IMP = generate_pseudo_unique_key(mynet.nodes()) + + mynet.add_import_node( + node_key=node_IMP, + prices={ + qpk: ResourcePrice(prices=1.0, volumes=None) + for qpk in tf.qpk() + }, + ) + + # other nodes + + node_A = generate_pseudo_unique_key(mynet.nodes()) + + mynet.add_source_sink_node( + node_key=node_A, + base_flow={ + (0, 0): 0.50, + (0, 1): 0.00, + (0, 2): 1.00, + (1, 0): 1.25, + (1, 1): 0.30, + }, + ) + + # arc IA + + arc_tech_IA = Arcs( + name="any", + # efficiency=[0.5, 0.5, 0.5], + efficiency={(0, 0): 0.5, (0, 1): 0.5, (0, 2): 0.5, (1, 0): 0.5, (1, 1): 0.5}, + efficiency_reverse=None, + static_loss=None, + capacity=[3], + minimum_cost=[2], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + validate=False, + ) + + mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) + + # identify node types + + mynet.identify_node_types() + + # no sos, regular time intervals + + ipp = self.build_solve_ipp( + # solver=solver, + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + converters={}, + time_frame=tf, + static_losses_mode=True, # just to reach a line, + mandatory_arcs=[], + max_number_parallel_arcs={}, + assessment_weights=assessment_weights, + ) + + assert is_peak_total_problem(ipp) + assert ipp.results["Problem"][0]["Number of constraints"] == 42 + assert ipp.results["Problem"][0]["Number of variables"] == 38 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 87 + + # ********************************************************************* + + # validation + + # the arc should be installed since it is the only feasible solution + assert ( + True + in ipp.networks["mynet"] + .edges[(node_IMP, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the flows should be 1.0, 0.0 and 2.0 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 0)]), + 1.0, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 1)]), + 0.0, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 2)]), + 2.0, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 1, 0)]), + 2.5, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 1, 1)]), + 0.6, + abs_tol=1e-6, + ) + + # arc amplitude should be two + assert math.isclose( + pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]), + 2.5, + abs_tol=0.01, + ) + + # capex should be 4.5 + assert math.isclose(pyo.value(ipp.instance.var_capex), 4.5, abs_tol=1e-3) + + # sdncf_q[0] should be -5.7 + assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[0]), -5.7, abs_tol=1e-3) + + # the objective function should be -10.80213032963115 + assert math.isclose(pyo.value(ipp.instance.obj_f), -10.80213032963115, abs_tol=3e-3) + + # ************************************************************************* + # ************************************************************************* + + def test_problem_two_scenarios_two_discount_rates_simpler(self): + + # two discount rates + assessment_weights = {0: 0.7, 1: 0.3} + tf = EconomicTimeFrame( + discount_rates_q={0: (0.035, 0.035), 1: (0.1, 0.1, 0.1)}, + reporting_periods={0: (0, 1), 1: (0, 1, 2)}, + reporting_period_durations={0: (1, 1), 1: (1, 1, 1)}, # does not matter + time_intervals={0: (0, 1, 2), 1: (0, 1)}, + time_interval_durations={0: (1, 1, 1), 1: (1, 1)}, + ) + + # 2 nodes: one import, one regular + mynet = Network() + node_IMP = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=node_IMP, + prices={ + qpk: ResourcePrice(prices=1.0, volumes=None) + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + base_flow={ + (0, 0): 0.50, + (0, 1): 0.00, + (0, 2): 1.00, + (1, 0): 1.25, + (1, 1): 0.30, + }, + ) + + # arc IA + arc_tech_IA = Arcs( + name="any", + # efficiency=[0.5, 0.5, 0.5], + efficiency={(0, 0): 0.5, (0, 1): 0.5, (0, 2): 0.5, (1, 0): 0.5, (1, 1): 0.5}, + efficiency_reverse=None, + static_loss=None, + capacity=[3], + minimum_cost=[2], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + validate=False, + ) + mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + # solver=solver, + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + converters={}, + time_frame=tf, + static_losses_mode=True, # just to reach a line, + mandatory_arcs=[], + max_number_parallel_arcs={}, + assessment_weights=assessment_weights, + simplify_problem=True + ) + + assert is_peak_total_problem(ipp) + assert ipp.results["Problem"][0]["Number of constraints"] == 42 + assert ipp.results["Problem"][0]["Number of variables"] == 38 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 87 + + # ********************************************************************* + + # validation + + # the arc should be installed since it is the only feasible solution + assert ( + True + in ipp.networks["mynet"] + .edges[(node_IMP, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the flows should be 1.0, 0.0 and 2.0 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 0)]), + 1.0, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 1)]), + 0.0, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 2)]), + 2.0, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 1, 0)]), + 2.5, + abs_tol=1e-6, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 1, 1)]), + 0.6, + abs_tol=1e-6, + ) + + # arc amplitude should be two + assert math.isclose( + pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]), + 2.5, + abs_tol=0.01, + ) + + # capex should be four + assert math.isclose(pyo.value(ipp.instance.var_capex), 4.5, abs_tol=1e-3) + + # sdncf_q[0] should be -5.7 + assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[0]), -5.7, abs_tol=1e-3) + + # the objective function should be -10.80213032963115 (or -10.8027723516153) + assert math.isclose(pyo.value(ipp.instance.obj_f), -10.80213032963115, abs_tol=3e-3) + + # ************************************************************************* + # ************************************************************************* + + # problem with two symmetrical nodes and one undirected arc + # problem with symmetrical nodes and one undirected arc with diff. tech. + # problem with symmetrical nodes and one undirected arc, irregular steps + # same problem as the previous one, except with interface variables + # problem with two symmetrical nodes and one undirected arc, w/ simple sos1 + + def test_isolated_undirected_network(self): + + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,)}, + reporting_period_durations={q: (365 * 24 * 3600,)}, + time_intervals={q: (0,1,2,3)}, + time_interval_durations={q: (1,1,1,1)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + # base_flow=[1, -1, 0.5, -0.5], + base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, + ) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + # base_flow=[-1, 1, -0.5, 0.5], + base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, + ) + + # add arcs + # undirected arc + arc_tech_AB = ArcsWithoutStaticLosses( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + efficiency_reverse=None, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + ) + arc_key_AB_und = mynet.add_undirected_arc( + node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=True, # just to reach a line, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + assert is_peak_total_problem(ipp) # TODO: make sure this is true + assert ipp.results["Problem"][0]["Number of constraints"] == 34 + assert ipp.results["Problem"][0]["Number of variables"] == 28 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 105 + + # ********************************************************************* + # ********************************************************************* + + # validation + + # the arc should be installed since it is the only feasible solution + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # there should be no opex (imports or exports), only capex from arcs + assert pyo.value(ipp.instance.var_sdncf_q[q]) == 0 + assert pyo.value(ipp.instance.var_capex) > 0 + assert ( + pyo.value( + ipp.instance.var_capex_arc_gllj[("mynet", node_A, node_B, arc_key_AB_und)] + ) + > 0 + ) + + # ************************************************************************* + # ************************************************************************* + + def test_isolated_undirected_network_diff_tech(self): + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,)}, + reporting_period_durations={q: (365 * 24 * 3600,)}, + time_intervals={q: (0,1,2,3)}, + time_interval_durations={q: (1,1,1,1)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + # base_flow=[1, -1, 0.5, -0.5] + base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, + ) + + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + # base_flow=[-1.25, 1, -0.625, 0.5] + base_flow={(0, 0): -1.25, (0, 1): 1.0, (0, 2): -0.625, (0, 3): 0.5}, + ) + + # add arcs + # undirected arc + arc_tech_AB = ArcsWithoutStaticLosses( + name="any", + # efficiency=[0.8, 1.0, 0.8, 1.0], + efficiency={(0, 0): 0.8, (0, 1): 1.0, (0, 2): 0.8, (0, 3): 1.0}, + efficiency_reverse=None, + capacity=[1.25, 2.5], + minimum_cost=[10, 15], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + ) + arc_key_AB_und = mynet.add_undirected_arc( + node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + assert not is_peak_total_problem(ipp) + assert ipp.results["Problem"][0]["Number of constraints"] == 34 + assert ipp.results["Problem"][0]["Number of variables"] == 24 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 77 + + # ********************************************************************* + # ********************************************************************* + + # validation + + # the arc should be installed since it is the only feasible solution + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # there should be no opex (imports or exports), only capex from arcs + assert pyo.value(ipp.instance.var_sdncf_q[q]) == 0 + assert pyo.value(ipp.instance.var_capex) > 0 + assert ( + pyo.value( + ipp.instance.var_capex_arc_gllj[("mynet", node_A, node_B, arc_key_AB_und)] + ) + > 0 + ) + + # ********************************************************************* + # ********************************************************************* + + # ************************************************************************* + # ************************************************************************* + + def test_nonisolated_undirected_network(self): + + # scenario + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0, 1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,1,2,3)}, + time_interval_durations={q: (1,1,1,1)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1+i*0.05, volumes=None) + for i, qpk in enumerate(tf.qpk()) + }, + ) + + # export node + exp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_export_node( + node_key=exp_node_key, + prices={ + qpk: ResourcePrice(prices=0.1+i*0.05, volumes=None) + for i, qpk in enumerate(tf.qpk()) + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + # base_flow=[1, -1, 0.5, -0.5] + base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, + ) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + # base_flow=[-1, 1, -0.5, 0.5] + base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, + ) + + # add arcs + + # import arc + arc_tech_IA = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + efficiency_reverse=None, + static_loss=None, + validate=False, + ) + mynet.add_directed_arc( + node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA + ) + + # export arc + + arc_tech_BE = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + efficiency_reverse=None, + static_loss=None, + validate=False, + ) + mynet.add_directed_arc( + node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE + ) + + # undirected arc + arc_tech_AB = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10.0, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + efficiency_reverse=None, + static_loss=None, + validate=False, + ) + arc_key_AB_und = mynet.add_undirected_arc( + node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + assert not is_peak_total_problem(ipp) + assert ipp.results["Problem"][0]["Number of constraints"] == 80 + assert ipp.results["Problem"][0]["Number of variables"] == 84 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 253 + + # ************************************************************************** + + # validation + # network is still isolated + # the import arc was not installed + + assert ( + True + not in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the export arc was not installed + + assert ( + True + not in ipp.networks["mynet"] + .edges[(node_B, exp_node_key, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the undirected arc was installed + + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the opex should be zero + + assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), 0, abs_tol=1e-6) + + # the capex should be positive + + assert pyo.value(ipp.instance.var_capex) > 0 + + + # ********************************************************************* + # ********************************************************************* + + # ************************************************************************* + # ************************************************************************* + + def test_nonisolated_undirected_network_diff_tech(self): + + # scenario + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600,365 * 24 * 3600)}, + time_intervals={q: (0,1,2,3)}, + time_interval_durations={q: (1,1,1,1)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1+i*0.05, volumes=None) + for i, qpk in enumerate(tf.qpk()) + }, + ) + + # export node + exp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_export_node( + node_key=exp_node_key, + prices={ + qpk: ResourcePrice(prices=0.1+i*0.05, volumes=None) + for i, qpk in enumerate(tf.qpk()) + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + # base_flow=[1, -1, 0.5, -0.5] + base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, + ) + + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + # base_flow=[-1, 1, -0.5, 0.5] + base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, + ) + + # add arcs + # import arc + arc_tech_IA = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + efficiency_reverse=None, + static_loss=None, + validate=False, + ) + mynet.add_directed_arc( + node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA + ) + + # export arc + arc_tech_BE = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + efficiency_reverse=None, + static_loss=None, + validate=False, + ) + mynet.add_directed_arc( + node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE + ) + + # undirected arc + arc_tech_AB = Arcs( + name="any", + # efficiency=[0.95, 0.95, 0.95, 0.95], + efficiency={(0, 0): 0.95, (0, 1): 0.95, (0, 2): 0.95, (0, 3): 0.95}, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + # efficiency_reverse=[0.85, 0.85, 0.85, 0.85], + efficiency_reverse={(0, 0): 0.85, (0, 1): 0.85, (0, 2): 0.85, (0, 3): 0.85}, + static_loss=None, + validate=False, + ) + arc_key_AB_und = mynet.add_undirected_arc( + node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + assert not is_peak_total_problem(ipp) + assert ipp.results["Problem"][0]["Number of constraints"] == 80 + assert ipp.results["Problem"][0]["Number of variables"] == 84 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 253 + + # ********************************************************************* + # ********************************************************************* + + # validation + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the directed arc from the import should also be installed since node + # B cannot fullfil all the demand since it has an efficiency of 0.85<1 + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # there should be no opex (imports or exports), only capex from arcs + assert pyo.value(ipp.instance.var_sdncf_q[q]) < 0 + assert pyo.value(ipp.instance.var_capex) > 0 + assert ( + pyo.value( + ipp.instance.var_capex_arc_gllj[ + ("mynet", node_A, node_B, arc_key_AB_und) + ] + ) + > 0 + ) + assert ( + pyo.value( + ipp.instance.var_capex_arc_gllj[("mynet", imp_node_key, node_A, 0)] + ) + > 0 + ) + + # ********************************************************************* + # ********************************************************************* + + # ************************************************************************* + # ************************************************************************* + + # def test_problem_converter_sink(self): + # # scenario + # q = 0 + # # time + # number_intervals = 3 + # # periods + # number_periods = 1 + + # tf = EconomicTimeFrame( + # discount_rate=3.5/100, + # reporting_periods={q: (0,)}, + # reporting_period_durations={q: (365 * 24 * 3600,)}, + # time_intervals={q: (0,1,2)}, + # time_interval_durations={q: (1,1,1)}, + # ) + + # # 2 nodes: one import, one regular + # mynet = Network() + + # # import node + # node_IMP = generate_pseudo_unique_key(mynet.nodes()) + # mynet.add_import_node( + # node_key=node_IMP, + # prices={ + # (q, p, k): ResourcePrice(prices=1.0, volumes=None) + # for p in range(number_periods) + # for k in range(number_intervals) + # }, + # ) + + # # other nodes + + # node_A = generate_pseudo_unique_key(mynet.nodes()) + + # mynet.add_source_sink_node( + # node_key=node_A, + # # base_flow=[0.5, 0.0, 1.0], + # base_flow={(q, 0): 0.50, (q, 1): 0.00, (q, 2): 1.00}, + # ) + + # # arc IA + + # arc_tech_IA = Arcs( + # name="any", + # # efficiency=[0.5, 0.5, 0.5], + # efficiency={(q, 0): 0.5, (q, 1): 0.5, (q, 2): 0.5}, + # efficiency_reverse=None, + # static_loss=None, + # capacity=[3], + # minimum_cost=[2], + # specific_capacity_cost=1, + # capacity_is_instantaneous=False, + # validate=False, + # ) + + # mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) + + # # identify node types + + # mynet.identify_node_types() + + # # converters + + # # number of samples + # time_step_durations = [1, 1, 1] + # number_time_steps = len(time_step_durations) + + # # get the coefficients + # import numpy as np + + # # a_innk + # a_innk = { + # ("cvt1", 0, 0, 0): 0.95, + # ("cvt1", 0, 0, 1): 0.95, + # ("cvt1", 0, 0, 2): 0.95, + # } + + # # b_inmk + # b_inmk = {("cvt1", 0, 0, 0): 3, ("cvt1", 0, 0, 1): 3, ("cvt1", 0, 0, 2): 3} + + # # c_irnk + # c_irnk = {} + # # d_irmk + # d_irmk = {} + # # e_x_ink: depends on fixed signals + # e_x_ink = {} + # # e_y_irk: depends on fixed signals + # e_y_irk = {} + + # # get the signals + # inputs, states, outputs = get_two_node_model_signals(number_time_steps) + + # # create a dynamic system + # ds = dynsys.DynamicSystem( + # time_interval_durations=time_step_durations, A=a, B=b, C=c, D=d + # ) + + # # create a converter + # cvn1 = cvn.Converter( + # "cvn1", + # sys=ds, + # initial_states=x0, + # turn_key_cost=3, + # inputs=inputs, + # states=states, + # outputs=outputs, + # ) + + # # no sos, regular time intervals + + # ipp = self.build_solve_ipp( + # # solver=solver, + # solver_options={}, + # # use_sos_arcs=use_sos_arcs, + # # arc_sos_weight_key=sos_weight_key, + # # arc_use_real_variables_if_possible=use_real_variables_if_possible, + # # use_sos_sense=use_sos_sense, + # # sense_sos_weight_key=sense_sos_weight_key, + # # sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, + # # sense_use_arc_interfaces=use_arc_interfaces, + # perform_analysis=False, + # plot_results=False, # True, + # print_solver_output=False, + # time_frame=tf, + # networks={"mynet": mynet}, + # converters={"mycvt": cvt}, + # static_losses_mode=True, # just to reach a line, + # mandatory_arcs=[], + # max_number_parallel_arcs={}, + # # init_aux_sets=init_aux_sets, + # simplify_problem=False, + # ) + + # assert is_peak_total_problem(ipp) + # assert ipp.results["Problem"][0]["Number of constraints"] == 24 + # assert ipp.results["Problem"][0]["Number of variables"] == 22 + # assert ipp.results["Problem"][0]["Number of nonzeros"] == 49 + + # # ********************************************************************* + # # ********************************************************************* + + # # validation + + # # the arc should be installed since it is required for feasibility + # assert ( + # True + # in ipp.networks["mynet"] + # .edges[(node_IMP, node_A, 0)][Network.KEY_ARC_TECH] + # .options_selected + # ) + + # # the flows should be 1.0, 0.0 and 2.0 + # assert math.isclose( + # pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 0)]), + # 1.0, + # abs_tol=1e-6, + # ) + # assert math.isclose( + # pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 1)]), + # 0.0, + # abs_tol=1e-6, + # ) + # assert math.isclose( + # pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 2)]), + # 2.0, + # abs_tol=1e-6, + # ) + + # # arc amplitude should be two + # assert math.isclose( + # pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]), + # 2.0, + # abs_tol=0.01, + # ) + + # # capex should be four + # assert math.isclose(pyo.value(ipp.instance.var_capex), 4.0, abs_tol=1e-3) + + # # sdncf should be -5.7 + # assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), -5.7, abs_tol=1e-3) + + # # the objective function should be -9.7 + # assert math.isclose(pyo.value(ipp.instance.obj_f), -9.7, abs_tol=1e-3) + + # ************************************************************************* + # ************************************************************************* + + def test_nonisolated_network_preexisting_directed_arcs(self): + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600,365 * 24 * 3600)}, + time_intervals={q: (0,1,2,3)}, + time_interval_durations={q: (1,1,1,1)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1+i*0.05, volumes=None) + for i, qpk in enumerate(tf.qpk()) + }, + ) + + # export node + exp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_export_node( + node_key=exp_node_key, + prices={ + qpk: ResourcePrice(prices=0.1+i*0.05, volumes=None) + for i, qpk in enumerate(tf.qpk()) + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + # base_flow=[1, -1, 0.5, -0.5], + base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, + ) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + # base_flow=[-1, 1, -0.5, 0.5], + base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, + ) + + # add arcs + # import arc + arc_tech_IA = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + efficiency_reverse=None, + static_loss=None, + validate=False, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + ) + arc_tech_IA.options_selected[0] = True + mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA) + + # export arc + arc_tech_BE = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + efficiency_reverse=None, + static_loss=None, + validate=False, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + ) + arc_tech_BE.options_selected[0] = True + mynet.add_directed_arc(node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE) + + # undirected arc + # isotropic arc + arc_tech_AB = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + efficiency_reverse=None, + static_loss=None, + validate=False, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10.0, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + ) + arc_key_AB_und = mynet.add_undirected_arc( + node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, + mandatory_arcs=[], + max_number_parallel_arcs={}, + ) + + # ********************************************************************* + + # validation + # network is still isolated + # the undirected arc was installed + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the opex should be zero + assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), 0, abs_tol=1e-3) + + # the capex should be positive + assert pyo.value(ipp.instance.var_capex) > 0 + + # ********************************************************************* + # ********************************************************************* + + # ************************************************************************* + # ************************************************************************* + + def test_nonisolated_network_preexisting_directed_arcs_diff_tech(self): + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600,365 * 24 * 3600)}, + time_intervals={q: (0,1,2,3)}, + time_interval_durations={q: (1,1,1,1)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1+i*0.05, volumes=None) + for i, qpk in enumerate(tf.qpk()) + }, + ) + + # export node + exp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_export_node( + node_key=exp_node_key, + prices={ + qpk: ResourcePrice(prices=0.1+i*0.05, volumes=None) + for i, qpk in enumerate(tf.qpk()) + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + # base_flow=[1, -1, 0.5, -0.5], + base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, + ) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + # base_flow=[-1, 1, -0.5, 0.5], + base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, + ) + + # add arcs + # import arc + arc_tech_IA = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + efficiency_reverse=None, + static_loss=None, + validate=False, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + ) + arc_tech_IA.options_selected[0] = True + mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA) + + # export arc + arc_tech_BE = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + efficiency_reverse=None, + static_loss=None, + validate=False, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + ) + arc_tech_BE.options_selected[0] = True + mynet.add_directed_arc(node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE) + + # undirected arc + # anisotropic arc + arc_tech_AB = Arcs( + name="any", + # efficiency=[0.95, 0.95, 0.95, 0.95], + efficiency={(0, 0): 0.95, (0, 1): 0.95, (0, 2): 0.95, (0, 3): 0.95}, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + # efficiency_reverse=[0.85, 0.85, 0.85, 0.85], + efficiency_reverse={(0, 0): 0.85, (0, 1): 0.85, (0, 2): 0.85, (0, 3): 0.85}, + static_loss=None, + validate=False, + specific_capacity_cost=1, + capacity_is_instantaneous=False, + ) + arc_key_AB_und = mynet.add_undirected_arc( + node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, + mandatory_arcs=[], + max_number_parallel_arcs={}, + ) + + # ************************************************************************** + + # validation + # the undirected arc should be installed since it is cheaper tham imp. + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the directed arc from the import should also be installed since node + # B cannot fullfil all the demand since it has an efficiency of 0.85<1 + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # there should be no opex (imports or exports), only capex from arcs + + assert pyo.value(ipp.instance.var_sdncf_q[q]) < 0 + + assert pyo.value(ipp.instance.var_capex) > 0 + + assert ( + pyo.value( + ipp.instance.var_capex_arc_gllj[ + ("mynet", node_A, node_B, arc_key_AB_und) + ] + ) + > 0 + ) + + # ********************************************************************* + # ********************************************************************* + + # ************************************************************************* + # ************************************************************************* + + def test_arc_groups_individual(self): + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600,365 * 24 * 3600)}, + time_intervals={q: (0,1)}, + time_interval_durations={q: (1,1)}, + ) + + # 4 nodes: two import nodes, two supply/demand nodes + mynet = Network() + + # ************************************************************************** + + # import nodes + imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp1_node_key, + prices={ + qpk: ResourcePrice(prices=qpk[2] + 1, volumes=None) + for qpk in tf.qpk() + }, + ) + imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp2_node_key, + prices={ + qpk: ResourcePrice(prices=3 - qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + imp3_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp3_node_key, + prices={ + qpk: ResourcePrice(prices=1 + 2 * qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # ************************************************************************** + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 1.0, (q, 1): 0.5}) + + # add arcs + efficiency_IA1 = { + (q, 0): 1.00, + (q, 1): 0.85, + } + efficiency_IA2 = { + (q, 0): 0.95, + (q, 1): 0.90, + } + efficiency_IA3 = { + (q, 0): 0.90, + (q, 1): 0.95, + } + + # I1A + static_loss_IA1 = { + (0, q, 0): 0.10, + (0, q, 1): 0.10, + (1, q, 0): 0.15, + (1, q, 1): 0.15, + (2, q, 0): 0.20, + (2, q, 1): 0.20, + } + + # I2A + static_loss_IA2 = { + (0, q, 0): 0.20, + (0, q, 1): 0.20, + (1, q, 0): 0.05, + (1, q, 1): 0.05, + (2, q, 0): 0.10, + (2, q, 1): 0.10, + } + + # I3A + static_loss_IA3 = { + (0, q, 0): 0.15, + (0, q, 1): 0.15, + (1, q, 0): 0.10, + (1, q, 1): 0.10, + (2, q, 0): 0.05, + (2, q, 1): 0.05, + } + arcs_I1A = Arcs( + name="IA1", + efficiency=efficiency_IA1, + efficiency_reverse=None, + static_loss=static_loss_IA1, + capacity=( + 0.5, + 0.75, + 1.2, + ), + minimum_cost=( + 0.2, + 0.5, + 0.75, + ), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + arc_key_I1A = mynet.add_directed_arc( + node_key_a=imp1_node_key, node_key_b=node_A, arcs=arcs_I1A + ) + + arcs_I2A = Arcs( + name="IA2", + efficiency=efficiency_IA2, + efficiency_reverse=None, + static_loss=static_loss_IA2, + capacity=( + 0.5, + 0.75, + 1.2, + ), + minimum_cost=( + 0.2, + 0.5, + 0.75, + ), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + + arc_key_I2A = mynet.add_directed_arc( + node_key_a=imp2_node_key, node_key_b=node_A, arcs=arcs_I2A + ) + + arcs_I3A = Arcs( + name="IA3", + efficiency=efficiency_IA3, + efficiency_reverse=None, + static_loss=static_loss_IA3, + capacity=( + 0.5, + 0.75, + 1.2, + ), + minimum_cost=( + 0.2, + 0.5, + 0.75, + ), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + + arc_key_I3A = mynet.add_directed_arc( + node_key_a=imp3_node_key, node_key_b=node_A, arcs=arcs_I3A + ) + + arc_groups_dict = { + 0: ( + ("mynet", imp1_node_key, node_A, arc_key_I1A), + ("mynet", imp2_node_key, node_A, arc_key_I2A), + ("mynet", imp3_node_key, node_A, arc_key_I3A), + ) + } + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + solver_options = {} + solver_options["relative_mip_gap"] = 0 + solver_options["absolute_mip_gap"] = 1e-4 + + ipp = self.build_solve_ipp( + solver_options=solver_options, + use_sos_arcs=False, + arc_sos_weight_key=None, + arc_use_real_variables_if_possible=False, + use_sos_sense=False, + sense_sos_weight_key=None, + sense_use_real_variables_if_possible=False, + sense_use_arc_interfaces=False, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, + time_frame=tf, + networks={"mynet": mynet}, + mandatory_arcs=[], + max_number_parallel_arcs={}, + arc_groups_dict=arc_groups_dict + ) + + # ********************************************************************* + + # overview + + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + q = 0 + capex_ind = 0.75 + capex_group = 1.5 + imp_ind = 1.9882352941176473 + imp_group = 2.2 # {'mynet': 2.2245012886626414} + sdncf_group = -6.184560251632995 # -6.2386008960367345 + sdncf_ind = -5.274445281858455 + sdext_group = 0 + sdext_ind = 0 + # losses_ind = 0 + # losses_group = 0 + obj_ind = sdncf_ind + sdext_ind - capex_ind + obj_group = sdncf_group + sdext_group - capex_group + + assert capex_ind < capex_group + assert imp_ind < imp_group + assert obj_ind > obj_group + + # all arcs have to be installed + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp1_node_key, node_A, arc_key_I1A)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp2_node_key, node_A, arc_key_I2A)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp3_node_key, node_A, arc_key_I3A)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the same option has to be selected in all arcs + + h1 = ( + ipp.networks["mynet"] + .edges[(imp1_node_key, node_A, arc_key_I1A)][Network.KEY_ARC_TECH] + .options_selected.index(True) + ) + + h2 = ( + ipp.networks["mynet"] + .edges[(imp2_node_key, node_A, arc_key_I2A)][Network.KEY_ARC_TECH] + .options_selected.index(True) + ) + + h3 = ( + ipp.networks["mynet"] + .edges[(imp3_node_key, node_A, arc_key_I3A)][Network.KEY_ARC_TECH] + .options_selected.index(True) + ) + + assert h1 == h2 + + assert h1 == h3 + + # the capex have to be higher than those of the best individual arc + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value(ipp.instance.var_capex), capex_group, abs_tol=abs_tol + ) + + # there should be no exports + + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # the imports should be higher than with individual arcs + + abs_tol = 1e-3 + + assert math.isclose(flow_in[("mynet", 0, 0)], imp_group, abs_tol=abs_tol) + + # the operating results should be lower than with an individual arc + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_group, abs_tol=abs_tol + ) + + # the externalities should be zero + + abs_tol = 1e-3 + + assert math.isclose(pyo.value(ipp.instance.var_sdext_q[q]), 0, abs_tol=abs_tol) + + # the objective function should be -6.3639758220728595-1.5 + + abs_tol = 1e-3 + + assert math.isclose(pyo.value(ipp.instance.obj_f), obj_group, abs_tol=abs_tol) + + # the imports should be greater than or equal to the losses for all arx + + losses_model = sum( + pyo.value( + ipp.instance.var_w_glljqk[ + ("mynet", imp1_node_key, node_A, arc_key_I1A, q, k) + ] + ) + + pyo.value( + ipp.instance.var_w_glljqk[ + ("mynet", imp2_node_key, node_A, arc_key_I2A, q, k) + ] + ) + + pyo.value( + ipp.instance.var_w_glljqk[ + ("mynet", imp3_node_key, node_A, arc_key_I3A, q, k) + ] + ) + for k in range(tf.number_time_intervals(q)) + ) + + losses_data = sum( + static_loss_IA1[(h1, q, k)] + + static_loss_IA2[(h2, q, k)] + + static_loss_IA3[(h3, q, k)] + for k in range(tf.number_time_intervals(q)) + ) + + assert math.isclose(losses_model, losses_data, abs_tol=abs_tol) + + assert flow_in[("mynet", 0, 0)] >= losses_model + + # ************************************************************************* + # ************************************************************************* + + def test_arc_groups_individual_ref(self): + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600,365 * 24 * 3600)}, + time_intervals={q: (0,1)}, + time_interval_durations={q: (1,1)}, + ) + + # 4 nodes: two import nodes, two supply/demand nodes + mynet = Network() + + # ************************************************************************** + + # import nodes + imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp1_node_key, + prices={ + qpk: ResourcePrice(prices=qpk[2] + 1, volumes=None) + for qpk in tf.qpk() + }, + ) + imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp2_node_key, + prices={ + qpk: ResourcePrice(prices=3 - qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + imp3_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp3_node_key, + prices={ + qpk: ResourcePrice(prices=1 + 2 * qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # ************************************************************************** + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 1.0, (q, 1): 0.5}) + + # add arcs + efficiency_IA1 = { + (q, 0): 1.00, + (q, 1): 0.85, + } + efficiency_IA2 = { + (q, 0): 0.95, + (q, 1): 0.90, + } + efficiency_IA3 = { + (q, 0): 0.90, + (q, 1): 0.95, + } + + # I1A + static_loss_IA1 = { + (0, q, 0): 0.10, + (0, q, 1): 0.10, + (1, q, 0): 0.15, + (1, q, 1): 0.15, + (2, q, 0): 0.20, + (2, q, 1): 0.20, + } + + # I2A + static_loss_IA2 = { + (0, q, 0): 0.20, + (0, q, 1): 0.20, + (1, q, 0): 0.05, + (1, q, 1): 0.05, + (2, q, 0): 0.10, + (2, q, 1): 0.10, + } + + # I3A + static_loss_IA3 = { + (0, q, 0): 0.15, + (0, q, 1): 0.15, + (1, q, 0): 0.10, + (1, q, 1): 0.10, + (2, q, 0): 0.05, + (2, q, 1): 0.05, + } + arcs_I1A = Arcs( + name="IA1", + efficiency=efficiency_IA1, + efficiency_reverse=None, + static_loss=static_loss_IA1, + capacity=( + 0.5, + 0.75, + 1.2, + ), + minimum_cost=( + 0.2, + 0.5, + 0.75, + ), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + arc_key_I1A = mynet.add_directed_arc( + node_key_a=imp1_node_key, node_key_b=node_A, arcs=arcs_I1A + ) + + arcs_I2A = Arcs( + name="IA2", + efficiency=efficiency_IA2, + efficiency_reverse=None, + static_loss=static_loss_IA2, + capacity=( + 0.5, + 0.75, + 1.2, + ), + minimum_cost=( + 0.2, + 0.5, + 0.75, + ), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + + arc_key_I2A = mynet.add_directed_arc( + node_key_a=imp2_node_key, node_key_b=node_A, arcs=arcs_I2A + ) + + arcs_I3A = Arcs( + name="IA3", + efficiency=efficiency_IA3, + efficiency_reverse=None, + static_loss=static_loss_IA3, + capacity=( + 0.5, + 0.75, + 1.2, + ), + minimum_cost=( + 0.2, + 0.5, + 0.75, + ), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + + arc_key_I3A = mynet.add_directed_arc( + node_key_a=imp3_node_key, node_key_b=node_A, arcs=arcs_I3A + ) + + # do not use arc groups + arc_groups_dict = {} + + # identify node types + + mynet.identify_node_types() + + # no sos, regular time intervals + solver_options = {} + solver_options["relative_mip_gap"] = 0 + solver_options["absolute_mip_gap"] = 1e-4 + + ipp = self.build_solve_ipp( + solver_options=solver_options, + use_sos_arcs=False, + arc_sos_weight_key=None, + arc_use_real_variables_if_possible=False, + use_sos_sense=False, + sense_sos_weight_key=None, + sense_use_real_variables_if_possible=False, + sense_use_arc_interfaces=False, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, + networks={"mynet": mynet}, + mandatory_arcs=[], + max_number_parallel_arcs={}, + arc_groups_dict=arc_groups_dict + ) + + # ************************************************************************** + + # overview + + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + q = 0 + capex_ind = 0.75 + capex_group = 1.5 + imp_ind = 1.9882352941176473 + imp_group = 2.2 # {'mynet': 2.2245012886626414} + sdncf_group = -6.184560251632995 # -6.2386008960367345 + sdncf_ind = -5.274445281858455 + sdext_group = 0 + sdext_ind = 0 + # losses_ind = 0 + # losses_group = 0 + obj_ind = sdncf_ind + sdext_ind - capex_ind + obj_group = sdncf_group + sdext_group - capex_group + + assert capex_ind < capex_group + assert imp_ind < imp_group + assert obj_ind > obj_group + + # at least one arc has to be installed + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp1_node_key, node_A, arc_key_I1A)][Network.KEY_ARC_TECH] + .options_selected + or True + in ipp.networks["mynet"] + .edges[(imp2_node_key, node_A, arc_key_I2A)][Network.KEY_ARC_TECH] + .options_selected + or True + in ipp.networks["mynet"] + .edges[(imp3_node_key, node_A, arc_key_I3A)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the capex have to be lower than with a group of arcs + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value(ipp.instance.var_capex), capex_ind, abs_tol=abs_tol + ) + + # there should be no exports + + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # the imports should be lower than with a group of arcs + + abs_tol = 1e-3 + + assert math.isclose(flow_in[("mynet", 0, 0)], imp_ind, abs_tol=abs_tol) + + # the operating results should be lower than with an individual arc + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_ind, abs_tol=abs_tol + ) + + # the externalities should be zero + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value(ipp.instance.var_sdext_q[q]), sdext_ind, abs_tol=abs_tol + ) + + # the objective function should be -6.3639758220728595-1.5 + + abs_tol = 1e-3 + + assert math.isclose(pyo.value(ipp.instance.obj_f), obj_ind, abs_tol=abs_tol) + + # the imports should be greater than or equal to the losses for all arx + + losses_model = sum( + pyo.value( + ipp.instance.var_w_glljqk[ + ("mynet", imp1_node_key, node_A, arc_key_I1A, q, k) + ] + ) + + pyo.value( + ipp.instance.var_w_glljqk[ + ("mynet", imp2_node_key, node_A, arc_key_I2A, q, k) + ] + ) + + pyo.value( + ipp.instance.var_w_glljqk[ + ("mynet", imp3_node_key, node_A, arc_key_I3A, q, k) + ] + ) + for k in range(tf.number_time_intervals(q)) + ) + + assert flow_in[("mynet", 0, 0)] >= losses_model + + # ************************************************************************* + # ************************************************************************* + + def test_arc_groups_individual_undirected(self): + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600,365 * 24 * 3600)}, + time_intervals={q: (0,1)}, + time_interval_durations={q: (1,1)}, + ) + + # 4 nodes: one import node, four regular nodes + mynet = Network() + + # import nodes + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1 + qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # A + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 0.0, (q, 1): 1.0}) + + # B + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_B, base_flow={(q, 0): 1.0, (q, 1): -0.5}) + + # C + node_C = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_C, base_flow={(q, 0): 0.0, (q, 1): 0.5}) + + # D + node_D = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_D, base_flow={(q, 0): 0.5, (q, 1): -0.25}) + + # ************************************************************************** + + # add arcs + + # IA + mynet.add_preexisting_directed_arc( + node_key_a=imp_node_key, + node_key_b=node_A, + efficiency=None, + static_loss=None, + capacity=1.5, + capacity_is_instantaneous=False, + ) + + # IC + mynet.add_preexisting_directed_arc( + node_key_a=imp_node_key, + node_key_b=node_C, + efficiency=None, + static_loss=None, + capacity=1.5, + capacity_is_instantaneous=False, + ) + + # AB + efficiency_AB = { + (q, 0): 1.00, + (q, 1): 0.85, + } + efficiency_BA = { + (q, 0): 0.95, + (q, 1): 0.80, + } + static_loss_AB = { + (0, q, 0): 0.20, + (0, q, 1): 0.25, + (1, q, 0): 0.25, + (1, q, 1): 0.30, + (2, q, 0): 0.30, + (2, q, 1): 0.35, + } + + arcs_AB = Arcs( + name="AB", + efficiency=efficiency_AB, + efficiency_reverse=efficiency_BA, + static_loss=static_loss_AB, + capacity=(0.85, 1.5, 2.5), + minimum_cost=(1, 2, 3), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + + arc_key_AB = mynet.add_undirected_arc( + node_key_a=node_A, node_key_b=node_B, arcs=arcs_AB + ) + + # CD + efficiency_CD = { + (q, 0): 1.00, + (q, 1): 0.85, + } + efficiency_DC = {(q, 0): 0.95, (q, 1): 0.80} + static_loss_CD = { + (0, q, 0): 0.010, + (0, q, 1): 0.015, + (1, q, 0): 0.015, + (1, q, 1): 0.020, + (2, q, 0): 0.020, + (2, q, 1): 0.025, + } + arcs_CD = Arcs( + name="CD", + efficiency=efficiency_CD, + efficiency_reverse=efficiency_DC, + static_loss=static_loss_CD, + capacity=(0.85, 1.5, 2.5), + minimum_cost=(1, 2, 3), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + arc_key_CD = mynet.add_undirected_arc( + node_key_a=node_C, node_key_b=node_D, arcs=arcs_CD + ) + + # arc groups + arc_groups_dict = { + 0: ( + ("mynet", node_A, node_B, arc_key_AB), + ("mynet", node_C, node_D, arc_key_CD), + ) + } + + # identify node types + mynet.identify_node_types() + + # solver settings + solver_options = {} + solver_options["relative_mip_gap"] = 0 + solver_options["absolute_mip_gap"] = 1e-4 + + # no sos, regular time intervals + + for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: + # reset decisions if necessary + if True in mynet.edges[(node_A, node_B, arc_key_AB)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(node_A, node_B, arc_key_AB)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(node_A, node_B, arc_key_AB)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + if True in mynet.edges[(node_C, node_D, arc_key_CD)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(node_C, node_D, arc_key_CD)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(node_C, node_D, arc_key_CD)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + + ipp = self.build_solve_ipp( + solver_options=solver_options, + use_sos_arcs=False, + arc_sos_weight_key=None, + arc_use_real_variables_if_possible=False, + use_sos_sense=False, + sense_sos_weight_key=None, + sense_use_real_variables_if_possible=False, + sense_use_arc_interfaces=False, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=static_losses_mode, + mandatory_arcs=[], + max_number_parallel_arcs={}, + arc_groups_dict=arc_groups_dict + ) + + # overview + + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + capex_ind = 3 + capex_group = 4 + + imp_ind = 2.912 + imp_group = 2.9210000000000003 + + sdncf_group = -7.745053560176434 + + sdnext_group = 0 + + obj_group = sdnext_group + sdncf_group - capex_group + + losses_ind = sum( + static_loss_AB[(1, q, k)] + static_loss_CD[(0, q, k)] + for k in range(tf.number_time_intervals(q)) + ) + losses_group = sum( + static_loss_AB[(1, q, k)] + static_loss_CD[(1, q, k)] + for k in range(tf.number_time_intervals(q)) + ) + + losses_model = sum( + pyo.value( + ipp.instance.var_w_glljqk[("mynet", node_A, node_B, arc_key_AB, q, k)] + ) + + pyo.value( + ipp.instance.var_w_glljqk[("mynet", node_C, node_D, arc_key_CD, q, k)] + ) + for k in range(tf.number_time_intervals(q)) + ) + + assert capex_group > capex_ind + # # assert math.isclose(losses_group, losses_ind, abs_tol=1e-3) + assert losses_group > losses_ind + assert imp_group > imp_ind + + # all arcs have to be installed + + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(node_C, node_D, arc_key_CD)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the same option has to be selected in all arcs + + h1 = ( + ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB)][Network.KEY_ARC_TECH] + .options_selected.index(True) + ) + + h2 = ( + ipp.networks["mynet"] + .edges[(node_C, node_D, arc_key_CD)][Network.KEY_ARC_TECH] + .options_selected.index(True) + ) + + assert h1 == h2 + + # the capex have to be higher than those of the best individual arc + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value(ipp.instance.var_capex), capex_group, abs_tol=abs_tol + ) + + # there should be no exports + + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # the imports should be higher than with individual arcs + + abs_tol = 1e-3 + + assert math.isclose(flow_in[("mynet", 0, 0)], imp_group, abs_tol=abs_tol) + + assert imp_group > imp_ind + + # the operating results should be lower than with an individual arc + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_group, abs_tol=abs_tol + ) + + # the externalities should be zero + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value(ipp.instance.var_sdext_q[q]), sdnext_group, abs_tol=abs_tol + ) + + # the objective function should be -6.3639758220728595-1.5 + + abs_tol = 1e-3 + + assert math.isclose(pyo.value(ipp.instance.obj_f), obj_group, abs_tol=abs_tol) + + # the imports should be greater than or equal to the losses for all arx + + losses_model = sum( + pyo.value( + ipp.instance.var_w_glljqk[("mynet", node_A, node_B, arc_key_AB, q, k)] + ) + + pyo.value( + ipp.instance.var_w_glljqk[("mynet", node_C, node_D, arc_key_CD, q, k)] + ) + for k in range(tf.number_time_intervals(q)) + ) + + losses_data = sum( + static_loss_AB[(h1, q, k)] + static_loss_CD[(h2, q, k)] + for k in range(tf.number_time_intervals(q)) + ) + + assert math.isclose(losses_model, losses_data, abs_tol=abs_tol) + + assert math.isclose(losses_data, losses_group, abs_tol=abs_tol) + + # ************************************************************************* + # ************************************************************************* + + def test_arc_groups_individual_undirected_ref(self): + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600,365 * 24 * 3600)}, + time_intervals={q: (0,1)}, + time_interval_durations={q: (1,1)}, + ) + + # 4 nodes: one import node, four regular nodes + mynet = Network() + + # import nodes + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1 + qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # A + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 0.0, (q, 1): 1.0}) + + # B + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_B, base_flow={(q, 0): 1.0, (q, 1): -0.5}) + + # C + node_C = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_C, base_flow={(q, 0): 0.0, (q, 1): 0.5}) + + # D + node_D = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_D, base_flow={(q, 0): 0.5, (q, 1): -0.25}) + + # ************************************************************************** + + # add arcs + # IA + mynet.add_preexisting_directed_arc( + node_key_a=imp_node_key, + node_key_b=node_A, + efficiency=None, + static_loss=None, + capacity=1.5, + capacity_is_instantaneous=False, + ) + + # IC + mynet.add_preexisting_directed_arc( + node_key_a=imp_node_key, + node_key_b=node_C, + efficiency=None, + static_loss=None, + capacity=1.5, + capacity_is_instantaneous=False, + ) + + # AB + efficiency_AB = { + (q, 0): 1.00, + (q, 1): 0.85, + } + efficiency_BA = { + (q, 0): 0.95, + (q, 1): 0.80, + } + static_loss_AB = { + (0, q, 0): 0.20, + (0, q, 1): 0.25, + (1, q, 0): 0.25, + (1, q, 1): 0.30, + (2, q, 0): 0.30, + (2, q, 1): 0.35, + } + + arcs_AB = Arcs( + name="AB", + efficiency=efficiency_AB, + efficiency_reverse=efficiency_BA, + static_loss=static_loss_AB, + capacity=(0.85, 1.5, 2.5), + minimum_cost=(1, 2, 3), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + + arc_key_AB = mynet.add_undirected_arc( + node_key_a=node_A, node_key_b=node_B, arcs=arcs_AB + ) + + # CD + efficiency_CD = { + (q, 0): 1.00, + (q, 1): 0.85, + } + efficiency_DC = {(q, 0): 0.95, (q, 1): 0.80} + static_loss_CD = { + (0, q, 0): 0.010, + (0, q, 1): 0.015, + (1, q, 0): 0.015, + (1, q, 1): 0.020, + (2, q, 0): 0.020, + (2, q, 1): 0.025, + } + arcs_CD = Arcs( + name="CD", + efficiency=efficiency_CD, + efficiency_reverse=efficiency_DC, + static_loss=static_loss_CD, + capacity=(0.85, 1.5, 2.5), + minimum_cost=(1, 2, 3), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + arc_key_CD = mynet.add_undirected_arc( + node_key_a=node_C, node_key_b=node_D, arcs=arcs_CD + ) + + # arc groups + arc_groups_dict = {} + + # identify node types + mynet.identify_node_types() + + # solver settings + solver_options = {} + solver_options["relative_mip_gap"] = 0 + solver_options["absolute_mip_gap"] = 1e-4 + + # no sos, regular time intervals + + for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: + # reset decisions if necessary + if True in mynet.edges[(node_A, node_B, arc_key_AB)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(node_A, node_B, arc_key_AB)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(node_A, node_B, arc_key_AB)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + if True in mynet.edges[(node_C, node_D, arc_key_CD)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(node_C, node_D, arc_key_CD)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(node_C, node_D, arc_key_CD)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + + ipp = self.build_solve_ipp( + solver_options=solver_options, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=static_losses_mode, + arc_groups_dict=arc_groups_dict, + max_number_parallel_arcs={} + ) + + # overview + (flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue + ) = compute_cost_volume_metrics(ipp.instance, True) + + capex_ind = 3 + capex_group = 4 + + imp_ind = 2.912 + imp_group = 2.9210000000000003 + + sdncf_ind = -7.72035753459824 + + sdnext_ind = 0 + + obj_ind = sdnext_ind + sdncf_ind - capex_ind + + losses_ind = sum( + static_loss_AB[(1, q, k)] + static_loss_CD[(0, q, k)] + for k in range(tf.number_time_intervals(q)) + ) + losses_group = sum( + static_loss_AB[(1, q, k)] + static_loss_CD[(1, q, k)] + for k in range(tf.number_time_intervals(q)) + ) + print('hey') + # print(static_losses_mode) + print(ipp.networks['mynet'].edges[(node_A, node_B, arc_key_AB)][Network.KEY_ARC_TECH].options_selected) + print(ipp.networks['mynet'].edges[(node_C, node_D, arc_key_CD)][Network.KEY_ARC_TECH].options_selected) + print(ipp.instance.set_GLLJ_static_pre.pprint()) + print(ipp.instance.set_GLLJ_static_new.pprint()) + # print(ipp.static_losses_departure_node) + # print(ipp.static_losses_arrival_node) + # print(ipp.static_losses_upstream) + # print(ipp.static_losses_downstream) + losses_model = sum( + pyo.value( + ipp.instance.var_w_glljqk[("mynet", node_A, node_B, arc_key_AB, q, k)] + ) + + pyo.value( + ipp.instance.var_w_glljqk[("mynet", node_C, node_D, arc_key_CD, q, k)] + ) + for k in range(tf.number_time_intervals(q)) + ) + + assert capex_group > capex_ind + # # assert math.isclose(losses_group, losses_ind, abs_tol=1e-3) + assert losses_group > losses_ind + assert imp_group > imp_ind + + # at least one arc has to be installed + + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB)][Network.KEY_ARC_TECH] + .options_selected + or True + in ipp.networks["mynet"] + .edges[(node_C, node_D, arc_key_CD)][Network.KEY_ARC_TECH] + .options_selected + ) + + # the capex have to be lower than with a group of arcs + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value(ipp.instance.var_capex), capex_ind, abs_tol=abs_tol + ) + + # there should be no exports + + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # the imports should be lower than with a group of arcs + + abs_tol = 1e-3 + + assert math.isclose(flow_in[("mynet", 0, 0)], imp_ind, abs_tol=abs_tol) + + # the operating results should be lower than with an individual arc + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_ind, abs_tol=abs_tol + ) + + # the externalities should be zero + + abs_tol = 1e-3 + + assert math.isclose(pyo.value(ipp.instance.var_sdext_q[q]), 0, abs_tol=abs_tol) + + # the objective function should be -6.3639758220728595-1.5 + + abs_tol = 1e-3 + + assert math.isclose(pyo.value(ipp.instance.obj_f), obj_ind, abs_tol=abs_tol) + + # the imports should be greater than or equal to the losses for all arx + + assert math.isclose(losses_model, losses_ind, abs_tol=abs_tol) # ***************************************************************************** -# ***************************************************************************** +# ***************************************************************************** \ No newline at end of file diff --git a/tests/test_esipp_time.py b/tests/test_esipp_time.py index 6bdf4a9684c0ac011add335a3ba01e2e517da1f1..46bfdb2086d89b66174467e79e620d4fe53cc135 100644 --- a/tests/test_esipp_time.py +++ b/tests/test_esipp_time.py @@ -981,16 +981,12 @@ class TestTimeFrame: time_interval_durations = {0: [1, 1], 1: [1, 1]} discount_rates = { - (0,0): 3.5/100, - (0,1): 3.6/100, - (0,2): 3.7/100, - (1,0): 1.5/100, - (1,1): 1.6/100, - (1,2): 1.7/100, + 0: [3.5/100, 3.6/100, 3.7/100], + 1: [1.5/100, 1.6/100, 1.7/100], } etf = EconomicTimeFrame( - discount_rates_qp=discount_rates, + discount_rates_q=discount_rates, reporting_periods=reporting_periods, reporting_period_durations=reporting_period_durations, time_intervals=time_intervals, @@ -1050,6 +1046,40 @@ class TestTimeFrame: for df, true_df in zip(factors, true_factors): assert isclose(df, true_df, abs_tol=0.001) + # ************************************************************************* + # ************************************************************************* + + def test_discount_factor_single_period(self): + + discount_rate = 0 + + etf = EconomicTimeFrame( + discount_rate=discount_rate, + reporting_periods={ + 0: [0] + }, + reporting_period_durations={ + 0: [1] + }, + time_intervals={ + 0: [0] + }, + time_interval_durations={ + 0: [1] + }, + ) + + true_factors = [1] + + assessment = 0 + factors = [ + etf.discount_factor(assessment, p) + for p in etf.reporting_periods[assessment] + ] + + for df, true_df in zip(factors, true_factors): + assert isclose(df, true_df, abs_tol=0.001) + # ************************************************************************* # *************************************************************************