diff --git a/src/topupopt/problems/esipp/converter.py b/src/topupopt/problems/esipp/converter.py index fbb972d2fba798e77c3a61f0209c53c0b77a1c19..c3fb987b0bf2746fcac02fab9d50801120ebf79d 100644 --- a/src/topupopt/problems/esipp/converter.py +++ b/src/topupopt/problems/esipp/converter.py @@ -6,6 +6,7 @@ import numpy as np # local libraries, internal from .dynsys import DynamicSystem from .signal import Signal, FixedSignal +from .time import TimeFrame # ***************************************************************************** # ***************************************************************************** @@ -15,11 +16,17 @@ from .signal import Signal, FixedSignal class Converter: """A class for modular dynamic systems in an integrated energy system.""" + + # input options: + # 1) DynamicSystem object + # 2) Matrices def __init__( self, # system information sys: DynamicSystem, + # time frame + time_frame: TimeFrame, # initial conditions initial_states: np.array, # optimisation-relevant parameters diff --git a/src/topupopt/problems/esipp/dynsys.py b/src/topupopt/problems/esipp/dynsys.py index 006ae9d8cdb30fcb2fac1c4652ebb3abf46d35da..5d8a005866620ee90ba0accd4adef91fdfd26889 100644 --- a/src/topupopt/problems/esipp/dynsys.py +++ b/src/topupopt/problems/esipp/dynsys.py @@ -1,16 +1,7 @@ -# -*- coding: utf-8 -*- -""" -Created on Wed Nov 17 13:04:53 2021 - -@author: pmede -""" - # standard libraries # local libraries, external - import numpy as np - from scipy.linalg import expm, inv # local libraries, internal @@ -22,11 +13,14 @@ from scipy.linalg import expm, inv # TODO: ensure it is compatible with stateless systems - class DynamicSystem: """A class for dynamic systems described using A, B, C and D matrices.""" - + # use cases: + # 1) simulate + # 2) optimise + + # input options: # 1) the A, B, C and D matrices are provided: inputs, states and outputs (general case) # 2) the A and B matrices are provided: inputs and states (no outputs) # 3) the D matrix is provided: inputs and outputs (no states) @@ -1192,6 +1186,25 @@ class OutputlessSystem(DynamicSystem): return DynamicSystem.simulate(self, U=U, X0=X0) +# ***************************************************************************** +# ***************************************************************************** + +class DiscretisedDynamicSystem(DynamicSystem): + + # workflow: + # 1) define the system + # 2) discretise + # 3) simulate response + + # what should it do? + # 1) discretise based on (q,k) tuples + # 2) simulate response + + # + + def __init__(self): + + pass # ***************************************************************************** # ***************************************************************************** diff --git a/src/topupopt/problems/esipp/network.py b/src/topupopt/problems/esipp/network.py index 78c816069aa098241e6c54e799afe5a9285bbd33..abb30f5dc4c544f7ed77d7f5684e94b92f1e4446 100644 --- a/src/topupopt/problems/esipp/network.py +++ b/src/topupopt/problems/esipp/network.py @@ -22,7 +22,6 @@ from .resource import are_prices_time_invariant # ***************************************************************************** # ***************************************************************************** - class Arcs: """A class for arc technologies in a network.""" @@ -134,7 +133,7 @@ class Arcs: """Returns True if the arc has a constant efficiency.""" if self.has_proportional_losses(): - # proportional losses + # has proportional losses if self.is_isotropic(): # is isotropic if len(set(self.efficiency.values())) == 1: diff --git a/src/topupopt/problems/esipp/problem.py b/src/topupopt/problems/esipp/problem.py index 006e1d7eebdcee930df20da72371f36b65968dd3..28ae2faa7d1d8fbd5c757e01c913acd6c71a69ae 100644 --- a/src/topupopt/problems/esipp/problem.py +++ b/src/topupopt/problems/esipp/problem.py @@ -11,11 +11,11 @@ import pyomo.environ as pyo # local libraries, internal from .model import create_model from ...solvers.interface import SolverInterface -from ...data.finance.invest import discount_factor from .network import Network, Arcs from .system import EnergySystem from .resource import ResourcePrice from .time import EconomicTimeFrame +from .time import entries_are_invariant # ***************************************************************************** # ***************************************************************************** @@ -23,6 +23,7 @@ from .time import EconomicTimeFrame # TODO: allow users to define how fixed components in the objective function # are handled (using a variable equal to one or by excluding them altogether) +# TODO: create a log class InfrastructurePlanningProblem(EnergySystem): """A class for optimisation of infrastructure planning problems.""" @@ -3526,6 +3527,8 @@ class InfrastructurePlanningProblem(EnergySystem): # ************************************************************************* # ************************************************************************* + + # TODO: do it for converters too def import_results( self, @@ -3612,320 +3615,510 @@ class InfrastructurePlanningProblem(EnergySystem): # ************************************************************************* # ************************************************************************* - - -# ***************************************************************************** -# ***************************************************************************** - - -def simplify_peak_total_problem( - problem: InfrastructurePlanningProblem, -) -> InfrastructurePlanningProblem: - # ************************************************************************* - # ************************************************************************* - - # TODO: make this compatible with multiple assessments - # check if the simplification is feasible - if not is_peak_total_problem(problem): - # it is not possible to simplify the problem - return problem - - # ************************************************************************* - # ************************************************************************* + def peak_total_assessments(self) -> dict: + """Returns the peak+total compatible assessments and the respective intervals.""" + + # output: a dict with q keys and k values + + # conditions: + # 1) prices within each period have to be invariant in time and volume + # - how to quickly determine this? + # 2) there can only be import or export nodes, not both + # 3) the range of potential flows through each arc can be represented + # using a subset of the intervals, for at least one assessment + # - arc efficiency is constant in time, for each arc and assessment + # - the variation of static arc losses over time correlates with the + # variation of the maximum potential flow through the arc over time + # - there are no converters under consideration - # identify the peak assessment - ref_found = False - for key, net in problem.networks.items(): - for node_key in net.source_sink_nodes: - q_ref, k_ref = sorted( - ( - (value, key) - for key, value in net.nodes[node_key][ - Network.KEY_NODE_BASE_FLOW - ].items() - ), - reverse=True, - )[0][1] - ref_found = True - break - if ref_found: - break + # ********************************************************************* + + # initialise the set of peak total assessments + set_q = set(self.time_frame.assessments) + # initialise the output dictionary + out_dict = {} + + # ********************************************************************* + + # there can only be import or export nodes, not both + + # check each network + for net_key, net in self.networks.items(): + if len(set(net.import_nodes).intersection(net.export_nodes)) >= 1: + # return an empty dict + return out_dict + + # ********************************************************************* + + # there can be no converters (might be relaxed) + if len(self.converters) != 0: + # there are converters + return out_dict + + # ********************************************************************* + + # prices have to be invariant per period and assessment ((q,p) tuple) + + # for each assessment + for q in self.time_frame.assessments: + # assume it is compatible + q_meets_criteria = True + # build the (q,p,k) tuples for this assessment and respective periods + qpk_qp_dict = { + (q,p): tuple( + (q,p,k) for k in self.time_frame.time_intervals[q] + ) + for p in self.time_frame.reporting_periods[q] + } + # check each network + for net_key, net in self.networks.items(): + # check import node prices + for imp_node_key in net.import_nodes: + # check if the node prices are invariant within each (q,p) + for qp, qpk_tuples in qpk_qp_dict.items(): + if not entries_are_invariant( + net.nodes[imp_node_key][Network.KEY_NODE_PRICES], + qpk_tuples + ): + # entries are not time invariant + set_q.remove(q) + q_meets_criteria = False + break # out of qpk_qp_dict.items() for loop + # prices are time invariant, check if they are volume + # invariant as well: checking one entry will be enough + if not net.nodes[imp_node_key][Network.KEY_NODE_PRICES][ + qpk_tuples[0] + ].is_volume_invariant(): + # it is not volume invariant + set_q.remove(q) + q_meets_criteria = False + break # out of qpk_qp_dict.items() for loop + if not q_meets_criteria: + break # out of net.import_nodes for loop + if not q_meets_criteria: + break # out of networks.items() for loop + + # check export node prices + for exp_node_key in net.export_nodes: + # check if the node prices are invariant within each (q,p) + for qp, qpk_tuples in qpk_qp_dict.items(): + if not entries_are_invariant( + net.nodes[exp_node_key][Network.KEY_NODE_PRICES], + qpk_tuples + ): + # entries are not time invariant + set_q.remove(q) + q_meets_criteria = False + break # out of qpk_qp_dict.items() for loop + # prices are time invariant, check if they are volume + # invariant as well: checking one entry will be enough + if not net.nodes[exp_node_key][Network.KEY_NODE_PRICES][ + qpk_tuples[0] + ].is_volume_invariant(): + # it is not volume invariant + set_q.remove(q) + q_meets_criteria = False + break # out of qpk_qp_dict.items() for loop + if not q_meets_criteria: + break # out of net.export_nodes for loop + if not q_meets_criteria: + break # out of networks.items() for loop + + # ********************************************************************* + + # for each arc and assessment, the efficiency must be time invariant + + # arc efficiencies have to be time invariant per assessment + # for each assessment + for q in self.time_frame.assessments: + # if it is still under consideration + if q not in set_q: + continue + # assume it is compatible + q_meets_criteria = True + # build the (q,k) tuples for this assessment + qk_tuples = tuple((q,k) for k in self.time_frame.time_intervals[q]) + for key, net in self.networks.items(): + # check each edge + for edge_key in net.edges(keys=True): + if not entries_are_invariant( + net.edges[edge_key][Network.KEY_ARC_TECH].efficiency, + qk_tuples + ): + # entries are not time invariant, remove q from consideration + set_q.remove(q) + q_meets_criteria = False + break # break out of net.edges() for loop + if not q_meets_criteria: + break # break out of self.networks.items(): + + # ********************************************************************* + + # find the critical intervals for each compatible assessment + + # TODO: revise this method to consider static losses + + # for each assessment still under consideration + for q in set_q: + # assume it is compatible + q_meets_criteria = True + # build the (q,k) tuples for this assessment + qk_tuples = tuple((q,k) for k in self.time_frame.time_intervals[q]) + # initialise the intervals for this assessment + set_k = set() + # for each network + for key, net in self.networks.items(): + # for each source/sink node + for node_key in net.source_sink_nodes: + # identify the nonzero extremes (minima first) + static_flow_sorted = sorted( + ((v, k) + for (_, k), v in net.nodes[node_key][Network.KEY_NODE_BASE_FLOW].items() + if v != 0 # there should be at least one non-zero entry + ) + ) + if static_flow_sorted[0][0] < 0: + # negative minima: add the interval to the set + set_k.add(static_flow_sorted[0][1]) + # add the other extreme to the list if it is positive + if static_flow_sorted[-1][0] > 0: + # positive maxima: add it to the set too + set_k.add(static_flow_sorted[-1][1]) + # if not positive: do not add it to the set + else: + # minima is positive: add the maxima to the set + set_k.add(static_flow_sorted[-1][1]) + # # for each edge + # for edge_key in net.edges(keys=True): + # # check for static losses + # if net.edges[edge_key][Network.KEY_ARC_TECH].has_static_losses(): + # # it has static losses, check each option + # for h in range( + # net.edges[edge_key][ + # Network.KEY_ARC_TECH + # ].number_options() + # ): + # # sort the static losses to identify the extremes + # # the maximum appears last + # static_loss_sorted = sorted( + # ((net.edges[edge_key][Network.KEY_ARC_TECH].static_loss[(h, q, k)], k) + # for q, k in qk_tuples + # ) + # ) + # # add the interval for the maxima to the set + # set_k.add(static_loss_sorted[-1][1]) + # ************************************************************* + # all checks passed so far: move on to the next + out_dict[q] = list(set_k) + + # ********************************************************************* + + # return statement + return out_dict # ************************************************************************* # ************************************************************************* - # define the peak assessment and the peak interval - q_peak = "peak" - k_peak = 0 - # define the total assessment and the total interval - q_total = "total" - k_total = 0 - + def has_peak_total_assessments(self) -> bool: + """Returns True if there are peak+total assessments in the problem.""" + return len(self.peak_total_assessments()) >= 1 + # ************************************************************************* # ************************************************************************* - - # create one peak scenario per polarity within each network - # create one total scenario per polarity within each network - for key, net in problem.networks.items(): - # 1) losses in arcs: - # 1.1) sum the static losses for all time intervals (total assessment) - # 1.2) insert the static losses for the peak assessment - # 1.3) remove all static losses but for the peak assessment - # 1.4) insert the static losses for the total assessment - for arc_key in net.edges(keys=True): - # check if the arc has static losses - if net.edges[arc_key][Network.KEY_ARC_TECH].has_static_losses(): - # 1.1) sum the static losses for all time intervals - loss_sum = { - (h, q_total, k_total): sum( - net.edges[arc_key][Network.KEY_ARC_TECH].static_loss[ - (h, q_ref, k) - ] - for k in range(problem.number_time_intervals[q_ref]) - ) - for h in range( - net.edges[arc_key][Network.KEY_ARC_TECH].number_options() - ) - } - # 1.2) insert the static losses for the peak assessment - net.edges[arc_key][Network.KEY_ARC_TECH].static_loss.update( - { - (h, q_peak, k_peak): ( + + def simplify_peak_total_assessments(self, qk_dict: dict=None): + """Simplifies the problem in the presence of peak+total assessments.""" + + # ********************************************************************* + # ********************************************************************* + + # check if the qk dict was defined + if type(qk_dict) == type(None): + # no tuples were defined, identify them automatically + qk_dict = self.peak_total_assessments() + + # ********************************************************************* + # ********************************************************************* + + # critical assessments + q_ref_tuples = tuple(qk_dict.keys()) + qk_ref_tuples = tuple( + (q, k) for q in q_ref_tuples for k in qk_dict[q] + ) + # total assessments: one assessment per reference assessment + qk_total_tuples = tuple( + ('total-'+str(q), 0) for q in q_ref_tuples + ) + q_total_tuples = tuple(qk[0] for qk in qk_total_tuples) + # peak assessments: one assessment only, uses all ref intervals + q_peak = 'peak' + qk_peak_tuples = tuple( + (q_peak, i) + for i, qk in enumerate(qk_ref_tuples) + ) + + # ********************************************************************* + # ********************************************************************* + + # prepare the data + + # for each network + for key, net in self.networks.items(): + # for each arc + for arc_key in net.edges(keys=True): + + # static losses + if net.edges[arc_key][Network.KEY_ARC_TECH].has_static_losses(): + # compute the total static losses + new_data = { + (h, *qk_total): sum( net.edges[arc_key][Network.KEY_ARC_TECH].static_loss[ - (h, q_ref, k_ref) + (h, q_ref, k) ] + for k in self.time_frame.time_intervals[q_ref] ) + for q_ref, qk_total in zip(q_ref_tuples, qk_total_tuples) for h in range( - net.edges[arc_key][Network.KEY_ARC_TECH].number_options() + net.edges[arc_key][ + Network.KEY_ARC_TECH + ].number_options() ) } - ) - # 1.3) remove all static losses but for the peak assessment - for hqk in tuple(net.edges[arc_key][Network.KEY_ARC_TECH].static_loss): - if hqk[1:] != (q_peak, k_peak): - net.edges[arc_key][Network.KEY_ARC_TECH].static_loss.pop(hqk) - # 1.4) insert the static losses for the total assessment - net.edges[arc_key][Network.KEY_ARC_TECH].static_loss.update(loss_sum) - - # efficiency - # 1.2) insert the efficiencies for the peak and total assessments - # 1.3) remove all efficiencies but for the peak and total assessm. - - if net.edges[arc_key][Network.KEY_ARC_TECH].has_proportional_losses(): - # peak assessment efficiency - net.edges[arc_key][Network.KEY_ARC_TECH].efficiency.update( - { - (q_peak, k_peak): ( - net.edges[arc_key][Network.KEY_ARC_TECH].efficiency[ - (q_ref, k_ref) - ] + # add data for the peak assessment + new_data.update({ + (h, *qk_peak): net.edges[arc_key][Network.KEY_ARC_TECH].static_loss[ + (h, *qk_ref)] + for qk_peak, qk_ref in zip(qk_peak_tuples, qk_ref_tuples) + for h in range( + net.edges[arc_key][ + Network.KEY_ARC_TECH + ].number_options() ) - } - ) - # total assessment efficiency - net.edges[arc_key][Network.KEY_ARC_TECH].efficiency.update( - { - (q_total, k_total): ( + }) + # remove dict entries associated with the reference assessments + for qk in qk_ref_tuples: + for h in range( + net.edges[arc_key][ + Network.KEY_ARC_TECH + ].number_options() + ): + net.edges[arc_key][Network.KEY_ARC_TECH].static_loss.pop((h, *qk)) + # update the static loss dict with the new data + net.edges[arc_key][Network.KEY_ARC_TECH].static_loss.update(new_data) + # the arc has no static losses, continue + + # proportional losses + if net.edges[arc_key][Network.KEY_ARC_TECH].has_proportional_losses(): + # get the efficiency values for the total assessments + # the efficiency should be constant so one interval will do + efficiency_dict = { + qk_total: ( net.edges[arc_key][Network.KEY_ARC_TECH].efficiency[ - (q_ref, k_ref) + (q_ref, qk_dict[q_ref][0]) ] ) + for qk_total, q_ref in zip(qk_total_tuples, q_ref_tuples) } - ) - for qk in tuple(net.edges[arc_key][Network.KEY_ARC_TECH].efficiency): - if qk != (q_peak, k_peak) and qk != (q_total, k_total): + # add data for the peak assessment + efficiency_dict.update({ + qk_peak: net.edges[arc_key][Network.KEY_ARC_TECH].efficiency[ + qk_ref_tuples[0] + ] + for qk_peak in qk_peak_tuples + }) + # remove dict entries associated with the reference assessments + for qk in qk_ref_tuples: net.edges[arc_key][Network.KEY_ARC_TECH].efficiency.pop(qk) - - # 2) prices in import/export nodes: - # 2.1) determine the price for the total assessment - # 2.2) insert the prices for the peak assessment - # 2.3) remove all prices but those for the peak assessment - # 2.4) insert the prices for the total assessment - for node_key in net.nodes(): - # 2.1) determine the price for the total assessment - # 2.2) insert the prices for the peak assessment - if node_key in net.import_nodes or node_key in net.export_nodes: - # import node: - # - get the current price - # - insert the peak price = 0 - # export node: - # - get the current price - # - insert the peak price = 0 + # update the efficiency dict to include the total assessme. + net.edges[arc_key][Network.KEY_ARC_TECH].efficiency.update( + efficiency_dict + ) + + # handle import/export nodes + for node_key in set(net.import_nodes).union(net.export_nodes): + # import/export node: determine price (invariant) total_price = { - (q_total, p, k_total): ( - net.nodes[node_key][Network.KEY_NODE_PRICES][(q_ref, p, k_ref)] + (qk_total[0], p, qk_total[1]): ( + net.nodes[node_key][Network.KEY_NODE_PRICES][(q_ref, p, qk_dict[q_ref][0])] ) - for p in problem.time_frame.reporting_periods[q_ref] + for q_ref, qk_total in zip(q_ref_tuples, qk_total_tuples) + for p in self.time_frame.reporting_periods[q_ref] } - net.nodes[node_key][Network.KEY_NODE_PRICES].update( - { - (q_peak, p, k_peak): ResourcePrice(prices=0) - for p in problem.time_frame.reporting_periods[q_ref] - } - ) - else: # other nodes - continue - # 2.3) remove all prices but those for the peak assessment - for qpk in tuple(net.nodes[node_key][Network.KEY_NODE_PRICES].keys()): - if qpk[0] != q_peak and qpk[2] != k_peak: - net.nodes[node_key][Network.KEY_NODE_PRICES].pop(qpk) - # 2.4) insert the prices for the total assessment - net.nodes[node_key][Network.KEY_NODE_PRICES].update(total_price) - - # 3) flows in other nodes: - # 3.1) determine the flow volume for the total assessment - # 3.2) insert the prices and base flows for the peak scenario - # 3.3) remove all but the peak scenario and intervals - # 3.4) insert the flow volume for the total assessment - - for node_key in net.source_sink_nodes: - # 3.1) determine the flow volume for the total assessment - total_flow = { - (q_total, k_total): sum( - net.nodes[node_key][Network.KEY_NODE_BASE_FLOW][(q_ref, k)] - for k in range( - problem.time_frame.number_time_intervals(q_ref) - ) - ) - } - # 3.2) insert the prices and base flows for the peak scenario - net.nodes[node_key][Network.KEY_NODE_BASE_FLOW].update( - { - (q_peak, k_peak): net.nodes[node_key][Network.KEY_NODE_BASE_FLOW][ - (q_ref, k_ref) - ] + # no data for the peak assessment + # remove dict entries associated with the reference assessments + for qk in qk_ref_tuples: + for p in self.time_frame.reporting_periods[qk[0]]: + net.nodes[node_key][Network.KEY_NODE_PRICES].pop((qk[0],p,qk[1])) + # update the price dict + net.nodes[node_key][Network.KEY_NODE_PRICES].update(total_price) + + # handle source/sink nodes + for node_key in net.source_sink_nodes: + # determine the flow volume for the total assessment + new_data = { + qk_total: sum( + net.nodes[node_key][Network.KEY_NODE_BASE_FLOW][(q_ref, k)] + for k in range( + self.time_frame.number_time_intervals(q_ref) + ) + ) + for q_ref, qk_total in zip(q_ref_tuples, qk_total_tuples) } - ) - # 3.3) remove all but the peak scenario and intervals - for qk in tuple(net.nodes[node_key][Network.KEY_NODE_BASE_FLOW]): - if qk != (q_peak, k_peak): + # add data for the peak assessment + new_data.update({ + qk_peak: net.nodes[node_key][Network.KEY_NODE_BASE_FLOW][qk_ref] + for qk_peak, qk_ref in zip(qk_peak_tuples, qk_ref_tuples) + }) + # remove dict entries associated with the reference assessments + for qk in qk_ref_tuples: net.nodes[node_key][Network.KEY_NODE_BASE_FLOW].pop(qk) - # 3.4) insert the flow volume for the total assessment - net.nodes[node_key][Network.KEY_NODE_BASE_FLOW].update(total_flow) - - # ************************************************************************* - # ************************************************************************* + # update the dict for the base flow needs + net.nodes[node_key][Network.KEY_NODE_BASE_FLOW].update(new_data) - # update the assessments, reporting periods, intervals and segments - - # assessments: q_peak and q_total - # reporting_periods: same as before, applied to q_total only (P_q[q=q_peak]=empty set) - # reporting period durations: same as before - # intervals: q_peak has 1, q_total has 1 - # interval duration: q_peak, q_total should be the sum of all intervals - - problem.time_frame = EconomicTimeFrame( - discount_rates_q={ - q_peak: [], - q_total: [problem.time_frame.discount_rate(q_ref, p) - for p in problem.time_frame.reporting_periods[q_ref]] - }, - reporting_periods={ - q_peak: [], - q_total: problem.time_frame.reporting_periods[q_ref], - }, - reporting_period_durations={ - q_peak: [], - q_total: problem.time_frame.reporting_period_durations[q_ref], - }, - time_intervals={ - q_peak: [k_peak], - q_total: [k_total], - }, - time_interval_durations={ - q_peak: [problem.time_frame.time_interval_durations[q_ref][k_ref]], - q_total: [sum(problem.time_frame.time_interval_durations[q_ref])], + # ********************************************************************* + # ********************************************************************* + + # define the new time frame + # - 1 total assessment per P&T assessment (1 interval) + # - 1 peak assessment (1 or more intervals) + # - non P&T assessments (stay the same) + + # discount rates: + # - non P&T assessments stay the same + # - total assessments use the discount rates from P&T assessments + # - peak assessments have no discount rates + discount_rates_q = { + q: [self.time_frame.discount_rate(q, p) + for p in self.time_frame.reporting_periods[q]] + for q in self.time_frame.assessments + if q not in q_ref_tuples } - ) + discount_rates_q.update({ + q_total: [self.time_frame.discount_rate(q_ref, p) + for p in self.time_frame.reporting_periods[q_ref]] + for q_total, q_ref in zip(q_total_tuples, q_ref_tuples) + }) + discount_rates_q.update({q_peak: []}) + + # time intervals: + # - intervals for non-P&T assessments stay the same + # - total assessments have a single interval numbered 0 + # - peak intervals are numbered consecutively + time_intervals = { + q: self.time_frame.time_intervals[q] + for q in self.time_frame.assessments + if q not in q_ref_tuples + } + time_intervals.update({ + q_total: [k_total] + for q_total, k_total in qk_total_tuples + }) + time_intervals.update({ + q_peak: [k for _, k in qk_peak_tuples] + }) + + # time interval durations: + # - intervals for non-P&T assessments stay the same + # - total assessments represent the entire duration of a P&T assessment + # - peak intervals stay the same as in the respective P&T assessments + time_interval_durations = { + q: self.time_frame.time_interval_durations[q] + for q in self.time_frame.assessments + if q not in q_ref_tuples + } + time_interval_durations.update({ + q_total: [sum(self.time_frame.time_interval_durations[q_ref])] + for q_ref, q_total in zip(q_ref_tuples, q_total_tuples) + }) + time_interval_durations.update({ + q_peak: [self.time_frame.time_interval_durations[q_ref][k_ref] + for q_ref, k_ref in qk_ref_tuples + ] + }) + + # reporting periods: + # - non P&T assessments stay the same + # - total assessments cover the same periods as the respective P&T assessment + # - peak assessments do not (need to) cover any period + reporting_periods = { + q: self.time_frame.reporting_periods[q] + for q in self.time_frame.assessments + if q not in q_ref_tuples + } + reporting_periods.update({ + q_total: self.time_frame.reporting_periods[q_ref] + for q_ref, q_total in zip(q_ref_tuples, q_total_tuples) + }) + reporting_periods.update({q_peak: []}) + + # reporting period durations: + reporting_period_durations = { + q: self.time_frame.reporting_period_durations[q] + for q in self.time_frame.assessments + if q not in q_ref_tuples + } + reporting_period_durations.update({ + q_total: self.time_frame.reporting_period_durations[q_ref] + for q_ref, q_total in zip(q_ref_tuples, q_total_tuples) + }) + reporting_period_durations.update({q_peak: []}) + + # - non-P&T assessments are not affected + # - peak assessments get a weight of 1 (possibly through omission) + # - total assessments receive the weight of the respective reference assessment + + # self.param_c_wgt_q + new_assessment_weights = { + q: self.assessment_weights[q] if q in self.assessment_weights else 1 + for q in self.time_frame.assessments # original assessments + if q not in q_ref_tuples # non-P&T + #if q != q_peak + } + new_assessment_weights.update({ + q_total: self.assessment_weights[q_ref] if q_ref in self.assessment_weights else 1 + for q_total, q_ref in zip(q_total_tuples, q_ref_tuples) + }) + new_assessment_weights[q_peak] = 1 + self.assessment_weights = new_assessment_weights + + # create the object + self.time_frame = EconomicTimeFrame( + discount_rates_q=discount_rates_q, + reporting_periods=reporting_periods, + reporting_period_durations=reporting_period_durations, + time_intervals=time_intervals, + time_interval_durations=time_interval_durations + ) + + # TODO: include regular time steps as a condition - # average time interval - problem.average_time_interval = { - q: mean(problem.time_frame.time_interval_durations[q]) - for q in problem.time_frame.assessments - } - # normalised time interval duration - # problem.normalised_time_interval_duration = { - # (q,k): duration/problem.average_time_interval[q] - # for q in problem.assessment_keys - # for k, duration in enumerate(problem.time_intervals[q]) - # } - problem.normalised_time_interval_duration = { - (q_peak, k_peak): 1, - (q_total, k_total): ( - sum(problem.time_frame.time_interval_durations[q_total]) - / problem.time_frame.time_interval_durations[q_peak][k_total] - ), - } - + # average time interval + self.average_time_interval = { + q: mean(self.time_frame.time_interval_durations[q]) + for q in self.time_frame.assessments + } + + # normalised time interval durations + # - for non P&T assessments, factors are calculated using the average + # - for peak assessments, factors are calculated using the average + # - for total assessments, factors are calculated using the total dura- + # tion of the intervals composing the assessment. Why? By adding up the + # flow needs of multiple intervals, amplitudes also need to be adjusted + self.normalised_time_interval_duration = { + (q,k): dt/self.average_time_interval[q] if q not in q_total_tuples else dt/self.average_time_interval[q_peak] + for q in self.time_frame.assessments + for k, dt in zip( + self.time_frame.time_intervals[q], + self.time_frame.time_interval_durations[q] + ) + } + + # ********************************************************************* + # ********************************************************************* + # ************************************************************************* # ************************************************************************* - # return the modified problem - return problem - - -# ***************************************************************************** -# ***************************************************************************** - - -def is_peak_total_problem(problem: InfrastructurePlanningProblem) -> bool: - """Returns True if the problem only concerns peak capacity and volume.""" - - # conditions: - # 1) maximum congestion occurs simultaneously across the network - # - corollary: dynamic behaviours do not change the peaks - # - corollary: arc efficiencies are constant? - # - simplifying assumption: no proportional losses in the network - # - simplifying assumption: no converters or only stateless ones - # - simplifying assumption: only source or sink nodes, no hybrid ones - # 2) the time during which maximum congestion occurs can be determined - # 3) energy prices are constant in time and volume (per assessment) - - # check: energy prices are constant in time and volume - for key, net in problem.networks.items(): - # check import nodes - for imp_node_key in net.import_nodes: - # is an import node, check if it is time invariant - if not net.nodes[imp_node_key][Network.KEY_NODE_PRICES_TIME_INVARIANT]: - return False # is not time invariant - # it is time invariant, but is it volume invariant? check any qpk - for qpk in net.nodes[imp_node_key][Network.KEY_NODE_PRICES]: - if not net.nodes[imp_node_key][Network.KEY_NODE_PRICES][ - qpk - ].is_volume_invariant(): - # it is not volume invariant - return False - # if the entries are time invariant, checking one will do - break - # check export nodes - for exp_node_key in net.export_nodes: - # is an import node, check if it is time invariant - if not net.nodes[exp_node_key][Network.KEY_NODE_PRICES_TIME_INVARIANT]: - return False # is not time invariant - # it is time invariant, but is it volume invariant? check any qpk - for qpk in net.nodes[exp_node_key][Network.KEY_NODE_PRICES]: - if not net.nodes[exp_node_key][Network.KEY_NODE_PRICES][ - qpk - ].is_volume_invariant(): - # it is not volume invariant - return False - # if the entries are time invariant, checking one will do - break - # check: no converters - if len(problem.converters) != 0: - # there are converters = cannot be simplified (might be relaxed later) - return False - - # check: arc efficiencies must be constant in time - for key, net in problem.networks.items(): - # check each edge - for edge_key in net.edges(keys=True): - if not net.edges[edge_key][ - Network.KEY_ARC_TECH].has_constant_efficiency(): - return False # does not have constant efficiency, return False - return True # all conditions are true - - # ***************************************************************************** # ***************************************************************************** diff --git a/src/topupopt/problems/esipp/resource.py b/src/topupopt/problems/esipp/resource.py index a6ce9d37cdf76de402f8ed2f6b5ef09faaa4042c..bcd49ad2be59073b9c76ca653b54861dc82f1fa3 100644 --- a/src/topupopt/problems/esipp/resource.py +++ b/src/topupopt/problems/esipp/resource.py @@ -226,12 +226,27 @@ class ResourcePrice: # ************************************************************************* # ************************************************************************* + + def __eq__(self, o) -> bool: + """Returns True if a given ResourcePrice is equivalent to another.""" + return self.is_equivalent(o) + + def __hash__(self): + return hash( + tuple(( + self.number_segments(), + tuple(self.prices), + tuple(self.volumes) + )) + ) + # ************************************************************************* + # ************************************************************************* # ***************************************************************************** # ***************************************************************************** - +# TODO: method to determine if qpk-keyed dict is time-invariant per q and p def are_prices_time_invariant(resource_prices_qpk: dict) -> bool: """Returns True if all prices are identical per time interval.""" # check if there is only one or no (q,p,k) entry diff --git a/src/topupopt/problems/esipp/time.py b/src/topupopt/problems/esipp/time.py index 308f7d363814f653d68b1f57576abc467a0dc637..7db047cca5783b790e3de5484a4ea56032d1f39a 100644 --- a/src/topupopt/problems/esipp/time.py +++ b/src/topupopt/problems/esipp/time.py @@ -307,3 +307,14 @@ class EconomicTimeFrame(TimeFrame): # ***************************************************************************** # ***************************************************************************** + +def entries_are_invariant(data: dict, keys: tuple = None) -> bool: + "Returns True if all (identified) entries in the dict are identical." + return set( + value for key, value in data.items() + ) if type(keys) == type(None) else set( + value for key, value in data.items() if key in keys + ) + +# ***************************************************************************** +# ***************************************************************************** diff --git a/tests/examples_esipp.py b/tests/examples_esipp.py deleted file mode 100644 index b70f4ff76b067831f765d9a0c16fbdd0a93853a5..0000000000000000000000000000000000000000 --- a/tests/examples_esipp.py +++ /dev/null @@ -1,1240 +0,0 @@ -# imports - -# standard - -import random as rand - -from statistics import mean - -import math - -# local - -import numpy as np - -import networkx as nx - -# imprt src.topupopt.problems.esipp as ipp - -import src.topupopt.problems.esipp.utils as utils - -from src.topupopt.problems.esipp.problem import InfrastructurePlanningProblem - -from src.topupopt.problems.esipp.network import Arcs, Network - -from src.topupopt.problems.esipp.resource import ResourcePrice - -# TODO: replace this set of examples with more deterministic ones - -# ****************************************************************************** -# ****************************************************************************** - - -def examples( - solver: str, - solver_options: dict = None, - seed_number: int = None, - init_aux_sets: bool = False, -): - # test a generic mvesipp problem using the original classes - - # termination criteria - - solver_timelimit = 60 - - solver_abs_mip_gap = 0.001 - - solver_rel_mip_gap = 0.01 - - if type(solver_options) == dict: - solver_options.update( - { - "time_limit": solver_timelimit, - "relative_mip_gap": solver_rel_mip_gap, - "absolute_mip_gap": solver_abs_mip_gap, - } - ) - - else: - solver_options = { - "time_limit": solver_timelimit, - "relative_mip_gap": solver_rel_mip_gap, - "absolute_mip_gap": solver_abs_mip_gap, - } - - # ************************************************************************** - - # no sos, regular time intervals - - example_generic_problem( - solver=solver, - solver_options=solver_options, - use_sos_arcs=False, - sos_weight_key=InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_NONE, - seed_number=seed_number, - perform_analysis=True, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=False, - init_aux_sets=init_aux_sets, - ) - - # sos, cost as weight, regular time intervals - - example_generic_problem( - solver=solver, - solver_options=solver_options, - use_sos_arcs=True, - sos_weight_key=InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_COST, - seed_number=seed_number, - perform_analysis=False, - plot_results=False, - print_solver_output=False, - irregular_time_intervals=False, - init_aux_sets=init_aux_sets, - ) - - # sos, capacity as weight, regular time intervals - - example_generic_problem( - solver=solver, - solver_options=solver_options, - use_sos_arcs=True, - sos_weight_key=InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_CAP, - seed_number=seed_number, - perform_analysis=False, - plot_results=False, - print_solver_output=False, - irregular_time_intervals=False, - init_aux_sets=init_aux_sets, - ) - - # sos, specific minimum cost as weight, irregular time intervals - - example_generic_problem( - solver=solver, - solver_options=solver_options, - use_sos_arcs=True, - sos_weight_key=InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_COST, - seed_number=seed_number, - perform_analysis=False, - plot_results=False, - print_solver_output=False, - irregular_time_intervals=True, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** -# ****************************************************************************** -# ****************************************************************************** - - -def example_generic_problem( - solver: str = "glpk", - solver_options: dict = None, - use_sos_arcs: bool = False, - sos_weight_key: str = "cost", - seed_number: int = None, - perform_analysis: bool = False, - plot_results: bool = False, - print_solver_output: bool = False, - irregular_time_intervals: bool = False, - init_aux_sets: bool = False, -): - number_periods = 2 - - number_intraperiod_time_intervals = 5 - - discount_rates = tuple([0.035 for p in range(number_periods)]) - - planning_horizon = [ - 365 * 24 * 3600 for p in range(number_periods) - ] # intra-period, of course - - if irregular_time_intervals: - # TODO: adjust demand/supply levels - - time_step_max_relative_variation = 0.25 - - intraperiod_time_interval_duration = [ - (planning_horizon[0] / number_intraperiod_time_intervals) - * ( - 1 - + (k / (number_intraperiod_time_intervals - 1) - 0.5) - * time_step_max_relative_variation - ) - for k in range(number_intraperiod_time_intervals) - ] - - else: - intraperiod_time_interval_duration = [ - planning_horizon[0] / number_intraperiod_time_intervals - for k in range(number_intraperiod_time_intervals) - ] - - # time weights - - # average time interval duration - - average_time_interval_duration = round(mean(intraperiod_time_interval_duration)) - - # relative weight of time period - - # one interval twice as long as the average is worth twice - # one interval half as long as the average is worth half - - # time_weights = [ - # [time_period_duration/average_time_interval_duration - # for time_period_duration in intraperiod_time_interval_duration] - # for p in range(number_periods)] - - time_weights = None - - # create problem object - - ipp = InfrastructurePlanningProblem( - name="problem", - discount_rates={0: discount_rates}, - reporting_periods={0: tuple(i for i in range(number_periods))}, - time_intervals={0: tuple(dt for dt in intraperiod_time_interval_duration)}, - time_weights=time_weights, - ) - - # add networks and systems - - ipp = create_generic_networks(ipp, seed_number) - - # set up the use of sos, if necessary - - if use_sos_arcs: - for network_key in ipp.networks: - for arc_key in ipp.networks[network_key].edges(keys=True): - if ( - ipp.networks[network_key] - .edges[arc_key][Network.KEY_ARC_TECH] - .has_been_selected() - ): - continue - - ipp.use_sos1_for_arc_selection( - network_key, - arc_key, - use_real_variables_if_possible=False, - sos1_weight_method=sos_weight_key, - ) - - # instantiate - - ipp.instantiate(initialise_ancillary_sets=init_aux_sets) - - # optimise - - if print_solver_output: - ipp.instance.pprint() - - out = ipp.optimise( - solver_name=solver, - solver_options=solver_options, - output_options={}, - print_solver_output=print_solver_output, - ) - - if out: - print("The optimisation was successful. Running post-optimisation analysis.") - - # run tests - - utils.run_mvesipp_analysis( - ipp, - ipp.instance, - analyse_problem=perform_analysis, - analyse_results=perform_analysis, - ) - - else: - print("The optimisation failed. Skipping results analysis.") - - # run tests - - utils.run_mvesipp_analysis( - ipp, ipp.instance, analyse_problem=perform_analysis, analyse_results=False - ) - - # ************************************************************************** - # ************************************************************************** - - # print results - - if plot_results: - utils.plot_mves(ipp, filepath="/another_folder/", filename_radical="network_") - - # ************************************************************************** - # ************************************************************************** - - # return something - - return True - - -# ****************************************************************************** -# ****************************************************************************** - - -def generic_problem_get_arc_techs( - number_time_intervals, - network_order, - network_name, - arc_tech_efficiencies, - number_arc_technologies, - peak_flow, - n1, - n2, - distance, -): - min_efficiency = min(arc_tech_efficiencies.values()) - - # note: the network order needs to be accurate - - capacity = [ - peak_flow - * (1 / (min_efficiency**network_order)) - * (k + 1) - / number_arc_technologies - for k in range(number_arc_technologies) - ] - - min_cost = [ - (k + 1) * distance * 1e3 * (1 + rand.random()) - for k in range(number_arc_technologies) - ] - - new_arc_tech = Arcs( - name=(network_name + "_arc_tech_n" + str(n1) + "_n" + str(n2)), - efficiency=arc_tech_efficiencies, - efficiency_reverse=None, - static_loss=None, - capacity=capacity, - minimum_cost=min_cost, - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=False, - ) - - # return - - return new_arc_tech - - -# ****************************************************************************** -# ****************************************************************************** - - -def add_arc_this_way( - network, - network_order, - ipp, - order_boost, - network_names, - g, - node_start, - node_end, - arc_number_key, - arc_tech_efficiency, - number_arc_technologies, - peak_flow, - distance_matrix, -): - arc_tech = generic_problem_get_arc_techs( - ipp.time_intervals[ipp.assessment_keys[0]], - network_order[g] + order_boost, - network_names[g], - arc_tech_efficiency[g], - number_arc_technologies, - peak_flow[g], - node_start, - node_end, - distance_matrix[g][node_start][node_end], - ) - - # add it to the network - - if arc_number_key == None: - network.add_directed_arc( - node_key_a=node_start, node_key_b=node_end, arcs=arc_tech - ) - - else: - network.modify_network_arc( - node_key_a=node_start, - node_key_b=node_end, - arc_key_ab=arc_number_key, - data_dict={Network.KEY_ARC_TECH: arc_tech, Network.KEY_ARC_UND: False}, - ) - - # ************************************************************************** - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def create_generic_networks( - ipp: InfrastructurePlanningProblem, seed_number: int = None -): - # ************************************************************************** - # ************************************************************************** - - if seed_number == None: - seed_number = rand.randint(1, int(1e5)) - - print("Seed number: " + str(seed_number)) - - # initialise random number generators - - rand.seed(a=seed_number) - - np.random.seed(seed=seed_number) - - # ************************************************************************** - # ************************************************************************** - - # problem specification - - # networks - - min_number_networks = 2 - - max_number_networks = 3 - - number_networks = rand.randint(min_number_networks, max_number_networks) - - # network type: supply (nodes only), demand (nodes only), hybrid (both) - - NET_TYPE_SUPPLY = "supply" - NET_TYPE_DEMAND = "demand" - NET_TYPE_HYBRID = "hybrid" - - NET_TYPES = [NET_TYPE_SUPPLY, NET_TYPE_DEMAND, NET_TYPE_HYBRID] - - network_type = [ - NET_TYPES[rand.randint(0, len(NET_TYPES) - 1)] for g in range(number_networks) - ] - print(network_type) - # TODO: delete print above - # order of network - - min_network_order = 2 # has to be at least 2 for hybrid mode - - max_network_order = 4 - - network_order = [ - rand.randint(min_network_order, max_network_order) - for g in range(number_networks) - ] - - # import and export nodes - - # import nodes are needed with insuf. supply - - min_number_import_nodes = [ - ( - 1 - if ( - network_type[g] == NET_TYPE_DEMAND or network_type[g] == NET_TYPE_HYBRID - ) - else 0 - ) - for g in range(number_networks) - ] - - max_number_import_nodes = [ - min_number_import_nodes[g] + rand.randint(0, 1) for g in range(number_networks) - ] - - # export nodes are needed with insuf. demand - - min_number_export_nodes = [ - ( - 1 - if ( - network_type[g] == NET_TYPE_SUPPLY or network_type[g] == NET_TYPE_HYBRID - ) - else 0 - ) - for g in range(number_networks) - ] - - max_number_export_nodes = [ - min_number_export_nodes[g] + rand.randint(0, 1) for g in range(number_networks) - ] - - min_number_other_nodes = 3 - - max_number_other_nodes = 6 - - number_import_nodes = [ - rand.randint(min_number_import_nodes[g], max_number_import_nodes[g]) - for g in range(number_networks) - ] - - number_export_nodes = [ - rand.randint(min_number_export_nodes[g], max_number_export_nodes[g]) - for g in range(number_networks) - ] - - number_other_nodes = [ - rand.randint(min_number_other_nodes, max_number_other_nodes) - for g in range(number_networks) - ] - - number_nodes = [ - 2 ** network_order[g] - + number_import_nodes[g] - + number_export_nodes[g] - + number_other_nodes[g] - for g in range(number_networks) - ] - - # arc technologies - - min_number_arc_technologies = [1 for g in range(number_networks)] - - max_number_arc_technologies = [6 for g in range(number_networks)] - - number_arc_technologies = [ - rand.randint(min_number_arc_technologies[g], max_number_arc_technologies[g]) - for g in range(number_networks) - ] - - # ************************************************************************** - # ************************************************************************** - - # generate data - - network_names = ["grid_" + str(g) for g in range(number_networks)] - - # import prices (could be an empty dict) - - import_prices = { - (g, n): [rand.random() for k in ipp.time_intervals[ipp.assessment_keys[0]]] - for g in range(number_networks) - for n in range(number_import_nodes[g]) - } - - # export prices (lower than import ones; random if no imports prices exist) - - export_prices = { - (g, n): [ - min(import_prices[(g, n_imp)][k] for n_imp in range(number_import_nodes[g])) - * rand.random() - if number_import_nodes[g] != 0 - else rand.random() - for k in range(len(ipp.time_intervals[ipp.assessment_keys[0]])) - ] - for g in range(number_networks) - for n in range(number_export_nodes[g]) - } - - # static supply (negative) or demand (positive) - - base_flow = { - (g, n): [ - rand.random() - if network_type[g] == NET_TYPE_DEMAND - else -rand.random() - if network_type[g] == NET_TYPE_SUPPLY - else -1 + 2 * rand.random() - for k in ipp.time_intervals[ipp.assessment_keys[0]] - ] - for g in range(number_networks) - for n in range(number_other_nodes[g]) - } - - # positions - - position_nodes = [ - [(rand.random(), rand.random()) for n in range(number_nodes[g])] - for g in range(number_networks) - ] - - # distance - - def distance_function(x1, x2, y1, y2): - return np.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2) - - distance_matrix = [ - [ - [ - distance_function( - position_nodes[g][n1][0], - position_nodes[g][n2][0], - position_nodes[g][n1][1], - position_nodes[g][n2][1], - ) - for n2 in range(number_nodes[g]) - ] - for n1 in range(number_nodes[g]) - ] - for g in range(number_networks) - ] - - # determine peak demand - - peak_flow = [ - sum( - max( - [ - abs(base_flow[(g, n)][k]) - for k in range(len(ipp.time_intervals[ipp.assessment_keys[0]])) - ] - ) - for n in range(number_other_nodes[g]) - ) - for g in range(number_networks) - ] - - # arc tech efficiency per arc tech and grid - - # arc_tech_efficiency = [ - # [1-rand.random()*rand.random()*rand.random() - # for k in range(ipp.number_intraperiod_time_intervals)] - # for g in range(number_networks)] - - arc_tech_efficiency = [ - { - (q, k): 1 - rand.random() * rand.random() * rand.random() - for q in ipp.assessment_keys - for k in range(ipp.number_time_intervals[q]) - } - for g in range(number_networks) - ] - - # ************************************************************************** - # ************************************************************************** - - # for each network: - # 1) create network using networkx's graph creators - # 2) add random data to the existing nodes and edges - # 3) add import, export and other nodes (including the relevant data) - # 4) add arcs from these new nodes to the other nodes in the network - - # list of Network objects - - for g in range(number_networks): - # ********************************************************************** - - order_boost = ( - 2 - if number_import_nodes[g] and number_export_nodes[g] - else 1 - if number_import_nodes[g] or number_export_nodes[g] - else 0 - ) - - # ********************************************************************** - - # 1) create network using networkx's graph creators - - if network_type[g] == NET_TYPE_DEMAND: - # consumer network (positive SB_glk) - - new_network = Network( - nx.binomial_tree(network_order[g], create_using=nx.MultiDiGraph) - ) - - elif network_type[g] == NET_TYPE_SUPPLY: - # producer network (negative SB_glk) - - new_network = Network( - nx.binomial_tree(network_order[g], create_using=nx.MultiDiGraph) - ) - - # reverse arc directions - - arc_list = [] - for arc in new_network.edges(): - arc_list.append(arc) - for arc in arc_list: - new_network.remove_edge(arc[0], arc[1]) - new_network.add_edge(arc[1], arc[0]) - - else: # hybrid - # join one supply grid with one demand grid - - G1 = nx.binomial_tree(network_order[g] - 1, create_using=nx.MultiDiGraph) - - G2 = nx.binomial_tree(network_order[g] - 1, create_using=nx.MultiDiGraph) - - nn_g1 = G1.number_of_nodes() - arc_list = [arc for arc in G2.edges()] - node_list = [node_key for node_key in G2.nodes()] - for arc in arc_list: - G2.remove_edge(arc[0], arc[1]) - G2.add_node(arc[0] + nn_g1) - G2.add_node(arc[1] + nn_g1) - G2.add_edge(arc[1] + nn_g1, arc[0] + nn_g1) - for node in node_list: - G2.remove_node(node) - - G = nx.union(G1, G2) - G.add_edge(nn_g1, 0) # G2 is the supply network, G1 is the demand 1 - new_network = Network(G) - - # define the nodes as not being import, nor export nor other nodes - - for n in new_network.nodes: - new_network.add_waypoint_node(node_key=n) - - # add arc data - - for edge in new_network.edges(keys=True): - # add arc - - add_arc_this_way( - new_network, - network_order, - ipp, - order_boost, - network_names, - g, - edge[0], - edge[1], - edge[2], - arc_tech_efficiency, - number_arc_technologies[g], - peak_flow, - distance_matrix, - ) - - # ********************************************************************** - - # compute the number of outgoing arcs per node - - dict_number_outgoing_arcs = { - node: len(nx.edges(new_network, node)) for node in new_network.nodes() - } - - # list the nodes ordered by descending number of outgoing arcs - - list_nodes_descending_number_outgoing_arcs = sorted( - dict_number_outgoing_arcs, key=dict_number_outgoing_arcs.get, reverse=True - ) - - # list the nodes ordered by ascending number of outgoing arcs - - list_nodes_ascending_number_outgoing_arcs = sorted( - dict_number_outgoing_arcs, key=dict_number_outgoing_arcs.get - ) - - # compute the number of incoming arcs per node - - dict_number_incoming_arcs = { - node: len([node_source for node_source in new_network.predecessors(node)]) - for node in new_network.nodes() - } - - # list of nodes ordered by descending number of incoming arcs - - list_nodes_descending_number_incoming_arcs = sorted( - dict_number_incoming_arcs, key=dict_number_incoming_arcs.get, reverse=True - ) - - # list of nodes ordered by ascending number of incoming arcs - - list_nodes_ascending_number_incoming_arcs = sorted( - dict_number_incoming_arcs, key=dict_number_incoming_arcs.get - ) - - # ********************************************************************** - - # add import nodes - - for n in range(number_import_nodes[g]): - # define key - - node_key = new_network.number_of_nodes() - - # res_pri = ResourcePrice(prices=import_prices[(g,n)], - # volumes=None) - - new_network.add_import_node( - node_key=node_key, - prices={ - (q, p, k): ResourcePrice( - prices=import_prices[(g, n)][k], volumes=None - ) - for q in range(ipp.number_assessments) - for p in range(ipp.number_reporting_periods[q]) - for k in range(ipp.number_time_intervals[q]) - }, - ) - - # add arc from import node to a node with many outgoing arcs - - add_arc_this_way( - new_network, - network_order, - ipp, - order_boost, - network_names, - g, - node_key, - list_nodes_descending_number_outgoing_arcs[n], - None, - arc_tech_efficiency, - number_arc_technologies[g], - peak_flow, - distance_matrix, - ) - - # ********************************************************************** - - # add export nodes - - for n in range(number_export_nodes[g]): - # define key - - node_key = new_network.number_of_nodes() - - # res_pri = ResourcePrice(prices=export_prices[(g,n)], - # volumes=None) - - new_network.add_export_node( - node_key=node_key, - prices={ - (q, p, k): ResourcePrice( - prices=export_prices[(g, n)][k], volumes=None - ) - for q in range(ipp.number_assessments) - for p in range(ipp.number_reporting_periods[q]) - for k in range(ipp.number_time_intervals[q]) - }, - ) - - # add arc from node with many incoming arcs to the export node - - add_arc_this_way( - new_network, - network_order, - ipp, - order_boost, - network_names, - g, - list_nodes_descending_number_incoming_arcs[n], - node_key, - None, - arc_tech_efficiency, - number_arc_technologies[g], - peak_flow, - distance_matrix, - ) - - # ********************************************************************** - - # identify import and export nodes - - new_network.identify_node_types() - - # ********************************************************************** - - # demand/supply nodes: create them and add arcs to random nodes - - demand_node_counter = 0 - - supply_node_counter = 0 - - for n in range(number_other_nodes[g]): - # add demand/supply node - - node_key = new_network.number_of_nodes() - - new_network.add_source_sink_node( - node_key=node_key, - # base_flow=base_flow[(g,n)], - base_flow={ - (q, k): base_flow[(g, n)][k] - for q in ipp.assessment_keys - for k in range(len(ipp.time_intervals[q])) - }, - ) - - # differentiate node placement based on the static flow needs - - # this will tend to ensure feasibility - - if min(base_flow[(g, n)]) >= 0: - # demand node: - # from nodes with zero/few outgoing arcs to the demand node - - node_key_start = list_nodes_ascending_number_outgoing_arcs[ - demand_node_counter - ] - - add_arc_this_way( - new_network, - network_order, - ipp, - order_boost, - network_names, - g, - node_key_start, - node_key, - None, - arc_tech_efficiency, - number_arc_technologies[g], - peak_flow, - distance_matrix, - ) - - # increment counter - - demand_node_counter = demand_node_counter + 1 - - elif max(base_flow[(g, n)]) <= 0: - # supply node: - # from the supply node to nodes with zero/few incoming arcs - - node_key_end = list_nodes_ascending_number_incoming_arcs[ - supply_node_counter - ] - - add_arc_this_way( - new_network, - network_order, - ipp, - order_boost, - network_names, - g, - node_key, - node_key_end, - None, - arc_tech_efficiency, - number_arc_technologies[g], - peak_flow, - distance_matrix, - ) - - # increment counter - - supply_node_counter = supply_node_counter + 1 - - else: - # demand/supply node - - # add two arcs: - # arc 1) from an import node or nodes directly or indirectly - # connected to an import node (from which imports are possible) - # arc 2) to an export node or nodes directly or indirectly co- - # nnected to an export node (from which exports are possible) - - # ************************************************************** - - # arc 1 - - # randomly select a starting node - - # for each import node - - for import_node in new_network.import_nodes: - # select a node with few outgoing arcs - - node_key_start = list_nodes_ascending_number_outgoing_arcs[ - supply_node_counter - ] - - # check if there is a path between them - - if nx.has_path(new_network, import_node, node_key_start): - # call random for comparison purposes - - rand.randint(0, 1) - - # update the counter - - supply_node_counter = supply_node_counter + 1 - - # if there is, break - - break - - # if not, continue - - # TODO: while loop to try more times with each import node - - else: - # randomly select an import node - - node_key_start = new_network.import_nodes[ - rand.randint(0, len(new_network.import_nodes) - 1) - ] - - # add arc - - add_arc_this_way( - new_network, - network_order, - ipp, - order_boost, - network_names, - g, - node_key_start, - node_key, - None, - arc_tech_efficiency, - number_arc_technologies[g], - peak_flow, - distance_matrix, - ) - - # ************************************************************** - - # arc 2 - - # randomly select an end node - - # for each export node - - for export_node in new_network.export_nodes: - # select a node with few incoming arcs - - node_key_end = list_nodes_ascending_number_incoming_arcs[ - demand_node_counter - ] - - # check if there is a path between them - - if nx.has_path(new_network, node_key_end, export_node): - # call random for comparison purposes - - rand.randint(0, 1) - - # update the counter - - demand_node_counter = demand_node_counter + 1 - - # if there is, break - - break - - # if not, continue - - # TODO: while loop to try more times with each export node - - else: - # randomly select an export node - - node_key_end = new_network.export_nodes[ - rand.randint(0, len(new_network.export_nodes) - 1) - ] - - # add arc - - add_arc_this_way( - new_network, - network_order, - ipp, - order_boost, - network_names, - g, - node_key, - node_key_end, - None, - arc_tech_efficiency, - number_arc_technologies[g], - peak_flow, - distance_matrix, - ) - - # ************************************************************** - - # ****************************************************************** - - # restart counters - - if demand_node_counter >= 2 ** (network_order[g]) - 1: - demand_node_counter = 0 - - if supply_node_counter >= 2 ** (network_order[g]) - 1: - supply_node_counter = 0 - - # ****************************************************************** - - # # print(new_network.nodes()) - # print('here now') - # print(new_network.edges(keys=True,data=True)) - # assert False - - # ********************************************************************** - - # test preselected arcs with finite capacity - - # from import node to new node - - if len(new_network.import_nodes) != 0: - # an import node is required for this - - node_key = "test_node_1" - - new_network.add_source_sink_node( - node_key=node_key, - # base_flow=[ - # rand.random() - # for k in range(ipp.number_intraperiod_time_intervals)] - base_flow={ - (q, k): rand.random() - for q in ipp.assessment_keys - for k in range(len(ipp.time_intervals[q])) - }, - ) - - new_network.add_preexisting_directed_arc( - node_key_a=new_network.import_nodes[0], - node_key_b=node_key, - # efficiency=[ - # 1 for k in range(ipp.number_intraperiod_time_intervals)], - efficiency={ - (q, k): 1 - for q in ipp.assessment_keys - for k in range(len(ipp.time_intervals[q])) - }, - static_loss=None, - capacity=1, - capacity_is_instantaneous=False, - ) - - # from new node to export node - - if len(new_network.export_nodes) != 0: - node_key = "test_node_2" - - new_network.add_source_sink_node( - node_key=node_key, - # base_flow=[ - # -rand.random() - # for k in range(ipp.number_intraperiod_time_intervals)], - base_flow={ - (q, k): -rand.random() - for q in ipp.assessment_keys - for k in range(len(ipp.time_intervals[q])) - }, - ) - - # add preselected arc from export node - - new_network.add_preexisting_directed_arc( - node_key_a=node_key, - node_key_b=new_network.export_nodes[0], - # efficiency=[ - # 1 for k in range(ipp.number_intraperiod_time_intervals)], - efficiency={ - (q, k): 1 - for q in ipp.assessment_keys - for k in range(len(ipp.time_intervals[q])) - }, - static_loss=None, - capacity=1, - capacity_is_instantaneous=False, - ) - - # add preselected infinite capacity arcs - - # add infinite capacity arc from import node to new node - - if len(new_network.import_nodes) != 0: - # an import node is required for this - - node_key = "test_node_3" - - new_network.add_source_sink_node( - node_key=node_key, - # base_flow=[ - # 1e3*rand.random() - # for k in range(ipp.number_intraperiod_time_intervals)] - base_flow={ - (q, k): 1e3 * rand.random() - for q in ipp.assessment_keys - for k in range(len(ipp.time_intervals[q])) - }, - ) - - new_network.add_preexisting_directed_arc( - node_key_a=new_network.import_nodes[0], - node_key_b="test_node_3", - # efficiency=[ - # 1 for k in range(ipp.number_intraperiod_time_intervals)], - efficiency={ - (q, k): 1 - for q in ipp.assessment_keys - for k in range(len(ipp.time_intervals[q])) - }, - static_loss=None, - capacity=math.inf, - capacity_is_instantaneous=False, - ) - - # add infinite capacity from new node to export node - - if len(new_network.export_nodes) != 0: - node_key = "test_node_4" - - new_network.add_source_sink_node( - node_key=node_key, - # base_flow=[ - # -1e3*rand.random() - # for k in range(ipp.number_intraperiod_time_intervals)] - base_flow={ - (q, k): -1e3 * rand.random() - for q in ipp.assessment_keys - for k in range(len(ipp.time_intervals[q])) - }, - ) - - # add preselected arc from export node - - new_network.add_preexisting_directed_arc( - node_key_a=node_key, - node_key_b=new_network.export_nodes[0], - # efficiency=[ - # 1 for k in range(ipp.number_intraperiod_time_intervals)], - efficiency={ - (q, k): 1 - for q in ipp.assessment_keys - for k in range(len(ipp.time_intervals[q])) - }, - static_loss=None, - capacity=math.inf, - capacity_is_instantaneous=False, - ) - - # ********************************************************************** - - # prepare network object - - new_network.identify_node_types() - - # add network to mves object - - ipp.add_network(network_key=g, network=new_network) - - # ********************************************************************** - - # feasibility checks - - # ********************************************************************** - - # ************************************************************************** - # ************************************************************************** - - return ipp - - # ************************************************************************** - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** diff --git a/tests/examples_esipp_network.py b/tests/examples_esipp_network.py deleted file mode 100644 index d5f92900935527c23e6363b3560768db8fe0dabc..0000000000000000000000000000000000000000 --- a/tests/examples_esipp_network.py +++ /dev/null @@ -1,2136 +0,0 @@ -# imports - -# standard - -import random - -from networkx import binomial_tree, MultiDiGraph - -# local - -from src.topupopt.problems.esipp.network import Arcs, Network - -from src.topupopt.problems.esipp.network import ArcsWithoutLosses - -from src.topupopt.problems.esipp.network import ArcsWithoutProportionalLosses - -from src.topupopt.problems.esipp.network import ArcsWithoutStaticLosses - -from src.topupopt.problems.esipp.resource import ResourcePrice - -# ****************************************************************************** -# ****************************************************************************** - - -def examples(): - # ************************************************************************** - # ************************************************************************** - - # test creating arc technology objects - - examples_arc_technologies() - - # test creating arc technology objects for technologies with static losses - - examples_arc_technologies_static_losses() - - # test peculiar subclasses - - example_arcs_without_losses() - - # test modifying nodes - - examples_modifying_nodes() - - # test to trigger special errors - - examples_network_disallowed_cases() - - # test key generation - - examples_pseudo_unique_key_generation() - - # test creating a network with a tree topology - - examples_tree_topology() - - # ************************************************************************** - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def examples_tree_topology(): - # create a network object with a tree topology - - tree_network = binomial_tree(3, create_using=MultiDiGraph) - - network = Network(tree_network) - - for edge_key in network.edges(keys=True): - arc = ArcsWithoutLosses( - name=str(edge_key), - capacity=[5, 10], - minimum_cost=[3, 6], - specific_capacity_cost=0, - capacity_is_instantaneous=False, - ) - - network.add_edge(*edge_key, **{Network.KEY_ARC_TECH: arc}) - - # assert that it does not have a tree topology - - assert not network.has_tree_topology() - - # select all the nodes - - for edge_key in network.edges(keys=True): - network.edges[edge_key][Network.KEY_ARC_TECH].options_selected[0] = True - - # assert that it has a tree topology - - assert network.has_tree_topology() - - -# ****************************************************************************** -# ****************************************************************************** - - -def examples_arc_technologies_static_losses(): - # ************************************************************************** - - number_time_intervals = 3 - number_scenarios = 2 - number_options = 4 - - efficiency_dict = { - (q, k): 0.95 - for q in range(number_scenarios) - for k in range(number_time_intervals) - } - - static_loss_dict = { - (h, q, k): 1 - for h in range(number_options) - for q in range(number_scenarios) - for k in range(number_time_intervals) - } - - for capacity_is_instantaneous in (True, False): - arc_tech = Arcs( - name="any", - efficiency=efficiency_dict, - efficiency_reverse=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - static_loss=static_loss_dict, - validate=True, - ) - - assert arc_tech.has_proportional_losses() - - assert arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - assert not arc_tech.has_been_selected() - - assert arc_tech.is_isotropic(reverse_none_means_isotropic=True) - - assert not arc_tech.is_isotropic(reverse_none_means_isotropic=False) - - # isotropic - - arc_tech = Arcs( - name="any", - efficiency=None, - efficiency_reverse=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - static_loss=static_loss_dict, - validate=True, - ) - - assert not arc_tech.has_proportional_losses() - - assert arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - assert not arc_tech.has_been_selected() - - assert arc_tech.is_isotropic(reverse_none_means_isotropic=True) - - assert arc_tech.is_isotropic(reverse_none_means_isotropic=False) - - # create arc technology with only one option - - arc_tech = Arcs( - name="any", - efficiency=efficiency_dict, - efficiency_reverse=None, - capacity=(1,), - minimum_cost=(1,), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - static_loss={ - (0, q, k): 1 - # for h in range(number_options) - for q in range(number_scenarios) - for k in range(number_time_intervals) - }, - validate=True, - ) - - assert arc_tech.has_proportional_losses() - - assert arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - assert not arc_tech.has_been_selected() - - assert arc_tech.is_isotropic(reverse_none_means_isotropic=True) - - assert not arc_tech.is_isotropic(reverse_none_means_isotropic=False) - - # create arc technology for one time interval - - arc_tech = Arcs( - name="any", - efficiency={ - (q, 0): 0.5 - for q in range(number_scenarios) - # for k in range(number_time_intervals) - }, - efficiency_reverse=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - static_loss={ - (h, q, 0): 1 - for h in range(number_options) - for q in range(number_scenarios) - # for k in range(number_time_intervals) - }, - validate=True, - ) - - assert arc_tech.has_proportional_losses() - - assert arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - assert not arc_tech.has_been_selected() - - assert arc_tech.is_isotropic(reverse_none_means_isotropic=True) - - assert not arc_tech.is_isotropic(reverse_none_means_isotropic=False) - - # ********************************************************************** - - # TypeError: The static losses should be given as a dict or None. - - error_triggered = False - try: - _ = Arcs( - name="any", - efficiency=None, - efficiency_reverse=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - static_loss=tuple( - [k for k in range(number_time_intervals)] - for o in range(number_options) - ), - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError('The static losses should be specified for each arc - # option.') - - error_triggered = False - try: - _ = Arcs( - name="any", - efficiency=None, - efficiency_reverse=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - static_loss={ - ( - h, - q, - ): 1 - for h in range(number_options) - for q in range(number_scenarios) - }, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # TypeError('The static losses must be specified via a list of lists.') - - error_triggered = False - try: - _ = Arcs( - name="any", - efficiency=None, - efficiency_reverse=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - static_loss=[ - tuple(k for k in range(number_time_intervals)) - for o in range(number_options) - ], - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError('The static loss values are inconsistent with the number ' - # 'of options, scenarios and intervals.') - - error_triggered = False - try: - arc_tech = Arcs( - name="any", - efficiency=None, - efficiency_reverse=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - static_loss={ - (h, q, k): 1 - for h in range(number_options) - for q in range(number_scenarios) - for k in range(number_time_intervals - 1) - }, - validate=True, - ) - - arc_tech.validate_sizes( - number_options=number_options, - number_scenarios=number_scenarios, - number_intervals=[ - number_time_intervals for _ in range(number_scenarios) - ], - ) - except ValueError: - error_triggered = True - assert error_triggered - - # TypeError('The static losses were not provided as numbers.') - - error_triggered = False - try: - _ = Arcs( - name="any", - efficiency=None, - efficiency_reverse=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - static_loss={ - (h, q, k): str(3.54) - for h in range(number_options) - for q in range(number_scenarios) - for k in range(number_time_intervals) - }, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError('The static losses must be positive or zero.') - - error_triggered = False - try: - _ = Arcs( - name="any", - efficiency=None, - efficiency_reverse=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - static_loss={ - (h, q, k): -random.randint(0, 1) * random.random() - for h in range(number_options) - for q in range(number_scenarios) - for k in range(number_time_intervals) - }, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # TypeError: The static loss dict keys must be tuples - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=None, - efficiency_reverse=None, - static_loss={k: 1 for k in range(number_time_intervals)}, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError( 'The static loss dict keys must be tuples of size 3.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=None, - efficiency_reverse=None, - static_loss={(k, 3): 1 for k in range(number_time_intervals)}, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # TypeError(The staticl osses should be given as a dict or None.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=None, - efficiency_reverse=None, - static_loss=[1 for k in range(number_time_intervals)], - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError( - # 'No static loss values were provided. There should be one'+ - # ' value per option, scenario and time interval.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=None, - efficiency_reverse=None, - static_loss={}, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def examples_arc_technologies(): - # ************************************************************************** - - # create arc technology using instantaneous capacities - - number_scenarios = 2 - number_options = 4 - number_time_intervals = 3 - - efficiency_dict = { - (q, k): 0.85 - for q in range(number_scenarios) - for k in range(number_time_intervals) - } - - for capacity_is_instantaneous in (True, False): - arc_tech = Arcs( - name="any", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - - assert arc_tech.has_proportional_losses() - - assert not arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - assert not arc_tech.has_been_selected() - - assert arc_tech.is_isotropic(reverse_none_means_isotropic=True) - - assert not arc_tech.is_isotropic(reverse_none_means_isotropic=False) - - # create arc technology with only one option - - arc_tech = Arcs( - name="any", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=(1,), - minimum_cost=(1,), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - - assert arc_tech.has_proportional_losses() - - assert not arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - assert not arc_tech.has_been_selected() - - assert arc_tech.is_isotropic(reverse_none_means_isotropic=True) - - assert not arc_tech.is_isotropic(reverse_none_means_isotropic=False) - - # create arc technology for one time interval - - arc_tech = Arcs( - name="any", - efficiency={(0, 0): 0.95}, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - - assert arc_tech.has_proportional_losses() - - assert not arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - assert not arc_tech.has_been_selected() - - assert arc_tech.is_isotropic(reverse_none_means_isotropic=True) - - assert not arc_tech.is_isotropic(reverse_none_means_isotropic=False) - - # create arc technology for one time interval and isotropic - - arc_tech = Arcs( - name="any", - efficiency={(0, 0): 0.95}, - efficiency_reverse={(0, 0): 0.95}, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - - assert arc_tech.has_proportional_losses() - - assert not arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - assert not arc_tech.has_been_selected() - - assert arc_tech.is_isotropic(reverse_none_means_isotropic=True) - - assert arc_tech.is_isotropic(reverse_none_means_isotropic=False) - - # create arc technology for one time interval and anisotropic - - arc_tech = Arcs( - name="any", - efficiency={(0, 0): 0.95}, - efficiency_reverse={(0, 0): 1}, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - - assert arc_tech.has_proportional_losses() - - assert not arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - assert not arc_tech.has_been_selected() - - assert not arc_tech.is_isotropic(reverse_none_means_isotropic=True) - - assert not arc_tech.is_isotropic(reverse_none_means_isotropic=False) - - # create arc technology for one time interval and anisotropic - - arc_tech = Arcs( - name="any", - efficiency={(0, 0): 1}, - efficiency_reverse={(0, 0): 0.95}, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - - assert arc_tech.has_proportional_losses() - - assert not arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - assert not arc_tech.has_been_selected() - - assert not arc_tech.is_isotropic(reverse_none_means_isotropic=True) - - assert not arc_tech.is_isotropic(reverse_none_means_isotropic=False) - - # ********************************************************************** - - # trigger errors - - # TypeError('The name attribute is not hashable.') - - error_triggered = False - try: - _ = Arcs( - name=[1, 2, 3], - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # TypeError:The efficiency dict keys must be (scenario, interval) tuples - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency={k: 1 for k in range(number_time_intervals)}, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError( 'The efficiency dict keys must be tuples of size 2.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency={(k, 3, 4): 1 for k in range(number_time_intervals)}, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # TypeError(The efficiency should be given as a dict or None.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=[1 for k in range(number_time_intervals)], - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # TypeError('The reverse efficiency has to match the nominal'+ - # ' one when there are no proportional losses.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=None, - efficiency_reverse={}, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # TypeError:'The reverse efficiency should be given as a dict or None.' - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=[1 for k in range(number_time_intervals)], - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError( - # 'No efficiency values were provided. There should be '+ - # 'one value per scenario and time interval.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse={}, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ValueError: The keys for the efficiency dicts do not match. - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse={ - (key[1], key[0]): value for key, value in efficiency_dict.items() - }, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # TypeError: Efficiency values must be provided as numeric types. - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse={ - (key[0], key[1]): str(value) - for key, value in efficiency_dict.items() - }, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError('Efficiency values must be positive.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse={ - (key[0], key[1]): -1 for key, value in efficiency_dict.items() - }, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # TypeError('The capacity should be given as a list or tuple.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity={o: 1 + o for o in range(number_options)}, - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # TypeError: The minimum cost values should be given as a list or tuple - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost={o: 1 + o for o in range(number_options)}, - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # TypeError: The specific capacity cost was not given as a numeric type - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=[1], - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError:The number of capacity and minimum cost entries must match - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options + 1)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ValueError: No entries for capacity and minimum cost were provided. - # At least one option should be provided. - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(), - minimum_cost=tuple(), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ValueError: No entries for efficiency were provided. There should be - # one entry per time interval. - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency={}, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ValueError('The number of efficiency values must match the number of - # time intervals.') - - arc_tech = Arcs( - name="hey", - efficiency={ - (q, k): 0.85 - for q in range(number_scenarios) - for k in range(number_time_intervals + 1) - }, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - - error_triggered = False - try: - arc_tech.validate_sizes( - number_options=number_options, - number_scenarios=number_scenarios, - number_intervals=[ - number_time_intervals for _ in range(number_scenarios) - ], - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ValueError('The number of efficiency values must match the number of - # time intervals.') - - error_triggered = False - try: - arc_tech = Arcs( - name="hey", - efficiency={ - (q, k): 0.85 - for q in range(number_scenarios) - for k in range(number_time_intervals) - }, - efficiency_reverse={ - (q, k): 0.85 - for q in range(number_scenarios) - for k in range(number_time_intervals - 1) - }, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - arc_tech.validate_sizes( - number_options=number_options, - number_scenarios=number_scenarios, - number_intervals=[ - number_time_intervals for _ in range(number_scenarios) - ], - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ValueError('The number of capacity values must match the number of - # options.') - - arc_tech = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options + 1)), - minimum_cost=tuple(1 + o for o in range(number_options + 1)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - - error_triggered = False - try: - arc_tech.validate_sizes( - number_options=number_options, - number_scenarios=number_scenarios, - number_intervals=[ - number_time_intervals for _ in range(number_scenarios) - ], - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ValueError: The minimum cost values are inconsistent with the number - # of options. - - arc_tech = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options + 1)), - minimum_cost=tuple(1 + o for o in range(number_options + 1)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - - error_triggered = False - try: - arc_tech.validate_sizes( - number_options=number_options, - number_scenarios=number_scenarios, - number_intervals=[ - number_time_intervals for _ in range(number_scenarios) - ], - ) - except ValueError: - error_triggered = True - assert error_triggered - - # TypeError('Efficiency values must be provided as numeric types.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency={key: str(value) for key, value in efficiency_dict.items()}, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError('Efficiency values must be positive.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency={ - key: -value * random.randint(0, 1) - for key, value in efficiency_dict.items() - }, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # TypeError('Capacity values must be provided as numeric types.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(str(1 + o) for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError('Capacity values must be positive.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(-random.randint(0, 1) for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # TypeError('Minimum cost values must be provided as numeric types.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(str(1 + o) for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ValueError('Minimum cost values must be positive or zero.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(-1 for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - validate=True, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # TypeError('The information about capacities being instantaneous or not - # should be given as a boolean variable.') - - error_triggered = False - try: - _ = Arcs( - name="hey", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=1, - validate=True, - ) - except TypeError: - error_triggered = True - assert error_triggered - - # ************************************************************************** - # ************************************************************************** - - # Network - - arc_tech_AB = Arcs( - name="AB", - efficiency=efficiency_dict, - efficiency_reverse=None, - static_loss=None, - capacity=tuple(1 + o for o in range(number_options)), - minimum_cost=tuple(1 + o for o in range(number_options)), - specific_capacity_cost=1, - capacity_is_instantaneous=False, - validate=True, - ) - - arc_tech_AB.options_selected[0] = True - - assert arc_tech_AB.number_options() == number_options - - net = Network() - - # add undirected arc - - net.add_undirected_arc(node_key_a="A", node_key_b="B", arcs=arc_tech_AB) - - # add directed arc - - net.add_directed_arc(node_key_a="A", node_key_b="B", arcs=arc_tech_AB) - - # add infinite capacity arc - - net.add_infinite_capacity_arc( - node_key_a="C", - node_key_b="D", - efficiency={(i, j): 1 for i in range(3) for j in range(4)}, - static_loss=None, - ) - - # add pre-existing directed arc - - net.add_preexisting_directed_arc( - node_key_a="E", - node_key_b="F", - efficiency=efficiency_dict, - static_loss=None, - capacity=3, - capacity_is_instantaneous=True, - ) - - # add pre-existing undirected arc - - net.add_preexisting_undirected_arc( - node_key_a="A", - node_key_b="C", - efficiency=efficiency_dict, - efficiency_reverse=efficiency_dict, - static_loss=None, - capacity=3, - capacity_is_instantaneous=True, - ) - - net.modify_network_arc( - node_key_a="A", - node_key_b="C", - arc_key_ab="AC", - data_dict={net.KEY_ARC_TECH: arc_tech_AB, net.KEY_ARC_UND: False}, - ) - - # ************************************************************************** - - # add import node - - imp_resource_price = ResourcePrice( - prices=[random.random() for k in range(number_time_intervals)], - volumes=[*[random.random() for k in range(number_time_intervals - 1)], None], - ) - - net.add_import_node(node_key="G", prices={(0, 0, 0): imp_resource_price}) - - # add export node - - exp_resource_price = ResourcePrice( - prices=[random.random() for k in range(number_time_intervals)], - volumes=[*[random.random() for k in range(number_time_intervals - 1)], None], - ) - - net.add_export_node(node_key="H", prices={(0, 0, 0): exp_resource_price}) - - net.add_waypoint_node(node_key="Z") - - base_flow = {(i, j): random.random() for i in range(3) for j in range(4)} - - net.add_source_sink_node(node_key="Y", base_flow=base_flow) - - base_flow[(2, 3)] = random.random() - - net.modify_network_node(node_key="Y", node_data={net.KEY_NODE_BASE_FLOW: base_flow}) - - net.identify_node_types() - - assert "Z" in net.waypoint_nodes - - assert "G" in net.import_nodes - - assert "H" in net.export_nodes - - assert "Y" in net.source_sink_nodes - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_arcs_without_losses(): - # test arc without (static and proportional) losses - - arc_tech = ArcsWithoutLosses( - name="AB", - capacity=(1, 2, 3), - minimum_cost=(4, 5, 6), - specific_capacity_cost=6, - capacity_is_instantaneous=False, - validate=True, - ) - - assert not arc_tech.has_proportional_losses() - - assert not arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - # test arc without static losses - - arc_tech = ArcsWithoutStaticLosses( - name="AB", - efficiency={(0, 0): 1, (0, 1): 0.9, (0, 2): 0.8}, - efficiency_reverse=None, - capacity=(1, 2, 3), - minimum_cost=(4, 5, 6), - specific_capacity_cost=6, - capacity_is_instantaneous=False, - validate=True, - ) - - assert arc_tech.has_proportional_losses() - - assert not arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - # test arc without proportional losses - - arc_tech = ArcsWithoutProportionalLosses( - name="AB", - static_loss={ - (0, 0, 0): 0.1, - (0, 0, 1): 0.2, - (0, 0, 2): 0.3, - (1, 0, 0): 0.15, - (1, 0, 1): 0.25, - (1, 0, 2): 0.35, - (2, 0, 0): 0.16, - (2, 0, 1): 0.26, - (2, 0, 2): 0.36, - }, - capacity=(1, 2, 3), - minimum_cost=(4, 5, 6), - specific_capacity_cost=6, - capacity_is_instantaneous=False, - validate=True, - ) - - assert not arc_tech.has_proportional_losses() - - assert arc_tech.has_static_losses() - - assert not arc_tech.is_infinite_capacity() - - -# ****************************************************************************** -# ****************************************************************************** - - -def examples_modifying_nodes(): - # ************************************************************************** - - net = Network() - - number_intervals = 3 - - resource_price = ResourcePrice( - prices=[random.random() for k in range(number_intervals)], - volumes=[*[random.random() for k in range(number_intervals - 1)], None], - ) - - base_flow = {(0, k): random.random() for k in range(number_intervals)} - - arc_tech = ArcsWithoutLosses( - name="hello", - capacity=[5], - minimum_cost=[3], - specific_capacity_cost=3, - capacity_is_instantaneous=False, - ) - - # add isolated import node - - net.add_import_node(node_key="I_iso", prices={(0, 0, 0): resource_price}) - - # add import node with outgoing arcs - - net.add_import_node(node_key="I", prices={(0, 0, 0): resource_price}) - - # add isolated export node - - net.add_import_node(node_key="E_iso", prices={(0, 0, 0): resource_price}) - - # add export node with incoming arcs - - net.add_export_node(node_key="E", prices={(0, 0, 0): resource_price}) - - # add isolated normal node - - net.add_source_sink_node(node_key="A_iso", base_flow=base_flow) - - # add normal node with incoming arcs - - net.add_source_sink_node(node_key="A_in", base_flow=base_flow) - - # add normal node with outgoing arcs - - net.add_source_sink_node(node_key="A_out", base_flow=base_flow) - - # add normal node with incoming and outgoing arcs - - net.add_source_sink_node(node_key="A", base_flow=base_flow) - - # ************************************************************************** - - # arcs - - net.add_directed_arc(node_key_a="I", node_key_b="A_in", arcs=arc_tech) - - net.add_directed_arc(node_key_a="I", node_key_b="A", arcs=arc_tech) - - net.add_directed_arc(node_key_a="A_out", node_key_b="E", arcs=arc_tech) - - net.add_directed_arc(node_key_a="A", node_key_b="E", arcs=arc_tech) - - # ************************************************************************** - - # change I_iso to regular: okay - - net.modify_network_node( - node_key="I_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, - net.KEY_NODE_BASE_FLOW: base_flow, - }, - ) - - # reverse: okay - - net.modify_network_node( - node_key="I_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # change I_iso to export: okay - - net.modify_network_node( - node_key="I_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # reverse: okay - - net.modify_network_node( - node_key="I_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # change I_iso to waypoint: okay - - net.modify_network_node( - node_key="I_iso", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} - ) - - # reverse: okay - - net.modify_network_node( - node_key="I_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # ************************************************************************** - - # change E_iso to regular: okay - - net.modify_network_node( - node_key="E_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, - net.KEY_NODE_BASE_FLOW: base_flow, - }, - ) - - # reverse: okay - - net.modify_network_node( - node_key="E_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # change E_iso to import: okay - - net.modify_network_node( - node_key="E_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # reverse: okay - - net.modify_network_node( - node_key="E_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # change E_iso to waypoint: okay - - net.modify_network_node( - node_key="E_iso", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} - ) - - # reverse: okay - - net.modify_network_node( - node_key="E_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # ************************************************************************** - - # change A_iso to export: okay - - net.modify_network_node( - node_key="A_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # reverse: okay - - net.modify_network_node( - node_key="A_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, - net.KEY_NODE_BASE_FLOW: base_flow, - }, - ) - - # change A_iso to import: okay - - net.modify_network_node( - node_key="A_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # reverse: okay - - net.modify_network_node( - node_key="A_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, - net.KEY_NODE_BASE_FLOW: base_flow, - }, - ) - - # change A_iso to waypoint: okay - - net.modify_network_node( - node_key="A_iso", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} - ) - - # reverse: okay - - net.modify_network_node( - node_key="A_iso", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, - net.KEY_NODE_BASE_FLOW: base_flow, - }, - ) - - # ************************************************************************** - - # change I to regular: okay - - net.modify_network_node( - node_key="I", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, - net.KEY_NODE_BASE_FLOW: base_flow, - }, - ) - - # reverse: okay - - net.modify_network_node( - node_key="I", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # change I to waypoint: okay - - net.modify_network_node( - node_key="I", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} - ) - - # reverse: okay - - net.modify_network_node( - node_key="I", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # ************************************************************************** - - # change E to regular: okay - - net.modify_network_node( - node_key="E", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, - net.KEY_NODE_BASE_FLOW: base_flow, - }, - ) - - # reverse: okay - - net.modify_network_node( - node_key="E", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # change E to waypoint: okay - - net.modify_network_node( - node_key="E", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} - ) - - # reverse: okay - - net.modify_network_node( - node_key="E", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # ************************************************************************** - - # change A_in to export: okay - - net.modify_network_node( - node_key="A_in", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # reverse: okay - - net.modify_network_node( - node_key="A_in", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, - net.KEY_NODE_BASE_FLOW: base_flow, - }, - ) - - # change A_in to waypoint: okay - - net.modify_network_node( - node_key="A_in", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} - ) - - # reverse: okay - - net.modify_network_node( - node_key="A_in", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, - net.KEY_NODE_BASE_FLOW: base_flow, - }, - ) - - # ************************************************************************** - - # change A_out to import: okay - - net.modify_network_node( - node_key="A_out", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - - # reverse: okay - - net.modify_network_node( - node_key="A_out", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, - net.KEY_NODE_BASE_FLOW: base_flow, - }, - ) - - # change A_out to waypoint: okay - - net.modify_network_node( - node_key="A_out", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} - ) - - # reverse: okay - - net.modify_network_node( - node_key="A_out", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK, - net.KEY_NODE_BASE_FLOW: base_flow, - }, - ) - - # ************************************************************************** - - # change I to export: fail - - error_triggered = False - try: - net.modify_network_node( - node_key="I", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # change E to import: fail - - error_triggered = False - try: - net.modify_network_node( - node_key="E", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # change A_out to export: fail - - error_triggered = False - try: - net.modify_network_node( - node_key="A_out", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # change A_in to import: fail - - error_triggered = False - try: - net.modify_network_node( - node_key="A_in", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # change A to export: fail - - error_triggered = False - try: - net.modify_network_node( - node_key="A", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # change A to import: fail - - error_triggered = False - try: - net.modify_network_node( - node_key="A", - node_data={ - net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, - net.KEY_NODE_PRICES: resource_price, - }, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ************************************************************************** - - # try to modify a non-existent node - - error_triggered = False - try: - net.modify_network_node( - node_key="ABCD", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def examples_network_disallowed_cases(): - # ************************************************************************** - - net = Network() - - number_intervals = 3 - - resource_price = ResourcePrice( - prices=[random.random() for k in range(number_intervals)], - volumes=[*[random.random() for k in range(number_intervals - 1)], None], - ) - - base_flow = {(0, k): random.random() for k in range(number_intervals)} - - lossless_arcs = ArcsWithoutLosses( - name="hello", - capacity=[5], - minimum_cost=[3], - specific_capacity_cost=3, - capacity_is_instantaneous=False, - ) - - lossy_arcs = ArcsWithoutProportionalLosses( - name="hello back", - static_loss={(0, 0, k): random.random() for k in range(number_intervals)}, - capacity=(1,), - minimum_cost=(5,), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - ) - - # add import node I - - net.add_import_node(node_key="I", prices={(0, 0, 0): resource_price}) - - # add export node E - - net.add_export_node(node_key="E", prices={(0, 0, 0): resource_price}) - - # add regular node A - - net.add_source_sink_node(node_key="A", base_flow=base_flow) - - # add regular node B - - net.add_source_sink_node(node_key="B", base_flow=base_flow) - - # add a valid import-export arc - - net.add_directed_arc(node_key_a="I", node_key_b="E", arcs=lossless_arcs) - - # identify the nodes and validate - - net.identify_node_types() - - # ************************************************************************** - # ************************************************************************** - - # trigger errors using pre-identified nodes - - # directed arcs cannot start in an export node: E -> B - - error_triggered = False - try: - net.add_directed_arc(node_key_a="E", node_key_b="B", arcs=lossless_arcs) - except ValueError: - error_triggered = True - assert error_triggered - - # directed arcs cannot end on an import node: A -> I - - error_triggered = False - try: - net.add_directed_arc(node_key_a="A", node_key_b="I", arcs=lossless_arcs) - except ValueError: - error_triggered = True - assert error_triggered - - # import-export nodes cannot have static losses - - error_triggered = False - try: - net.add_directed_arc(node_key_a="I", node_key_b="E", arcs=lossy_arcs) - except ValueError: - error_triggered = True - assert error_triggered - - # undirected arcs cannot involve import nor export nodes - - error_triggered = False - try: - net.add_undirected_arc(node_key_a="I", node_key_b="A", arcs=lossless_arcs) - except ValueError: - error_triggered = True - assert error_triggered - - # undirected arcs cannot involve import nor export nodes - - error_triggered = False - try: - net.add_undirected_arc(node_key_a="B", node_key_b="E", arcs=lossless_arcs) - except ValueError: - error_triggered = True - assert error_triggered - - # ************************************************************************** - # ************************************************************************** - - # trigger errors using non-identified nodes - - # ************************************************************************** - - # create a new export node - - net.add_export_node(node_key="E1", prices={(0, 0, 0): resource_price}) - - # create an arc starting in that export node - - error_triggered = False - try: - net.add_directed_arc(node_key_a="E1", node_key_b="B", arcs=lossless_arcs) - net.identify_node_types() - except ValueError: - error_triggered = True - assert error_triggered - - # remove the troublesome arc - - net.remove_edge(u="E1", v="B") - - # ************************************************************************** - - # create a new import node - - net.add_import_node(node_key="I1", prices={(0, 0, 0): resource_price}) - - # create an arc ending in that import node - - error_triggered = False - try: - net.add_directed_arc(node_key_a="A", node_key_b="I1", arcs=lossless_arcs) - net.identify_node_types() - except ValueError: - error_triggered = True - assert error_triggered - - # remove the troublesome arc - - net.remove_edge(u="A", v="I1") - - # ************************************************************************** - - # check non-existent arc - - net.arc_is_undirected(("X", "Y", 1)) - - -# ****************************************************************************** -# ****************************************************************************** - - -def examples_pseudo_unique_key_generation(): - # create network - - network = Network() - - # add node A - - network.add_waypoint_node(node_key="A") - - # add node B - - network.add_waypoint_node(node_key="B") - - # identify nodes - - network.identify_node_types() - - # add arcs - - key_list = [ - "3e225573-4e78-48c8-bb08-efbeeb795c22", - "f6d30428-15d1-41e9-a952-0742eaaa5a31", - "8c29b906-2518-41c5-ada8-07b83508b5b8", - "f9a72a39-1422-4a02-af97-906ce79c32a3", - "b6941a48-10cc-465d-bf53-178bd2939bd1", - ] - - for key in key_list: - network.add_edge( - u_for_edge="A", - v_for_edge="B", - key=key, - **{network.KEY_ARC_UND: False, network.KEY_ARC_TECH: None} - ) - - # use a seed number to trigger more iterations - - import uuid - - rand = random.Random() - rand.seed(360) - uuid.uuid4 = lambda: uuid.UUID(int=rand.getrandbits(128), version=4) - - error_triggered = False - try: - _ = network.get_pseudo_unique_arc_key( - node_key_start="A", node_key_end="B", max_iterations=len(key_list) - 1 - ) - except Exception: - error_triggered = True - assert error_triggered - - # ************************************************************************** - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** diff --git a/tests/examples_esipp_problem.py b/tests/examples_esipp_problem.py deleted file mode 100644 index 1fd6782b1dc71597629278843596be61228b853c..0000000000000000000000000000000000000000 --- a/tests/examples_esipp_problem.py +++ /dev/null @@ -1,8791 +0,0 @@ -# imports - -# standard - -import math - -from statistics import mean - -import random - -# local - -# import numpy as np - -# import networkx as nx - -import pyomo.environ as pyo - -# import src.topupopt.problems.esipp.utils as utils - -from src.topupopt.data.misc.utils import generate_pseudo_unique_key - -from src.topupopt.problems.esipp.problem import InfrastructurePlanningProblem - -from src.topupopt.problems.esipp.network import Arcs, Network - -from src.topupopt.problems.esipp.network import ArcsWithoutStaticLosses - -from src.topupopt.problems.esipp.resource import ResourcePrice - -from src.topupopt.problems.esipp.utils import compute_cost_volume_metrics - -# ***************************************************************************** -# ***************************************************************************** - - -def examples(solver: str, solver_options: dict = None, init_aux_sets: bool = False): - # ************************************************************************** - - # solver details - - # termination criteria - - solver_timelimit = 60 - - solver_abs_mip_gap = 0.001 - - solver_rel_mip_gap = 0.01 - - if type(solver_options) == dict: - solver_options.update( - { - "time_limit": solver_timelimit, - "relative_mip_gap": solver_rel_mip_gap, - "absolute_mip_gap": solver_abs_mip_gap, - } - ) - - else: - solver_options = { - "time_limit": solver_timelimit, - "relative_mip_gap": solver_rel_mip_gap, - "absolute_mip_gap": solver_abs_mip_gap, - } - - # ************************************************************************** - - # problem with two symmetrical nodes and one undirected arc - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # problem with symmetrical nodes and one undirected arc, irregular steps - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=True, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # same problem as the previous one, except with interface variables - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=True, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=True, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # problem with two symmetrical nodes and one undirected arc, w/ simple sos1 - - sos_weight_key = None - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - sos_weight_key = InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_COST - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - sos_weight_key = InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_CAP - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - sos_weight_key = InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_COST - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - sos_weight_key = InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_CAP - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - sos_weight_key = InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_CAP - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=True, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # use sos1 for flow sense determination, nominal weights - - sos_weight_key = InfrastructurePlanningProblem.SOS1_SENSE_WEIGHT_NOMINAL_HIGHER - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=True, - sense_sos_weight_key=sos_weight_key, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # use sos1 for flow sense determination, reverse weights - - sos_weight_key = InfrastructurePlanningProblem.SOS1_SENSE_WEIGHT_REVERSE_HIGHER - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=True, - sense_sos_weight_key=sos_weight_key, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # use sos1 for flow sense determination, use real variables - - sos_weight_key = InfrastructurePlanningProblem.SOS1_SENSE_WEIGHT_NOMINAL_HIGHER - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=True, - sense_sos_weight_key=sos_weight_key, - sense_use_real_variables_if_possible=True, - use_arc_interfaces=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # use sos1 for flow sense determination, use real variables and inter. var. - - sos_weight_key = InfrastructurePlanningProblem.SOS1_SENSE_WEIGHT_NOMINAL_HIGHER - - example_isolated_undirected_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=True, - sense_sos_weight_key=sos_weight_key, - sense_use_real_variables_if_possible=True, - use_arc_interfaces=True, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # sos1 for flow sense determination involving directed arcs as well - - sos_weight_key = InfrastructurePlanningProblem.SOS1_SENSE_WEIGHT_NOMINAL_HIGHER - - example_nonisolated_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=False, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=True, - use_sos_sense=True, - sense_sos_weight_key=sos_weight_key, - sense_use_real_variables_if_possible=True, - use_arc_interfaces=False, - undirected_arc_imports=False, - undirected_arc_exports=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # preexisting, reference - - example_isolated_preexisting_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=False, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - capacity_is_instantaneous=False, - use_specific_method=False, - init_aux_sets=init_aux_sets, - ) - - # capacity is instantaneous - - example_isolated_preexisting_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=False, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - capacity_is_instantaneous=True, - use_specific_method=False, - init_aux_sets=init_aux_sets, - ) - - # use dedicated method for preexisting arcs - - example_isolated_preexisting_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=False, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - capacity_is_instantaneous=False, - use_specific_method=True, - init_aux_sets=init_aux_sets, - ) - - # capacity is instantaneous, using dedicated method - - example_isolated_preexisting_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=False, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - capacity_is_instantaneous=True, - use_specific_method=True, - init_aux_sets=init_aux_sets, - ) - - # use different technologies for the undirected arc - - example_isolated_preexisting_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - capacity_is_instantaneous=False, - use_specific_method=False, - init_aux_sets=init_aux_sets, - ) - - # use different technologies for the undirected arc, capacity is instant. - - example_isolated_preexisting_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - capacity_is_instantaneous=True, - use_specific_method=False, - init_aux_sets=init_aux_sets, - ) - - # use different technologies for the undirected arc, using specific method - - example_isolated_preexisting_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - capacity_is_instantaneous=False, - use_specific_method=True, - init_aux_sets=init_aux_sets, - ) - - # same as before but assuming the capacity is instantaneous - - example_isolated_preexisting_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - capacity_is_instantaneous=True, - use_specific_method=True, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # problem with two symmetrical nodes, one undirected arc, imports and exp. - - example_nonisolated_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=False, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - undirected_arc_imports=False, - undirected_arc_exports=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # same problem as before buth with interface variables - - example_nonisolated_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=False, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=True, - undirected_arc_imports=False, - undirected_arc_exports=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # same problem as before buth with different technologies for the und. arc - - example_nonisolated_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - undirected_arc_imports=False, - undirected_arc_exports=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # preexisting directed arcs, undirected with same tech. in both directions - - example_nonisolated_network_preexisting_directed_arcs( - solver=solver, - solver_options=solver_options, - different_technologies=False, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - init_aux_sets=init_aux_sets, - ) - - # preexisting directed arcs - - example_nonisolated_network_preexisting_directed_arcs( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - init_aux_sets=init_aux_sets, - ) - - # same as before but with sos for arc selection and interfaces - - sos_weight_key = InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_CAP - - example_nonisolated_network_preexisting_directed_arcs( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=True, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # test using preexisting infinite capacity arcs - - example_preexisting_infinite_capacity_directed_arcs( - solver=solver, - solver_options=solver_options, - different_technologies=False, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - init_aux_sets=init_aux_sets, - ) - - example_preexisting_infinite_capacity_directed_arcs( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # test using mandatory arcs with directed arcs - - example_network_mandatory_arcs( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - use_undirected_arcs=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # test using mandatory arcs with directed arcs and sos1 for arc selection - - example_network_mandatory_arcs( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=True, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - use_undirected_arcs=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # test using mandatory arcs with undirected arcs - - example_network_mandatory_arcs( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - use_undirected_arcs=True, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # test using mandatory arcs with undirected arcs and sos1 for arc selection - - example_network_mandatory_arcs( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=sos_weight_key, - use_real_variables_if_possible=True, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - use_undirected_arcs=True, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # test using static losses with directed arcs - - # using only arc technologies with fixed losses (upstream, if possible) - - example_directed_network_static_losses( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=True, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=True, - use_arc_interfaces=False, - make_all_arcs_mandatory=False, - use_arc_techs_with_fixed_losses=True, - use_arc_techs_without_fixed_losses=False, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # using only arc technologies without fixed losses - - example_directed_network_static_losses( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=True, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=True, - use_arc_interfaces=False, - make_all_arcs_mandatory=False, - use_arc_techs_with_fixed_losses=False, - use_arc_techs_without_fixed_losses=True, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # using arc technologies with and without fixed losses simultaneously - - example_directed_network_static_losses( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=True, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=True, - use_arc_interfaces=False, - make_all_arcs_mandatory=False, - use_arc_techs_with_fixed_losses=True, - use_arc_techs_without_fixed_losses=True, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # using only arc technologies without fixed losses (yet downstream?) - - example_directed_network_static_losses( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=True, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=True, - use_arc_interfaces=False, - make_all_arcs_mandatory=False, - use_arc_techs_with_fixed_losses=False, - use_arc_techs_without_fixed_losses=True, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # using only arc technologies with fixed losses (downstream, if possible) - - example_directed_network_static_losses( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=True, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=True, - use_arc_interfaces=False, - make_all_arcs_mandatory=False, - use_arc_techs_with_fixed_losses=True, - use_arc_techs_without_fixed_losses=False, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # example from the report: new directed arc with losses in the source - - example_report_directed_network_static_losses( - solver=solver, - solver_options=solver_options, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - use_new_arcs=True, - init_aux_sets=init_aux_sets, - ) - - # example from the report: new directed arc with losses in the source - - example_report_directed_network_static_losses( - solver=solver, - solver_options=solver_options, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - use_new_arcs=True, - init_aux_sets=init_aux_sets, - ) - - # example from the report: pre-existing directed arc with losses in the end - - example_report_directed_network_static_losses( - solver=solver, - solver_options=solver_options, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, - use_new_arcs=False, - init_aux_sets=init_aux_sets, - ) - - # example from the report: pre-existing directed arc with losses in the source - - example_report_directed_network_static_losses( - solver=solver, - solver_options=solver_options, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - use_new_arcs=False, - init_aux_sets=init_aux_sets, - ) - - # pre-existing directed arcs with losses downstream - - example_directed_arc_static_downstream_pre( - solver=solver, solver_options=solver_options, init_aux_sets=init_aux_sets - ) - - # new directed arcs with losses downstream - - example_directed_arc_static_downstream_new( - solver=solver, solver_options=solver_options, init_aux_sets=init_aux_sets - ) - - # new directed arcs with losses upstream - - example_directed_arc_static_upstream( - solver=solver, - solver_options=solver_options, - use_new_arcs=True, - init_aux_sets=init_aux_sets, - ) - - # pre-existing directed arcs with losses upstream - - example_directed_arc_static_upstream( - solver=solver, - solver_options=solver_options, - use_new_arcs=False, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # static losses on undirected arcs (example from the report) - - for mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: - # pre-existing arcs, original arc direction - - example_report_undirected_network_static_losses( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=False, - invert_original_direction=False, - init_aux_sets=init_aux_sets, - ) - - # new arcs, original arc direction - - example_report_undirected_network_static_losses( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=True, - invert_original_direction=False, - init_aux_sets=init_aux_sets, - ) - - # pre-existing arcs, inverted arc direction - - example_report_undirected_network_static_losses( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=False, - invert_original_direction=True, - init_aux_sets=init_aux_sets, - ) - - # new arcs, inverted arc direction - - example_report_undirected_network_static_losses( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=True, - invert_original_direction=True, - init_aux_sets=init_aux_sets, - ) - - # capacity reduction - - # pre-existing arcs, original arc direction - - example_undirected_arc_static_upstream( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=False, - init_aux_sets=init_aux_sets, - ) - - # new arcs, original arc direction - - example_undirected_arc_static_upstream( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=True, - init_aux_sets=init_aux_sets, - ) - - # pre-existing arcs, inverted arc direction - - example_undirected_arc_static_upstream( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=False, - init_aux_sets=init_aux_sets, - ) - - # new arcs, inverted arc direction - - example_undirected_arc_static_upstream( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=True, - init_aux_sets=init_aux_sets, - ) - - # minimum flow - - # pre-existing arcs, original arc direction - - example_undirected_arc_static_downstream( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=False, - init_aux_sets=init_aux_sets, - ) - - # new arcs, original arc direction - - example_undirected_arc_static_downstream( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=True, - init_aux_sets=init_aux_sets, - ) - - # pre-existing arcs, inverted arc direction - - example_undirected_arc_static_downstream( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=False, - init_aux_sets=init_aux_sets, - ) - - # new arcs, inverted arc direction - - example_undirected_arc_static_downstream( - solver=solver, - solver_options=solver_options, - static_losses_mode=mode, - use_new_arcs=True, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # try network with a direct import-export arc, with higher import prices - - example_direct_imp_exp_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=None, - use_real_variables_if_possible=True, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=True, - use_arc_interfaces=False, - make_all_arcs_mandatory=False, - use_static_losses=False, - use_higher_export_prices=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # try network with a direct import-export arc, with higher export prices - - example_direct_imp_exp_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=None, - use_real_variables_if_possible=True, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=True, - use_arc_interfaces=False, - make_all_arcs_mandatory=False, - use_static_losses=False, - use_higher_export_prices=True, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # try network with a direct import-export arc (with static losses) - - error_triggered = False - try: - example_direct_imp_exp_network( - solver=solver, - solver_options=solver_options, - irregular_time_intervals=False, - use_sos_arcs=True, - sos_weight_key=None, - use_real_variables_if_possible=True, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=True, - use_arc_interfaces=False, - make_all_arcs_mandatory=False, - use_static_losses=True, - use_higher_export_prices=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ************************************************************************** - - # test using undirected arcs involving import and export nodes - - # import nodes - - error_triggered = False - try: - example_nonisolated_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - undirected_arc_imports=True, - undirected_arc_exports=False, - print_model=False, - init_aux_sets=init_aux_sets, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # export nodes - - error_triggered = False - try: - example_nonisolated_undirected_network( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - undirected_arc_imports=False, - undirected_arc_exports=True, - print_model=False, - init_aux_sets=init_aux_sets, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ************************************************************************** - # ************************************************************************** - - # test using groups of arcs - - # TODO: perform additional tests involving groups of arcs - - for mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: - example_arc_groups_individual_undirected( - solver=solver, - solver_options=solver_options, - use_arc_groups=False, - static_losses_mode=mode, - init_aux_sets=init_aux_sets, - ) - - example_arc_groups_individual_undirected( - solver=solver, - solver_options=solver_options, - use_arc_groups=True, - static_losses_mode=mode, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - # ************************************************************************** - - # test using a maximum number of parallel arcs - - # TODO: test using the restriction in different directions - - # there are 3 possible outcomes: - # 1) the number of preexisting and mandatory arcs is above the limit - # >> the problem is infeasible - # 2) maximum number of arcs lower than or equal to the limit - # >> the constraint is skipped - # 3) maximum number of arcs above the limit, the number of preexisting and - # mandatory arcs is below the limit >> the constraint is used - - # ************************************************************************** - - # TODO: test using the constraint - - # how to test case 3: - # a) use only preexisting directed arcs - # b) use only preexisting undirected arcs - # c) use preexisting directed and undirected arcs - # d) use only mandatory directed arcs - # e) use only mandatory undirected arcs - # f) use mandatory directed and undirected arcs - # g) use preexisting directed arcs and mandatory directed arcs - # h) use preexisting undirected arcs and mandatory undirected arcs - # i) use preexisting undirected arcs and mandatory directed arcs - # j) use preexisting directed arcs and mandatory undirected arcs - # k) use preexi. directed and undirected arcs and mandatory directed arcs - # l) use preexi. directed and undirected arcs and mandatory undirected arcs - # m) use preexi. directed arcs and mandatory directed and undirected arcs - # n) use preexi. undirected arcs and mandatory directed and undirected arcs - # o) use preselelected and mandatory directed and undirected arcs - - # ************************************************************************** - - # case 2: skip constraint - - # how to test case 2: - # a) use only preexisting directed arcs - # b) use only preexisting undirected arcs - # c) use preexisting directed and undirected arcs - # d) use only mandatory directed arcs - # e) use only mandatory undirected arcs - # f) use mandatory directed and undirected arcs - # g) use preexisting directed arcs and mandatory directed arcs - # h) use preexisting undirected arcs and mandatory undirected arcs - # i) use preexisting undirected arcs and mandatory directed arcs - # j) use preexisting directed arcs and mandatory undirected arcs - # k) use preexi. directed and undirected arcs and mandatory directed arcs - # l) use preexi. directed and undirected arcs and mandatory undirected arcs - # m) use preexi. directed arcs and mandatory directed and undirected arcs - # n) use preexi. undirected arcs and mandatory directed and undirected arcs - # o) use preselelected and mandatory directed and undirected arcs - # p) TODO: use pre-existing undirected arcs in both directions - # q) TODO: use mandatory undirected arcs in both directions - # r) TODO: use pre-existing and mandatory arcs in both directions - - # TODO: use groups of arcs - - skip_test_cases = ( - "2_a", - "2_b", - "2_c", - "2_d", - "2_e", - "2_f", - "2_g", - "2_h", - "2_i", - "2_j", - "2_k", - "2_l", - "2_m", - "2_n", - "2_o", - ) - - for test_case in skip_test_cases: - example_problem_max_arc_limits_skip( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - case=test_case, - print_model=False, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # case 1: infeasible (too many mandatory or pre-existing arcs) - - # how to test case 1: - # a) use only preexisting directed arcs - # b) use only preexisting undirected arcs - # c) use preexisting directed and undirected arcs - # d) use only mandatory directed arcs - # e) use only mandatory undirected arcs - # f) use mandatory directed and undirected arcs - # g) use preexisting directed arcs and mandatory directed arcs - # h) use preexisting undirected arcs and mandatory undirected arcs - # i) use preexisting undirected arcs and mandatory directed arcs - # j) use preexisting directed arcs and mandatory undirected arcs - # k) use preexi. directed and undirected arcs and mandatory directed arcs - # l) use preexi. directed and undirected arcs and mandatory undirected arcs - # m) use preexi. directed arcs and mandatory directed and undirected arcs - # n) use preexi. undirected arcs and mandatory directed and undirected arcs - # o) use preselelected and mandatory directed and undirected arcs - # p) use pre-existing undirected arcs in both directions - # q) use mandatory undirected arcs in both directions - # r) use mandatory and pre-existing undirected arcs in both directions - # s) TODO: use groups of arcs that include mandatory arcs - # t) TODO: use mandatory groups of arcs - - infeasible_test_cases = ( - "1_a", - "1_b", - "1_c", - "1_d", - "1_e", - "1_f", - "1_g", - "1_h", - "1_i", - "1_j", - "1_k", - "1_l", - "1_m", - "1_n", - "1_o", - "1_p", - "1_q", - "1_r", - "1_s", - "1_t", - ) - - for test_case in infeasible_test_cases: - error_triggered = False - try: - example_problem_max_arc_limits_infeasible( - solver=solver, - solver_options=solver_options, - different_technologies=True, - irregular_time_intervals=False, - use_sos_arcs=False, - sos_weight_key=None, - use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - use_arc_interfaces=False, - case=test_case, - print_model=False, - init_aux_sets=init_aux_sets, - ) - except ValueError: - error_triggered = True - assert error_triggered - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def build_solve_ipp( - solver: str = "glpk", - solver_options: dict = None, - use_sos_arcs: bool = False, - arc_sos_weight_key: str = (InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_NONE), - arc_use_real_variables_if_possible: bool = False, - use_sos_sense: bool = False, - sense_sos_weight_key: int = ( - InfrastructurePlanningProblem.SOS1_SENSE_WEIGHT_NOMINAL_HIGHER - ), - sense_use_real_variables_if_possible: bool = False, - sense_use_arc_interfaces: bool = False, - perform_analysis: bool = False, - plot_results: bool = False, - print_solver_output: bool = False, - irregular_time_intervals: bool = False, - networks: dict = None, - number_intraperiod_time_intervals: int = 4, - static_losses_mode=None, - mandatory_arcs: list = None, - max_number_parallel_arcs: dict = None, - arc_groups_dict: dict = None, - init_aux_sets: bool = False, - discount_rates: dict = None, - reporting_periods: dict = None, - time_intervals: dict = None, - assessment_weights: dict = None, -): - reporting_period_duration = 365 * 24 * 3600 - - if type(discount_rates) != dict: - discount_rates = {0: tuple([0.035, 0.035])} - - if type(assessment_weights) != dict: - assessment_weights = {} # default - - if type(reporting_periods) != dict: - reporting_periods = {0: (0, 1)} - - # time intervals - - if type(time_intervals) != dict: - if irregular_time_intervals: - time_step_max_relative_variation = 0.25 - - intraperiod_time_interval_duration = [ - (reporting_period_duration / number_intraperiod_time_intervals) - * ( - 1 - + (k / (number_intraperiod_time_intervals - 1) - 0.5) - * time_step_max_relative_variation - ) - for k in range(number_intraperiod_time_intervals) - ] - - else: - intraperiod_time_interval_duration = [ - reporting_period_duration / number_intraperiod_time_intervals - for k in range(number_intraperiod_time_intervals) - ] - - # average time interval duration - - average_time_interval_duration = round(mean(intraperiod_time_interval_duration)) - - time_intervals = {0: tuple(dt for dt in intraperiod_time_interval_duration)} - - # time weights - - # relative weight of time period - - # one interval twice as long as the average is worth twice - # one interval half as long as the average is worth half - - # time_weights = [ - # [time_period_duration/average_time_interval_duration - # for time_period_duration in intraperiod_time_interval_duration] - # for p in range(number_periods)] - - time_weights = None # nothing yet - - normalised_time_interval_duration = None # nothing yet - - # create problem object - - ipp = InfrastructurePlanningProblem( - name="problem", - discount_rates=discount_rates, - reporting_periods=reporting_periods, - time_intervals=time_intervals, - time_weights=time_weights, - normalised_time_interval_duration=normalised_time_interval_duration, - assessment_weights=assessment_weights, - ) - - # add networks and systems - - for netkey, net in networks.items(): - ipp.add_network(network_key=netkey, network=net) - - # define arcs as mandatory - - if type(mandatory_arcs) == list: - for full_arc_key in mandatory_arcs: - ipp.make_arc_mandatory(full_arc_key[0], full_arc_key[1:]) - - # if make_all_arcs_mandatory: - - # for network_key in ipp.networks: - - # for arc_key in ipp.networks[network_key].edges(keys=True): - - # # preexisting arcs are no good - - # if ipp.networks[network_key].edges[arc_key][ - # Network.KEY_ARC_TECH].has_been_selected(): - - # continue - - # ipp.make_arc_mandatory(network_key, arc_key) - - # set up the use of sos for arc selection - - if use_sos_arcs: - for network_key in ipp.networks: - for arc_key in ipp.networks[network_key].edges(keys=True): - if ( - ipp.networks[network_key] - .edges[arc_key][Network.KEY_ARC_TECH] - .has_been_selected() - ): - continue - - ipp.use_sos1_for_arc_selection( - network_key, - arc_key, - use_real_variables_if_possible=(arc_use_real_variables_if_possible), - sos1_weight_method=arc_sos_weight_key, - ) - - # set up the use of sos for flow sense determination - - if use_sos_sense: - for network_key in ipp.networks: - for arc_key in ipp.networks[network_key].edges(keys=True): - if not ipp.networks[network_key].edges[arc_key][Network.KEY_ARC_UND]: - continue - - ipp.use_sos1_for_flow_senses( - network_key, - arc_key, - use_real_variables_if_possible=( - sense_use_real_variables_if_possible - ), - use_interface_variables=sense_use_arc_interfaces, - sos1_weight_method=sense_sos_weight_key, - ) - - elif sense_use_arc_interfaces: # set up the use of arc interfaces w/o sos1 - for network_key in ipp.networks: - for arc_key in ipp.networks[network_key].edges(keys=True): - if ( - ipp.networks[network_key] - .edges[arc_key][Network.KEY_ARC_TECH] - .has_been_selected() - ): - continue - - ipp.use_interface_variables_for_arc_selection(network_key, arc_key) - - # static losses - - if static_losses_mode == ipp.STATIC_LOSS_MODE_ARR: - ipp.place_static_losses_arrival_node() - - elif static_losses_mode == ipp.STATIC_LOSS_MODE_DEP: - ipp.place_static_losses_departure_node() - - elif static_losses_mode == ipp.STATIC_LOSS_MODE_US: - ipp.place_static_losses_upstream() - - elif static_losses_mode == ipp.STATIC_LOSS_MODE_DS: - ipp.place_static_losses_downstream() - - else: - raise ValueError("Unknown static loss modelling mode.") - - # ************************************************************************** - - # groups - - if type(arc_groups_dict) != type(None): - for key in arc_groups_dict: - ipp.create_arc_group(arc_groups_dict[key]) - - # ************************************************************************** - - # maximum number of parallel arcs - - for key in max_number_parallel_arcs: - ipp.set_maximum_number_parallel_arcs( - network_key=key[0], - node_a=key[1], - node_b=key[2], - limit=max_number_parallel_arcs[key], - ) - - # ************************************************************************** - - # instantiate (disable the default case v-a-v fixed losses) - - # ipp.instantiate(place_fixed_losses_upstream_if_possible=False) - - ipp.instantiate(initialise_ancillary_sets=init_aux_sets) - - # optimise - - ipp.optimise( - solver_name=solver, - solver_options=solver_options, - output_options={}, - print_solver_output=print_solver_output, - ) - - # return the problem object - - return ipp - - # ************************************************************************** - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_single_network_single_arc_problem( - solver, - solver_options, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - print_model, - init_aux_sets, -): - # scenario - - q = 0 - - # number_periods = 2 - - # # number_intraperiod_time_intervals = 4 - - # discount_rates = tuple([0.035 for p in range(number_periods)]) - - # period_duration = [365*24*3600 for p in range(number_periods)] - - # if irregular_time_intervals: - - # time_step_max_relative_variation = 0.25 - - # intraperiod_time_interval_duration = [ - # (planning_horizon/number_intraperiod_time_intervals)* - # (1+(k/(number_intraperiod_time_intervals-1)-0.5)* - # time_step_max_relative_variation) - # for k in range(number_intraperiod_time_intervals)] - - # else: - - # intraperiod_time_interval_duration = [ - # planning_horizon/number_intraperiod_time_intervals - # for k in range(number_intraperiod_time_intervals)] - - # # create problem object - - # ipp = InfrastructurePlanningProblem( - # name='problem', - # discount_rates=discount_rates, - # intraperiod_time_interval_duration=intraperiod_time_interval_duration, - # period_duration=planning_horizon) - - # time - - number_intervals = 3 - - # 2 nodes: one import, one regular - - mynet = Network() - - # import node - - # res_price = ResourcePrice( - # prices=[1.0 for i in range(number_intervals)], - # volumes=None - # ) - - number_periods = 2 - - node_IMP = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=node_IMP, - prices={ - (q, p, k): ResourcePrice(prices=1.0, volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - # base_flow=[0.5, 0.0, 1.0], - base_flow={(q, 0): 0.50, (q, 1): 0.00, (q, 2): 1.00}, - ) - - # arc IA - - arc_tech_IA = Arcs( - name="any", - # efficiency=[0.5, 0.5, 0.5], - efficiency={(q, 0): 0.5, (q, 1): 0.5, (q, 2): 0.5}, - efficiency_reverse=None, - static_loss=None, - capacity=[3], - minimum_cost=[2], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - validate=False, - ) - - mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=True, # just to reach a line, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # validation - - # the arc should be installed since it is the only feasible solution - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_IMP, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the flows should be 1.0, 0.0 and 2.0 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 0)]), - 1.0, - abs_tol=1e-6, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 1)]), - 0.0, - abs_tol=1e-6, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 2)]), - 2.0, - abs_tol=1e-6, - ) - - # arc amplitude should be two - - assert math.isclose( - pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]), - 2.0, - abs_tol=0.01, - ) - - # capex should be four - - assert math.isclose(pyo.value(ipp.instance.var_capex), 4.0, abs_tol=1e-3) - - # sdncf should be -5.7 - - assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), -5.7, abs_tol=1e-3) - - # the objective function should be -9.7 - - assert math.isclose(pyo.value(ipp.instance.obj_f), -9.7, abs_tol=1e-3) - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_isolated_undirected_network( - solver, - solver_options, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - print_model, - init_aux_sets, -): - q = 0 - - # time - - number_intervals = 4 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - # base_flow=[1, -1, 0.5, -0.5], - base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, - ) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, - # base_flow=[-1, 1, -0.5, 0.5], - base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, - ) - - # add arcs - - # undirected arc - - arc_tech_AB = ArcsWithoutStaticLosses( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=True, # just to reach a line, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # # - - # if print_model: - - # ipp.instance.pprint() - - # ************************************************************************** - - # validation - - # the arc should be installed since it is the only feasible solution - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # there should be no opex (imports or exports), only capex from arcs - - assert pyo.value(ipp.instance.var_sdncf_q[q]) == 0 - - assert pyo.value(ipp.instance.var_capex) > 0 - - assert ( - pyo.value( - ipp.instance.var_capex_arc_gllj[("mynet", node_A, node_B, arc_key_AB_und)] - ) - > 0 - ) - - # the return amplitude should be the same as the the forward one - - # assert math.isclose( - # pyo.value( - # ipp.instance.var_v_amp_gllj[('mynet', node_A, node_B, 0)] - # ), - # pyo.value( - # ipp.instance.var_v_amp_gllj[('mynet', node_B, node_A, 0)] - # ), - # abs_tol=0.01) - - # ************************************************************************** - -# ****************************************************************************** -# ****************************************************************************** - - -def example_nonisolated_undirected_network( - solver, - solver_options, - different_technologies, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - undirected_arc_imports, - undirected_arc_exports, - print_model, - init_aux_sets, -): - q = 0 - - # time - - number_intervals = 4 - - number_periods = 2 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # import node - - # imp_prices = ResourcePrice( - # prices=[1+random.random() for i in range(number_intervals)], - # volumes=None - # ) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1 + random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # export node - - # exp_prices = ResourcePrice( - # prices=[random.random() for i in range(number_intervals)], - # volumes=None) - - exp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_export_node( - node_key=exp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - # base_flow=[1, -1, 0.5, -0.5] - base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, - ) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, - # base_flow=[-1, 1, -0.5, 0.5] - base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, - ) - - # add arcs - - # import arc - - arc_tech_IA = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - efficiency_reverse=None, - static_loss=None, - validate=False, - ) - - if undirected_arc_imports: - mynet.add_undirected_arc( - node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA - ) - - else: - mynet.add_directed_arc( - node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA - ) - - # export arc - - arc_tech_BE = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - efficiency_reverse=None, - static_loss=None, - validate=False, - ) - - if undirected_arc_exports: - mynet.add_undirected_arc( - node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE - ) - - else: - mynet.add_directed_arc( - node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE - ) - - # undirected arc - - if different_technologies: - arc_tech_AB = Arcs( - name="any", - # efficiency=[0.95, 0.95, 0.95, 0.95], - efficiency={(0, 0): 0.95, (0, 1): 0.95, (0, 2): 0.95, (0, 3): 0.95}, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - # efficiency_reverse=[0.85, 0.85, 0.85, 0.85], - efficiency_reverse={(0, 0): 0.85, (0, 1): 0.85, (0, 2): 0.85, (0, 3): 0.85}, - static_loss=None, - validate=False, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - else: - arc_tech_AB = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10.0, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - efficiency_reverse=None, - static_loss=None, - validate=False, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # validation - - if different_technologies: - # the undirected arc should be installed since it is cheaper tham imp. - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the directed arc from the import should also be installed since node - # B cannot fullfil all the demand since it has an efficiency of 0.85<1 - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # there should be no opex (imports or exports), only capex from arcs - - assert pyo.value(ipp.instance.var_sdncf_q[q]) < 0 - - assert pyo.value(ipp.instance.var_capex) > 0 - - assert ( - pyo.value( - ipp.instance.var_capex_arc_gllj[ - ("mynet", node_A, node_B, arc_key_AB_und) - ] - ) - > 0 - ) - - assert ( - pyo.value( - ipp.instance.var_capex_arc_gllj[("mynet", imp_node_key, node_A, 0)] - ) - > 0 - ) - - else: # same efficiency (and = 1) - # network is still isolated - - # the import arc was not installed - - assert ( - True - not in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the export arc was not installed - - assert ( - True - not in ipp.networks["mynet"] - .edges[(node_B, exp_node_key, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the undirected arc was installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the opex should be zero - - assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), 0, abs_tol=1e-6) - - # the capex should be positive - - assert pyo.value(ipp.instance.var_capex) > 0 - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_isolated_preexisting_undirected_network( - solver, - solver_options, - different_technologies, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - capacity_is_instantaneous, - use_specific_method, - init_aux_sets, -): - q = 0 - - # time - - number_intervals = 4 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - # base_flow=[1, -1, 0.5, -0.5] - base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, - ) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, - # base_flow=[-1, 1, -0.5, 0.5], - base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, - ) - - # add arcs - - if different_technologies: - # anisotropic - - if use_specific_method: - mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[0.9, 1, 0.9, 1], - efficiency={(0, 0): 0.9, (0, 1): 1, (0, 2): 0.9, (0, 3): 1}, - capacity=1.0, - capacity_is_instantaneous=capacity_is_instantaneous, - # efficiency_reverse=[1, 0.9, 1, 0.9], - efficiency_reverse={(0, 0): 1, (0, 1): 0.9, (0, 2): 1, (0, 3): 0.9}, - static_loss=None, - ) - - else: - # undirected arc: - - arc_tech_AB = Arcs( - name="any", - # efficiency=[0.9, 1, 0.9, 1], - efficiency={(0, 0): 0.9, (0, 1): 1, (0, 2): 0.9, (0, 3): 1}, - capacity=[1.0], - minimum_cost=[0], - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - # efficiency_reverse=[1, 0.9, 1, 0.9], - efficiency_reverse={(0, 0): 1, (0, 1): 0.9, (0, 2): 1, (0, 3): 0.9}, - static_loss=None, - validate=False, - ) - - arc_tech_AB.options_selected[0] = True - - mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - else: # isotropic - if use_specific_method: - mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - capacity=1.0, - capacity_is_instantaneous=capacity_is_instantaneous, - ) - - else: - # undirected arc - - arc_tech_AB = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=capacity_is_instantaneous, - efficiency_reverse=None, - static_loss=None, - validate=False, - ) - - arc_tech_AB.options_selected[2] = True - - mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # validation - - if different_technologies: - # there should be no opex (imports or exports) and no capex - - assert pyo.value(ipp.instance.var_sdncf_q[q]) == 0 - - assert pyo.value(ipp.instance.var_capex) == 0 - - else: - # there should be no opex (imports or exports) and no capex - - assert pyo.value(ipp.instance.var_sdncf_q[q]) == 0 - - assert pyo.value(ipp.instance.var_capex) == 0 - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_nonisolated_network_preexisting_directed_arcs( - solver, - solver_options, - different_technologies, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - init_aux_sets, -): - q = 0 - - # time - - number_intervals = 4 - number_periods = 2 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # import node - - # imp_prices = ResourcePrice( - # prices=[1+random.random() for i in range(number_intervals)], - # volumes=None) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1 + random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # export node - - # exp_prices = ResourcePrice( - # prices=[random.random() for i in range(number_intervals)], - # volumes=None) - - exp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_export_node( - node_key=exp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - # base_flow=[1, -1, 0.5, -0.5], - base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, - ) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, - # base_flow=[-1, 1, -0.5, 0.5], - base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, - ) - - # add arcs - - # import arc - - arc_tech_IA = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_tech_IA.options_selected[0] = True - - mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA) - - # export arc - - arc_tech_BE = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_tech_BE.options_selected[0] = True - - mynet.add_directed_arc(node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE) - - # undirected arc - - if different_technologies: - # anisotropic arc - - arc_tech_AB = Arcs( - name="any", - # efficiency=[0.95, 0.95, 0.95, 0.95], - efficiency={(0, 0): 0.95, (0, 1): 0.95, (0, 2): 0.95, (0, 3): 0.95}, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - # efficiency_reverse=[0.85, 0.85, 0.85, 0.85], - efficiency_reverse={(0, 0): 0.85, (0, 1): 0.85, (0, 2): 0.85, (0, 3): 0.85}, - static_loss=None, - validate=False, - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - else: # isotropic arc - arc_tech_AB = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10.0, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # validation - - if different_technologies: - # the undirected arc should be installed since it is cheaper tham imp. - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the directed arc from the import should also be installed since node - # B cannot fullfil all the demand since it has an efficiency of 0.85<1 - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # there should be no opex (imports or exports), only capex from arcs - - assert pyo.value(ipp.instance.var_sdncf_q[q]) < 0 - - assert pyo.value(ipp.instance.var_capex) > 0 - - assert ( - pyo.value( - ipp.instance.var_capex_arc_gllj[ - ("mynet", node_A, node_B, arc_key_AB_und) - ] - ) - > 0 - ) - - else: # same efficiency (and = 1) - # network is still isolated - - # the undirected arc was installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the opex should be zero - - assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), 0, abs_tol=1e-3) - - # the capex should be positive - - assert pyo.value(ipp.instance.var_capex) > 0 - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_preexisting_infinite_capacity_directed_arcs( - solver, - solver_options, - different_technologies, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - init_aux_sets, -): - q = 0 - - # time - - number_intervals = 4 - - number_periods = 2 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # import node - - # imp_prices = ResourcePrice( - # prices=[1+random.random() for i in range(number_intervals)], - # volumes=None) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1 + random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # export node - - # exp_prices = ResourcePrice( - # prices=[random.random() for i in range(number_intervals)], - # volumes=None) - - exp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_export_node( - node_key=exp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - # base_flow=[1, -1, 0.5, -0.5], - base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, - ) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, - # base_flow=[-1, 1, -0.5, 0.5], - base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, - ) - - # add arcs - - # import arc - - arc_tech_IA = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[math.inf, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_tech_IA.options_selected[0] = True - - mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA) - - # export arc - - arc_tech_BE = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[math.inf, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_tech_BE.options_selected[0] = True - - mynet.add_directed_arc(node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE) - - # undirected arc - - if different_technologies: - arc_tech_AB = Arcs( - name="any", - # efficiency=[0.95, 0.95, 0.95, 0.95], - efficiency={(0, 0): 0.95, (0, 1): 0.95, (0, 2): 0.95, (0, 3): 0.95}, - # efficiency_reverse=[0.85, 0.85, 0.85, 0.85], - efficiency_reverse={(0, 0): 0.85, (0, 1): 0.85, (0, 2): 0.85, (0, 3): 0.85}, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - else: - arc_tech_AB = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10.0, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # validation - - if different_technologies: - # the undirected arc should be installed since it is cheaper tham imp. - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the directed arc from the import should also be installed since node - # B cannot fullfil all the demand since it has an efficiency of 0.85<1 - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # there should be no opex (imports or exports), only capex from arcs - - assert pyo.value(ipp.instance.var_sdncf_q[q]) < 0 - - assert pyo.value(ipp.instance.var_capex) > 0 - - assert ( - pyo.value( - ipp.instance.var_capex_arc_gllj[ - ("mynet", node_A, node_B, arc_key_AB_und) - ] - ) - > 0 - ) - - else: # same efficiency (and = 1) - # network is still isolated - - # the undirected arc was installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the opex should be zero - - assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), 0, abs_tol=0.001) - - # the capex should be positive - - assert pyo.value(ipp.instance.var_capex) > 0 - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_directed_network_static_losses( - solver, - solver_options, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - make_all_arcs_mandatory, - use_arc_techs_with_fixed_losses, - use_arc_techs_without_fixed_losses, - static_losses_mode, - print_model, - init_aux_sets, -): - # if (not use_arc_techs_with_fixed_losses and - # not use_arc_techs_without_fixed_losses): - - # return - - # case 1: - # if two arc technologies for a given arc are available, and one is with - # and the other is without fixed losses, ceteris paribus, the one without - # fixed losses will be selected, since it is less onerous - # how to check? via the arcs installed - - # case 2: - # if only technologies with fixed losses are available, then the demand - # will be higher and the supply lower than if there were only technologies - # without fixed losses, since the losses must offset the results - # how to check? via the imports and exports for both situations - - # case 3: - # placing arcs with fixed losses downstream or upstream should have no - # impact on the imports and exports - - q = 0 - - # time - - number_intervals = 4 - - number_periods = 2 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # import node - - imp_prices = [ - ResourcePrice(prices=1 + random.random(), volumes=None) - for i in range(number_intervals) - ] - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): imp_prices[k] - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # export node - - exp_prices = [ - ResourcePrice(prices=random.random(), volumes=None) - for i in range(number_intervals) - ] - - exp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_export_node( - node_key=exp_node_key, - prices={ - (q, p, k): exp_prices[k] - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - # base_flow=[0.5, 0.6, 0.7, 0.8], - base_flow={(0, 0): 0.5, (0, 1): 0.6, (0, 2): 0.7, (0, 3): 0.8}, - ) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, - # base_flow=[0.8, -0.7, -0.6, 0.5], - base_flow={(0, 0): 0.8, (0, 1): -0.7, (0, 2): -0.6, (0, 3): 0.5}, - ) - - # add arcs - - # import arc - - if use_arc_techs_without_fixed_losses: - arc_tech_IA = Arcs( - name="IA", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 0.9, (0, 3): 1}, # (0,2):1, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 1.0, 2.0], - minimum_cost=[10, 10.1, 10.2], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc( - node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA - ) - - if use_arc_techs_with_fixed_losses: - arc_tech_IA_fix = Arcs( - name="IA_fix", - # efficiency=[1, 1, 1, 1], - efficiency={(q, 0): 1, (q, 1): 1, (q, 2): 0.9, (q, 3): 1}, # (0,2):1, - efficiency_reverse=None, - validate=False, - capacity=[0.5, 1.0, 2.0], - minimum_cost=[10, 10.1, 10.2], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - # static_losses=[ - # [0.10, 0.15, 0.20, 0.25], - # [0.15, 0.20, 0.25, 0.30], - # [0.20, 0.25, 0.30, 0.35]] - static_loss={ - (0, q, 0): 0.10, - (0, q, 1): 0.15, - (0, q, 2): 0.20, - (0, q, 3): 0.25, - (1, q, 0): 0.15, - (1, q, 1): 0.20, - (1, q, 2): 0.25, - (1, q, 3): 0.30, - (2, q, 0): 0.20, - (2, q, 1): 0.25, - (2, q, 2): 0.30, - (2, q, 3): 0.35, - }, - ) - - mynet.add_directed_arc( - node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA_fix - ) - - # export arc - - if use_arc_techs_without_fixed_losses: - arc_tech_BE = Arcs( - name="BE", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 0.9, (0, 3): 1}, # (0,2):1, - validate=False, - efficiency_reverse=None, - static_loss=None, - capacity=[0.5, 1.0, 2.0], - minimum_cost=[10, 10.1, 10.2], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc( - node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE - ) - - if use_arc_techs_with_fixed_losses: - arc_tech_BE_fix = Arcs( - name="BE_fix", - # efficiency=[1, 1, 1, 1], - efficiency={(q, 0): 1, (q, 1): 1, (q, 2): 0.9, (q, 3): 1}, # (0,2):1, - validate=False, - efficiency_reverse=None, - capacity=[0.5, 1.0, 2.0], - minimum_cost=[10, 10.1, 10.2], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - # static_losses=[ - # [0.10, 0.15, 0.20, 0.25], - # [0.15, 0.20, 0.25, 0.30], - # [0.20, 0.25, 0.30, 0.35]] - static_loss={ - (0, q, 0): 0.10, - (0, q, 1): 0.15, - (0, q, 2): 0.20, - (0, q, 3): 0.25, - (1, q, 0): 0.15, - (1, q, 1): 0.20, - (1, q, 2): 0.25, - (1, q, 3): 0.30, - (2, q, 0): 0.20, - (2, q, 1): 0.25, - (2, q, 2): 0.30, - (2, q, 3): 0.35, - }, - ) - - mynet.add_directed_arc( - node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE_fix - ) - - # directed arc between A and B - - if use_arc_techs_without_fixed_losses: - arc_tech_AB = Arcs( - name="AB_BA", - # efficiency=[1, 0.7, 1, 1], - efficiency={(0, 0): 1, (0, 1): 0.7, (0, 2): 1, (0, 3): 1}, - validate=False, - efficiency_reverse=None, - static_loss=None, - capacity=[1.5, 2.0, 2.5, 3.0], - minimum_cost=[10.0, 10.1, 10.2, 10.3], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB) - - if use_arc_techs_with_fixed_losses: - arc_tech_AB_fix = Arcs( - name="AB_BA_fix", - # efficiency=[1, 0.7, 1, 1], - efficiency={(q, 0): 1, (q, 1): 0.7, (q, 2): 1, (q, 3): 1}, - validate=False, - efficiency_reverse=None, - capacity=[1.5, 2.0, 2.5, 3.0], - minimum_cost=[10, 10.1, 10.2, 10.3], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - # static_losses=[ - # [0.01, 0.02, 0.03, 0.04], - # [0.02, 0.03, 0.04, 0.05], - # [0.03, 0.04, 0.05, 0.06], - # [0.04, 0.05, 0.06, 0.07] - # ] - static_loss={ - (0, q, 0): 0.01, - (0, q, 1): 0.02, - (0, q, 2): 0.03, - (0, q, 3): 0.04, - (1, q, 0): 0.02, - (1, q, 1): 0.03, - (1, q, 2): 0.04, - (1, q, 3): 0.05, - (2, q, 0): 0.03, - (2, q, 1): 0.04, - (2, q, 2): 0.05, - (2, q, 3): 0.06, - (3, q, 0): 0.04, - (3, q, 1): 0.05, - (3, q, 2): 0.06, - (3, q, 3): 0.07, - }, - ) - - mynet.add_directed_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB_fix - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=static_losses_mode, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # if print_model: - - # ipp.instance.pprint() - - # validation - - # only arc techs with fixed losses - - if use_arc_techs_with_fixed_losses and not use_arc_techs_without_fixed_losses: - # all arcs should be installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_B, exp_node_key, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be imports - - abs_tol = 1e-6 - # print('hey') - # print(flow_in[('mynet',0,0)]) - # print(flow_in) - # ipp.instance.pprint() - assert math.isclose( - flow_in[("mynet", 0, 0)], 5.631111111111111, abs_tol=abs_tol - ) - # assert math.isclose(flow_in[('mynet',0,0)], 5.55, abs_tol=abs_tol) - - # there should be exports - - abs_tol = 1e-2 - - assert math.isclose(flow_out[("mynet", 0, 0)], 0.815, abs_tol=abs_tol) - # assert math.isclose(flow_out[('mynet',0,0)], 0.85, abs_tol=abs_tol) - - # the opex should be negative (costs outweigh the revenue) - - abs_tol = 1e-6 - - assert ( - flow_in_cost[("mynet", 0, 0)] > flow_out_revenue[("mynet", 0, 0)] - abs_tol - ) - - # there should be capex - - abs_tol = 1e-6 - - assert pyo.value(ipp.instance.var_capex) > 0 - abs_tol - - # only arc techs without fixed losses - - if not use_arc_techs_with_fixed_losses and use_arc_techs_without_fixed_losses: - # all arcs should be installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_B, exp_node_key, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be imports (lower than with fixed losses, cet. parib.) - - abs_tol = 1e-6 - - assert math.isclose( - flow_in[("mynet", 0, 0)], 3.977777777777778, abs_tol=abs_tol - ) - # assert math.isclose(flow_in[('mynet',0,0)], - # 3.8999999999999995, - # abs_tol=abs_tol) - - # there should be exports (higher than with fixed losses, cet. parib.) - - abs_tol = 1e-2 - - assert math.isclose( - flow_out[("mynet", 0, 0)], 1.2400000000000002, abs_tol=abs_tol - ) - # assert math.isclose(flow_out[('mynet',0,0)], - # 1.2999999999999998, - # abs_tol=abs_tol) - - # the opex should be negative (costs outweigh the revenue) - - abs_tol = 1e-6 - - assert ( - flow_in_cost[("mynet", 0, 0)] > flow_out_revenue[("mynet", 0, 0)] - abs_tol - ) - - # there should be capex - - abs_tol = 1e-6 - - assert pyo.value(ipp.instance.var_capex) > 0 - abs_tol - - # arc techs with and without fixed losses - - # (verifies that arcs without losses take precedence, due to fewer costs) - - if use_arc_techs_with_fixed_losses and use_arc_techs_without_fixed_losses: - # the arcs without losses should be installed (those get index 0) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_B, exp_node_key, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the arcs with losses should not be installed - - assert ( - True - not in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 1)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - not in ipp.networks["mynet"] - .edges[(node_B, exp_node_key, 1)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - not in ipp.networks["mynet"] - .edges[(node_A, node_B, 1)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be imports - - abs_tol = 1e-6 - - assert math.isclose( - flow_in[("mynet", 0, 0)], 3.977777777777778, abs_tol=abs_tol - ) - # assert math.isclose(flow_in[('mynet',0,0)], - # 3.8999999999999995, - # abs_tol=abs_tol) - - # there should be exports - - abs_tol = 1e-2 - - assert math.isclose( - flow_out[("mynet", 0, 0)], 1.2400000000000002, abs_tol=abs_tol - ) - # assert math.isclose(flow_out[('mynet',0,0)], - # 1.2999999999999998, - # abs_tol=abs_tol) - - # the opex should be negative (costs outweigh the revenue) - - abs_tol = 1e-6 - - assert ( - flow_in_cost[("mynet", 0, 0)] > flow_out_revenue[("mynet", 0, 0)] - abs_tol - ) - - # there should be capex - - abs_tol = 1e-6 - - assert pyo.value(ipp.instance.var_capex) > 0 - abs_tol - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_direct_imp_exp_network( - solver, - solver_options, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - make_all_arcs_mandatory, - use_static_losses, - use_higher_export_prices, - print_model, - init_aux_sets, -): - q = 0 - - # time - - number_intervals = 4 - - number_periods = 2 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # prices - - # if use_higher_export_prices: - - # imp_prices = ResourcePrice( - # prices=[0+random.random() for i in range(number_intervals)], - # volumes=None) - - # exp_prices = ResourcePrice( - # prices=[1.5+random.random() for i in range(number_intervals)], - # volumes=None) - - # else: - - # imp_prices = ResourcePrice( - # prices=[1.5+random.random() for i in range(number_intervals)], - # volumes=None) - - # exp_prices = ResourcePrice( - # prices=[random.random() for i in range(number_intervals)], - # volumes=None) - - # import node - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice( - prices=( - 0 + random.random() - if use_higher_export_prices - else 1.5 + random.random() - ), - volumes=None, - ) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # export node - - exp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_export_node( - node_key=exp_node_key, - prices={ - (q, p, k): ResourcePrice( - prices=( - 1.5 + random.random() - if use_higher_export_prices - else 0 + random.random() - ), - volumes=None, - ) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - if use_static_losses: - # add arc with fixed losses from import node to export - - arc_tech_IE_fix = Arcs( - name="IE_fix", - # efficiency=[1, 1, 1, 1], - efficiency={(q, 0): 1, (q, 1): 1, (q, 2): 1, (q, 3): 1}, - efficiency_reverse=None, - validate=False, - capacity=[0.5, 1.0, 2.0], - minimum_cost=[5, 5.1, 5.2], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - # static_losses=[ - # [0.10, 0.15, 0.20, 0.25], - # [0.15, 0.20, 0.25, 0.30], - # [0.20, 0.25, 0.30, 0.35]] - static_loss={ - (0, q, 0): 0.10, - (0, q, 1): 0.15, - (0, q, 2): 0.20, - (0, q, 3): 0.25, - (1, q, 0): 0.15, - (1, q, 1): 0.20, - (1, q, 2): 0.25, - (1, q, 3): 0.30, - (2, q, 0): 0.20, - (2, q, 1): 0.25, - (2, q, 2): 0.30, - (2, q, 3): 0.35, - }, - ) - - mynet.add_directed_arc( - node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE_fix - ) - - else: - # add arc without fixed losses from import node to export - - arc_tech_IE = Arcs( - name="IE", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 1.0, 2.0], - minimum_cost=[5, 5.1, 5.2], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc( - node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - if use_higher_export_prices: - # export prices are higher: it makes sense to install the arc since the - # revenue (@ max. cap.) exceeds the cost of installing the arc - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, exp_node_key, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be no imports - - abs_tol = 1e-6 - - assert flow_in[("mynet", 0, 0)] > 0.0 - abs_tol - - assert flow_in_cost[("mynet", 0, 0)] > 0.0 - abs_tol - - # there should be no exports - - abs_tol = 1e-2 - - assert flow_out[("mynet", 0, 0)] > 0.0 - abs_tol - - assert flow_out_revenue[("mynet", 0, 0)] > 0.0 - abs_tol - - # the revenue should exceed the costs - - abs_tol = 1e-2 - - assert ( - flow_out_revenue[("mynet", 0, 0)] > flow_in_cost[("mynet", 0, 0)] - abs_tol - ) - - # the capex should be positive - - abs_tol = 1e-6 - - assert pyo.value(ipp.instance.var_capex) > 0 - abs_tol - - else: # import prices are higher: it makes no sense to install the arc - # the arc should not be installed (unless prices allow for it) - - assert ( - True - not in ipp.networks["mynet"] - .edges[(imp_node_key, exp_node_key, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be no imports - - abs_tol = 1e-6 - - assert math.isclose(flow_in[("mynet", 0, 0)], 0.0, abs_tol=abs_tol) - - assert math.isclose(flow_in_cost[("mynet", 0, 0)], 0.0, abs_tol=abs_tol) - - # there should be no exports - - abs_tol = 1e-2 - - assert math.isclose(flow_out[("mynet", 0, 0)], 0.0, abs_tol=abs_tol) - - assert math.isclose(flow_out_revenue[("mynet", 0, 0)], 0.0, abs_tol=abs_tol) - - # there should be no capex - - abs_tol = 1e-6 - - assert math.isclose(pyo.value(ipp.instance.var_capex), 0.0, abs_tol=abs_tol) - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_network_mandatory_arcs( - solver, - solver_options, - different_technologies, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - use_undirected_arcs, - print_model, - init_aux_sets, -): - q = 0 - - # time - - number_intervals = 4 - number_periods = 2 - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # import node - - # imp_prices = ResourcePrice( - # prices=[1+random.random() for i in range(number_intervals)], - # volumes=None) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1 + random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # export node - - # exp_prices = ResourcePrice( - # prices=[random.random() for i in range(number_intervals)], - # volumes=None) - - exp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_export_node( - node_key=exp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - # base_flow=[1.00, 1.25, 0.75, 0.5], - base_flow={(0, 0): 1.0, (0, 1): 1.25, (0, 2): 0.75, (0, 3): 0.5}, - ) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, - # base_flow=[0.50, 0.25, 0.35, 0.45], - base_flow={(0, 0): 0.50, (0, 1): 0.25, (0, 2): 0.35, (0, 3): 0.45}, - ) - - node_C = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_C, - # base_flow=[-1, -0.1, -1.5, -0.25], - base_flow={(0, 0): -1, (0, 1): -0.1, (0, 2): -1.5, (0, 3): -0.25}, - ) - - node_D = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_D, - # base_flow=[-1, -1.25, -0.25, -0.5], - base_flow={(0, 0): -1, (0, 1): -1.25, (0, 2): -0.25, (0, 3): -0.5}, - ) - - # add arcs - - # import arc - - arc_tech_IA = Arcs( - name="IA", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA) - - arc_tech_AB = Arcs( - name="AB", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - if use_undirected_arcs: - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - else: - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB) - - arc_key_AB_und = 0 - - # export arc - - arc_tech_CD = Arcs( - name="CD", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_tech_DE = Arcs( - name="DE", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - if use_undirected_arcs: - arc_key_CD_und = mynet.add_undirected_arc( - node_key_a=node_C, node_key_b=node_D, arcs=arc_tech_CD - ) - - else: - mynet.add_directed_arc(node_key_a=node_C, node_key_b=node_D, arcs=arc_tech_CD) - - arc_key_CD_und = 0 - - mynet.add_directed_arc(node_key_a=node_D, node_key_b=exp_node_key, arcs=arc_tech_DE) - - mandatory_arcs = [ - ("mynet", imp_node_key, node_A, 0), - ("mynet", node_D, exp_node_key, 0), - ("mynet", node_A, node_B, arc_key_AB_und), - ("mynet", node_C, node_D, arc_key_CD_und), - ] - - # add two nodes and one preexisting arc to cover a special case - - node_G = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_waypoint_node(node_key=node_G) - - node_H = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_waypoint_node(node_key=node_H) - - mynet.add_preexisting_directed_arc( - node_key_a=node_G, - node_key_b=node_H, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=1, - capacity_is_instantaneous=False, - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - mandatory_arcs=mandatory_arcs, - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # validation - - # IA - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # AB - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # CD - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_C, node_D, arc_key_CD_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # DE - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_D, exp_node_key, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the undirected arc was installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be imports - - abs_tol = 1e-6 - - assert flow_in[("mynet", 0, 0)] > 0.0 - abs_tol - - assert flow_in_cost[("mynet", 0, 0)] > 0.0 - abs_tol - - # there should be exports - - abs_tol = 1e-2 - - assert flow_out[("mynet", 0, 0)] > 0.0 - abs_tol - - assert flow_out_revenue[("mynet", 0, 0)] > 0.0 - abs_tol - - # the capex should be positive - - assert pyo.value(ipp.instance.var_capex) > 0 - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_problem_max_arc_limits_infeasible( - solver, - solver_options, - different_technologies, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - case, - print_model, - init_aux_sets, -): - # ************************************************************************** - # ************************************************************************** - - # there are 3 possible outcomes: - # 1) the number of preexisting and mandatory arcs is above the limit - # >> the problem is infeasible - # 2) maximum number of arcs lower than or equal to the limit - # >> the constraint is skipped - # 3) maximum number of arcs above the limit, the number of preexisting and - # mandatory arcs is below the limit >> the constraint is used - - # various ways to test the cases: - # 1) preexisting vs selectable arcs - # 2) mandatory vs optional arcs - # 3) directed vs undirected arcs - - # how to test case 1: - # a) use only preexisting directed arcs - # b) use only preexisting undirected arcs - # c) use preexisting directed and undirected arcs - # d) use only mandatory directed arcs - # e) use only mandatory undirected arcs - # f) use mandatory directed and undirected arcs - # g) use preexisting directed arcs and mandatory directed arcs - # h) use preexisting undirected arcs and mandatory undirected arcs - # i) use preexisting undirected arcs and mandatory directed arcs - # j) use preexisting directed arcs and mandatory undirected arcs - # k) use preexi. directed and undirected arcs and mandatory directed arcs - # l) use preexi. directed and undirected arcs and mandatory undirected arcs - # m) use preexi. directed arcs and mandatory directed and undirected arcs - # n) use preexi. undirected arcs and mandatory directed and undirected arcs - # o) use preselelected and mandatory directed and undirected arcs - - # ************************************************************************** - # ************************************************************************** - - q = 0 - - # time - - number_intervals = 4 - - number_periods = 2 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # import node - - # imp_prices = ResourcePrice( - # prices=[0+random.random() for i in range(number_intervals)], - # volumes=None) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=0 + random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # export node - - # exp_prices = ResourcePrice( - # prices=[1+random.random() for i in range(number_intervals)], - # volumes=None) - - exp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_export_node( - node_key=exp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1 + random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - # base_flow=[0.1, 0.2, 0.3, 0.4], - base_flow={(0, 0): 0.1, (0, 1): 0.2, (0, 2): 0.3, (0, 3): 0.4}, - ) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, - # base_flow=[0.4, 0.3, 0.2, 0.1], - base_flow={(0, 0): 0.4, (0, 1): 0.3, (0, 2): 0.2, (0, 3): 0.1}, - ) - - # add arcs - - # import arc - - arc_tech_IA = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA) - - # export arc - - arc_tech_BE = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[1, 1.1, 1.2, 1.3, 1.4, 1.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE) - - # ************************************************************************** - # ************************************************************************** - - # arcs between A and B - - max_number_arcs_AB = 1 - - arc_tech_AB = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB = mynet.add_directed_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - if case == "1_a": - # a) use only preexisting directed arcs - - max_number_arcs_AB = 1 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - static_loss=None, - capacity=5, - capacity_is_instantaneous=False, - ) - - mandatory_arcs = [] - - arc_groups_dict = {} - - elif case == "1_b": - # b) use only preexisting undirected arcs - - max_number_arcs_AB = 1 - - mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - static_loss=None, - efficiency_reverse=None, - capacity=5, - capacity_is_instantaneous=False, - ) - - mandatory_arcs = [] - - arc_groups_dict = {} - - elif case == "1_c": - # c) use preexisting directed and undirected arcs - - max_number_arcs_AB = 1 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - static_loss=None, - efficiency_reverse=None, - capacity=5, - capacity_is_instantaneous=False, - ) - - mandatory_arcs = [] - - arc_groups_dict = {} - - elif case == "1_d": - # d) use only mandatory directed arcs - - max_number_arcs_AB = 1 - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2) - - mandatory_arcs = [("mynet", node_A, node_B, 1), ("mynet", node_A, node_B, 2)] - - arc_groups_dict = {} - - elif case == "1_e": - # e) use only mandatory undirected arcs - - max_number_arcs_AB = 1 - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1 - ) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [("mynet", node_A, node_B, 1), ("mynet", node_A, node_B, 2)] - - arc_groups_dict = {} - - elif case == "1_f": - # f) use mandatory directed and undirected arcs - - max_number_arcs_AB = 1 - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [("mynet", node_A, node_B, 1), ("mynet", node_A, node_B, 2)] - - arc_groups_dict = {} - - elif case == "1_g": - # g) use preexisting directed arcs and mandatory directed arcs - - max_number_arcs_AB = 1 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - mandatory_arcs = [("mynet", node_A, node_B, 1)] - - arc_groups_dict = {} - - elif case == "1_h": - # h) use preexisting undirected arcs and mandatory undirected arcs - - max_number_arcs_AB = 1 - - mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1 - ) - - mandatory_arcs = [("mynet", node_A, node_B, 1)] - - arc_groups_dict = {} - - elif case == "1_i": - # i) use preexisting undirected arcs and mandatory directed arcs - - max_number_arcs_AB = 1 - - mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - mandatory_arcs = [("mynet", node_A, node_B, 1)] - - arc_groups_dict = {} - - elif case == "1_j": - # j) use preexisting directed arcs and mandatory undirected arcs - - max_number_arcs_AB = 1 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1 - ) - - mandatory_arcs = [("mynet", node_A, node_B, 1)] - - arc_groups_dict = {} - - elif case == "1_k": - # k) use preexi. directed and undirected arcs and mandat. directed arcs - - max_number_arcs_AB = 2 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=5, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - mandatory_arcs = [("mynet", node_A, node_B, 1)] - - arc_groups_dict = {} - - elif case == "1_l": - # l) use preexi. directed and undir. arcs and mandatory undirected arcs - - max_number_arcs_AB = 2 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=5, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1 - ) - - mandatory_arcs = [("mynet", node_A, node_B, 1)] - - arc_groups_dict = {} - - elif case == "1_m": - # m) use preexi. directed arcs and mandat. directed and undirected arcs - - max_number_arcs_AB = 2 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [("mynet", node_A, node_B, 1), ("mynet", node_A, node_B, 2)] - - arc_groups_dict = {} - - elif case == "1_n": - # n) use preexi. undirected arcs and man. directed and undirected arcs - - max_number_arcs_AB = 2 - - mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - validate=False, - static_loss=None, - efficiency_reverse=None, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [("mynet", node_A, node_B, 1), ("mynet", node_A, node_B, 2)] - - arc_groups_dict = {} - - elif case == "1_o": - # o) use preselelected and mandatory directed and undirected arcs - - max_number_arcs_AB = 3 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=5, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [("mynet", node_A, node_B, 1), ("mynet", node_A, node_B, 2)] - - arc_groups_dict = {} - - elif case == "1_p": - # p) use pre-existing undirected arcs in both directions - - max_number_arcs_AB = 2 - - arc_key_AB1 = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - arc_key_AB2 = mynet.add_preexisting_undirected_arc( - node_key_a=node_B, - node_key_b=node_A, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=5, - capacity_is_instantaneous=False, - ) - - mandatory_arcs = [] - - arc_groups_dict = {} - - elif case == "1_q": - # q) use mandatory undirected arcs in both directions - - max_number_arcs_AB = 4 - - arc_key_AB1 = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - capacity=10, - capacity_is_instantaneous=False, - ) - - arc_key_AB2 = mynet.add_preexisting_undirected_arc( - node_key_a=node_B, - node_key_b=node_A, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=5, - capacity_is_instantaneous=False, - ) - - arc_tech_AB3 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB3 = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB3 - ) - - arc_tech_AB4 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - arc_key_AB4 = mynet.add_undirected_arc( - node_key_a=node_B, node_key_b=node_A, arcs=arc_tech_AB4 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, arc_key_AB3), - ("mynet", node_B, node_A, arc_key_AB4), - ] - - arc_groups_dict = {} - - elif case == "1_r": - # r) use mandatory and pre-existing undirected arcs in both directions - - max_number_arcs_AB = 2 - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB1 = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1 - ) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - arc_key_AB2 = mynet.add_undirected_arc( - node_key_a=node_B, node_key_b=node_A, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, arc_key_AB1), - ("mynet", node_B, node_A, arc_key_AB2), - ] - - arc_groups_dict = {} - - elif case == "1_s": - # s) TODO: use groups of arcs with mandatory arcs - - max_number_arcs_AB = 2 - - arc_tech_AB1 = Arcs( - name="any", - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB1 = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1 - ) - - arc_tech_AB2 = Arcs( - name="any", - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - arc_key_AB2 = mynet.add_undirected_arc( - node_key_a=node_B, node_key_b=node_A, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, arc_key_AB1), - ("mynet", node_B, node_A, arc_key_AB2), - ] - - arc_groups_dict = { - 0: ( - ("mynet", node_A, node_B, arc_key_AB1), - ("mynet", node_B, node_A, arc_key_AB2), - ("mynet", node_A, node_B, arc_key_AB), - ) - } - - elif case == "1_t": - # t) TODO: use mandatory groups of arcs - - max_number_arcs_AB = 2 - - arc_tech_AB1 = Arcs( - name="any", - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB1 = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1 - ) - - arc_tech_AB2 = Arcs( - name="any", - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - arc_key_AB2 = mynet.add_undirected_arc( - node_key_a=node_B, node_key_b=node_A, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, arc_key_AB1), - ("mynet", node_B, node_A, arc_key_AB2), - ] - - arc_groups_dict = { - 0: ( - ("mynet", node_A, node_B, arc_key_AB1), - ("mynet", node_B, node_A, arc_key_AB2), - ) - } - - # elif case == '1_u': - - # pass - - # else: - - # mandatory_arcs = [] - - # ************************************************************************** - # ************************************************************************** - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - _ = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - mandatory_arcs=mandatory_arcs, - max_number_parallel_arcs={("mynet", node_A, node_B): max_number_arcs_AB}, - arc_groups_dict=arc_groups_dict, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # # validation - - # # the import arc is installed - - # assert True in ipp.networks['mynet'].edges[(imp_node_key, node_A, 0)][ - # Network.KEY_ARC_TECH].options_selected - - # # the intermediate arc was installed - - # assert True in ipp.networks['mynet'].edges[(node_A, node_B, 0)][ - # Network.KEY_ARC_TECH].options_selected - - # # the export arc was installed - - # assert True in ipp.networks['mynet'].edges[(node_B, exp_node_key, 0)][ - # Network.KEY_ARC_TECH].options_selected - - # # overview - - # (flow_in, - # flow_out, - # flow_in_cost, - # flow_out_revenue) = compute_cost_volume_metrics(ipp.instance, True) - - # # there should be imports - - # abs_tol = 1e-6 - - # assert flow_in[('mynet',0,0)] > 0.0 - abs_tol - - # assert flow_in_cost[('mynet',0,0)] > 0.0 - abs_tol - - # # there should be exports - - # abs_tol = 1e-2 - - # assert flow_out[('mynet',0,0)] > 0.0 - abs_tol - - # assert flow_out_revenue[('mynet',0,0)] > 0.0 - abs_tol - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_problem_max_arc_limits_skip( - solver, - solver_options, - different_technologies, - irregular_time_intervals, - use_sos_arcs, - sos_weight_key, - use_real_variables_if_possible, - use_sos_sense, - sense_sos_weight_key, - sense_use_real_variables_if_possible, - use_arc_interfaces, - case, - print_model, - init_aux_sets, -): - # ************************************************************************** - # ************************************************************************** - - # there are 3 possible outcomes: - # 1) the number of preexisting and mandatory arcs is above the limit - # >> the problem is infeasible - # 2) maximum number of arcs lower than or equal to the limit - # >> the constraint is skipped - # 3) maximum number of arcs above the limit, the number of preexisting and - # mandatory arcs is below the limit >> the constraint is used - - # various ways to test the cases: - # 1) preexisting vs selectable arcs - # 2) mandatory vs optional arcs - # 3) directed vs undirected arcs - - # how to test case 1: - # a) use only preexisting directed arcs - # b) use only preexisting undirected arcs - # c) use preexisting directed and undirected arcs - # d) use only mandatory directed arcs - # e) use only mandatory undirected arcs - # f) use mandatory directed and undirected arcs - # g) use preexisting directed arcs and mandatory directed arcs - # h) use preexisting undirected arcs and mandatory undirected arcs - # i) use preexisting undirected arcs and mandatory directed arcs - # j) use preexisting directed arcs and mandatory undirected arcs - # k) use preexi. directed and undirected arcs and mandatory directed arcs - # l) use preexi. directed and undirected arcs and mandatory undirected arcs - # m) use preexi. directed arcs and mandatory directed and undirected arcs - # n) use preexi. undirected arcs and mandatory directed and undirected arcs - # o) use preselelected and mandatory directed and undirected arcs - - # ************************************************************************** - # ************************************************************************** - - q = 0 - - # time - - number_intervals = 4 - - number_periods = 2 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # import node - - # imp_prices = ResourcePrice( - # prices=[0+(i+1)/number_intervals for i in range(number_intervals)], - # volumes=None) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice( - prices=0 + (k + 1) / number_intervals, volumes=None - ) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # export node - - # exp_prices = ResourcePrice( - # prices=[10+(i+1)/number_intervals for i in range(number_intervals)], - # volumes=None) - - exp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_export_node( - node_key=exp_node_key, - prices={ - (q, p, k): ResourcePrice( - prices=10 + (k + 1) / number_intervals, volumes=None - ) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - # base_flow=[0.1, 0.2, 0.3, 0.4], - base_flow={(0, 0): 0.1, (0, 1): 0.2, (0, 2): 0.3, (0, 3): 0.4}, - ) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, - # base_flow=[0.4, 0.3, 0.2, 0.1], - base_flow={(0, 0): 0.4, (0, 1): 0.3, (0, 2): 0.2, (0, 3): 0.1}, - ) - - # add arcs - - # import arc - - arc_tech_IA = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA) - - # export arc - - arc_tech_BE = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[1, 1.1, 1.2, 1.3, 1.4, 1.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE) - - # ************************************************************************** - # ************************************************************************** - - # arcs between A and B - - max_number_arcs_AB = 1 - - arc_tech_AB = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB) - - if case == "2_a": - # a) use only preexisting directed arcs - - max_number_arcs_AB = 2 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=0.1, - capacity_is_instantaneous=False, - ) - - mandatory_arcs = [] - - elif case == "2_b": - # b) use only preexisting undirected arcs - - max_number_arcs_AB = 2 - - arc_key_AB1_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=0.1, - capacity_is_instantaneous=False, - ) - - mandatory_arcs = [] - - elif case == "2_c": - # c) use preexisting directed and undirected arcs - - max_number_arcs_AB = 3 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=0.1, - capacity_is_instantaneous=False, - ) - - arc_key_AB1_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - static_loss=None, - efficiency_reverse=None, - capacity=0.05, - capacity_is_instantaneous=False, - ) - - mandatory_arcs = [] - - elif case == "2_d": - # d) use only mandatory directed arcs - - max_number_arcs_AB = 3 - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2) - - mandatory_arcs = [ - ("mynet", node_A, node_B, 1), - ("mynet", node_A, node_B, 2), - ("mynet", node_A, node_B, 0), - ] - - elif case == "2_e": - # e) use only mandatory undirected arcs - - max_number_arcs_AB = 3 - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB1_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1 - ) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - arc_key_AB2_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, arc_key_AB1_und), - ("mynet", node_A, node_B, arc_key_AB2_und), - ("mynet", node_A, node_B, 0), - ] - - elif case == "2_f": - # f) use mandatory directed and undirected arcs - - max_number_arcs_AB = 3 - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - arc_key_AB2_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, 1), - ("mynet", node_A, node_B, arc_key_AB2_und), - ("mynet", node_A, node_B, 0), - ] - - elif case == "2_g": - # g) use preexisting directed arcs and mandatory directed arcs - - max_number_arcs_AB = 3 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=0.1, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - mandatory_arcs = [("mynet", node_A, node_B, 2), ("mynet", node_A, node_B, 0)] - - elif case == "2_h": - # h) use preexisting undirected arcs and mandatory undirected arcs - - max_number_arcs_AB = 3 - - arc_key_AB1_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=0.1, - capacity_is_instantaneous=False, - ) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB2_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, 0), - ("mynet", node_A, node_B, arc_key_AB2_und), - ] - - elif case == "2_i": - # i) use preexisting undirected arcs and mandatory directed arcs - - max_number_arcs_AB = 3 - - arc_key_AB1_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=0.1, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - mandatory_arcs = [("mynet", node_A, node_B, 2), ("mynet", node_A, node_B, 0)] - - elif case == "2_j": - # j) use preexisting directed arcs and mandatory undirected arcs - - max_number_arcs_AB = 3 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=0.1, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB1_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, arc_key_AB1_und), - ("mynet", node_A, node_B, 0), - ] - - elif case == "2_k": - # k) use preexi. directed and undirected arcs and mandat. directed arcs - - max_number_arcs_AB = 4 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - capacity=0.1, - static_loss=None, - capacity_is_instantaneous=False, - ) - - arc_key_AB1_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=5, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - mandatory_arcs = [("mynet", node_A, node_B, 3), ("mynet", node_A, node_B, 0)] - - elif case == "2_l": - # l) use preexi. directed and undir. arcs and mandatory undirected arcs - - max_number_arcs_AB = 4 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - capacity=0.1, - static_loss=None, - capacity_is_instantaneous=False, - ) - - arc_key_AB1_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=0.05, - capacity_is_instantaneous=False, - ) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - arc_key_AB2_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, arc_key_AB2_und), - ("mynet", node_A, node_B, 0), - ] - - elif case == "2_m": - # m) use preexi. directed arcs and mandat. directed and undirected arcs - - max_number_arcs_AB = 4 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - capacity=0.1, - static_loss=None, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - arc_key_AB2_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, arc_key_AB2_und), - ("mynet", node_A, node_B, 2), - ("mynet", node_A, node_B, 0), - ] - - elif case == "2_n": - # n) use preexi. undirected arcs and man. directed and undirected arcs - - max_number_arcs_AB = 4 - - arc_key_AB1_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=0.05, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - arc_key_AB2_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, 2), - ("mynet", node_A, node_B, arc_key_AB2_und), - ("mynet", node_A, node_B, 0), - ] - - elif case == "2_o": - # o) use pre-existing and mandatory directed and undirected arcs - - max_number_arcs_AB = 5 - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - capacity=0.1, - capacity_is_instantaneous=False, - ) - - arc_key_AB1_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - static_loss=None, - efficiency_reverse=None, - capacity=0.05, - capacity_is_instantaneous=False, - ) - - arc_tech_AB1 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1, 2, 3, 4, 5], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB1) - - arc_tech_AB2 = Arcs( - name="any", - # efficiency=[0.8, 0.8, 0.8, 0.8], - efficiency={(0, 0): 0.8, (0, 1): 0.8, (0, 2): 0.8, (0, 3): 0.8}, - efficiency_reverse=None, - static_loss=None, - validate=False, - capacity=[1.1, 2.1, 3.1, 4.1, 5.1], - minimum_cost=[1, 2, 3, 4, 5], - specific_capacity_cost=1.1, - capacity_is_instantaneous=False, - ) - - arc_key_AB2_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB2 - ) - - mandatory_arcs = [ - ("mynet", node_A, node_B, 3), - ("mynet", node_A, node_B, arc_key_AB2_und), - ("mynet", node_A, node_B, 0), - ] - - # else: - - # mandatory_arcs = [] - - # ************************************************************************** - # ************************************************************************** - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=use_sos_arcs, - arc_sos_weight_key=sos_weight_key, - arc_use_real_variables_if_possible=use_real_variables_if_possible, - use_sos_sense=use_sos_sense, - sense_sos_weight_key=sense_sos_weight_key, - sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - sense_use_arc_interfaces=use_arc_interfaces, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=irregular_time_intervals, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - mandatory_arcs=mandatory_arcs, - max_number_parallel_arcs={("mynet", node_A, node_B): max_number_arcs_AB}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # validation - - # assert that the constraint was not needed - - # ipp.instance.constr_limited_parallel_arcs_per_direction.pprint() - - assert ( - "mynet", - node_A, - node_B, - ) not in ipp.instance.constr_limited_parallel_arcs_per_direction - - # the import arc is installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the intermediate arc was installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the export arc was installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_B, exp_node_key, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be imports - - abs_tol = 1e-6 - - assert flow_in[("mynet", 0, 0)] > 0.0 - abs_tol - - assert flow_in_cost[("mynet", 0, 0)] > 0.0 - abs_tol - - # there should be exports - - abs_tol = 1e-2 - - assert flow_out[("mynet", 0, 0)] > 0.0 - abs_tol - - assert flow_out_revenue[("mynet", 0, 0)] > 0.0 - abs_tol - - # ************************************************************************** - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_report_directed_network_static_losses( - solver, solver_options, static_losses_mode, use_new_arcs, init_aux_sets -): - q = 0 - - # time - - number_intervals = 1 - - number_periods = 2 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # import node - - # imp_prices = ResourcePrice( - # prices=[1+random.random() for i in range(number_intervals)], - # volumes=None) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1 + random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_waypoint_node(node_key=node_A) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node(node_key=node_B, base_flow={(q, 0): 0.2}) - - # add arcs - - # IA arc - - mynet.add_infinite_capacity_arc( - node_key_a=imp_node_key, - node_key_b=node_A, - efficiency={(q, 0): 1}, - static_loss=None, - ) - - if use_new_arcs: - # AB arc - - arc_tech_AB = Arcs( - name="AB", - # efficiency=[1, 1, 1, 1], - efficiency={(q, 0): 0.8}, - efficiency_reverse=None, - validate=False, - capacity=[1.0], - minimum_cost=[0], - specific_capacity_cost=0, - capacity_is_instantaneous=False, - static_loss={(0, q, 0): 0.10}, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB) - - else: - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - efficiency={(q, 0): 0.8}, - static_loss={(0, q, 0): 0.10}, - capacity=1.0, - capacity_is_instantaneous=False, - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=False, - arc_sos_weight_key=None, - arc_use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - sense_use_arc_interfaces=False, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=False, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=static_losses_mode, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # if print_model: - - # all arcs should be installed (they are not new) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be imports - - abs_tol = 1e-6 - - assert math.isclose(flow_in[("mynet", 0, 0)], 0.35, abs_tol=abs_tol) - - # there should be no exports - - abs_tol = 1e-6 - - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) - - # flow through IA must be 0.35 - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, 0, 0)]), - 0.35, - abs_tol=abs_tol, - ) - - # validation - - if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: - # losses are downstream - - # flow through AB must be 0.35 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_B, 0, 0, 0)]), - 0.35, - abs_tol=abs_tol, - ) - - else: - # losses are upstream - - # flow through AB must be 0.25 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_B, 0, 0, 0)]), - 0.25, - abs_tol=abs_tol, - ) - - -# ****************************************************************************** -# ****************************************************************************** - -# test with pre-existing arcs: -# one import node, one regular node, and two directed arcs for imports -# one among the arcs has static losses while the other does not -# the latter must not compensate for the former's static losses -# the arc without losses should be capable of doing the job -# this way, the arc with losses does not have to be used but still has losses -# that requires that its efficiency be lower than the arc without losses - - -def example_directed_arc_static_downstream_pre(solver, solver_options, init_aux_sets): - q = 0 - - # time - - number_intervals = 1 - - number_periods = 2 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # import node - - # imp_prices = ResourcePrice( - # prices=[1+random.random() for i in range(number_intervals)], - # volumes=None) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1 + random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 1.0}) - - # add arcs - - # IA1 - - mynet.add_preexisting_directed_arc( - node_key_a=imp_node_key, - node_key_b=node_A, - efficiency={(q, 0): 0.9}, - static_loss={(q, 0, 0): 0.1}, - capacity=0.5, - capacity_is_instantaneous=False, - ) - - # IA2 - - mynet.add_preexisting_directed_arc( - node_key_a=imp_node_key, - node_key_b=node_A, - efficiency=None, - static_loss=None, - capacity=1.2, - capacity_is_instantaneous=False, - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=False, - arc_sos_weight_key=None, - arc_use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - sense_use_arc_interfaces=False, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=False, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=True, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # if print_model: - - # all arcs should be installed (they are not new) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 1)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be imports - - abs_tol = 1e-6 - - assert math.isclose(flow_in[("mynet", 0, 0)], (1.0 + 0.1), abs_tol=abs_tol) - - # there should be no exports - - abs_tol = 1e-6 - - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) - - # flow through IA1 must be 0.1 - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, 0, 0)]), - 0.1, - abs_tol=abs_tol, - ) - - # flow through IA2 must be 1.0 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 1, 0, 0)]), - 1.0, - abs_tol=abs_tol, - ) - - -# ****************************************************************************** -# ****************************************************************************** - -# test with new arcs: -# two steps must be used, one to force the investments, another to demonstrate -# their order does not matter, except for verification purposes -# both arcs needs to be installed -# the arc without losses should be capable of doing everything during one int. -# the arc with losses should not be used during one of the time steps -# during the other, the conditions must be such that the lossy arc is necessary - - -def example_directed_arc_static_downstream_new(solver, solver_options, init_aux_sets): - q = 0 - - # time - - number_intervals = 2 - - number_periods = 2 - - # 4 nodes: one import, one export, two supply/demand nodes - - mynet = Network() - - # import node - - # imp_prices = ResourcePrice( - # prices=[1+random.random() for i in range(number_intervals)], - # volumes=None) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=0 + random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 1.0, (q, 1): 1.3}) - - # add arcs - - # IA1 - - arcs_ia1 = Arcs( - name="IA1", - efficiency={(q, 0): 0.9, (q, 1): 0.9}, - efficiency_reverse=None, - static_loss={(0, q, 0): 0.0, (0, q, 1): 0.1}, - capacity=tuple([0.5 / 0.9]), - minimum_cost=tuple([0.1]), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=True, - ) - - mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arcs_ia1) - - # IA2 - - arcs_ia2 = Arcs( - name="IA2", - efficiency=None, - efficiency_reverse=None, - static_loss=None, - capacity=tuple([1.2]), - minimum_cost=tuple([0.1]), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=True, - ) - - mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arcs_ia2) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=False, - arc_sos_weight_key=None, - arc_use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - sense_use_arc_interfaces=False, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=False, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=True, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # if print_model: - - # all arcs should be installed (they are not new) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 1)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be imports - - abs_tol = 1e-6 - - assert math.isclose( - flow_in[("mynet", 0, 0)], (1.2 + 0.1 / 0.9 + 1.0 + 0.1), abs_tol=abs_tol - ) - - # there should be no exports - - abs_tol = 1e-6 - - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) - - # interval 0: flow through IA1 must be 0 - # interval 1: flow through IA1 must be 0.1+0.1/0.9 - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, 0, 0)]), - 0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, 0, 1)]), - 0.1 + 0.1 / 0.9, - abs_tol=abs_tol, - ) - - # interval 0: flow through IA2 must be 1.0 - # interval 1: flow through IA2 must be 1.2 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 1, 0, 0)]), - 1.0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 1, 0, 1)]), - 1.2, - abs_tol=abs_tol, - ) - - -# ****************************************************************************** -# ****************************************************************************** - -# test the capacity reduction when losses are upstream - - -def example_directed_arc_static_upstream( - solver, solver_options, use_new_arcs, init_aux_sets -): - q = 0 - - # time - - number_intervals = 1 - - number_periods = 2 - - # 4 nodes: two import nodes, two supply/demand nodes - - mynet = Network() - - # import nodes - - # imp1_prices = ResourcePrice( - # prices=[1 for i in range(number_intervals)], - # volumes=None) - - imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp1_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1, volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # imp2_prices = ResourcePrice( - # prices=[2 for i in range(number_intervals)], - # volumes=None) - - imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp2_node_key, - prices={ - (q, p, k): ResourcePrice(prices=2, volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_waypoint_node(node_key=node_A) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, - base_flow={ - (q, 0): 1.0, - }, - ) - - # add arcs - - # I1A - - mynet.add_preexisting_directed_arc( - node_key_a=imp1_node_key, - node_key_b=node_A, - efficiency=None, - static_loss=None, - capacity=1, - capacity_is_instantaneous=False, - ) - - if use_new_arcs: - # I2B - - arcs_i2b = Arcs( - name="I2B", - efficiency=None, - efficiency_reverse=None, - static_loss=None, - capacity=(0.1,), - minimum_cost=(0.025,), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=True, - ) - - mynet.add_directed_arc( - node_key_a=imp2_node_key, node_key_b=node_B, arcs=arcs_i2b - ) - - # AB - - arcs_ab = Arcs( - name="IA1", - efficiency=None, - efficiency_reverse=None, - static_loss={(0, q, 0): 0.1}, - capacity=(1,), - minimum_cost=(0.05,), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=True, - ) - - mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab) - - else: - # I2B - - mynet.add_preexisting_directed_arc( - node_key_a=imp2_node_key, - node_key_b=node_B, - efficiency=None, - static_loss=None, - capacity=0.1, - capacity_is_instantaneous=False, - ) - - # AB - - mynet.add_preexisting_directed_arc( - node_key_a=node_A, - node_key_b=node_B, - efficiency=None, - static_loss={(0, q, 0): 0.1}, - capacity=1, - capacity_is_instantaneous=False, - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=False, - arc_sos_weight_key=None, - arc_use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - sense_use_arc_interfaces=False, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=False, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # if print_model: - - # all arcs should be installed (they are not new) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp1_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp2_node_key, node_B, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be imports - - abs_tol = 1e-6 - - assert math.isclose(flow_in[("mynet", 0, 0)], 1.1, abs_tol=abs_tol) - - # there should be no exports - - abs_tol = 1e-6 - - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) - - # interval 0: flow through IA1 must be 1 - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 0)]), - 1, - abs_tol=abs_tol, - ) - - # interval 0: flow through AB must be 0.9 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_B, 0, 0, 0)]), - 0.9, - abs_tol=abs_tol, - ) - - # interval 0: flow through IB2 must be 0.1 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 0)]), - 0.1, - abs_tol=abs_tol, - ) - - -# ****************************************************************************** -# ****************************************************************************** - - -def example_report_undirected_network_static_losses( - solver, - solver_options, - static_losses_mode, - use_new_arcs, - invert_original_direction, - init_aux_sets, -): - q = 0 - - # time - - number_intervals = 2 - - number_periods = 2 - - # 3 nodes: one import, two regular nodes - - mynet = Network() - - # import node - - # imp_prices = ResourcePrice( - # prices=[1+random.random() for i in range(number_intervals)], - # volumes=None) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1 + random.random(), volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 0.0, (q, 1): 0.4}) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node(node_key=node_B, base_flow={(q, 0): 0.2, (q, 1): -0.6}) - - # add arcs - - # IA arc - - mynet.add_infinite_capacity_arc( - node_key_a=imp_node_key, node_key_b=node_A, efficiency=None, static_loss=None - ) - - AB_efficiency = {(q, 0): 0.8, (q, 1): 0.8} - - BA_efficiency = {(q, 0): 0.5, (q, 1): 0.5} - - if use_new_arcs: - # AB arc - - if invert_original_direction: - arc_tech_AB = Arcs( - name="AB", - efficiency=BA_efficiency, - efficiency_reverse=AB_efficiency, - validate=False, - capacity=[1.0], - minimum_cost=[0.01], - specific_capacity_cost=0, - capacity_is_instantaneous=False, - static_loss={(0, q, 0): 0.10, (0, q, 1): 0.10}, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_B, node_key_b=node_A, arcs=arc_tech_AB - ) - - else: - arc_tech_AB = Arcs( - name="AB", - efficiency=AB_efficiency, - efficiency_reverse=BA_efficiency, - validate=False, - capacity=[1.0], - minimum_cost=[0.01], - specific_capacity_cost=0, - capacity_is_instantaneous=False, - static_loss={(0, q, 0): 0.10, (0, q, 1): 0.10}, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB - ) - - else: - # pre-existing arcs - - if invert_original_direction: - arc_key_AB_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_B, - node_key_b=node_A, - efficiency=BA_efficiency, - efficiency_reverse=AB_efficiency, - static_loss={(0, q, 0): 0.10, (0, q, 1): 0.10}, - capacity=1.0, - capacity_is_instantaneous=False, - ) - - else: - arc_key_AB_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - efficiency=AB_efficiency, - efficiency_reverse=BA_efficiency, - static_loss={(0, q, 0): 0.10, (0, q, 1): 0.10}, - capacity=1.0, - capacity_is_instantaneous=False, - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=False, - arc_sos_weight_key=None, - arc_use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - sense_use_arc_interfaces=False, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=False, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=static_losses_mode, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # if print_model: - - # all arcs should be installed (they are not new) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - if invert_original_direction: - assert ( - True - in ipp.networks["mynet"] - .edges[(node_B, node_A, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - else: - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # the flow through AB should be from A to B during interval 0 - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value( - ipp.instance.var_zeta_sns_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, q, 0) - ] - ), - 1, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_zeta_sns_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, q, 0) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # the flow through AB should be from B to A during interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_zeta_sns_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, q, 1) - ] - ), - 0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_zeta_sns_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, q, 1) - ] - ), - 1, - abs_tol=abs_tol, - ) - - # there should be imports - - abs_tol = 1e-6 - - assert math.isclose(flow_in[("mynet", 0, 0)], (0.35 + 0.15), abs_tol=abs_tol) - - # there should be no exports - - abs_tol = 1e-6 - - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) - - # flow through IA must be 0.35 during time interval 0 - # flow through IA must be 0.15 during time interval 1 - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, q, 0)]), - 0.35, - abs_tol=abs_tol, - ) - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, q, 1)]), - 0.15, - abs_tol=abs_tol, - ) - - # flow from B to A must be 0 durng time interval 0 - # flow from A to B must be 0 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[("mynet", node_B, node_A, arc_key_AB_und, 0, 0)] - ), - 0.0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[("mynet", node_A, node_B, arc_key_AB_und, 0, 1)] - ), - 0.0, - abs_tol=abs_tol, - ) - - # validation - - if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: - # arrival node - - if invert_original_direction: - # flow from A to B must be 0.25 during time interval 0 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, q, 0) - ] - ), - 0.25, - abs_tol=abs_tol, - ) - - # flow from B to A must be 0.6 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, q, 1) - ] - ), - 0.6, - abs_tol=abs_tol, - ) - - else: - # flow from A to B must be 0.35 during time interval 0 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, q, 0) - ] - ), - 0.35, - abs_tol=abs_tol, - ) - - # flow from B to A must be 0.6 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, q, 1) - ] - ), - 0.5, - abs_tol=abs_tol, - ) - - elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP: - # departure node - - if invert_original_direction: - # arrival node - - # flow from A to B must be 0.35 during time interval 0 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, q, 0) - ] - ), - 0.35, - abs_tol=abs_tol, - ) - - # flow from B to A must be 0.6 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, q, 1) - ] - ), - 0.5, - abs_tol=abs_tol, - ) - - else: - # flow from A to B must be 0.25 during time interval 0 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, q, 0) - ] - ), - 0.25, - abs_tol=abs_tol, - ) - - # flow from B to A must be 0.6 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, q, 1) - ] - ), - 0.6, - abs_tol=abs_tol, - ) - - elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_US: - # upstream - - # flow from A to B must be 0.25 during time interval 0 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, q, 0) - ] - ), - 0.25, - abs_tol=abs_tol, - ) - - # flow from B to A must be 0.6 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, q, 1) - ] - ), - 0.5, - abs_tol=abs_tol, - ) - - else: - # downstream - - # flow from A to B must be 0.35 during time interval 0 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, q, 0) - ] - ), - 0.35, - abs_tol=abs_tol, - ) - - # flow from B to A must be 0.6 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, q, 1) - ] - ), - 0.6, - abs_tol=abs_tol, - ) - - -# ****************************************************************************** -# ****************************************************************************** - -# test the capacity reduction when losses are upstream w/ undirected arcs - - -def example_undirected_arc_static_upstream( - solver, solver_options, use_new_arcs, static_losses_mode, init_aux_sets -): - q = 0 - - # time - - number_intervals = 2 - - number_periods = 2 - - # 4 nodes: two import nodes, two supply/demand nodes - - mynet = Network() - - # import nodes - - # imp1_prices = ResourcePrice( - # prices=[1, 2], - # volumes=None) - - imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp1_node_key, - prices={ - (q, p, k): ResourcePrice(prices=k + 1, volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # imp2_prices = ResourcePrice( - # prices=[2, 1], - # volumes=None) - - imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp2_node_key, - prices={ - (q, p, k): ResourcePrice(prices=2 - k, volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, base_flow={(0, 0): 0.0, (0, 1): 1.1} # 1.0 - ) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, base_flow={(0, 0): 1.1, (0, 1): 0.0} # 1.0 - ) - - # add arcs - - # I1A - - mynet.add_preexisting_directed_arc( - node_key_a=imp1_node_key, - node_key_b=node_A, - efficiency=None, - static_loss=None, - capacity=1.2, - capacity_is_instantaneous=False, - ) - - # I2B - - mynet.add_preexisting_directed_arc( - node_key_a=imp2_node_key, - node_key_b=node_B, - efficiency=None, - static_loss=None, - capacity=1.2, - capacity_is_instantaneous=False, - ) - - efficiency_AB = {(0, 0): 1, (0, 1): 1} - - efficiency_BA = {(0, 0): 1, (0, 1): 1} - - if use_new_arcs: - # AB - - static_loss_AB = { - (0, q, 0): 0.1, - (0, q, 1): 0.1, - (1, q, 0): 0.1, - (1, q, 1): 0.1, - } - - arcs_ab = Arcs( - name="AB", - efficiency=efficiency_AB, - efficiency_reverse=efficiency_BA, - static_loss=static_loss_AB, - capacity=( - 0.5, - 1, - ), - minimum_cost=( - 0.025, - 0.05, - ), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=True, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab - ) - - else: - # AB - - static_loss_AB = {(0, q, 0): 0.1, (0, q, 1): 0.1} - - arc_key_AB_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - efficiency=efficiency_BA, - efficiency_reverse=efficiency_BA, - static_loss=static_loss_AB, - capacity=1, - capacity_is_instantaneous=False, - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=False, - arc_sos_weight_key=None, - arc_use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - sense_use_arc_interfaces=False, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=False, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=static_losses_mode, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # if print_model: - - # all arcs should be installed (they are not new) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp1_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp2_node_key, node_B, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # the flow through AB should be from A to B during interval 0 - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value( - ipp.instance.var_zeta_sns_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, q, 0) - ] - ), - 1, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_zeta_sns_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, q, 0) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # the flow through AB should be from B to A during interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_zeta_sns_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, q, 1) - ] - ), - 0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_zeta_sns_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, q, 1) - ] - ), - 1, - abs_tol=abs_tol, - ) - - # there should be imports - - abs_tol = 1e-6 - - assert math.isclose(flow_in[("mynet", 0, 0)], (1.2 + 1.2), abs_tol=abs_tol) - - # there should be no exports - - abs_tol = 1e-6 - - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) - - # flow through I1A must be 1.0 during time interval 0 - # flow through I1A must be 0.2 during time interval 1 - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 0)]), - 1.0, - abs_tol=abs_tol, - ) - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 1)]), - 0.2, - abs_tol=abs_tol, - ) - - # flow through I2B must be 0.2 during time interval 0 - # flow through I2B must be 1.0 during time interval 1 - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 0)]), - 0.2, - abs_tol=abs_tol, - ) - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 1)]), - 1.0, - abs_tol=abs_tol, - ) - - # flow from B to A must be 0 during time interval 0 - # flow from A to B must be 0 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[("mynet", node_B, node_A, arc_key_AB_und, 0, 0)] - ), - 0.0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[("mynet", node_A, node_B, arc_key_AB_und, 0, 1)] - ), - 0.0, - abs_tol=abs_tol, - ) - - # validation - - if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: - # arrival node - - # flow from A to B must be 1.0 during time interval 0 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) - ] - ), - 1.0, - abs_tol=abs_tol, - ) - - # flow from B to A must be 0.9 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) - ] - ), - 0.9, - abs_tol=abs_tol, - ) - - elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP: - # departure node - - # flow from A to B must be 0.9 during time interval 0 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) - ] - ), - 0.9, - abs_tol=abs_tol, - ) - - # flow from B to A must be 1.0 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) - ] - ), - 1.0, - abs_tol=abs_tol, - ) - - elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_US: - # upstream - - # flow from A to B must be 0.9 during time interval 0 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) - ] - ), - 0.9, - abs_tol=abs_tol, - ) - - # flow from B to A must be 0.9 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) - ] - ), - 0.9, - abs_tol=abs_tol, - ) - - else: - # downstream - - # flow from A to B must be 1.0 during time interval 0 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) - ] - ), - 1.0, - abs_tol=abs_tol, - ) - - # flow from B to A must be 1.0 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) - ] - ), - 1.0, - abs_tol=abs_tol, - ) - - -# ****************************************************************************** -# ****************************************************************************** - -# test the capacity reduction when losses are upstream w/ undirected arcs - - -def example_undirected_arc_static_downstream( - solver, solver_options, use_new_arcs, static_losses_mode, init_aux_sets -): - q = 0 - - # time - - number_intervals = 4 - - number_periods = 2 - - # 4 nodes: two import nodes, two supply/demand nodes - - mynet = Network() - - # import nodes - - imp1_prices = [ResourcePrice(prices=k, volumes=None) for k in [1, 2, 1, 1]] - - imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp1_node_key, - prices={ - (q, p, k): imp1_prices[k] - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - imp2_prices = [ResourcePrice(prices=k, volumes=None) for k in [2, 1, 2, 2]] - - imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp2_node_key, - prices={ - (q, p, k): imp2_prices[k] - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # other nodes - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_A, - base_flow={ - (0, 0): 1.0, # to be provided via I1 but AB losses have to be comp. - (0, 1): 0.0, - (0, 2): 0.0, - (0, 3): 0.0, - }, - ) - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node( - node_key=node_B, - base_flow={ - (0, 0): 0.0, - (0, 1): 1.0, # to be provided via I2 but AB losses have to be comp. - (0, 2): 2.0, # forces the undirected arc to be used and installed - (0, 3): 0.9, # forces the undirected arc to be used and installed - }, - ) - - # add arcs - - # I1A - - mynet.add_preexisting_directed_arc( - node_key_a=imp1_node_key, - node_key_b=node_A, - efficiency=None, - static_loss=None, - capacity=1.1, - capacity_is_instantaneous=False, - ) - - # I2B - - mynet.add_preexisting_directed_arc( - node_key_a=imp2_node_key, - node_key_b=node_B, - efficiency=None, - static_loss=None, - capacity=1.1, - capacity_is_instantaneous=False, - ) - - efficiency_AB = {(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1} - - efficiency_BA = {(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1} - - if use_new_arcs: - # AB - - static_loss_AB = { - (0, q, 0): 0.1, - (0, q, 1): 0.1, - (0, q, 2): 0.1, - (0, q, 3): 0.1, - } - - arcs_ab = Arcs( - name="AB", - efficiency=efficiency_AB, - efficiency_reverse=efficiency_BA, - static_loss=static_loss_AB, - capacity=(1,), - minimum_cost=(0.05,), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=True, - ) - - arc_key_AB_und = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab - ) - - else: - # AB - - static_loss_AB = { - (0, q, 0): 0.1, - (0, q, 1): 0.1, - (0, q, 2): 0.1, - (0, q, 3): 0.1, - } - - arc_key_AB_und = mynet.add_preexisting_undirected_arc( - node_key_a=node_A, - node_key_b=node_B, - efficiency=efficiency_BA, - efficiency_reverse=efficiency_BA, - static_loss=static_loss_AB, - capacity=1, - capacity_is_instantaneous=False, - ) - - # identify node types - - mynet.identify_node_types() - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=False, - arc_sos_weight_key=None, - arc_use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - sense_use_arc_interfaces=False, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=False, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=static_losses_mode, - mandatory_arcs=[], - max_number_parallel_arcs={}, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # if print_model: - - # all arcs should be installed (they are not new) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp1_node_key, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(imp2_node_key, node_B, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] - .options_selected - ) - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # there should be imports - - abs_tol = 1e-6 - - assert math.isclose( - flow_in[("mynet", 0, 0)], (1 + 1 + 2 + 0.3 + 1), abs_tol=abs_tol - ) - - # there should be no exports - - abs_tol = 1e-6 - - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) - - # flow through I1A must be 1.1 during time interval 0 - # flow through I1A must be 0.0 during time interval 1 - # flow through I1A must be 1.0 during time interval 2 (flow from B to A) - # flow through I1A must be 1.0 during time interval 3 (because AB is used from B to A) - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 0)]), - 1.1, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 1)]), - 0.0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 2)]), - 1.0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 3)]), - 1.0, - abs_tol=abs_tol, - ) - - # flow through I2B must be 0.0 during time interval 0 - # flow through I2B must be 1.1 during time interval 1 - # flow through I2B must be 1.1 during time interval 2 - # flow through I2B must be 0.0 during time interval 3 - - abs_tol = 1e-6 - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 0)]), - 0.0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 1)]), - 1.1, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 2)]), - 1.1, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 3)]), - 0, - abs_tol=abs_tol, - ) - - # validation - - if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: - # arrival node - - # losses are always in B - - # flow from A to B must be 0.1 during time interval 0 - # flow from B to A must be 0 during time interval 0 - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) - ] - ), - 0.1, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # flow from A to B must be 0 during time interval 1 - # flow from B to A must be 0 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) - ] - ), - 0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # flow from A to B must be 1.0 during time interval 2 - # flow from B to A must be 0 during time interval 2 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) - ] - ), - 1.0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # flow from A to B must be 1.0 during time interval 3 - # flow from B to A must be 0 during time interval 3 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) - ] - ), - 1.0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) - ] - ), - 0, - abs_tol=abs_tol, - ) - - elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP: - # departure node - - # losses are always in A - - # flow from A to B must be 0 during time interval 0 - # flow from B to A must be 0 during time interval 0 - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) - ] - ), - 0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # flow from A to B must be 0 during time interval 1 - # flow from B to A must be 0.1 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) - ] - ), - 0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) - ] - ), - 0.1, - abs_tol=abs_tol, - ) - - # flow from A to B must be 0.9 during time interval 2 - # flow from B to A must be 0 during time interval 2 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) - ] - ), - 0.9, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # flow from A to B must be 0.9 during time interval 3 - # flow from B to A must be 0 during time interval 3 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) - ] - ), - 0.9, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) - ] - ), - 0, - abs_tol=abs_tol, - ) - - elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_US: - # upstream - - # flow from A to B must be 0 during time interval 0 - # flow from B to A must be 0 during time interval 0 - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) - ] - ), - 0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # flow from A to B must be 0 during time interval 1 - # flow from B to A must be 0 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) - ] - ), - 0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # flow from A to B must be 0.9 during time interval 2 - # flow from B to A must be 0 during time interval 2 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) - ] - ), - 0.9, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # flow from A to B must be 0.9 during time interval 3 - # flow from B to A must be 0 during time interval 3 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) - ] - ), - 0.9, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) - ] - ), - 0, - abs_tol=abs_tol, - ) - - else: - # downstream - - # flow from A to B must be 0 during time interval 0 - # flow from B to A must be 0 during time interval 0 - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) - ] - ), - 0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # flow from A to B must be 0 during time interval 1 - # flow from B to A must be 0.1 during time interval 1 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) - ] - ), - 0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) - ] - ), - 0.1, - abs_tol=abs_tol, - ) - - # flow from A to B must be 1.0 during time interval 2 - # flow from B to A must be 0 during time interval 2 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) - ] - ), - 1, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) - ] - ), - 0, - abs_tol=abs_tol, - ) - - # flow from A to B must be 1.0 during time interval 3 - # flow from B to A must be 0 during time interval 3 - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) - ] - ), - 1.0, - abs_tol=abs_tol, - ) - - assert math.isclose( - pyo.value( - ipp.instance.var_v_glljqk[ - ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) - ] - ), - 0, - abs_tol=abs_tol, - ) - -# ***************************************************************************** -# ***************************************************************************** - -def example_arc_groups_individual_undirected( - solver, solver_options, use_arc_groups, static_losses_mode, init_aux_sets -): - # time - - number_intervals = 2 - - number_periods = 2 - - # 4 nodes: one import node, four regular nodes - - mynet = Network() - - q = 0 - - # ************************************************************************** - - # import nodes - - # imp_prices = ResourcePrice( - # prices=[1, - # 2], - # volumes=None) - - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_import_node( - node_key=imp_node_key, - prices={ - (q, p, k): ResourcePrice(prices=1 + k, volumes=None) - for p in range(number_periods) - for k in range(number_intervals) - }, - ) - - # ************************************************************************** - - # A - - node_A = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 0.0, (q, 1): 1.0}) - - # B - - node_B = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node(node_key=node_B, base_flow={(q, 0): 1.0, (q, 1): -0.5}) - - # C - - node_C = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node(node_key=node_C, base_flow={(q, 0): 0.0, (q, 1): 0.5}) - - # D - - node_D = generate_pseudo_unique_key(mynet.nodes()) - - mynet.add_source_sink_node(node_key=node_D, base_flow={(q, 0): 0.5, (q, 1): -0.25}) - - # ************************************************************************** - - # add arcs - - # IA - - mynet.add_preexisting_directed_arc( - node_key_a=imp_node_key, - node_key_b=node_A, - efficiency=None, - static_loss=None, - capacity=1.5, - capacity_is_instantaneous=False, - ) - - # IC - - mynet.add_preexisting_directed_arc( - node_key_a=imp_node_key, - node_key_b=node_C, - efficiency=None, - static_loss=None, - capacity=1.5, - capacity_is_instantaneous=False, - ) - - # AB - - efficiency_AB = { - (q, 0): 1.00, - (q, 1): 0.85, - } - - efficiency_BA = { - (q, 0): 0.95, - (q, 1): 0.80, - } - - static_loss_AB = { - (0, q, 0): 0.20, - (0, q, 1): 0.25, - (1, q, 0): 0.25, - (1, q, 1): 0.30, - (2, q, 0): 0.30, - (2, q, 1): 0.35, - } - - arcs_AB = Arcs( - name="AB", - efficiency=efficiency_AB, - efficiency_reverse=efficiency_BA, - static_loss=static_loss_AB, - capacity=(0.85, 1.5, 2.5), - minimum_cost=(1, 2, 3), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=True, - ) - - arc_key_AB = mynet.add_undirected_arc( - node_key_a=node_A, node_key_b=node_B, arcs=arcs_AB - ) - - # CD - - efficiency_CD = { - (q, 0): 1.00, - (q, 1): 0.85, - } - - efficiency_DC = {(q, 0): 0.95, (q, 1): 0.80} - - static_loss_CD = { - (0, q, 0): 0.010, - (0, q, 1): 0.015, - (1, q, 0): 0.015, - (1, q, 1): 0.020, - (2, q, 0): 0.020, - (2, q, 1): 0.025, - } - - arcs_CD = Arcs( - name="CD", - efficiency=efficiency_CD, - efficiency_reverse=efficiency_DC, - static_loss=static_loss_CD, - capacity=(0.85, 1.5, 2.5), - minimum_cost=(1, 2, 3), - specific_capacity_cost=0, - capacity_is_instantaneous=False, - validate=True, - ) - - arc_key_CD = mynet.add_undirected_arc( - node_key_a=node_C, node_key_b=node_D, arcs=arcs_CD - ) - - if use_arc_groups: - arc_groups_dict = { - 0: ( - ("mynet", node_A, node_B, arc_key_AB), - ("mynet", node_C, node_D, arc_key_CD), - ) - } - - else: - arc_groups_dict = {} - - # identify node types - - mynet.identify_node_types() - - # solver settings - - solver_options["relative_mip_gap"] = 0 - solver_options["absolute_mip_gap"] = 1e-4 - - # no sos, regular time intervals - - ipp = build_solve_ipp( - solver=solver, - solver_options=solver_options, - use_sos_arcs=False, - arc_sos_weight_key=None, - arc_use_real_variables_if_possible=False, - use_sos_sense=False, - sense_sos_weight_key=None, - sense_use_real_variables_if_possible=False, - sense_use_arc_interfaces=False, - perform_analysis=False, - plot_results=False, # True, - print_solver_output=False, - irregular_time_intervals=False, - networks={"mynet": mynet}, - number_intraperiod_time_intervals=number_intervals, - static_losses_mode=static_losses_mode, - mandatory_arcs=[], - max_number_parallel_arcs={}, - arc_groups_dict=arc_groups_dict, - init_aux_sets=init_aux_sets, - ) - - # ************************************************************************** - - # overview - - ( - flow_in, - flow_in_k, - flow_out, - flow_in_cost, - flow_out_revenue, - ) = compute_cost_volume_metrics(ipp.instance, True) - - # print('**********(((((((((((((((())))))))))))))))))))))') - # print('flow in') - # print(flow_in) - # print('flow out') - # print(flow_out) - # print('var_capex') - # print(pyo.value(ipp.instance.var_capex)) - # print('var_sdncf') - # print(pyo.value(ipp.instance.var_sdncf)) - # print('var_sdext') - # print(pyo.value(ipp.instance.var_sdext)) - - capex_ind = 3 - capex_group = 4 - - imp_ind = 2.912 - imp_group = 2.9210000000000003 - - sdncf_ind = -7.72035753459824 - sdncf_group = -7.745053560176434 - - sdnext_ind = 0 - sdnext_group = 0 - - obj_ind = sdnext_ind + sdncf_ind - capex_ind - obj_group = sdnext_group + sdncf_group - capex_group - - losses_ind = sum( - static_loss_AB[(1, q, k)] + static_loss_CD[(0, q, k)] - for k in range(number_intervals) - ) - losses_group = sum( - static_loss_AB[(1, q, k)] + static_loss_CD[(1, q, k)] - for k in range(number_intervals) - ) - - losses_model = sum( - pyo.value( - ipp.instance.var_w_glljqk[("mynet", node_A, node_B, arc_key_AB, q, k)] - ) - + pyo.value( - ipp.instance.var_w_glljqk[("mynet", node_C, node_D, arc_key_CD, q, k)] - ) - for k in range(number_intervals) - ) - - assert capex_group > capex_ind - # # assert math.isclose(losses_group, losses_ind, abs_tol=1e-3) - assert losses_group > losses_ind - assert imp_group > imp_ind - - if use_arc_groups: - # all arcs have to be installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB)][Network.KEY_ARC_TECH] - .options_selected - ) - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_C, node_D, arc_key_CD)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the same option has to be selected in all arcs - - h1 = ( - ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB)][Network.KEY_ARC_TECH] - .options_selected.index(True) - ) - - h2 = ( - ipp.networks["mynet"] - .edges[(node_C, node_D, arc_key_CD)][Network.KEY_ARC_TECH] - .options_selected.index(True) - ) - - assert h1 == h2 - - # the capex have to be higher than those of the best individual arc - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value(ipp.instance.var_capex), capex_group, abs_tol=abs_tol - ) - - # there should be no exports - - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) - - # the imports should be higher than with individual arcs - - abs_tol = 1e-3 - - assert math.isclose(flow_in[("mynet", 0, 0)], imp_group, abs_tol=abs_tol) - - assert imp_group > imp_ind - - # the operating results should be lower than with an individual arc - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_group, abs_tol=abs_tol - ) - - # the externalities should be zero - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value(ipp.instance.var_sdext_q[q]), sdnext_group, abs_tol=abs_tol - ) - - # the objective function should be -6.3639758220728595-1.5 - - abs_tol = 1e-3 - - assert math.isclose(pyo.value(ipp.instance.obj_f), obj_group, abs_tol=abs_tol) - - # the imports should be greater than or equal to the losses for all arx - - losses_model = sum( - pyo.value( - ipp.instance.var_w_glljqk[("mynet", node_A, node_B, arc_key_AB, q, k)] - ) - + pyo.value( - ipp.instance.var_w_glljqk[("mynet", node_C, node_D, arc_key_CD, q, k)] - ) - for k in range(number_intervals) - ) - - losses_data = sum( - static_loss_AB[(h1, q, k)] + static_loss_CD[(h2, q, k)] - for k in range(number_intervals) - ) - - assert math.isclose(losses_model, losses_data, abs_tol=abs_tol) - - assert math.isclose(losses_data, losses_group, abs_tol=abs_tol) - - else: - # at least one arc has to be installed - - assert ( - True - in ipp.networks["mynet"] - .edges[(node_A, node_B, arc_key_AB)][Network.KEY_ARC_TECH] - .options_selected - or True - in ipp.networks["mynet"] - .edges[(node_C, node_D, arc_key_CD)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the capex have to be lower than with a group of arcs - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value(ipp.instance.var_capex), capex_ind, abs_tol=abs_tol - ) - - # there should be no exports - - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) - - # the imports should be lower than with a group of arcs - - abs_tol = 1e-3 - - assert math.isclose(flow_in[("mynet", 0, 0)], imp_ind, abs_tol=abs_tol) - - # the operating results should be lower than with an individual arc - - abs_tol = 1e-3 - - assert math.isclose( - pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_ind, abs_tol=abs_tol - ) - - # the externalities should be zero - - abs_tol = 1e-3 - - assert math.isclose(pyo.value(ipp.instance.var_sdext_q[q]), 0, abs_tol=abs_tol) - - # the objective function should be -6.3639758220728595-1.5 - - abs_tol = 1e-3 - - assert math.isclose(pyo.value(ipp.instance.obj_f), obj_ind, abs_tol=abs_tol) - - # the imports should be greater than or equal to the losses for all arx - - assert math.isclose(losses_model, losses_ind, abs_tol=abs_tol) - - -# ****************************************************************************** -# ****************************************************************************** diff --git a/tests/test_all.py b/tests/test_all.py index 8fce4ffa3dee1e8fc8b6775b4c6d06ef986909f9..ca594de50f7d23941698e2c4c485e4c7d450b224 100644 --- a/tests/test_all.py +++ b/tests/test_all.py @@ -5,9 +5,6 @@ import random from topupheat.pipes.single import StandardisedPipeDatabase from topupheat.common.fluids import FluidDatabase # , Fluid -from examples_esipp_network import examples as examples_esipp_network -from examples_esipp_problem import examples as examples_esipp_problem -from examples_esipp import examples as examples_esipp from examples_signal import examples as examples_signal # ****************************************************************************** @@ -17,17 +14,6 @@ from examples_signal import examples as examples_signal def test_suite(): - test_examples_dynsys = True - # test_examples_dynsys = False - - test_examples_esipp_network = True - # test_examples_esipp_network = False - - test_examples_esipp_problem = True - # test_examples_esipp_problem = False - - test_examples_esipp = True - # test_examples_esipp = False test_examples_signal = True # test_examples_signal = False @@ -123,45 +109,7 @@ def test_suite(): # source=airdata_file) # load osm/osmnx data - - # ************************************************************************** - # ************************************************************************** - - # esipp-network - - if test_examples_esipp_network: - print("'esipp-network': testing about to start...") - - examples_esipp_network() - - print("'esipp-network': testing complete.") - - # ************************************************************************** - - # esipp-problem - - if test_examples_esipp_problem: - print("'esipp-problem': testing about to start...") - - examples_esipp_problem(solver, solver_options, init_aux_sets=False) - - examples_esipp_problem(solver, solver_options, init_aux_sets=True) - - print("'esipp-problem': testing complete.") - - # ************************************************************************** - - # esipp - - if test_examples_esipp: - print("'esipp': testing about to start...") - - examples_esipp(solver, solver_options, seed_number, init_aux_sets=False) - - examples_esipp(solver, solver_options, seed_number, init_aux_sets=True) - - print("'esipp-problem': testing complete.") - + # ************************************************************************** # signal diff --git a/tests/test_esipp_converter.py b/tests/test_esipp_converter.py index 15e64c27bdbfce3f5d3761519da8c367c48e1e95..0778807b3adb6da8dc7ff82623e04110a137dc6d 100644 --- a/tests/test_esipp_converter.py +++ b/tests/test_esipp_converter.py @@ -273,6 +273,7 @@ def method_full_converter(time_step_durations: list): # create a converter cvn1 = cvn.Converter( sys=ds, + time_frame=None, initial_states=x0, turn_key_cost=3, inputs=inputs, @@ -281,9 +282,7 @@ def method_full_converter(time_step_durations: list): ) # get the dictionaries - (a_innk, b_inmk, c_irnk, d_irmk, e_x_ink, e_y_irk) = cvn1.matrix_dictionaries( - "cvn1" - ) + (a_innk, b_inmk, c_irnk, d_irmk, e_x_ink, e_y_irk) = cvn1.matrix_dictionaries() # TODO: check the dicts diff --git a/tests/test_esipp_dynsys.py b/tests/test_esipp_dynsys.py index 70bbb412a8dea1260fab22ee68e39485d848861b..25d74f1943f21abee51f961f6e0fab50c50818a2 100644 --- a/tests/test_esipp_dynsys.py +++ b/tests/test_esipp_dynsys.py @@ -15,157 +15,6 @@ import src.topupopt.problems.esipp.dynsys as dynsys class TestDynsys: - # ************************************************************************* - # ************************************************************************* - - # seed_number = random.randint(1,int(1e5)) - - # print_outputs = True - - # # with states and outputs - - # # test multi-ODE, multi-output dynamic systems while integrating outputs - - # examples_dynsys_multiode_multiout(True, print_outputs, seed_number) - - # # test multi-ODE, multi-output dynamic systems without integrating outputs - - # examples_dynsys_multiode_multiout(False, print_outputs, seed_number) - - # # test single ODE, multi-output dynamic systems while integrating outputs - - # examples_dynsys_singleode_multiout(True, print_outputs, seed_number) - - # # test single ODE, multi-output dynamic systems without integrating outputs - - # examples_dynsys_singleode_multiout(False, print_outputs, seed_number) - - # # test multi-ODE, single-output dynamic systems while integrating outputs - - # examples_dynsys_multiode_multiout(True, print_outputs, seed_number, 1) - - # # test multi-ODE, single-output dynamic systems without integrating outputs - - # examples_dynsys_multiode_multiout(False, print_outputs, seed_number, 1) - - # # test single-ODE, single-output dynamic systems while integrating outputs - - # examples_dynsys_singleode_multiout(True, print_outputs, seed_number, 1) - - # # test single-ODE, single-output dynamic systems without integrating outputs - - # examples_dynsys_singleode_multiout(False, print_outputs, seed_number, 1) - - # # ************************************************************************* - - # # outputless - - # # test single-ODE, outputless dynamic systems while integrating outputs - - # examples_dynsys_singleode_multiout(True, print_outputs, seed_number, 0) - - # # test multi-ODE, outputless dynamic systems while integrating outputs - - # examples_dynsys_multiode_multiout(True, print_outputs, seed_number, 0) - - # # test single-ODE, outputless dynamic systems without integrating outputs - - # examples_dynsys_singleode_multiout(False, print_outputs, seed_number, 0) - - # # test multi-ODE, outputless dynamic systems without integrating outputs - - # examples_dynsys_multiode_multiout(False, print_outputs, seed_number, 0) - - # # outputless system via dynsys subclass - - # example_outputless_system_object() - - # # ************************************************************************* - - # # stateless - - # # test stateless, single-output dynamic systems while integrating outputs - - # examples_dynsys_stateless_multiout(True, print_outputs, seed_number, 1) - - # # test stateless, multi-output dynamic systems without integrating outputs - - # examples_dynsys_stateless_multiout(False, print_outputs, seed_number, 2) - - # # stateless system via dynsys subclass - - # example_stateless_system_object(True) - # example_stateless_system_object(False) - - # # ************************************************************************* - # # ************************************************************************* - - # # trigger errors - - # # test stateless, outputless dynamic systems while integrating outputs - - # number_errors = 0 - - # try: - # examples_dynsys_stateless_multiout(True, False, seed_number, 0) - # except Exception: - # number_errors += 1 - - # assert number_errors == 1 - - # # test stateless, outputless dynamic systems without integrating outputs - - # number_errors = 0 - - # try: - # examples_dynsys_stateless_multiout(False, False, seed_number, 0) - # except Exception: - # number_errors += 1 - - # assert number_errors == 1 - - # # test negative time duration - - # example_incorrect_time_step_durations() - - # # test unrecognised matrix formats - - # example_unrecognised_matrix_formats() - - # # different matrix sizes for the same problem, all other things being equal - - # example_varying_matrix_sizes(True) - # example_varying_matrix_sizes(False) - - # # test multiple A matrices and multiple non-matching time intervals - - # example_nonmatching_time_steps_and_matrices() - - # # test non-square A matrices - - # example_nonsquare_A_matrices() - - # # test incompatible A and B matrices (different number of rows) - - # example_incompatible_AB_matrices() - - # # test incompatible C and D matrices (different number of rows) - - # example_incompatible_CD_matrices() - - # # test incompatible A and C matrices (different number of columns) - - # example_incompatible_AC_matrices() - - # # test incompatible B and D matrices (different number of columns) - - # example_incompatible_BD_matrices() - - # # trigger incorrect input signal format error when simulating - - # example_single_time_step_model_incorrect_inputs() - - # # TODO: test only some matrices as being time invariant # ************************************************************************* # ************************************************************************* @@ -1642,11 +1491,8 @@ def method_dynsys_singleode_multiout(integrate_outputs: bool, number_outputs: in # generate time invariant problem # data - Ci, Ria, Aw, min_rel_heat, x0 = get_single_ode_model_data() - # matrices - (A_matrix, B_matrix, C_matrix, D_matrix) = single_node_model( Ci, Ria, Aw, min_rel_heat ) diff --git a/tests/test_esipp_network.py b/tests/test_esipp_network.py index b30b1f95afeb8832358df7dfe8bdf6bb06a72d25..0136b0fdf00847ed5623ad979ae55c0b7dcb3b62 100644 --- a/tests/test_esipp_network.py +++ b/tests/test_esipp_network.py @@ -26,39 +26,6 @@ from src.topupopt.data.misc.utils import generate_pseudo_unique_key # TODO: add test for directed arcs between import and export nodes with static losses class TestNetwork: - # ************************************************************************* - # ************************************************************************* - - def test_tree_topology(self): - # create a network object with a tree topology - - tree_network = binomial_tree(3, create_using=MultiDiGraph) - - network = Network(tree_network) - - for edge_key in network.edges(keys=True): - arc = ArcsWithoutLosses( - name=str(edge_key), - capacity=[5, 10], - minimum_cost=[3, 6], - specific_capacity_cost=0, - capacity_is_instantaneous=False, - ) - - network.add_edge(*edge_key, **{Network.KEY_ARC_TECH: arc}) - - # assert that it does not have a tree topology - - assert not network.has_tree_topology() - - # select all the nodes - - for edge_key in network.edges(keys=True): - network.edges[edge_key][Network.KEY_ARC_TECH].options_selected[0] = True - - # assert that it has a tree topology - - assert network.has_tree_topology() # ************************************************************************* # ************************************************************************* @@ -1931,7 +1898,6 @@ class TestNetwork: # ************************************************************************* def test_network_disallowed_cases(self): - # ********************************************************************* net = Network() @@ -2035,7 +2001,6 @@ class TestNetwork: error_triggered = True assert error_triggered - # ********************************************************************* # ********************************************************************* # trigger errors using non-identified nodes @@ -2195,6 +2160,40 @@ class TestNetwork: except ValueError: error_raised = True assert error_raised + + # ************************************************************************* + # ************************************************************************* + + def test_tree_topology(self): + # create a network object with a tree topology + + tree_network = binomial_tree(3, create_using=MultiDiGraph) + + network = Network(tree_network) + + for edge_key in network.edges(keys=True): + arc = ArcsWithoutLosses( + name=str(edge_key), + capacity=[5, 10], + minimum_cost=[3, 6], + specific_capacity_cost=0, + capacity_is_instantaneous=False, + ) + + network.add_edge(*edge_key, **{Network.KEY_ARC_TECH: arc}) + + # assert that it does not have a tree topology + + assert not network.has_tree_topology() + + # select all the nodes + + for edge_key in network.edges(keys=True): + network.edges[edge_key][Network.KEY_ARC_TECH].options_selected[0] = True + + # assert that it has a tree topology + + assert network.has_tree_topology() # ************************************************************************* # ************************************************************************* @@ -2250,6 +2249,82 @@ class TestNetwork: except Exception: error_triggered = True assert error_triggered + + # ************************************************************************* + # ************************************************************************* + + def test_imp_exp_static_losses(self): + + # assessment + q = 0 + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + imp_prices = { + qpk: ResourcePrice( + prices=0.5, + volumes=None, + ) + for qpk in [(0,0,0),(0,0,1),(0,1,0),(0,1,1)] + } + mynet.add_import_node( + node_key=imp_node_key, + prices=imp_prices + ) + + # export node + exp_node_key = generate_pseudo_unique_key(mynet.nodes()) + exp_prices = { + qpk: ResourcePrice( + prices=1.5, + volumes=None, + ) + for qpk in [(0,0,0),(0,0,1),(0,1,0),(0,1,1)] + } + mynet.add_export_node( + node_key=exp_node_key, + prices=exp_prices, + ) + + # add arc with fixed losses from import node to export + + arc_tech_IE_fix = Arcs( + name="IE_fix", + # efficiency=[1, 1, 1, 1], + efficiency={(q, 0): 1, (q, 1): 1}, + efficiency_reverse=None, + validate=False, + capacity=[0.5, 1.0, 2.0], + minimum_cost=[5, 5.1, 5.2], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + # static_losses=[ + # [0.10, 0.15, 0.20, 0.25], + # [0.15, 0.20, 0.25, 0.30], + # [0.20, 0.25, 0.30, 0.35]] + static_loss={ + (0, q, 0): 0.10, + (0, q, 1): 0.15, + (1, q, 0): 0.15, + (1, q, 1): 0.20, + (2, q, 0): 0.20, + (2, q, 1): 0.25, + }, + ) + + mynet.add_directed_arc( + node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE_fix + ) + + error_raised = False + try: + # identify node types + mynet.identify_node_types() + except ValueError: + error_raised = True + assert error_raised # ***************************************************************************** # ***************************************************************************** diff --git a/tests/test_esipp_problem.py b/tests/test_esipp_problem.py index d695e5b00505b7f5681f213acb244f8f19a6de7e..af3ca638af0a4d642bcc6e40e15df6f05aedd649 100644 --- a/tests/test_esipp_problem.py +++ b/tests/test_esipp_problem.py @@ -2,7 +2,6 @@ # standard import math -from statistics import mean # local # import numpy as np @@ -15,10 +14,9 @@ from src.topupopt.problems.esipp.problem import InfrastructurePlanningProblem from src.topupopt.problems.esipp.network import Arcs, Network from src.topupopt.problems.esipp.network import ArcsWithoutStaticLosses from src.topupopt.problems.esipp.resource import ResourcePrice -from src.topupopt.problems.esipp.problem import simplify_peak_total_problem -from src.topupopt.problems.esipp.problem import is_peak_total_problem from src.topupopt.problems.esipp.utils import compute_cost_volume_metrics from src.topupopt.problems.esipp.time import EconomicTimeFrame +from src.topupopt.problems.esipp.converter import Converter # ***************************************************************************** # ***************************************************************************** @@ -211,7 +209,7 @@ class TestESIPPProblem: # ********************************************************************* if simplify_problem: - ipp = simplify_peak_total_problem(ipp) + ipp.simplify_peak_total_assessments() # ********************************************************************* @@ -222,7 +220,6 @@ class TestESIPPProblem: ipp.instantiate(initialise_ancillary_sets=init_aux_sets) # optimise - ipp.optimise( solver_name=solver, solver_options=solver_options, @@ -244,7 +241,6 @@ class TestESIPPProblem: # assessment q = 0 - tf = EconomicTimeFrame( discount_rate=3.5/100, reporting_periods={q: (0, 1)}, @@ -261,26 +257,19 @@ class TestESIPPProblem: mynet.add_import_node( node_key=node_IMP, prices={ - # (q, p, k): ResourcePrice(prices=1.0, volumes=None) - # for p in range(number_periods) - # for k in range(number_intervals) qpk: ResourcePrice(prices=1.0, volumes=None) for qpk in tf.qpk() }, ) # other nodes - node_A = generate_pseudo_unique_key(mynet.nodes()) - mynet.add_source_sink_node( node_key=node_A, - # base_flow=[0.5, 0.0, 1.0], base_flow={(q, 0): 0.50, (q, 1): 0.00, (q, 2): 1.00}, ) # arc IA - arc_tech_IA = Arcs( name="any", efficiency={qk: 0.5 for qk in tf.qk()}, @@ -292,49 +281,35 @@ class TestESIPPProblem: capacity_is_instantaneous=False, validate=False, ) - mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) # identify node types - mynet.identify_node_types() # no sos, regular time intervals - ipp = self.build_solve_ipp( - # solver=solver, solver_options={}, - # use_sos_arcs=use_sos_arcs, - # arc_sos_weight_key=sos_weight_key, - # arc_use_real_variables_if_possible=use_real_variables_if_possible, - # use_sos_sense=use_sos_sense, - # sense_sos_weight_key=sense_sos_weight_key, - # sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - # sense_use_arc_interfaces=use_arc_interfaces, perform_analysis=False, plot_results=False, # True, print_solver_output=False, - # irregular_time_intervals=irregular_time_intervals, time_frame=tf, networks={"mynet": mynet}, static_losses_mode=True, # just to reach a line, mandatory_arcs=[], max_number_parallel_arcs={}, - # discount_rates={0: tuple([0.035, 0.035])}, - # init_aux_sets=init_aux_sets, simplify_problem=False, ) - assert is_peak_total_problem(ipp) - assert ipp.results["Problem"][0]["Number of constraints"] == 24 - assert ipp.results["Problem"][0]["Number of variables"] == 22 - assert ipp.results["Problem"][0]["Number of nonzeros"] == 49 - # ********************************************************************* # ********************************************************************* - + # validation + assert ipp.has_peak_total_assessments() + assert ipp.results["Problem"][0]["Number of constraints"] == 24 + assert ipp.results["Problem"][0]["Number of variables"] == 22 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 49 + # the arc should be installed since it is required for feasibility assert ( True @@ -469,7 +444,7 @@ class TestESIPPProblem: simplify_problem=False, ) - assert is_peak_total_problem(ipp) + assert ipp.has_peak_total_assessments() assert ipp.results["Problem"][0]["Number of constraints"] == 42 assert ipp.results["Problem"][0]["Number of variables"] == 40 assert ipp.results["Problem"][0]["Number of nonzeros"] == 95 @@ -478,7 +453,7 @@ class TestESIPPProblem: # ********************************************************************* # validation - + # TODO: make a dict with the results and a for loop to reduce extent # the arc should be installed since it is required for feasibility assert ( True @@ -549,7 +524,6 @@ class TestESIPPProblem: # assessment q = 0 - tf = EconomicTimeFrame( discount_rate=3.5/100, reporting_periods={q: (0, 1)}, @@ -562,24 +536,17 @@ class TestESIPPProblem: mynet = Network() # import node - # node_IMP = generate_pseudo_unique_key(mynet.nodes()) node_IMP = "thatimpnode" mynet.add_import_node( node_key=node_IMP, prices={ - # (q, p, k): ResourcePrice(prices=1.0, volumes=None) - # for p in range(number_periods) - # for k in range(number_intervals) qpk: ResourcePrice(prices=1.0, volumes=None) for qpk in tf.qpk() }, ) # other nodes - - # node_A = generate_pseudo_unique_key(mynet.nodes()) node_A = "thatnodea" - mynet.add_source_sink_node( node_key=node_A, # base_flow=[0.5, 0.0, 1.0], @@ -587,7 +554,6 @@ class TestESIPPProblem: ) # arc IA - arc_tech_IA = Arcs( name="any", efficiency={qk: 0.5 for qk in tf.qk()}, @@ -599,25 +565,15 @@ class TestESIPPProblem: capacity_is_instantaneous=False, validate=False, ) - mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) # identify node types - mynet.identify_node_types() # no sos, regular time intervals - ipp = self.build_solve_ipp( # solver=solver, solver_options={}, - # use_sos_arcs=use_sos_arcs, - # arc_sos_weight_key=sos_weight_key, - # arc_use_real_variables_if_possible=use_real_variables_if_possible, - # use_sos_sense=use_sos_sense, - # sense_sos_weight_key=sense_sos_weight_key, - # sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - # sense_use_arc_interfaces=use_arc_interfaces, perform_analysis=False, plot_results=False, # True, print_solver_output=False, @@ -626,16 +582,14 @@ class TestESIPPProblem: static_losses_mode=True, # just to reach a line, mandatory_arcs=[], max_number_parallel_arcs={}, - # discount_rates={0: tuple([0.035, 0.035])}, - # init_aux_sets=init_aux_sets, simplify_problem=True, ) - assert is_peak_total_problem(ipp) + assert ipp.has_peak_total_assessments() assert ipp.results["Problem"][0]["Number of constraints"] == 16 # 20 assert ipp.results["Problem"][0]["Number of variables"] == 15 # 19 assert ipp.results["Problem"][0]["Number of nonzeros"] == 28 # 36 - + # ********************************************************************* # ********************************************************************* @@ -655,64 +609,6 @@ class TestESIPPProblem: # the objective function should be -9.7 assert math.isclose(pyo.value(ipp.instance.obj_f), -9.7, abs_tol=1e-3) - # TODO: create method to automate getting data from the command line - import io - import sys - from contextlib import redirect_stdout - - # print('wow wow wow') - # ipp.instance.constr_imp_flow_cost.pprint() - expected_string = """constr_imp_flow_cost : Size=2, Index=constr_imp_flow_cost_index, Active=True\n Key : Lower : Body : Upper : Active\n ('mynet', 'thatimpnode', 'total', 0, 0) : 0.0 : var_if_glqpks[mynet,thatimpnode,total,0,0,0] - var_ifc_glqpk[mynet,thatimpnode,total,0,0] : 0.0 : True\n ('mynet', 'thatimpnode', 'total', 1, 0) : 0.0 : var_if_glqpks[mynet,thatimpnode,total,1,0,0] - var_ifc_glqpk[mynet,thatimpnode,total,1,0] : 0.0 : True\n""" - - cmd_output = io.StringIO() - sys.stdout = cmd_output - ipp.instance.constr_imp_flow_cost.pprint() - sys.stdout = sys.__stdout__ - assert cmd_output.getvalue() == expected_string - - expected_string = """constr_exp_flow_revenue : Size=0, Index=constr_exp_flow_revenue_index, Active=True\n Key : Lower : Body : Upper : Active\n""" - f = io.StringIO() - with redirect_stdout(f): - ipp.instance.constr_exp_flow_revenue.pprint() - assert f.getvalue() == expected_string - - # try the whole model - # print('wow wow wow') - # ipp.instance.pprint() - # expected_string = """constr_imp_flow_cost : Size=4, Index=constr_imp_flow_cost_index, Active=True\n Key : Lower : Body : Upper : Active\n ('mynet', 'thatimpnode', 'peak', 0, 0) : 0.0 : 0*var_if_glqpks[mynet,thatimpnode,peak,0,0,0] - var_ifc_glqpk[mynet,thatimpnode,peak,0,0] : 0.0 : True\n ('mynet', 'thatimpnode', 'peak', 1, 0) : 0.0 : 0*var_if_glqpks[mynet,thatimpnode,peak,1,0,0] - var_ifc_glqpk[mynet,thatimpnode,peak,1,0] : 0.0 : True\n ('mynet', 'thatimpnode', 'total', 0, 0) : 0.0 : var_if_glqpks[mynet,thatimpnode,total,0,0,0] - var_ifc_glqpk[mynet,thatimpnode,total,0,0] : 0.0 : True\n ('mynet', 'thatimpnode', 'total', 1, 0) : 0.0 : var_if_glqpks[mynet,thatimpnode,total,1,0,0] - var_ifc_glqpk[mynet,thatimpnode,total,1,0] : 0.0 : True\n""" - # cmd_output = io.StringIO() - # sys.stdout = cmd_output - # ipp.instance.pprint() - # sys.stdout = sys.__stdout__ - # assert cmd_output.getvalue() == expected_string - - # from contextlib import redirect_stdout - # import io - - # ipp.instance.constr_imp_flow_cost.pprint() # only one constraint - # f = io.StringIO() - # with redirect_stdout(f): - # # ipp.instance.pprint() # full model - # ipp.instance.constr_imp_flow_cost.pprint() # only one constraint - - # expected_string = r"""constr_imp_flow_cost : Size=4, Index=constr_imp_flow_cost_index, Active=True - # Key : Lower : Body : Upper : Active - # ('mynet', 'thatimpnode', 'peak', 0, 0) : 0.0 : 0*var_if_glqpks[mynet,thatimpnode,peak,0,0,0] - var_ifc_glqpk[mynet,thatimpnode,peak,0,0] : 0.0 : True - # ('mynet', 'thatimpnode', 'peak', 1, 0) : 0.0 : 0*var_if_glqpks[mynet,thatimpnode,peak,1,0,0] - var_ifc_glqpk[mynet,thatimpnode,peak,1,0] : 0.0 : True - # ('mynet', 'thatimpnode', 'total', 0, 0) : 0.0 : var_if_glqpks[mynet,thatimpnode,total,0,0,0] - var_ifc_glqpk[mynet,thatimpnode,total,0,0] : 0.0 : True - # ('mynet', 'thatimpnode', 'total', 1, 0) : 0.0 : var_if_glqpks[mynet,thatimpnode,total,1,0,0] - var_ifc_glqpk[mynet,thatimpnode,total,1,0] : 0.0 : True - # """ - # assert expected_string == f.getvalue() - - # from contextlib import redirect_stdout - # import io - # f = io.StringIO() - # with redirect_stdout(f): - # print('foobar') - # print(12) - # 12+3 - # print('Got stdout: "{0}"'.format(f.getvalue())) - # ************************************************************************* # ************************************************************************* @@ -791,7 +687,7 @@ class TestESIPPProblem: # discount_rates={0: (0.0,)}, ) - assert not is_peak_total_problem(ipp) + assert not ipp.has_peak_total_assessments() assert ipp.results["Problem"][0]["Number of constraints"] == 10 assert ipp.results["Problem"][0]["Number of variables"] == 11 assert ipp.results["Problem"][0]["Number of nonzeros"] == 20 @@ -911,7 +807,7 @@ class TestESIPPProblem: # discount_rates={0: (0.0,)}, ) - assert not is_peak_total_problem(ipp) + assert not ipp.has_peak_total_assessments() assert ipp.results["Problem"][0]["Number of constraints"] == 10 assert ipp.results["Problem"][0]["Number of variables"] == 11 assert ipp.results["Problem"][0]["Number of nonzeros"] == 20 @@ -1048,7 +944,7 @@ class TestESIPPProblem: # discount_rates={0: (0.0,)}, ) - assert not is_peak_total_problem(ipp) + assert not ipp.has_peak_total_assessments() assert ipp.results["Problem"][0]["Number of constraints"] == 23 assert ipp.results["Problem"][0]["Number of variables"] == 26 assert ipp.results["Problem"][0]["Number of nonzeros"] == 57 @@ -1204,7 +1100,7 @@ class TestESIPPProblem: assessment_weights=assessment_weights, ) - assert is_peak_total_problem(ipp) + assert ipp.has_peak_total_assessments() assert ipp.results["Problem"][0]["Number of constraints"] == 42 assert ipp.results["Problem"][0]["Number of variables"] == 38 assert ipp.results["Problem"][0]["Number of nonzeros"] == 87 @@ -1274,16 +1170,101 @@ class TestESIPPProblem: assert math.isclose(pyo.value(ipp.instance.obj_f), -11.096, abs_tol=3e-3) + # ************************************************************************* + # ************************************************************************* + + def test_problem_two_scenarios_simpler(self): + + # number_intraperiod_time_intervals = 4 + nominal_discount_rate = 0.035 + assessment_weights = {0: 0.7, 1: 0.3} + tf = EconomicTimeFrame( + discount_rate=nominal_discount_rate, + reporting_periods={0: (0, 1), 1: (0, 1, 2)}, + reporting_period_durations={0: (1, 1), 1: (1, 1, 1)}, # does not matter + time_intervals={0: (0, 1, 2), 1: (0, 1)}, + time_interval_durations={0: (1, 1, 1), 1: (1, 1)}, + ) + + # 2 nodes: one import, one regular + mynet = Network() + node_IMP = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=node_IMP, + prices={ + qpk: ResourcePrice(prices=1.0, volumes=None) + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + base_flow={ + (0, 0): 0.50, + (0, 1): 0.00, + (0, 2): 1.00, + (1, 0): 1.25, + (1, 1): 0.30, + }, + ) + + # arc IA + arc_tech_IA = Arcs( + name="any", + # efficiency=[0.5, 0.5, 0.5], + efficiency={(0, 0): 0.5, (0, 1): 0.5, (0, 2): 0.5, (1, 0): 0.5, (1, 1): 0.5}, + efficiency_reverse=None, + static_loss=None, + capacity=[3], + minimum_cost=[2], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + validate=False, + ) + mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + # solver=solver, + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + converters={}, + time_frame=tf, + static_losses_mode=True, # just to reach a line, + mandatory_arcs=[], + max_number_parallel_arcs={}, + assessment_weights=assessment_weights, + simplify_problem=True + ) + + assert ipp.has_peak_total_assessments() + assert ipp.results["Problem"][0]["Number of constraints"] == 28 # 42 + assert ipp.results["Problem"][0]["Number of variables"] == 25 # 38 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 51 # 87 + + # ********************************************************************* + # validation + # capex should be 4.5 + assert math.isclose(pyo.value(ipp.instance.var_capex), 4.5, abs_tol=1e-3) + # the objective function should be -11.096 + assert math.isclose(pyo.value(ipp.instance.obj_f), -11.096, abs_tol=3e-3) + # ************************************************************************* # ************************************************************************* def test_problem_two_scenarios_two_discount_rates(self): # two discount rates - assessment_weights = {0: 0.7, 1: 0.3} - tf = EconomicTimeFrame( discount_rates_q={0: (0.035, 0.035), 1: (0.1, 0.1, 0.1)}, reporting_periods={0: (0, 1), 1: (0, 1, 2)}, @@ -1293,11 +1274,8 @@ class TestESIPPProblem: ) # 2 nodes: one import, one regular - mynet = Network() - node_IMP = generate_pseudo_unique_key(mynet.nodes()) - mynet.add_import_node( node_key=node_IMP, prices={ @@ -1307,9 +1285,7 @@ class TestESIPPProblem: ) # other nodes - node_A = generate_pseudo_unique_key(mynet.nodes()) - mynet.add_source_sink_node( node_key=node_A, base_flow={ @@ -1322,7 +1298,6 @@ class TestESIPPProblem: ) # arc IA - arc_tech_IA = Arcs( name="any", # efficiency=[0.5, 0.5, 0.5], @@ -1335,15 +1310,12 @@ class TestESIPPProblem: capacity_is_instantaneous=False, validate=False, ) - mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) # identify node types - mynet.identify_node_types() # no sos, regular time intervals - ipp = self.build_solve_ipp( # solver=solver, solver_options={}, @@ -1357,9 +1329,8 @@ class TestESIPPProblem: mandatory_arcs=[], max_number_parallel_arcs={}, assessment_weights=assessment_weights, - ) - - assert is_peak_total_problem(ipp) + ) + assert ipp.has_peak_total_assessments() assert ipp.results["Problem"][0]["Number of constraints"] == 42 assert ipp.results["Problem"][0]["Number of variables"] == 38 assert ipp.results["Problem"][0]["Number of nonzeros"] == 87 @@ -1367,7 +1338,6 @@ class TestESIPPProblem: # ********************************************************************* # validation - # the arc should be installed since it is the only feasible solution assert ( True @@ -1375,7 +1345,6 @@ class TestESIPPProblem: .edges[(node_IMP, node_A, 0)][Network.KEY_ARC_TECH] .options_selected ) - # the flows should be 1.0, 0.0 and 2.0 assert math.isclose( pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 0)]), @@ -1402,7 +1371,6 @@ class TestESIPPProblem: 0.6, abs_tol=1e-6, ) - # arc amplitude should be two assert math.isclose( pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]), @@ -1412,10 +1380,8 @@ class TestESIPPProblem: # capex should be 4.5 assert math.isclose(pyo.value(ipp.instance.var_capex), 4.5, abs_tol=1e-3) - # sdncf_q[0] should be -5.7 assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[0]), -5.7, abs_tol=1e-3) - # the objective function should be -10.80213032963115 assert math.isclose(pyo.value(ipp.instance.obj_f), -10.80213032963115, abs_tol=3e-3) @@ -1493,63 +1459,24 @@ class TestESIPPProblem: simplify_problem=True ) - assert is_peak_total_problem(ipp) - assert ipp.results["Problem"][0]["Number of constraints"] == 42 - assert ipp.results["Problem"][0]["Number of variables"] == 38 - assert ipp.results["Problem"][0]["Number of nonzeros"] == 87 + assert ipp.has_peak_total_assessments() + assert ipp.results["Problem"][0]["Number of constraints"] == 28 # 42 + assert ipp.results["Problem"][0]["Number of variables"] == 25 # 38 + assert ipp.results["Problem"][0]["Number of nonzeros"] == 51 # 87 # ********************************************************************* # validation - - # the arc should be installed since it is the only feasible solution - assert ( - True - in ipp.networks["mynet"] - .edges[(node_IMP, node_A, 0)][Network.KEY_ARC_TECH] - .options_selected - ) - - # the flows should be 1.0, 0.0 and 2.0 - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 0)]), - 1.0, - abs_tol=1e-6, - ) - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 1)]), - 0.0, - abs_tol=1e-6, - ) - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 0, 2)]), - 2.0, - abs_tol=1e-6, - ) - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 1, 0)]), - 2.5, - abs_tol=1e-6, - ) - assert math.isclose( - pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, 1, 1)]), - 0.6, - abs_tol=1e-6, - ) - # arc amplitude should be two assert math.isclose( pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]), 2.5, abs_tol=0.01, ) - # capex should be four assert math.isclose(pyo.value(ipp.instance.var_capex), 4.5, abs_tol=1e-3) - # sdncf_q[0] should be -5.7 - assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[0]), -5.7, abs_tol=1e-3) - + # assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[0]), -5.7, abs_tol=1e-3) # the objective function should be -10.80213032963115 (or -10.8027723516153) assert math.isclose(pyo.value(ipp.instance.obj_f), -10.80213032963115, abs_tol=3e-3) @@ -1622,7 +1549,7 @@ class TestESIPPProblem: max_number_parallel_arcs={} ) - assert is_peak_total_problem(ipp) # TODO: make sure this is true + assert ipp.has_peak_total_assessments() # TODO: make sure this is true assert ipp.results["Problem"][0]["Number of constraints"] == 34 assert ipp.results["Problem"][0]["Number of variables"] == 28 assert ipp.results["Problem"][0]["Number of nonzeros"] == 105 @@ -1715,7 +1642,7 @@ class TestESIPPProblem: max_number_parallel_arcs={} ) - assert not is_peak_total_problem(ipp) + assert ipp.has_peak_total_assessments() assert ipp.results["Problem"][0]["Number of constraints"] == 34 assert ipp.results["Problem"][0]["Number of variables"] == 24 assert ipp.results["Problem"][0]["Number of nonzeros"] == 77 @@ -1748,43 +1675,33 @@ class TestESIPPProblem: # ************************************************************************* # ************************************************************************* + + # preexisting, reference + # capacity is instantaneous + # use dedicated method for preexisting arcs + # capacity is instantaneous, using dedicated method + # use different technologies for the undirected arc + # use different technologies for the undirected arc, capacity is instant. + # use different technologies for the undirected arc, using specific method + # same as before but assuming the capacity is instantaneous + + def test_isolated_preexisting_undirected_network(self): - def test_nonisolated_undirected_network(self): + capacity_is_instantaneous = False - # scenario + # assessment q = 0 tf = EconomicTimeFrame( discount_rate=3.5/100, - reporting_periods={q: (0, 1)}, - reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + reporting_periods={q: (0,)}, + reporting_period_durations={q: (365 * 24 * 3600,)}, time_intervals={q: (0,1,2,3)}, time_interval_durations={q: (1,1,1,1)}, ) # 4 nodes: one import, one export, two supply/demand nodes - mynet = Network() - # import node - imp_node_key = generate_pseudo_unique_key(mynet.nodes()) - mynet.add_import_node( - node_key=imp_node_key, - prices={ - qpk: ResourcePrice(prices=1+i*0.05, volumes=None) - for i, qpk in enumerate(tf.qpk()) - }, - ) - - # export node - exp_node_key = generate_pseudo_unique_key(mynet.nodes()) - mynet.add_export_node( - node_key=exp_node_key, - prices={ - qpk: ResourcePrice(prices=0.1+i*0.05, volumes=None) - for i, qpk in enumerate(tf.qpk()) - }, - ) - # other nodes node_A = generate_pseudo_unique_key(mynet.nodes()) mynet.add_source_sink_node( @@ -1792,49 +1709,207 @@ class TestESIPPProblem: # base_flow=[1, -1, 0.5, -0.5] base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, ) + node_B = generate_pseudo_unique_key(mynet.nodes()) mynet.add_source_sink_node( node_key=node_B, - # base_flow=[-1, 1, -0.5, 0.5] + # base_flow=[-1, 1, -0.5, 0.5], base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, ) # add arcs - - # import arc - arc_tech_IA = Arcs( - name="any", + # isotropic + mynet.add_preexisting_undirected_arc( + node_key_a=node_A, + node_key_b=node_B, # efficiency=[1, 1, 1, 1], efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, efficiency_reverse=None, static_loss=None, - validate=False, - ) - mynet.add_directed_arc( - node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA + capacity=1.0, + capacity_is_instantaneous=capacity_is_instantaneous, ) - # export arc + # identify node types + mynet.identify_node_types() - arc_tech_BE = Arcs( - name="any", - # efficiency=[1, 1, 1, 1], - efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, - capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], - minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], - specific_capacity_cost=1, - capacity_is_instantaneous=False, - efficiency_reverse=None, - static_loss=None, - validate=False, - ) - mynet.add_directed_arc( - node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE - ) + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # validation + # there should be no opex (imports or exports) and no capex + assert pyo.value(ipp.instance.var_sdncf_q[q]) == 0 + assert pyo.value(ipp.instance.var_capex) == 0 + + # ************************************************************************* + # ************************************************************************* + + def test_isolated_preexisting_undirected_network_diff_tech(self): + + capacity_is_instantaneous = False + + # assessment + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,)}, + reporting_period_durations={q: (365 * 24 * 3600,)}, + time_intervals={q: (0,1,2,3)}, + time_interval_durations={q: (1,1,1,1)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + # base_flow=[1, -1, 0.5, -0.5] + base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, + ) + + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + # base_flow=[-1, 1, -0.5, 0.5], + base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, + ) + + # add arcs + # anisotropic + mynet.add_preexisting_undirected_arc( + node_key_a=node_A, + node_key_b=node_B, + # efficiency=[0.9, 1, 0.9, 1], + efficiency={(0, 0): 0.9, (0, 1): 1, (0, 2): 0.9, (0, 3): 1}, + capacity=1.0, + capacity_is_instantaneous=capacity_is_instantaneous, + # efficiency_reverse=[1, 0.9, 1, 0.9], + efficiency_reverse={(0, 0): 1, (0, 1): 0.9, (0, 2): 1, (0, 3): 0.9}, + static_loss=None, + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # validation + # there should be no opex (imports or exports) and no capex + assert pyo.value(ipp.instance.var_sdncf_q[q]) == 0 + assert pyo.value(ipp.instance.var_capex) == 0 + + # ************************************************************************* + # ************************************************************************* + + def test_nonisolated_undirected_network(self): + + # scenario + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0, 1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,1,2,3)}, + time_interval_durations={q: (1,1,1,1)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1+i*0.05, volumes=None) + for i, qpk in enumerate(tf.qpk()) + }, + ) + + # export node + exp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_export_node( + node_key=exp_node_key, + prices={ + qpk: ResourcePrice(prices=0.1+i*0.05, volumes=None) + for i, qpk in enumerate(tf.qpk()) + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + # base_flow=[1, -1, 0.5, -0.5] + base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, + ) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + # base_flow=[-1, 1, -0.5, 0.5] + base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, + ) + + # add arcs + + # import arc + arc_tech_IA = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + efficiency_reverse=None, + static_loss=None, + validate=False, + ) + mynet.add_directed_arc( + node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA + ) + + # export arc + + arc_tech_BE = Arcs( + name="any", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + capacity=[0.5, 0.75, 1.0, 1.25, 1.5, 2.0], + minimum_cost=[10, 10.1, 10.2, 10.3, 10.4, 10.5], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + efficiency_reverse=None, + static_loss=None, + validate=False, + ) + mynet.add_directed_arc( + node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE + ) # undirected arc arc_tech_AB = Arcs( @@ -1869,7 +1944,7 @@ class TestESIPPProblem: max_number_parallel_arcs={} ) - assert not is_peak_total_problem(ipp) + assert ipp.has_peak_total_assessments() assert ipp.results["Problem"][0]["Number of constraints"] == 80 assert ipp.results["Problem"][0]["Number of variables"] == 84 assert ipp.results["Problem"][0]["Number of nonzeros"] == 253 @@ -2039,7 +2114,7 @@ class TestESIPPProblem: max_number_parallel_arcs={} ) - assert not is_peak_total_problem(ipp) + assert ipp.has_peak_total_assessments() assert ipp.results["Problem"][0]["Number of constraints"] == 80 assert ipp.results["Problem"][0]["Number of variables"] == 84 assert ipp.results["Problem"][0]["Number of nonzeros"] == 253 @@ -2084,194 +2159,7 @@ class TestESIPPProblem: # ********************************************************************* # ********************************************************************* - - # ************************************************************************* - # ************************************************************************* - - # def test_problem_converter_sink(self): - # # scenario - # q = 0 - # # time - # number_intervals = 3 - # # periods - # number_periods = 1 - - # tf = EconomicTimeFrame( - # discount_rate=3.5/100, - # reporting_periods={q: (0,)}, - # reporting_period_durations={q: (365 * 24 * 3600,)}, - # time_intervals={q: (0,1,2)}, - # time_interval_durations={q: (1,1,1)}, - # ) - - # # 2 nodes: one import, one regular - # mynet = Network() - - # # import node - # node_IMP = generate_pseudo_unique_key(mynet.nodes()) - # mynet.add_import_node( - # node_key=node_IMP, - # prices={ - # (q, p, k): ResourcePrice(prices=1.0, volumes=None) - # for p in range(number_periods) - # for k in range(number_intervals) - # }, - # ) - - # # other nodes - - # node_A = generate_pseudo_unique_key(mynet.nodes()) - - # mynet.add_source_sink_node( - # node_key=node_A, - # # base_flow=[0.5, 0.0, 1.0], - # base_flow={(q, 0): 0.50, (q, 1): 0.00, (q, 2): 1.00}, - # ) - - # # arc IA - - # arc_tech_IA = Arcs( - # name="any", - # # efficiency=[0.5, 0.5, 0.5], - # efficiency={(q, 0): 0.5, (q, 1): 0.5, (q, 2): 0.5}, - # efficiency_reverse=None, - # static_loss=None, - # capacity=[3], - # minimum_cost=[2], - # specific_capacity_cost=1, - # capacity_is_instantaneous=False, - # validate=False, - # ) - - # mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) - - # # identify node types - - # mynet.identify_node_types() - - # # converters - - # # number of samples - # time_step_durations = [1, 1, 1] - # number_time_steps = len(time_step_durations) - - # # get the coefficients - # import numpy as np - - # # a_innk - # a_innk = { - # ("cvt1", 0, 0, 0): 0.95, - # ("cvt1", 0, 0, 1): 0.95, - # ("cvt1", 0, 0, 2): 0.95, - # } - - # # b_inmk - # b_inmk = {("cvt1", 0, 0, 0): 3, ("cvt1", 0, 0, 1): 3, ("cvt1", 0, 0, 2): 3} - - # # c_irnk - # c_irnk = {} - # # d_irmk - # d_irmk = {} - # # e_x_ink: depends on fixed signals - # e_x_ink = {} - # # e_y_irk: depends on fixed signals - # e_y_irk = {} - - # # get the signals - # inputs, states, outputs = get_two_node_model_signals(number_time_steps) - - # # create a dynamic system - # ds = dynsys.DynamicSystem( - # time_interval_durations=time_step_durations, A=a, B=b, C=c, D=d - # ) - - # # create a converter - # cvn1 = cvn.Converter( - # "cvn1", - # sys=ds, - # initial_states=x0, - # turn_key_cost=3, - # inputs=inputs, - # states=states, - # outputs=outputs, - # ) - - # # no sos, regular time intervals - - # ipp = self.build_solve_ipp( - # # solver=solver, - # solver_options={}, - # # use_sos_arcs=use_sos_arcs, - # # arc_sos_weight_key=sos_weight_key, - # # arc_use_real_variables_if_possible=use_real_variables_if_possible, - # # use_sos_sense=use_sos_sense, - # # sense_sos_weight_key=sense_sos_weight_key, - # # sense_use_real_variables_if_possible=sense_use_real_variables_if_possible, - # # sense_use_arc_interfaces=use_arc_interfaces, - # perform_analysis=False, - # plot_results=False, # True, - # print_solver_output=False, - # time_frame=tf, - # networks={"mynet": mynet}, - # converters={"mycvt": cvt}, - # static_losses_mode=True, # just to reach a line, - # mandatory_arcs=[], - # max_number_parallel_arcs={}, - # # init_aux_sets=init_aux_sets, - # simplify_problem=False, - # ) - - # assert is_peak_total_problem(ipp) - # assert ipp.results["Problem"][0]["Number of constraints"] == 24 - # assert ipp.results["Problem"][0]["Number of variables"] == 22 - # assert ipp.results["Problem"][0]["Number of nonzeros"] == 49 - - # # ********************************************************************* - # # ********************************************************************* - - # # validation - - # # the arc should be installed since it is required for feasibility - # assert ( - # True - # in ipp.networks["mynet"] - # .edges[(node_IMP, node_A, 0)][Network.KEY_ARC_TECH] - # .options_selected - # ) - - # # the flows should be 1.0, 0.0 and 2.0 - # assert math.isclose( - # pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 0)]), - # 1.0, - # abs_tol=1e-6, - # ) - # assert math.isclose( - # pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 1)]), - # 0.0, - # abs_tol=1e-6, - # ) - # assert math.isclose( - # pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 2)]), - # 2.0, - # abs_tol=1e-6, - # ) - - # # arc amplitude should be two - # assert math.isclose( - # pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]), - # 2.0, - # abs_tol=0.01, - # ) - - # # capex should be four - # assert math.isclose(pyo.value(ipp.instance.var_capex), 4.0, abs_tol=1e-3) - - # # sdncf should be -5.7 - # assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), -5.7, abs_tol=1e-3) - - # # the objective function should be -9.7 - # assert math.isclose(pyo.value(ipp.instance.obj_f), -9.7, abs_tol=1e-3) - + # ************************************************************************* # ************************************************************************* @@ -3597,8 +3485,9 @@ class TestESIPPProblem: node_D = generate_pseudo_unique_key(mynet.nodes()) mynet.add_source_sink_node(node_key=node_D, base_flow={(q, 0): 0.5, (q, 1): -0.25}) - # ************************************************************************** - + # ********************************************************************* + # ********************************************************************* + # add arcs # IA mynet.add_preexisting_directed_arc( @@ -3751,16 +3640,7 @@ class TestESIPPProblem: static_loss_AB[(1, q, k)] + static_loss_CD[(1, q, k)] for k in range(tf.number_time_intervals(q)) ) - print('hey') - # print(static_losses_mode) - print(ipp.networks['mynet'].edges[(node_A, node_B, arc_key_AB)][Network.KEY_ARC_TECH].options_selected) - print(ipp.networks['mynet'].edges[(node_C, node_D, arc_key_CD)][Network.KEY_ARC_TECH].options_selected) - print(ipp.instance.set_GLLJ_static_pre.pprint()) - print(ipp.instance.set_GLLJ_static_new.pprint()) - # print(ipp.static_losses_departure_node) - # print(ipp.static_losses_arrival_node) - # print(ipp.static_losses_upstream) - # print(ipp.static_losses_downstream) + losses_model = sum( pyo.value( ipp.instance.var_w_glljqk[("mynet", node_A, node_B, arc_key_AB, q, k)] @@ -3777,7 +3657,6 @@ class TestESIPPProblem: assert imp_group > imp_ind # at least one arc has to be installed - assert ( True in ipp.networks["mynet"] @@ -3790,46 +3669,4457 @@ class TestESIPPProblem: ) # the capex have to be lower than with a group of arcs - abs_tol = 1e-3 - assert math.isclose( pyo.value(ipp.instance.var_capex), capex_ind, abs_tol=abs_tol ) # there should be no exports - assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) # the imports should be lower than with a group of arcs - abs_tol = 1e-3 - assert math.isclose(flow_in[("mynet", 0, 0)], imp_ind, abs_tol=abs_tol) # the operating results should be lower than with an individual arc - abs_tol = 1e-3 - assert math.isclose( pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_ind, abs_tol=abs_tol ) # the externalities should be zero - abs_tol = 1e-3 - assert math.isclose(pyo.value(ipp.instance.var_sdext_q[q]), 0, abs_tol=abs_tol) # the objective function should be -6.3639758220728595-1.5 - abs_tol = 1e-3 - assert math.isclose(pyo.value(ipp.instance.obj_f), obj_ind, abs_tol=abs_tol) # the imports should be greater than or equal to the losses for all arx - assert math.isclose(losses_model, losses_ind, abs_tol=abs_tol) + + # ************************************************************************* + # ************************************************************************* + + # TODO: trigger error with static losses + + def test_direct_imp_exp_network(self): + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600,365 * 24 * 3600)}, + time_intervals={q: (0,1)}, + time_interval_durations={q: (1,1)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + imp_prices = { + qpk: ResourcePrice( + prices=1.5, + volumes=None, + ) + for qpk in tf.qpk() + } + mynet.add_import_node( + node_key=imp_node_key, + prices=imp_prices + ) + + # export node + exp_node_key = generate_pseudo_unique_key(mynet.nodes()) + exp_prices = { + qpk: ResourcePrice( + prices=0.5, + volumes=None, + ) + for qpk in tf.qpk() + } + mynet.add_export_node( + node_key=exp_node_key, + prices=exp_prices, + ) + + # add arc without fixed losses from import node to export + arc_tech_IE = Arcs( + name="IE", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + efficiency_reverse=None, + static_loss=None, + validate=False, + capacity=[0.5, 1.0, 2.0], + minimum_cost=[5, 5.1, 5.2], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + ) + mynet.add_directed_arc( + node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # ********************************************************************* + # ********************************************************************* + + # import prices are higher: it makes no sense to install the arc + # the arc should not be installed (unless prices allow for it) + + assert ( + True + not in ipp.networks["mynet"] + .edges[(imp_node_key, exp_node_key, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # there should be no imports + + abs_tol = 1e-6 + + assert math.isclose(flow_in[("mynet", 0, 0)], 0.0, abs_tol=abs_tol) + + assert math.isclose(flow_in_cost[("mynet", 0, 0)], 0.0, abs_tol=abs_tol) + + # there should be no exports + + abs_tol = 1e-2 + + assert math.isclose(flow_out[("mynet", 0, 0)], 0.0, abs_tol=abs_tol) + + assert math.isclose(flow_out_revenue[("mynet", 0, 0)], 0.0, abs_tol=abs_tol) + + # there should be no capex + + abs_tol = 1e-6 + + assert math.isclose(pyo.value(ipp.instance.var_capex), 0.0, abs_tol=abs_tol) + + # ************************************************************************* + # ************************************************************************* + + def test_direct_imp_exp_network_higher_exp_prices(self): + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600,365 * 24 * 3600)}, + time_intervals={q: (0,1)}, + time_interval_durations={q: (1,1)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + imp_prices = { + qpk: ResourcePrice( + prices=0.5, + volumes=None, + ) + for qpk in tf.qpk() + } + mynet.add_import_node( + node_key=imp_node_key, + prices=imp_prices + ) + + # export node + exp_node_key = generate_pseudo_unique_key(mynet.nodes()) + exp_prices = { + qpk: ResourcePrice( + prices=1.5, + volumes=None, + ) + for qpk in tf.qpk() + } + mynet.add_export_node( + node_key=exp_node_key, + prices=exp_prices, + ) + + # add arc without fixed losses from import node to export + arc_tech_IE = Arcs( + name="IE", + # efficiency=[1, 1, 1, 1], + efficiency={(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1}, + efficiency_reverse=None, + static_loss=None, + validate=False, + capacity=[0.5, 1.0, 2.0], + minimum_cost=[5, 5.1, 5.2], + specific_capacity_cost=1, + capacity_is_instantaneous=False, + ) + mynet.add_directed_arc( + node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # export prices are higher: it makes sense to install the arc since the + # revenue (@ max. cap.) exceeds the cost of installing the arc + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, exp_node_key, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # there should be no imports + + abs_tol = 1e-6 + + assert flow_in[("mynet", 0, 0)] > 0.0 - abs_tol + + assert flow_in_cost[("mynet", 0, 0)] > 0.0 - abs_tol + + # there should be no exports + + abs_tol = 1e-2 + + assert flow_out[("mynet", 0, 0)] > 0.0 - abs_tol + + assert flow_out_revenue[("mynet", 0, 0)] > 0.0 - abs_tol + + # the revenue should exceed the costs + + abs_tol = 1e-2 + + assert ( + flow_out_revenue[("mynet", 0, 0)] > flow_in_cost[("mynet", 0, 0)] - abs_tol + ) + + # the capex should be positive + + abs_tol = 1e-6 + + assert pyo.value(ipp.instance.var_capex) > 0 - abs_tol + + # ************************************************************************* + # ************************************************************************* + + def test_undirected_arc_static_upstream_new(self): + + # assessment + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0, 1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0, 1)}, + time_interval_durations={q: (1, 1)}, + ) + + # 4 nodes: two import nodes, two supply/demand nodes + + mynet = Network() + + # import nodes + imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp1_node_key, + prices={ + qpk: ResourcePrice(prices=qpk[2] + 1, volumes=None) + for qpk in tf.qpk() + }, + ) + imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp2_node_key, + prices={ + qpk: ResourcePrice(prices=2-qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, base_flow={(0, 0): 0.0, (0, 1): 1.1} + ) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, base_flow={(0, 0): 1.1, (0, 1): 0.0} + ) + + # add arcs + # I1A + mynet.add_preexisting_directed_arc( + node_key_a=imp1_node_key, + node_key_b=node_A, + efficiency=None, + static_loss=None, + capacity=1.2, + capacity_is_instantaneous=False, + ) + + # I2B + mynet.add_preexisting_directed_arc( + node_key_a=imp2_node_key, + node_key_b=node_B, + efficiency=None, + static_loss=None, + capacity=1.2, + capacity_is_instantaneous=False, + ) + efficiency_AB = {(0, 0): 1, (0, 1): 1} + efficiency_BA = {(0, 0): 1, (0, 1): 1} + + # AB + static_loss_AB = { + (0, q, 0): 0.1, + (0, q, 1): 0.1, + (1, q, 0): 0.1, + (1, q, 1): 0.1, + } + + arcs_ab = Arcs( + name="AB", + efficiency=efficiency_AB, + efficiency_reverse=efficiency_BA, + static_loss=static_loss_AB, + capacity=( + 0.5, + 1, + ), + minimum_cost=( + 0.025, + 0.05, + ), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + + arc_key_AB_und = mynet.add_undirected_arc( + node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab + ) + + + # identify node types + + mynet.identify_node_types() + + # no sos, regular time intervals + + for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: + + # reset decisions if necessary + if True in mynet.edges[(imp1_node_key, node_A, 0)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(imp1_node_key, node_A, 0)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(imp1_node_key, node_A, 0)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + if True in mynet.edges[(imp2_node_key, node_B, 0)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(imp2_node_key, node_B, 0)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(imp2_node_key, node_B, 0)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + if True in mynet.edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(node_A, node_B, arc_key_AB_und)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(node_A, node_B, arc_key_AB_und)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + + ipp = self.build_solve_ipp( + solver_options={}, + use_sos_arcs=False, + arc_sos_weight_key=None, + arc_use_real_variables_if_possible=False, + use_sos_sense=False, + sense_sos_weight_key=None, + sense_use_real_variables_if_possible=False, + sense_use_arc_interfaces=False, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=static_losses_mode, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp1_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp2_node_key, node_B, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # the flow through AB should be from A to B during interval 0 + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 1, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # the flow through AB should be from B to A during interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 1, + abs_tol=abs_tol, + ) + + # there should be imports + + abs_tol = 1e-6 + + assert math.isclose(flow_in[("mynet", 0, 0)], (1.2 + 1.2), abs_tol=abs_tol) + + # there should be no exports + + abs_tol = 1e-6 + + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # flow through I1A must be 1.0 during time interval 0 + # flow through I1A must be 0.2 during time interval 1 + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 0)]), + 1.0, + abs_tol=abs_tol, + ) + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 1)]), + 0.2, + abs_tol=abs_tol, + ) + + # flow through I2B must be 0.2 during time interval 0 + # flow through I2B must be 1.0 during time interval 1 + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 0)]), + 0.2, + abs_tol=abs_tol, + ) + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 1)]), + 1.0, + abs_tol=abs_tol, + ) + + # flow from B to A must be 0 during time interval 0 + # flow from A to B must be 0 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_B, node_A, arc_key_AB_und, 0, 0)] + ), + 0.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_A, node_B, arc_key_AB_und, 0, 1)] + ), + 0.0, + abs_tol=abs_tol, + ) + + # validation + + if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: + # arrival node + + # flow from A to B must be 1.0 during time interval 0 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + # flow from B to A must be 0.9 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP: + # departure node + + # flow from A to B must be 0.9 during time interval 0 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + # flow from B to A must be 1.0 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_US: + # upstream + + # flow from A to B must be 0.9 during time interval 0 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + # flow from B to A must be 0.9 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + else: + # downstream + + # flow from A to B must be 1.0 during time interval 0 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + # flow from B to A must be 1.0 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + # ********************************************************************* + # ********************************************************************* + + # ************************************************************************* + # ************************************************************************* + + def test_undirected_arc_static_upstream_preexisting(self): + + # assessment + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0, 1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0, 1)}, + time_interval_durations={q: (1, 1)}, + ) + + # 4 nodes: two import nodes, two supply/demand nodes + + mynet = Network() + + # import nodes + imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp1_node_key, + prices={ + qpk: ResourcePrice(prices=qpk[2] + 1, volumes=None) + for qpk in tf.qpk() + }, + ) + imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp2_node_key, + prices={ + qpk: ResourcePrice(prices=2-qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, base_flow={(0, 0): 0.0, (0, 1): 1.1} + ) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, base_flow={(0, 0): 1.1, (0, 1): 0.0} + ) + + # add arcs + # I1A + mynet.add_preexisting_directed_arc( + node_key_a=imp1_node_key, + node_key_b=node_A, + efficiency=None, + static_loss=None, + capacity=1.2, + capacity_is_instantaneous=False, + ) + + # I2B + mynet.add_preexisting_directed_arc( + node_key_a=imp2_node_key, + node_key_b=node_B, + efficiency=None, + static_loss=None, + capacity=1.2, + capacity_is_instantaneous=False, + ) + efficiency_AB = {(0, 0): 1, (0, 1): 1} + efficiency_BA = {(0, 0): 1, (0, 1): 1} + + # AB + static_loss_AB = {(0, q, 0): 0.1, (0, q, 1): 0.1} + arc_key_AB_und = mynet.add_preexisting_undirected_arc( + node_key_a=node_A, + node_key_b=node_B, + efficiency=efficiency_AB, + efficiency_reverse=efficiency_BA, + static_loss=static_loss_AB, + capacity=1, + capacity_is_instantaneous=False, + ) + + # identify node types + + mynet.identify_node_types() + + # no sos, regular time intervals + + for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: + + # reset decisions if necessary + if True in mynet.edges[(imp1_node_key, node_A, 0)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(imp1_node_key, node_A, 0)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(imp1_node_key, node_A, 0)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + if True in mynet.edges[(imp2_node_key, node_B, 0)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(imp2_node_key, node_B, 0)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(imp2_node_key, node_B, 0)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + if True in mynet.edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(node_A, node_B, arc_key_AB_und)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(node_A, node_B, arc_key_AB_und)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + + ipp = self.build_solve_ipp( + solver_options={}, + use_sos_arcs=False, + arc_sos_weight_key=None, + arc_use_real_variables_if_possible=False, + use_sos_sense=False, + sense_sos_weight_key=None, + sense_use_real_variables_if_possible=False, + sense_use_arc_interfaces=False, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=static_losses_mode, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp1_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp2_node_key, node_B, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # the flow through AB should be from A to B during interval 0 + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 1, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # the flow through AB should be from B to A during interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 1, + abs_tol=abs_tol, + ) + + # there should be imports + + abs_tol = 1e-6 + + assert math.isclose(flow_in[("mynet", 0, 0)], (1.2 + 1.2), abs_tol=abs_tol) + + # there should be no exports + + abs_tol = 1e-6 + + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # flow through I1A must be 1.0 during time interval 0 + # flow through I1A must be 0.2 during time interval 1 + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 0)]), + 1.0, + abs_tol=abs_tol, + ) + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 1)]), + 0.2, + abs_tol=abs_tol, + ) + + # flow through I2B must be 0.2 during time interval 0 + # flow through I2B must be 1.0 during time interval 1 + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 0)]), + 0.2, + abs_tol=abs_tol, + ) + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 1)]), + 1.0, + abs_tol=abs_tol, + ) + + # flow from B to A must be 0 during time interval 0 + # flow from A to B must be 0 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_B, node_A, arc_key_AB_und, 0, 0)] + ), + 0.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_A, node_B, arc_key_AB_und, 0, 1)] + ), + 0.0, + abs_tol=abs_tol, + ) + + # validation + + if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: + # arrival node + + # flow from A to B must be 1.0 during time interval 0 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + # flow from B to A must be 0.9 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP: + # departure node + + # flow from A to B must be 0.9 during time interval 0 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + # flow from B to A must be 1.0 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_US: + # upstream + + # flow from A to B must be 0.9 during time interval 0 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + # flow from B to A must be 0.9 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + else: + # downstream + + # flow from A to B must be 1.0 during time interval 0 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + # flow from B to A must be 1.0 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_undirected_arc_static_downstream_new(self): + + # assessment + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0, 1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0, 1, 2, 3)}, + time_interval_durations={q: (1, 1, 1, 1)}, + ) + + # 4 nodes: two import nodes, two supply/demand nodes + mynet = Network() + + # import nodes + imp1_prices = [ResourcePrice(prices=k, volumes=None) for k in [1, 2, 1, 1]] + imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp1_node_key, + prices={ + qpk: imp1_prices[qpk[2]] + for qpk in tf.qpk() + }, + ) + imp2_prices = [ResourcePrice(prices=k, volumes=None) for k in [2, 1, 2, 2]] + imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp2_node_key, + prices={ + qpk: imp2_prices[qpk[2]] + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + base_flow={ + (0, 0): 1.0, # to be provided via I1 but AB losses have to be comp. + (0, 1): 0.0, + (0, 2): 0.0, + (0, 3): 0.0, + }, + ) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + base_flow={ + (0, 0): 0.0, + (0, 1): 1.0, # to be provided via I2 but AB losses have to be comp. + (0, 2): 2.0, # forces the undirected arc to be used and installed + (0, 3): 0.9, # forces the undirected arc to be used and installed + }, + ) + + # add arcs + # I1A + mynet.add_preexisting_directed_arc( + node_key_a=imp1_node_key, + node_key_b=node_A, + efficiency=None, + static_loss=None, + capacity=1.1, + capacity_is_instantaneous=False, + ) + # I2B + mynet.add_preexisting_directed_arc( + node_key_a=imp2_node_key, + node_key_b=node_B, + efficiency=None, + static_loss=None, + capacity=1.1, + capacity_is_instantaneous=False, + ) + efficiency_AB = {(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1} + efficiency_BA = {(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1} + + # AB + static_loss_AB = { + (0, q, 0): 0.1, + (0, q, 1): 0.1, + (0, q, 2): 0.1, + (0, q, 3): 0.1, + } + + arcs_ab = Arcs( + name="AB", + efficiency=efficiency_AB, + efficiency_reverse=efficiency_BA, + static_loss=static_loss_AB, + capacity=(1,), + minimum_cost=(0.05,), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + + arc_key_AB_und = mynet.add_undirected_arc( + node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + + for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: + + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=static_losses_mode, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp1_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + assert ( + True + in ipp.networks["mynet"] + .edges[(imp2_node_key, node_B, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # there should be imports + + abs_tol = 1e-6 + + assert math.isclose( + flow_in[("mynet", 0, 0)], (1 + 1 + 2 + 0.3 + 1), abs_tol=abs_tol + ) + + # there should be no exports + + abs_tol = 1e-6 + + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # flow through I1A must be 1.1 during time interval 0 + # flow through I1A must be 0.0 during time interval 1 + # flow through I1A must be 1.0 during time interval 2 (flow from B to A) + # flow through I1A must be 1.0 during time interval 3 (because AB is used from B to A) + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 0)]), + 1.1, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 1)]), + 0.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 2)]), + 1.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 3)]), + 1.0, + abs_tol=abs_tol, + ) + + # flow through I2B must be 0.0 during time interval 0 + # flow through I2B must be 1.1 during time interval 1 + # flow through I2B must be 1.1 during time interval 2 + # flow through I2B must be 0.0 during time interval 3 + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 0)]), + 0.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 1)]), + 1.1, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 2)]), + 1.1, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 3)]), + 0, + abs_tol=abs_tol, + ) + + # validation + + if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: + # arrival node + + # losses are always in B + + # flow from A to B must be 0.1 during time interval 0 + # flow from B to A must be 0 during time interval 0 + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0.1, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0 during time interval 1 + # flow from B to A must be 0 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 1.0 during time interval 2 + # flow from B to A must be 0 during time interval 2 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 1.0 during time interval 3 + # flow from B to A must be 0 during time interval 3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) + ] + ), + 0, + abs_tol=abs_tol, + ) + + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP: + # departure node + + # losses are always in A + + # flow from A to B must be 0 during time interval 0 + # flow from B to A must be 0 during time interval 0 + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0 during time interval 1 + # flow from B to A must be 0.1 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0.1, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0.9 during time interval 2 + # flow from B to A must be 0 during time interval 2 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0.9 during time interval 3 + # flow from B to A must be 0 during time interval 3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) + ] + ), + 0, + abs_tol=abs_tol, + ) + + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_US: + # upstream + + # flow from A to B must be 0 during time interval 0 + # flow from B to A must be 0 during time interval 0 + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0 during time interval 1 + # flow from B to A must be 0 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0.9 during time interval 2 + # flow from B to A must be 0 during time interval 2 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0.9 during time interval 3 + # flow from B to A must be 0 during time interval 3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) + ] + ), + 0, + abs_tol=abs_tol, + ) + + else: + # downstream + + # flow from A to B must be 0 during time interval 0 + # flow from B to A must be 0 during time interval 0 + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0 during time interval 1 + # flow from B to A must be 0.1 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0.1, + abs_tol=abs_tol, + ) + + # flow from A to B must be 1.0 during time interval 2 + # flow from B to A must be 0 during time interval 2 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) + ] + ), + 1, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 1.0 during time interval 3 + # flow from B to A must be 0 during time interval 3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_undirected_arc_static_downstream_preexisting(self): + + # assessment + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0, 1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0, 1, 2, 3)}, + time_interval_durations={q: (1, 1, 1, 1)}, + ) + + # 4 nodes: two import nodes, two supply/demand nodes + mynet = Network() + + # import nodes + imp1_prices = [ResourcePrice(prices=k, volumes=None) for k in [1, 2, 1, 1]] + imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp1_node_key, + prices={ + qpk: imp1_prices[qpk[2]] + for qpk in tf.qpk() + }, + ) + imp2_prices = [ResourcePrice(prices=k, volumes=None) for k in [2, 1, 2, 2]] + imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp2_node_key, + prices={ + qpk: imp2_prices[qpk[2]] + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_A, + base_flow={ + (0, 0): 1.0, # to be provided via I1 but AB losses have to be comp. + (0, 1): 0.0, + (0, 2): 0.0, + (0, 3): 0.0, + }, + ) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + base_flow={ + (0, 0): 0.0, + (0, 1): 1.0, # to be provided via I2 but AB losses have to be comp. + (0, 2): 2.0, # forces the undirected arc to be used and installed + (0, 3): 0.9, # forces the undirected arc to be used and installed + }, + ) + + # add arcs + # I1A + mynet.add_preexisting_directed_arc( + node_key_a=imp1_node_key, + node_key_b=node_A, + efficiency=None, + static_loss=None, + capacity=1.1, + capacity_is_instantaneous=False, + ) + # I2B + mynet.add_preexisting_directed_arc( + node_key_a=imp2_node_key, + node_key_b=node_B, + efficiency=None, + static_loss=None, + capacity=1.1, + capacity_is_instantaneous=False, + ) + efficiency_AB = {(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1} + efficiency_BA = {(0, 0): 1, (0, 1): 1, (0, 2): 1, (0, 3): 1} + + # AB + + static_loss_AB = { + (0, q, 0): 0.1, + (0, q, 1): 0.1, + (0, q, 2): 0.1, + (0, q, 3): 0.1, + } + + arc_key_AB_und = mynet.add_preexisting_undirected_arc( + node_key_a=node_A, + node_key_b=node_B, + efficiency=efficiency_AB, + efficiency_reverse=efficiency_BA, + static_loss=static_loss_AB, + capacity=1, + capacity_is_instantaneous=False, + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + + for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: + + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=static_losses_mode, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp1_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp2_node_key, node_B, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # there should be imports + + abs_tol = 1e-6 + + assert math.isclose( + flow_in[("mynet", 0, 0)], (1 + 1 + 2 + 0.3 + 1), abs_tol=abs_tol + ) + + # there should be no exports + + abs_tol = 1e-6 + + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # flow through I1A must be 1.1 during time interval 0 + # flow through I1A must be 0.0 during time interval 1 + # flow through I1A must be 1.0 during time interval 2 (flow from B to A) + # flow through I1A must be 1.0 during time interval 3 (because AB is used from B to A) + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 0)]), + 1.1, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 1)]), + 0.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 2)]), + 1.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 3)]), + 1.0, + abs_tol=abs_tol, + ) + + # flow through I2B must be 0.0 during time interval 0 + # flow through I2B must be 1.1 during time interval 1 + # flow through I2B must be 1.1 during time interval 2 + # flow through I2B must be 0.0 during time interval 3 + + abs_tol = 1e-6 + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 0)]), + 0.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 1)]), + 1.1, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 2)]), + 1.1, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 3)]), + 0, + abs_tol=abs_tol, + ) + + # validation + + if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: + # arrival node + + # losses are always in B + + # flow from A to B must be 0.1 during time interval 0 + # flow from B to A must be 0 during time interval 0 + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0.1, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0 during time interval 1 + # flow from B to A must be 0 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 1.0 during time interval 2 + # flow from B to A must be 0 during time interval 2 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 1.0 during time interval 3 + # flow from B to A must be 0 during time interval 3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) + ] + ), + 0, + abs_tol=abs_tol, + ) + + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP: + # departure node + + # losses are always in A + + # flow from A to B must be 0 during time interval 0 + # flow from B to A must be 0 during time interval 0 + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0 during time interval 1 + # flow from B to A must be 0.1 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0.1, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0.9 during time interval 2 + # flow from B to A must be 0 during time interval 2 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0.9 during time interval 3 + # flow from B to A must be 0 during time interval 3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) + ] + ), + 0, + abs_tol=abs_tol, + ) + + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_US: + # upstream + + # flow from A to B must be 0 during time interval 0 + # flow from B to A must be 0 during time interval 0 + + abs_tol = 1e-3 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0 during time interval 1 + # flow from B to A must be 0 during time interval 1 + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # flow from A to B must be 0.9 during time interval 2 + # flow from B to A must be 0 during time interval 2 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) + ] + ), + 0, + abs_tol=abs_tol, + ) + # flow from A to B must be 0.9 during time interval 3 + # flow from B to A must be 0 during time interval 3 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) + ] + ), + 0.9, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) + ] + ), + 0, + abs_tol=abs_tol, + ) + else: # downstream + # flow from A to B must be 0 during time interval 0 + # flow from B to A must be 0 during time interval 0 + abs_tol = 1e-3 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + # flow from A to B must be 0 during time interval 1 + # flow from B to A must be 0.1 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 1) + ] + ), + 0.1, + abs_tol=abs_tol, + ) + # flow from A to B must be 1.0 during time interval 2 + # flow from B to A must be 0 during time interval 2 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 2) + ] + ), + 1, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 2) + ] + ), + 0, + abs_tol=abs_tol, + ) + # flow from A to B must be 1.0 during time interval 3 + # flow from B to A must be 0 during time interval 3 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, 0, 3) + ] + ), + 1.0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, 0, 3) + ] + ), + 0, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_report_directed_network_static_losses_new(self): + + # assessment + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0, 1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,)}, + time_interval_durations={q: (1,)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1 + 0.05*qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_waypoint_node(node_key=node_A) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_B, base_flow={(q, 0): 0.2}) + + # add arcs + # IA arc + mynet.add_infinite_capacity_arc( + node_key_a=imp_node_key, + node_key_b=node_A, + efficiency={(q, 0): 1}, + static_loss=None, + ) + # AB arc + arc_tech_AB = Arcs( + name="AB", + efficiency={(q, 0): 0.8}, + efficiency_reverse=None, + validate=False, + capacity=[1.0], + minimum_cost=[0], # [0] + specific_capacity_cost=0, + capacity_is_instantaneous=False, + static_loss={(0, q, 0): 0.10}, + ) + mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + for static_losses_mode in [ + InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, + InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP + ]: + + # reset decisions if necessary + if True in mynet.edges[(node_A, node_B, 0)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(node_A, node_B, 0)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(node_A, node_B, 0)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, + print_solver_output=True, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=static_losses_mode, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # there should be imports + abs_tol = 1e-6 + assert math.isclose(flow_in[("mynet", 0, 0)], 0.35, abs_tol=abs_tol) + + # there should be no exports + abs_tol = 1e-6 + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # flow through IA must be 0.35 + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, 0, 0)]), + 0.35, + abs_tol=abs_tol, + ) + + # validation + if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: + # losses are downstream + # flow through AB must be 0.35 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_B, 0, 0, 0)]), + 0.35, + abs_tol=abs_tol, + ) + else: + # losses are upstream + # flow through AB must be 0.25 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_B, 0, 0, 0)]), + 0.25, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_report_directed_network_static_losses_pre(self): + + # assessment + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0, 1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,)}, + time_interval_durations={q: (1,)}, + ) + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1 + 0.05*qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_waypoint_node(node_key=node_A) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_B, base_flow={(q, 0): 0.2}) + + # add arcs + # IA arc + mynet.add_infinite_capacity_arc( + node_key_a=imp_node_key, + node_key_b=node_A, + efficiency={(q, 0): 1}, + static_loss=None, + ) + # AB arc + mynet.add_preexisting_directed_arc( + node_key_a=node_A, + node_key_b=node_B, + efficiency={(q, 0): 0.8}, + static_loss={(0, q, 0): 0.10}, + capacity=1.0, + capacity_is_instantaneous=False, + ) + # arc_tech_AB = Arcs( + # name="AB", + # efficiency={(q, 0): 0.8}, + # efficiency_reverse=None, + # validate=False, + # capacity=[1.0], + # minimum_cost=[0], # [0] + # specific_capacity_cost=0, + # capacity_is_instantaneous=False, + # static_loss={(0, q, 0): 0.10}, + # ) + # arc_tech_AB.options_selected[0] = True + # mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + for static_losses_mode in [ + InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, + InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP + ]: + # TODO: make this work with GLPK and SCIP + ipp = self.build_solve_ipp( + solver='cbc', # does not work with GLPK nor SCIP + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=True, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=static_losses_mode, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # there should be imports + abs_tol = 1e-6 + assert math.isclose(flow_in[("mynet", 0, 0)], 0.35, abs_tol=abs_tol) + + # there should be no exports + abs_tol = 1e-6 + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # flow through IA must be 0.35 + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, q, 0)]), + 0.35, + abs_tol=abs_tol, + ) + + # validation + if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: + # losses are downstream + # flow through AB must be 0.35 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_B, 0, q, 0)]), + 0.35, + abs_tol=abs_tol, + ) + else: + # losses are upstream + # flow through AB must be 0.25 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_B, 0, q, 0)]), + 0.25, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_report_undirected_network_static_losses_new_nom(self): + + # static losses on undirected arcs (example from the report) + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,1)}, + time_interval_durations={q: (1,1)}, + ) + + # 3 nodes: one import, two regular nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1 + qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 0.0, (q, 1): 0.4}) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_B, base_flow={(q, 0): 0.2, (q, 1): -0.6}) + + # add arcs + # IA arc + mynet.add_infinite_capacity_arc( + node_key_a=imp_node_key, node_key_b=node_A, efficiency=None, static_loss=None + ) + AB_efficiency = {(q, 0): 0.8, (q, 1): 0.8} + BA_efficiency = {(q, 0): 0.5, (q, 1): 0.5} + + # new AB arc + arc_tech_AB = Arcs( + name="AB", + efficiency=AB_efficiency, + efficiency_reverse=BA_efficiency, + validate=False, + capacity=[1.0], + minimum_cost=[0.01], + specific_capacity_cost=0, + capacity_is_instantaneous=False, + static_loss={(0, q, 0): 0.10, (0, q, 1): 0.10}, + ) + + arc_key_AB_und = mynet.add_undirected_arc( + node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: + + # reset decisions if necessary + if True in mynet.edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(node_A, node_B, arc_key_AB_und)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(node_A, node_B, arc_key_AB_und)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=static_losses_mode, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # the flow through AB should be from A to B during interval 0 + abs_tol = 1e-6 + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 1, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + # the flow through AB should be from B to A during interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 1, + abs_tol=abs_tol, + ) + # there should be imports + abs_tol = 1e-6 + assert math.isclose(flow_in[("mynet", 0, 0)], (0.35 + 0.15), abs_tol=abs_tol) + # there should be no exports + abs_tol = 1e-6 + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + # flow through IA must be 0.35 during time interval 0 + # flow through IA must be 0.15 during time interval 1 + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, q, 0)]), + 0.35, + abs_tol=abs_tol, + ) + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, q, 1)]), + 0.15, + abs_tol=abs_tol, + ) + # flow from B to A must be 0 durng time interval 0 + # flow from A to B must be 0 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_B, node_A, arc_key_AB_und, 0, 0)] + ), + 0.0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_A, node_B, arc_key_AB_und, 0, 1)] + ), + 0.0, + abs_tol=abs_tol, + ) + + # validation + + if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: + # arrival node + # flow from A to B must be 0.35 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.35, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.5, + abs_tol=abs_tol, + ) + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP: + # departure node + # flow from A to B must be 0.25 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.25, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.6, + abs_tol=abs_tol, + ) + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_US: + # upstream + # flow from A to B must be 0.25 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.25, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.5, + abs_tol=abs_tol, + ) + else: + # downstream + # flow from A to B must be 0.35 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.35, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.6, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_report_undirected_network_static_losses_pre_nom(self): + + # static losses on undirected arcs (example from the report) + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,1)}, + time_interval_durations={q: (1,1)}, + ) + + # 3 nodes: one import, two regular nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1 + qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 0.0, (q, 1): 0.4}) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_B, base_flow={(q, 0): 0.2, (q, 1): -0.6}) + + # add arcs + # IA arc + mynet.add_infinite_capacity_arc( + node_key_a=imp_node_key, node_key_b=node_A, efficiency=None, static_loss=None + ) + AB_efficiency = {(q, 0): 0.8, (q, 1): 0.8} + BA_efficiency = {(q, 0): 0.5, (q, 1): 0.5} + + # pre-existing AB arc + arc_key_AB_und = mynet.add_preexisting_undirected_arc( + node_key_a=node_A, + node_key_b=node_B, + efficiency=AB_efficiency, + efficiency_reverse=BA_efficiency, + static_loss={(0, q, 0): 0.10, (0, q, 1): 0.10}, + capacity=1.0, + capacity_is_instantaneous=False, + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: + + # reset decisions if necessary + if True in mynet.edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(node_A, node_B, arc_key_AB_und)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(node_A, node_B, arc_key_AB_und)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=static_losses_mode, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # the flow through AB should be from A to B during interval 0 + abs_tol = 1e-6 + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 1, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + # the flow through AB should be from B to A during interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 1, + abs_tol=abs_tol, + ) + # there should be imports + abs_tol = 1e-6 + assert math.isclose(flow_in[("mynet", 0, 0)], (0.35 + 0.15), abs_tol=abs_tol) + # there should be no exports + abs_tol = 1e-6 + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + # flow through IA must be 0.35 during time interval 0 + # flow through IA must be 0.15 during time interval 1 + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, q, 0)]), + 0.35, + abs_tol=abs_tol, + ) + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, q, 1)]), + 0.15, + abs_tol=abs_tol, + ) + # flow from B to A must be 0 durng time interval 0 + # flow from A to B must be 0 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_B, node_A, arc_key_AB_und, 0, 0)] + ), + 0.0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_A, node_B, arc_key_AB_und, 0, 1)] + ), + 0.0, + abs_tol=abs_tol, + ) + + # validation + + if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: + # arrival node + # flow from A to B must be 0.35 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.35, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.5, + abs_tol=abs_tol, + ) + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP: + # departure node + # flow from A to B must be 0.25 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.25, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.6, + abs_tol=abs_tol, + ) + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_US: + # upstream + # flow from A to B must be 0.25 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.25, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.5, + abs_tol=abs_tol, + ) + else: + # downstream + # flow from A to B must be 0.35 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.35, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.6, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_report_undirected_network_static_losses_new_rev(self): + + # static losses on undirected arcs (example from the report) + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,1)}, + time_interval_durations={q: (1,1)}, + ) + + # 3 nodes: one import, two regular nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1 + qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 0.0, (q, 1): 0.4}) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_B, base_flow={(q, 0): 0.2, (q, 1): -0.6}) + + # add arcs + # IA arc + mynet.add_infinite_capacity_arc( + node_key_a=imp_node_key, node_key_b=node_A, efficiency=None, static_loss=None + ) + AB_efficiency = {(q, 0): 0.8, (q, 1): 0.8} + BA_efficiency = {(q, 0): 0.5, (q, 1): 0.5} + + # new AB arc + arc_tech_AB = Arcs( + name="AB", + efficiency=BA_efficiency, + efficiency_reverse=AB_efficiency, + validate=False, + capacity=[1.0], + minimum_cost=[0.01], + specific_capacity_cost=0, + capacity_is_instantaneous=False, + static_loss={(0, q, 0): 0.10, (0, q, 1): 0.10}, + ) + + arc_key_AB_und = mynet.add_undirected_arc( + node_key_a=node_B, node_key_b=node_A, arcs=arc_tech_AB + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: + + # reset decisions if necessary + if True in mynet.edges[(node_B, node_A, arc_key_AB_und)][Network.KEY_ARC_TECH].options_selected: + mynet.edges[(node_B, node_A, arc_key_AB_und)][ + Network.KEY_ARC_TECH].options_selected[ + mynet.edges[(node_B, node_A, arc_key_AB_und)][ + Network.KEY_ARC_TECH].options_selected.index(True) + ] = False + + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=static_losses_mode, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + assert ( + True + in ipp.networks["mynet"] + .edges[(node_B, node_A, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # the flow through AB should be from A to B during interval 0 + abs_tol = 1e-6 + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 1, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + # the flow through AB should be from B to A during interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 1, + abs_tol=abs_tol, + ) + # there should be imports + abs_tol = 1e-6 + assert math.isclose(flow_in[("mynet", 0, 0)], (0.35 + 0.15), abs_tol=abs_tol) + # there should be no exports + abs_tol = 1e-6 + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + # flow through IA must be 0.35 during time interval 0 + # flow through IA must be 0.15 during time interval 1 + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, q, 0)]), + 0.35, + abs_tol=abs_tol, + ) + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, q, 1)]), + 0.15, + abs_tol=abs_tol, + ) + # flow from B to A must be 0 durng time interval 0 + # flow from A to B must be 0 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_B, node_A, arc_key_AB_und, 0, 0)] + ), + 0.0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_A, node_B, arc_key_AB_und, 0, 1)] + ), + 0.0, + abs_tol=abs_tol, + ) + + # validation + + if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: + # arrival node + # flow from A to B must be 0.25 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.25, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.6, + abs_tol=abs_tol, + ) + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP: + # departure node + # arrival node + # flow from A to B must be 0.35 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.35, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.5, + abs_tol=abs_tol, + ) + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_US: + # upstream + # flow from A to B must be 0.25 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.25, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.5, + abs_tol=abs_tol, + ) + else: + # downstream + # flow from A to B must be 0.35 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.35, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.6, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_report_undirected_network_static_losses_pre_rev(self): + + # static losses on undirected arcs (example from the report) + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,1)}, + time_interval_durations={q: (1,1)}, + ) + + # 3 nodes: one import, two regular nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + qpk: ResourcePrice(prices=1 + qpk[2], volumes=None) + for qpk in tf.qpk() + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 0.0, (q, 1): 0.4}) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_B, base_flow={(q, 0): 0.2, (q, 1): -0.6}) + + # add arcs + # IA arc + mynet.add_infinite_capacity_arc( + node_key_a=imp_node_key, node_key_b=node_A, efficiency=None, static_loss=None + ) + AB_efficiency = {(q, 0): 0.8, (q, 1): 0.8} + BA_efficiency = {(q, 0): 0.5, (q, 1): 0.5} + + # pre-existing AB arc + arc_key_AB_und = mynet.add_preexisting_undirected_arc( + node_key_a=node_B, + node_key_b=node_A, + efficiency=BA_efficiency, + efficiency_reverse=AB_efficiency, + static_loss={(0, q, 0): 0.10, (0, q, 1): 0.10}, + capacity=1.0, + capacity_is_instantaneous=False, + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: + + # # reset decisions if necessary + # if True in mynet.edges[(node_A, node_B, arc_key_AB_und)][Network.KEY_ARC_TECH].options_selected: + # mynet.edges[(node_A, node_B, arc_key_AB_und)][ + # Network.KEY_ARC_TECH].options_selected[ + # mynet.edges[(node_A, node_B, arc_key_AB_und)][ + # Network.KEY_ARC_TECH].options_selected.index(True) + # ] = False + + ipp = self.build_solve_ipp( + solver_options={}, + perform_analysis=False, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=static_losses_mode, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + assert ( + True + in ipp.networks["mynet"] + .edges[(node_B, node_A, arc_key_AB_und)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # the flow through AB should be from A to B during interval 0 + abs_tol = 1e-6 + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 1, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 0) + ] + ), + 0, + abs_tol=abs_tol, + ) + # the flow through AB should be from B to A during interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 1) + ] + ), + 0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_zeta_sns_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 1, + abs_tol=abs_tol, + ) + # there should be imports + abs_tol = 1e-6 + assert math.isclose(flow_in[("mynet", 0, 0)], (0.35 + 0.15), abs_tol=abs_tol) + # there should be no exports + abs_tol = 1e-6 + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + # flow through IA must be 0.35 during time interval 0 + # flow through IA must be 0.15 during time interval 1 + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, q, 0)]), + 0.35, + abs_tol=abs_tol, + ) + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, q, 1)]), + 0.15, + abs_tol=abs_tol, + ) + # flow from B to A must be 0 durng time interval 0 + # flow from A to B must be 0 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_B, node_A, arc_key_AB_und, 0, 0)] + ), + 0.0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[("mynet", node_A, node_B, arc_key_AB_und, 0, 1)] + ), + 0.0, + abs_tol=abs_tol, + ) + + # validation + + if static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR: + # arrival node + # flow from A to B must be 0.25 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.25, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.6, + abs_tol=abs_tol, + ) + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP: + # departure node + # arrival node + # flow from A to B must be 0.35 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.35, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.5, + abs_tol=abs_tol, + ) + elif static_losses_mode == InfrastructurePlanningProblem.STATIC_LOSS_MODE_US: + # upstream + # flow from A to B must be 0.25 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.25, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.5, + abs_tol=abs_tol, + ) + else: + # downstream + # flow from A to B must be 0.35 during time interval 0 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_A, node_B, arc_key_AB_und, q, 0) + ] + ), + 0.35, + abs_tol=abs_tol, + ) + # flow from B to A must be 0.6 during time interval 1 + assert math.isclose( + pyo.value( + ipp.instance.var_v_glljqk[ + ("mynet", node_B, node_A, arc_key_AB_und, q, 1) + ] + ), + 0.6, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_directed_arc_static_upstream_new(self): + + # time + number_intervals = 1 + number_periods = 2 + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,)}, + time_interval_durations={q: (1,)}, + ) + + # 4 nodes: two import nodes, two supply/demand nodes + mynet = Network() + + # import nodes + imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp1_node_key, + prices={ + (q, p, k): ResourcePrice(prices=1, volumes=None) + for p in range(number_periods) + for k in range(number_intervals) + }, + ) + imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp2_node_key, + prices={ + (q, p, k): ResourcePrice(prices=2, volumes=None) + for p in range(number_periods) + for k in range(number_intervals) + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_waypoint_node(node_key=node_A) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + base_flow={ + (q, 0): 1.0, + }, + ) + + # add arcs + # I1A + mynet.add_preexisting_directed_arc( + node_key_a=imp1_node_key, + node_key_b=node_A, + efficiency=None, + static_loss=None, + capacity=1, + capacity_is_instantaneous=False, + ) + + # I2B + arcs_i2b = Arcs( + name="I2B", + efficiency=None, + efficiency_reverse=None, + static_loss=None, + capacity=(0.1,), + minimum_cost=(0.025,), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + mynet.add_directed_arc( + node_key_a=imp2_node_key, node_key_b=node_B, arcs=arcs_i2b + ) + + # AB + arcs_ab = Arcs( + name="IA1", + efficiency=None, + efficiency_reverse=None, + static_loss={(0, q, 0): 0.1}, + capacity=(1,), + minimum_cost=(0.05,), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + + ipp = self.build_solve_ipp( + solver_options={}, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp1_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp2_node_key, node_B, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # there should be imports + abs_tol = 1e-6 + assert math.isclose(flow_in[("mynet", 0, 0)], 1.1, abs_tol=abs_tol) + + # there should be no exports + abs_tol = 1e-6 + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # interval 0: flow through IA1 must be 1 + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 0)]), + 1, + abs_tol=abs_tol, + ) + + # interval 0: flow through AB must be 0.9 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_B, 0, 0, 0)]), + 0.9, + abs_tol=abs_tol, + ) + + # interval 0: flow through IB2 must be 0.1 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 0)]), + 0.1, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_directed_arc_static_upstream_pre(self): + + # time + number_intervals = 1 + number_periods = 2 + + # time frame + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,)}, + time_interval_durations={q: (1,)}, + ) + + # 4 nodes: two import nodes, two supply/demand nodes + mynet = Network() + + # import nodes + imp1_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp1_node_key, + prices={ + (q, p, k): ResourcePrice(prices=1, volumes=None) + for p in range(number_periods) + for k in range(number_intervals) + }, + ) + imp2_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp2_node_key, + prices={ + (q, p, k): ResourcePrice(prices=2, volumes=None) + for p in range(number_periods) + for k in range(number_intervals) + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_waypoint_node(node_key=node_A) + node_B = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node( + node_key=node_B, + base_flow={ + (q, 0): 1.0, + }, + ) + + # add arcs + # I1A + mynet.add_preexisting_directed_arc( + node_key_a=imp1_node_key, + node_key_b=node_A, + efficiency=None, + static_loss=None, + capacity=1, + capacity_is_instantaneous=False, + ) + + # I2B + mynet.add_preexisting_directed_arc( + node_key_a=imp2_node_key, + node_key_b=node_B, + efficiency=None, + static_loss=None, + capacity=0.1, + capacity_is_instantaneous=False, + ) + + # AB + mynet.add_preexisting_directed_arc( + node_key_a=node_A, + node_key_b=node_B, + efficiency=None, + static_loss={(0, q, 0): 0.1}, + capacity=1, + capacity_is_instantaneous=False, + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + + ipp = self.build_solve_ipp( + solver='cbc', # TODO: make this work with other solvers + solver_options={}, + plot_results=False, # True, + print_solver_output=False, + time_frame=tf, + networks={"mynet": mynet}, + static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + assert ( + True + in ipp.networks["mynet"] + .edges[(imp1_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp2_node_key, node_B, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(node_A, node_B, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # there should be imports + abs_tol = 1e-6 + assert math.isclose(flow_in[("mynet", 0, 0)], 1.1, abs_tol=abs_tol) + + # there should be no exports + abs_tol = 1e-6 + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # interval 0: flow through IA1 must be 1 + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp1_node_key, node_A, 0, 0, 0)]), + 1, + abs_tol=abs_tol, + ) + + # interval 0: flow through AB must be 0.9 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", node_A, node_B, 0, 0, 0)]), + 0.9, + abs_tol=abs_tol, + ) + + # interval 0: flow through IB2 must be 0.1 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp2_node_key, node_B, 0, 0, 0)]), + 0.1, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_directed_arc_static_downstream_new(self): + + # time + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,)}, + time_interval_durations={q: (1,)}, + ) + number_intervals = 2 + number_periods = 2 + + # 4 nodes: one import, one export, two supply/demand nodes + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + (q, p, k): ResourcePrice(prices=0.1, volumes=None) + for p in range(number_periods) + for k in range(number_intervals) + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 1.0, (q, 1): 1.3}) + + # add arcs + + # IA1 + arcs_ia1 = Arcs( + name="IA1", + efficiency={(q, 0): 0.9, (q, 1): 0.9}, + efficiency_reverse=None, + static_loss={(0, q, 0): 0.0, (0, q, 1): 0.1}, + capacity=tuple([0.5 / 0.9]), + minimum_cost=tuple([0.1]), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arcs_ia1) + + # IA2 + arcs_ia2 = Arcs( + name="IA2", + efficiency=None, + efficiency_reverse=None, + static_loss=None, + capacity=tuple([1.2]), + minimum_cost=tuple([0.1]), + specific_capacity_cost=0, + capacity_is_instantaneous=False, + validate=True, + ) + mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arcs_ia2) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + # solver=solver, + solver_options={}, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=True, + mandatory_arcs=[], + max_number_parallel_arcs={} + ) + + # all arcs should be installed (they are not new) + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 1)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # there should be imports + abs_tol = 1e-6 + assert math.isclose( + flow_in[("mynet", 0, 0)], (1.2 + 0.1 / 0.9 + 1.0 + 0.1), abs_tol=abs_tol + ) + + # there should be no exports + abs_tol = 1e-6 + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # interval 0: flow through IA1 must be 0 + # interval 1: flow through IA1 must be 0.1+0.1/0.9 + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, 0, 0)]), + 0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, 0, 1)]), + 0.1 + 0.1 / 0.9, + abs_tol=abs_tol, + ) + + # interval 0: flow through IA2 must be 1.0 + # interval 1: flow through IA2 must be 1.2 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 1, 0, 0)]), + 1.0, + abs_tol=abs_tol, + ) + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 1, 0, 1)]), + 1.2, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + def test_directed_arc_static_downstream_pre(self): + + # time + q = 0 + tf = EconomicTimeFrame( + discount_rate=3.5/100, + reporting_periods={q: (0,1)}, + reporting_period_durations={q: (365 * 24 * 3600, 365 * 24 * 3600)}, + time_intervals={q: (0,)}, + time_interval_durations={q: (1,)}, + ) + number_intervals = 1 + number_periods = 2 + + # 4 nodes: one import, one export, two supply/demand nodes + + mynet = Network() + + # import node + imp_node_key = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_import_node( + node_key=imp_node_key, + prices={ + (q, p, k): ResourcePrice(prices=1 + 0.1, volumes=None) + for p in range(number_periods) + for k in range(number_intervals) + }, + ) + + # other nodes + node_A = generate_pseudo_unique_key(mynet.nodes()) + mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 1.0}) + + # add arcs + # IA1 + mynet.add_preexisting_directed_arc( + node_key_a=imp_node_key, + node_key_b=node_A, + efficiency={(q, 0): 0.9}, + static_loss={(q, 0, 0): 0.1}, + capacity=0.5, + capacity_is_instantaneous=False, + ) + + # IA2 + mynet.add_preexisting_directed_arc( + node_key_a=imp_node_key, + node_key_b=node_A, + efficiency=None, + static_loss=None, + capacity=1.2, + capacity_is_instantaneous=False, + ) + + # identify node types + mynet.identify_node_types() + + # no sos, regular time intervals + ipp = self.build_solve_ipp( + solver='cbc', # TODO: make this work with other solvers + solver_options={}, + plot_results=False, # True, + print_solver_output=False, + networks={"mynet": mynet}, + time_frame=tf, + static_losses_mode=True, + mandatory_arcs=[], + max_number_parallel_arcs={}, + ) + + # ************************************************************************** + + # all arcs should be installed (they are not new) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 0)][Network.KEY_ARC_TECH] + .options_selected + ) + + assert ( + True + in ipp.networks["mynet"] + .edges[(imp_node_key, node_A, 1)][Network.KEY_ARC_TECH] + .options_selected + ) + + # overview + ( + flow_in, + flow_in_k, + flow_out, + flow_in_cost, + flow_out_revenue, + ) = compute_cost_volume_metrics(ipp.instance, True) + + # there should be imports + abs_tol = 1e-6 + assert math.isclose(flow_in[("mynet", 0, 0)], (1.0 + 0.1), abs_tol=abs_tol) + + # there should be no exports + + abs_tol = 1e-6 + + assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol) + + # flow through IA1 must be 0.1 + abs_tol = 1e-6 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 0, 0, 0)]), + 0.1, + abs_tol=abs_tol, + ) + + # flow through IA2 must be 1.0 + assert math.isclose( + pyo.value(ipp.instance.var_v_glljqk[("mynet", imp_node_key, node_A, 1, 0, 0)]), + 1.0, + abs_tol=abs_tol, + ) + + # ************************************************************************* + # ************************************************************************* + + # def test_problem_converter_sink(self): + + # # assessment + # q = 0 + # tf = EconomicTimeFrame( + # discount_rate=3.5/100, + # reporting_periods={q: (0,)}, + # reporting_period_durations={q: (365 * 24 * 3600,)}, + # time_intervals={q: (0,1,2)}, + # time_interval_durations={q: (1,1,1)}, + # ) + + # # 2 nodes: one import, one regular + # mynet = Network() + + # # import node + # node_IMP = generate_pseudo_unique_key(mynet.nodes()) + # mynet.add_import_node( + # node_key=node_IMP, + # prices={ + # qpk: ResourcePrice(prices=1.0, volumes=None) + # for qpk in tf.qpk() + # }, + # ) + + # # other nodes + # node_A = generate_pseudo_unique_key(mynet.nodes()) + # mynet.add_source_sink_node( + # node_key=node_A, + # base_flow={(q, 0): 0.50, (q, 1): 0.00, (q, 2): 1.00}, + # ) + + # # arc IA + # arc_tech_IA = Arcs( + # name="any", + # # efficiency=[0.5, 0.5, 0.5], + # efficiency={(q, 0): 0.5, (q, 1): 0.5, (q, 2): 0.5}, + # efficiency_reverse=None, + # static_loss=None, + # capacity=[3], + # minimum_cost=[2], + # specific_capacity_cost=1, + # capacity_is_instantaneous=False, + # validate=False, + # ) + # mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) + + # # identify node types + # mynet.identify_node_types() + + # # converter + # a_nnk = { + # (0, 0, 0): 0.95, + # (0, 0, 1): 0.95, + # (0, 0, 2): 0.95, + # } + # b_nmk = { + # (0, 0, 0): 3, + # (0, 0, 1): 3, + # (0, 0, 2): 3 + # } + # x_n0 = {0: 18} + + # # get the signals + # inputs, states, outputs = get_two_node_model_signals( + # tf.number_time_intervals(q) + # ) + + # # create a discretised dynamic system from dictionaries + # dds = dynsys.DiscretisedDynamicSystem( + # a_nnk=a_nnk, + # b_nmk=b_nmk, + # x_n0=x_n0, + # time_frame=tf + # ) + + # # create a converter + # cvt = Converter( + # time_frame=tf, + # dds=dds, + # turn_key_cost=3, + # inputs=inputs, + # states=states, + # outputs=outputs, + # ) + + # # no sos, regular time intervals + + # ipp = self.build_solve_ipp( + # solver_options={}, + # perform_analysis=False, + # plot_results=False, # True, + # print_solver_output=False, + # time_frame=tf, + # networks={"mynet": mynet}, + # converters={"mycvt": cvt}, + # static_losses_mode=False, + # mandatory_arcs=[], + # max_number_parallel_arcs={}, + # # init_aux_sets=init_aux_sets, + # simplify_problem=False, + # ) + + # assert not ipp.has_peak_total_assessments() + # assert ipp.results["Problem"][0]["Number of constraints"] == 24 + # assert ipp.results["Problem"][0]["Number of variables"] == 22 + # assert ipp.results["Problem"][0]["Number of nonzeros"] == 49 + + # # ********************************************************************* + # # ********************************************************************* + + # # validation + + # # if uC,M 1,q,0 = 0, then xC,N 1,q,1 = 17.1 # infeasible + # # if uC,M 1,q,0 = 1, then xC,N 1,q,1 = 20.1. # only feasible option + # # if uC,M 1,q,1 = 0, then xC,N 1,q,2 = 19.095 # only feasible option + # # if uC,M 1,q,1 = 1, then xC,N 1,q,2 = 22.095 # infeasible + # # if uC,M 1,q,2 = 0, then xC,N 1,q,3 = 18.14025 # feasible + # # if uC,M 1,q,2 = 1, then xC,N 1,q,3 = 21.14025 # feasible + + # true_u_imqk = { + # ('mycvt', 0, q, 0): 1, + # ('mycvt', 0, q, 1): 0, + # ('mycvt', 0, q, 2): 0, # could also be 1 + # } + + # true_x_inqk = { + # ('mycvt', 0, q, 0): 20.1, + # ('mycvt', 0, q, 1): 19.095, + # ('mycvt', 0, q, 2): 18.14025, # could also be 21.14025 + # } + + # # check the inputs + # for imqk, u in true_u_imqk.items(): + # assert math.isclose( + # pyo.value(ipp.instance.var_u_imqk[imqk]), + # u, + # abs_tol=1e-6, + # ) + + # # check the states + # for inqk, x in true_x_inqk.items(): + # assert math.isclose( + # pyo.value(ipp.instance.var_x_inqk[inqk]), + # x, + # abs_tol=1e-6, + # ) + + + + # ************************************************************************* + # ************************************************************************* + + # TODO: test non-simplifiable problems with time varying prices on select assessments + # TODO: test non-simplifiable problems with volume varying prices on select assessments # ***************************************************************************** # ***************************************************************************** \ No newline at end of file diff --git a/tests/test_solvers.py b/tests/test_solvers.py index 87e2da1fc947863c1439299270879c0fcf38317d..48773c650ed106cec9f66f43599e82c8cc2312c6 100644 --- a/tests/test_solvers.py +++ b/tests/test_solvers.py @@ -5,6 +5,7 @@ from src.topupopt.solvers.interface import SolverInterface from pyomo.opt.results.solver import TerminationCondition import pyomo.environ as pyo +from pyomo.common.errors import ApplicationError import random @@ -40,6 +41,339 @@ class TestSolvers: solver_name = "glpk" results, solver_interface = self.optimise(solver_name, solver_options, problem) + + # ************************************************************************* + # ************************************************************************* + + def test_problems_scip(self): + # test a collection of problems using different solvers + + solver = "scip" + scip_exec_path = '/usr/bin/scip' + solver_options = {'executable': scip_exec_path} + # solver_options = {} + + # list of problems + + list_concrete_models = [ + self.problem_qp_optimal(), + self.problem_qp_infeasible(), + self.problem_lp_unbounded(), + self.problem_lp_infeasible(), + self.problem_lp_optimal(), + self.problem_milp_unbounded(), + self.problem_milp_infeasible(), + self.problem_milp_optimal(), + self.problem_milp_feasible(), + self.problem_milp_feasible(15, 64), + self.problem_milp_feasible(10, 46), + ] + + # list of problem types + + list_problem_types = [ + SolverInterface.PROBLEM_QP, + SolverInterface.PROBLEM_QP, + SolverInterface.PROBLEM_LP, + SolverInterface.PROBLEM_LP, + SolverInterface.PROBLEM_LP, + SolverInterface.PROBLEM_MILP, + SolverInterface.PROBLEM_MILP, + SolverInterface.PROBLEM_MILP, + SolverInterface.PROBLEM_MILP, + SolverInterface.PROBLEM_MILP, + "unknown_problem_type", + ] + + # expected + + list_problem_termination_conditions = [ + TerminationCondition.optimal, + TerminationCondition.infeasible, + TerminationCondition.unbounded, + TerminationCondition.infeasible, + TerminationCondition.optimal, + TerminationCondition.unbounded, + TerminationCondition.infeasible, + TerminationCondition.optimal, + None, # if we don't know what to expect, + None, # if we don't know what to expect, + None, # if we don't know what to expect + ] + + list_problem_optimisation_sucess = [ + True, + True, + False, + False, + True, + False, + False, + True, + True, + True, + True, + ] + + # solver settings + solver_timelimit = 10 + solver_abs_mip_gap = 0.001 + solver_rel_mip_gap = 0.01 + solver_options.update( + { + "time_limit": solver_timelimit, + "relative_mip_gap": solver_rel_mip_gap, + "absolute_mip_gap": solver_abs_mip_gap, + } + ) + + for problem_index, problem in enumerate(list_concrete_models): + print('******************') + print(problem_index) + + # check problem and solver compatibility + + problem_type = list_problem_types[problem_index] + + if not SolverInterface.problem_and_solver_are_compatible( + solver, problem_type + ): + continue + + try: + # optimise + results, solver_interface = self.optimise( + solver, solver_options, problem, print_solver_output=False + ) + except ApplicationError: + print(problem_index) + print(problem) + assert False + + except SolverInterface.UnknownSolverError: + continue + + except SolverInterface.UnknownProblemTypeError: + continue + + # ************************************************************* + # ************************************************************* + + # termination condition + + exp_term_cond = list_problem_termination_conditions[problem_index] + + term_cond = results.solver.termination_condition + + if ( + exp_term_cond == None + or ( + solver == "glpk" + and exp_term_cond == TerminationCondition.unbounded + ) + or ( + solver == "cplex" + and exp_term_cond == TerminationCondition.unbounded + ) + or ( + solver == "cplex" + and exp_term_cond == TerminationCondition.optimal + ) + or ( + solver == "cplex" + and exp_term_cond == TerminationCondition.infeasible + ) + ): + # exceptions in need of correction + + pass + + else: + # print(solver_name) + # print(results) + assert exp_term_cond == term_cond + + # ********************************************************************* + # ********************************************************************* + + # solver status + + if ( + ( + solver == "glpk" + and term_cond == TerminationCondition.infeasible + ) + or ( + solver == "cplex" + and term_cond == TerminationCondition.unknown + ) + or ( + solver == "cplex" + and exp_term_cond == TerminationCondition.unbounded + ) + or ( + solver == "cplex" + and exp_term_cond == TerminationCondition.infeasible + ) + ): + pass + + else: + # check if the solver status matches the one one would expect + # if the termination condition was correct + + assert ( + TerminationCondition.to_solver_status(term_cond) + == results.solver.status + ) + + # if valid, it means the results object is coherent + + # ********************************************************************* + # ********************************************************************* + + if ( + exp_term_cond == None + or ( + solver == "glpk" + and exp_term_cond == TerminationCondition.unbounded + ) + or ( + solver == "glpk" + and exp_term_cond == TerminationCondition.infeasible + ) + or ( + solver == "cplex" + and exp_term_cond == TerminationCondition.unknown + ) + or ( + solver == "cplex" + and exp_term_cond == TerminationCondition.unbounded + ) + or ( + solver == "cplex" + and exp_term_cond == TerminationCondition.infeasible + ) + ): + pass + + else: + # check if the solver status matches the one one would expect + # if the termination condition predicted was obtained + + assert ( + TerminationCondition.to_solver_status(exp_term_cond) + == results.solver.status + ) + + # if valid, the solver status is correct despite other issues + + # ************************************************************* + # ************************************************************* + + # make sure the optimisation went as expected + + exp_optim_result = list_problem_optimisation_sucess[problem_index] + + if ( + TerminationCondition.to_solver_status( + results.solver.termination_condition + ) + != results.solver.status + ): + # this can be removed once the aforementioned issues have + # been fixed (e.g. for the cplex and glpk solvers) + + pass + + else: + optim_result = solver_interface.was_optimisation_sucessful( + results, problem_type + ) + + # ************************************************************* + # ************************************************************* + + if ( + TerminationCondition.to_solver_status( + results.solver.termination_condition + ) + != results.solver.status + or exp_term_cond == TerminationCondition.unbounded + ): + # this can be removed once the aforementioned issues have + # been fixed (e.g. for the cplex and glpk solvers) + + pass + + else: + assert optim_result == exp_optim_result + + # ************************************************************* + # ************************************************************* + + # test additional scenarios + + if optim_result == False: + continue + + # force unknown solver status error + + results.solver.status = "false_solver_status" + + try: + _ = solver_interface.was_optimisation_sucessful( + results, problem_type + ) + + except solver_interface.UnknownSolverStatusError: + assert True + + # force unknown termination condition error + + results.solver.termination_condition = "false_termin_condition" + + try: + _ = solver_interface.was_optimisation_sucessful( + results, problem_type + ) + + except solver_interface.UnknownTerminationConditionError: + assert True + + # force an InconsistentSolverStatusError + + results.solver.termination_condition = TerminationCondition.optimal + + results.solver.status = TerminationCondition.to_solver_status( + results.solver.termination_condition + ) + + results.solver.termination_condition = TerminationCondition.unknown + + try: + _ = solver_interface.was_optimisation_sucessful( + results, problem_type + ) + + except solver_interface.InconsistentSolverStatusError: + assert True + + # force an InconsistentProblemTypeAndSolverError + + if problem_type == SolverInterface.PROBLEM_LP and solver == "glpk": + problem_type = SolverInterface.PROBLEM_QP + + try: + _ = solver_interface.was_optimisation_sucessful( + results, problem_type + ) + + except solver_interface.InconsistentProblemTypeAndSolverError: + assert True + + # ********************************************************************* + # ********************************************************************* # ************************************************************************* # *************************************************************************