Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • pmag/topupopt
1 result
Show changes
Commits on Source (18)
Showing
with 3248 additions and 2290 deletions
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# from . import mvesipp \ No newline at end of file
# -*- coding: utf-8 -*-
\ No newline at end of file
# imports
import pyomo.environ as pyo
# *****************************************************************************
# *****************************************************************************
def price_delta_block(
model: pyo.AbstractModel,
enable_default_values: bool = True,
enable_validation: bool = True,
enable_initialisation: bool = True
):
# auxiliary set for pyomo
model.set_GLQPK = model.set_GL_exp_imp*model.set_QPK
# create a block for a transshipment node during a given time interval
def rule_block_prices(b, g, l, q, p, k):
# *********************************************************************
# *********************************************************************
# sets
# set of price segments
b.set_S = pyo.Set()
# *********************************************************************
# *********************************************************************
# parameters
# resource prices
b.param_p_s = pyo.Param(b.set_S, within=pyo.NonNegativeReals)
# price function convexity
b.param_price_function_is_convex = pyo.Param(within=pyo.Boolean)
# maximum resource volumes for each prices
b.param_v_max_s = pyo.Param(b.set_S, within=pyo.NonNegativeReals)
# *********************************************************************
# *********************************************************************
# variables
# *********************************************************************
# *********************************************************************
# TODO: consider replacing None in the bounds with inf in the parameter
# import and export flows
def bounds_var_trans_flows(b):
try:
return (0, sum(b.param_v_max_s[s] for s in b.set_S))
except Exception:
return (0, None)
b.var_trans_flows = pyo.Var(
# within=pyo.NonNegativeReals,
bounds=bounds_var_trans_flows
)
# segment usage variables
b.var_segment_usage_s = pyo.Var(
b.set_S,
within=pyo.UnitInterval,
)
# *********************************************************************
# *********************************************************************
# import flow costs and export flow revenues (y function)
def rule_constr_trans_monetary_flows(b):
if (g,l) in b.parent_block().set_GL_imp:
return (
sum(
b.var_segment_usage_s[s]*b.param_p_s[s]*b.param_v_max_s[s]
for s in b.set_S
)
== b.parent_block().var_ifc_glqpk[(g,l,q,p,k)]
)
else:
return (
sum(
b.var_segment_usage_s[s]*b.param_p_s[s]*b.param_v_max_s[s]
for s in b.set_S
)
== b.parent_block().var_efr_glqpk[(g,l,q,p,k)]
)
b.constr_trans_monetary_flows = pyo.Constraint(
rule=rule_constr_trans_monetary_flows
)
# imported and exported flows (x function)
def rule_constr_trans_flows_seg(b):
return sum(
b.var_segment_usage_s[s]*b.param_v_max_s[s]
for s in b.set_S
) == b.var_trans_flows
b.constr_trans_flows_seg = pyo.Constraint(rule=rule_constr_trans_flows_seg)
# imported and exported flows: energy system defines the x value
def rule_constr_trans_flows_sys(b):
if (g,l) in b.parent_block().set_GL_imp:
return sum(
b.parent_block().var_v_glljqk[(g,l,l_star,j,q,k)]
for l_star in b.parent_block().set_L[g]
if l_star not in b.parent_block().set_L_imp[g]
for j in b.parent_block().set_J[(g,l,l_star)] # only directed arcs
) == b.var_trans_flows
else:
return sum(
b.parent_block().var_v_glljqk[(g,l_star,l,j,q,k)]
* b.parent_block().param_eta_glljqk[(g,l_star,l,j,q,k)]
for l_star in b.parent_block().set_L[g]
if l_star not in b.parent_block().set_L_exp[g]
for j in b.parent_block().set_J[(g,l_star,l)] # only directed arcs
) == b.var_trans_flows
b.constr_trans_flows_sys = pyo.Constraint(rule=rule_constr_trans_flows_sys)
# *********************************************************************
# *********************************************************************
# non-convex price functions
if not b.param_price_function_is_convex:
# this means the segment usage variables are binary
# variables to indicate if the segments are active
b.var_segment_is_full = pyo.Var(
b.set_S,
within=pyo.Binary
)
# if next level is active, bin. var. for current level must be one
# if bin. var. for current level is one, segment must be used fully
# if previous level is not full, bin. var for current level must be zero
# if bin. var. for current level is zero,
def rule_constr_nonconvex_p1(b, s):
if s == len(b.set_S)-1:
# last segment, skip
return pyo.Constraint.Skip
return (
b.var_segment_usage_s[s+1] <=
b.var_segment_is_full[s]
)
b.constr_nonconvex_p1 = pyo.Constraint(
b.set_S, rule=rule_constr_nonconvex_p1
)
def rule_constr_nonconvex_p2(b, s):
if s == len(b.set_S)-1:
# last segment, skip
return pyo.Constraint.Skip
return (
b.var_segment_usage_s[s] >=
b.var_segment_is_full[s]
)
b.constr_nonconvex_p2 = pyo.Constraint(
b.set_S, rule=rule_constr_nonconvex_p2
)
def rule_constr_nonconvex_p3(b, s):
if s == len(b.set_S)-1:
# last segment, skip
return pyo.Constraint.Skip
return (
b.var_segment_usage_s[s] >=
b.var_segment_is_full[s+1]
)
b.constr_nonconvex_p3 = pyo.Constraint(
b.set_S, rule=rule_constr_nonconvex_p3
)
def rule_constr_nonconvex_p4(b, s):
if s == len(b.set_S)-1:
# last segment, skip
return pyo.Constraint.Skip
return (
b.var_segment_usage_s[s+1] <=
b.var_segment_is_full[s+1]
)
b.constr_nonconvex_p4 = pyo.Constraint(
b.set_S, rule=rule_constr_nonconvex_p4
)
# *********************************************************************
# *********************************************************************
model.block_prices = pyo.Block(model.set_GLQPK, rule=rule_block_prices)
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
def price_delta_no_block(
model: pyo.AbstractModel,
# convex_price_function: bool = True,
enable_default_values: bool = True,
enable_validation: bool = True,
enable_initialisation: bool = True
):
# auxiliary set for pyomo
model.set_GLQPK = model.set_GL_exp_imp*model.set_QPK
# set of price segments
model.set_S = pyo.Set(model.set_GLQPK)
# set of GLQKS tuples
def init_set_GLQPKS(m):
return (
(g, l, q, p, k, s)
# for (g,l) in m.set_GL_exp_imp
# for (q,k) in m.set_QK
for (g, l, q, p, k) in m.set_S
for s in m.set_S[(g, l, q, p, k)]
)
model.set_GLQPKS = pyo.Set(
dimen=6, initialize=(init_set_GLQPKS if enable_initialisation else None)
)
# *************************************************************************
# *************************************************************************
# parameters
# resource prices
model.param_p_glqpks = pyo.Param(model.set_GLQPKS, within=pyo.NonNegativeReals)
# price function convexity
model.param_price_function_is_convex = pyo.Param(
model.set_GLQPK,
within=pyo.Boolean
)
# maximum resource volumes for each prices
model.param_v_max_glqpks = pyo.Param(
model.set_GLQPKS,
within=pyo.NonNegativeReals,
# default=math.inf
)
# *************************************************************************
# *************************************************************************
# variables
# *************************************************************************
# *************************************************************************
# import and export flows
def bounds_var_trans_flows_glqpk(m, g, l, q, p, k):
try:
return (0, sum(m.param_v_max_glqpks[s] for s in m.set_S[(g, l, q, p, k)]))
except Exception:
return (0, None)
model.var_trans_flows_glqpk = pyo.Var(
model.set_GLQPK, within=pyo.NonNegativeReals, bounds=bounds_var_trans_flows_glqpk
)
# segment usage variables
model.var_segment_usage_glqpks = pyo.Var(
model.set_GLQPKS,
within=pyo.UnitInterval,
)
# *************************************************************************
# *************************************************************************
# import flow costs and export flow revenues (y function)
def rule_constr_trans_monetary_flows(m, g, l, q, p, k):
if (g,l) in m.set_GL_imp:
return (
sum(
m.var_segment_usage_glqpks[(g, l, q, p, k, s)]*
m.param_p_glqpks[(g, l, q, p, k, s)]*
m.param_v_max_glqpks[(g, l, q, p, k, s)]
for s in m.set_S[(g, l, q, p, k)]
)
== m.var_ifc_glqpk[(g,l,q,p,k)]
)
else:
return (
sum(
m.var_segment_usage_glqpks[(g, l, q, p, k, s)]*
m.param_p_glqpks[(g, l, q, p, k, s)]*
m.param_v_max_glqpks[(g, l, q, p, k, s)]
for s in m.set_S[(g, l, q, p, k)]
)
== m.var_efr_glqpk[(g,l,q,p,k)]
)
model.constr_trans_monetary_flows = pyo.Constraint(
model.set_GLQPK,
rule=rule_constr_trans_monetary_flows
)
# imported and exported flows (x function)
def rule_constr_trans_flows_seg(m, g, l, q, p, k):
return sum(
m.var_segment_usage_glqpks[(g, l, q, p, k, s)]*m.param_v_max_glqpks[(g, l, q, p, k, s)]
for s in m.set_S[(g, l, q, p, k)]
) == m.var_trans_flows_glqpk[(g, l, q, p, k)]
model.constr_trans_flows_seg = pyo.Constraint(
model.set_GLQPK,
rule=rule_constr_trans_flows_seg
)
# imported and exported flows: energy system defines the x value
def rule_constr_trans_flows_sys(m, g, l, q, p, k):
if (g,l) in m.set_GL_imp:
return sum(
m.var_v_glljqk[(g,l,l_star,j,q,k)]
for l_star in m.set_L[g]
if l_star not in m.set_L_imp[g]
for j in m.set_J[(g,l,l_star)] # only directed arcs
) == m.var_trans_flows_glqpk[(g, l, q, p, k)]
else:
return sum(
m.var_v_glljqk[(g,l_star,l,j,q,k)]
* m.param_eta_glljqk[(g,l_star,l,j,q,k)]
for l_star in m.set_L[g]
if l_star not in m.set_L_exp[g]
for j in m.set_J[(g,l_star,l)] # only directed arcs
) == m.var_trans_flows_glqpk[(g, l, q, p, k)]
model.constr_trans_flows_sys = pyo.Constraint(
model.set_GLQPK,
rule=rule_constr_trans_flows_sys
)
# *************************************************************************
# *************************************************************************
# non-convex price functions
# variables to indicate if the segments are active
model.var_segment_is_full = pyo.Var(
model.set_GLQPKS,
within=pyo.Binary
)
# if next level is active, bin. var. for current level must be one
# if bin. var. for current level is one, segment must be used fully
# if previous level is not full, bin. var for current level must be zero
# if bin. var. for current level is zero,
def rule_constr_nonconvex_p1(m, g, l, q, p, k, s):
if s == len(m.set_S[(g, l, q, p, k)])-1:
# last segment, skip
return pyo.Constraint.Skip
return (
m.var_segment_usage_glqpks[(g,l,q,p,k,s+1)] <=
m.var_segment_is_full[(g,l,q,p,k,s)]
)
model.constr_nonconvex_p1 = pyo.Constraint(
model.set_GLQPKS, rule=rule_constr_nonconvex_p1
)
def rule_constr_nonconvex_p2(m, g, l, q, p, k, s):
if s == len(m.set_S[(g, l, q, p, k)])-1:
# last segment, skip
return pyo.Constraint.Skip
return (
m.var_segment_usage_glqpks[(g,l,q,p,k,s)] >=
m.var_segment_is_full[(g,l,q,p,k,s)]
)
model.constr_nonconvex_p2 = pyo.Constraint(
model.set_GLQPKS, rule=rule_constr_nonconvex_p2
)
def rule_constr_nonconvex_p3(m, g, l, q, p, k, s):
if s == len(m.set_S[(g, l, q, p, k)])-1:
# last segment, skip
return pyo.Constraint.Skip
return (
m.var_segment_usage_glqpks[(g,l,q,p,k,s)] >=
m.var_segment_is_full[(g,l,q,p,k,s+1)]
)
model.constr_nonconvex_p3 = pyo.Constraint(
model.set_GLQPKS, rule=rule_constr_nonconvex_p3
)
def rule_constr_nonconvex_p4(m, g, l, q, p, k, s):
if s == len(m.set_S[(g, l, q, p, k)])-1:
# last segment, skip
return pyo.Constraint.Skip
return (
m.var_segment_usage_glqpks[(g,l,q,p,k,s+1)] <=
m.var_segment_is_full[(g,l,q,p,k,s+1)]
)
model.constr_nonconvex_p4 = pyo.Constraint(
model.set_GLQPKS, rule=rule_constr_nonconvex_p4
)
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
# imports
import pyomo.environ as pyo
# *****************************************************************************
# *****************************************************************************
def price_lambda_block(
model: pyo.AbstractModel,
enable_default_values: bool = True,
enable_validation: bool = True,
enable_initialisation: bool = True
):
# auxiliary set for pyomo
model.set_GLQPK = model.set_GL_exp_imp*model.set_QPK
# create a block for a transshipment node during a given time interval
def rule_block_prices(b, g, l, q, p, k):
# *********************************************************************
# *********************************************************************
# sets
# set of price segments
b.set_S = pyo.Set()
# *********************************************************************
# *********************************************************************
# parameters
# resource prices
b.param_p_s = pyo.Param(b.set_S, within=pyo.NonNegativeReals)
# price function convexity
b.param_price_function_is_convex = pyo.Param(within=pyo.Boolean)
# maximum resource volumes for each prices
b.param_v_max_s = pyo.Param(b.set_S, within=pyo.NonNegativeReals)
# *********************************************************************
# *********************************************************************
# variables
# *********************************************************************
# *********************************************************************
# set of points
b.set_O = pyo.Set(initialize=[i for i in range(len(b.set_S)+1)])
# set of x points
def init_param_x_o(b, o):
return 0 if o == 0 else sum(b.param_v_max_s[i] for i in range(o))
b.param_x_o = pyo.Param(
b.set_O,
within=pyo.NonNegativeReals,
initialize=init_param_x_o
)
# set of y points
def init_param_y_o(b, o):
return 0 if o == 0 else (b.param_x_o[o]-b.param_x_o[o-1])*b.param_p_s[o-1]+b.param_y_o[o-1]
# sum(b.param_p_s[i] for i in range(p+1))
b.param_y_o = pyo.Param(
b.set_O,
within=pyo.NonNegativeReals,
initialize=init_param_y_o
)
# interpoint weights
b.var_weights_o = pyo.Var(b.set_O, within=pyo.UnitInterval)
# TODO: consider replacing None in the bounds with inf in the parameter
# transshipment flows
def bounds_var_trans_flows(b):
try:
return (0, sum(b.param_v_max_s[s] for s in b.set_S))
except Exception:
return (0, None)
b.var_trans_flows = pyo.Var(bounds=bounds_var_trans_flows)
# *********************************************************************
# *********************************************************************
def rule_constr_stick_to_line(b):
return sum(b.var_weights_o[p] for p in b.set_O) == 1
b.constr_stick_to_line = pyo.Constraint(rule=rule_constr_stick_to_line)
# *********************************************************************
# *********************************************************************
# import flow costs and export flow revenues (y equation)
def rule_constr_y_equation(b):
if (g,l) in b.parent_block().set_GL_imp:
return (
sum(b.var_weights_o[p]*b.param_y_o[p] for p in b.set_O)
== b.parent_block().var_ifc_glqpk[(g,l,q,p,k)]
)
else:
return (
sum(b.var_weights_o[p]*b.param_y_o[p] for p in b.set_O)
== b.parent_block().var_efr_glqpk[(g,l,q,p,k)]
)
b.constr_y_equation = pyo.Constraint(rule=rule_constr_y_equation)
# imported and exported flows (x equation)
def rule_constr_x_equation(b):
if (g,l) in b.parent_block().set_GL_imp:
return (
sum(b.var_weights_o[p]*b.param_x_o[p] for p in b.set_O)
== b.var_trans_flows
)
else:
return (
sum(b.var_weights_o[p]*b.param_x_o[p] for p in b.set_O)
== b.var_trans_flows
)
b.constr_x_equation = pyo.Constraint(rule=rule_constr_x_equation)
# imported and exported flows (system equation)
def rule_constr_sys_equation(b):
if (g,l) in b.parent_block().set_GL_imp:
return sum(
b.parent_block().var_v_glljqk[(g,l,l_star,j,q,k)]
for l_star in b.parent_block().set_L[g]
if l_star not in b.parent_block().set_L_imp[g]
for j in b.parent_block().set_J[(g,l,l_star)] # only directed arcs
) == b.var_trans_flows
else:
return sum(
b.parent_block().var_v_glljqk[(g,l_star,l,j,q,k)]
* b.parent_block().param_eta_glljqk[(g,l_star,l,j,q,k)]
for l_star in b.parent_block().set_L[g]
if l_star not in b.parent_block().set_L_exp[g]
for j in b.parent_block().set_J[(g,l_star,l)] # only directed arcs
) == b.var_trans_flows
b.constr_sys_equation = pyo.Constraint(rule=rule_constr_sys_equation)
# *********************************************************************
# *********************************************************************
# non-convex price functions
if not b.param_price_function_is_convex:
# declare SOS2
def rule_constr_sos2_weights(b):
return [b.var_weights_o[p] for p in b.set_O]
b.constr_sos2_weights = pyo.SOSConstraint(rule=rule_constr_sos2_weights, sos=2)
# *********************************************************************
# *********************************************************************
model.block_prices = pyo.Block(model.set_GLQPK, rule=rule_block_prices)
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
def price_lambda_no_block(
model: pyo.AbstractModel,
# convex_price_function: bool = True,
enable_default_values: bool = True,
enable_validation: bool = True,
enable_initialisation: bool = True
):
# auxiliary set for pyomo
model.set_GLQPK = model.set_GL_exp_imp*model.set_QPK
# set of price segments
model.set_S = pyo.Set(model.set_GLQPK)
# set of GLQKS tuples
def init_set_GLQPKS(m):
return (
(g, l, q, p, k, s)
# for (g,l) in m.set_GL_exp_imp
# for (q,k) in m.set_QK
for (g, l, q, p, k) in m.set_S
for s in m.set_S[(g, l, q, p, k)]
)
model.set_GLQPKS = pyo.Set(
dimen=6, initialize=(init_set_GLQPKS if enable_initialisation else None)
)
# *************************************************************************
# *************************************************************************
# parameters
# resource prices
model.param_p_glqpks = pyo.Param(model.set_GLQPKS, within=pyo.NonNegativeReals)
# price function convexity
model.param_price_function_is_convex = pyo.Param(
model.set_GLQPK,
within=pyo.Boolean
)
# maximum resource volumes for each prices
model.param_v_max_glqpks = pyo.Param(
model.set_GLQPKS,
within=pyo.NonNegativeReals,
# default=math.inf
)
# *************************************************************************
# *************************************************************************
# variables
# *************************************************************************
# *************************************************************************
# set of points
def init_set_O(m, g, l, q, p, k):
return [i for i in range(len(m.set_S[(g,l,q,p,k)])+1)]
# return 0 if o == 0 else sum(m.param_v_max_glqpks[i] for i in range(o))
model.set_O = pyo.Set(
model.set_GLQPK,
initialize=(init_set_O if enable_initialisation else None)
)
# set of GLQKS tuples
def init_set_GLQPKO(m):
return (
(g, l, q, p, k, o)
# for (g,l) in m.set_GL_exp_imp
# for (q,k) in m.set_QK
for (g, l, q, p, k) in m.set_O
for o in m.set_O[(g, l, q, p, k)]
)
model.set_GLQPKO = pyo.Set(
dimen=6, initialize=(init_set_GLQPKO if enable_initialisation else None)
)
# set of x points
def init_param_x_glqpko(m, g, l, q, p, k, o):
return 0 if o == 0 else sum(m.param_v_max_glqpks[(g,l,q,p,k,i)] for i in range(o))
model.param_x_glqpko = pyo.Param(
model.set_GLQPKO,
within=pyo.NonNegativeReals,
initialize=init_param_x_glqpko
)
# set of y points
def init_param_y_glqpko(m, g, l, q, p, k, o):
return (
0
if o == 0 else
(m.param_x_glqpko[(g,l,q,p,k,o)]
-m.param_x_glqpko[(g,l,q,p,k,o-1)])*
m.param_p_glqpks[(g,l,q,p,k,o-1)]+
m.param_y_glqpko[(g,l,q,p,k,o-1)]
)
model.param_y_glqpko = pyo.Param(
model.set_GLQPKO,
within=pyo.NonNegativeReals,
initialize=init_param_y_glqpko
)
# interpoint weights
model.var_weights_glqpko = pyo.Var(
model.set_GLQPKO,
within=pyo.UnitInterval
)
# TODO: consider replacing None in the bounds with inf in the parameter
# transshipment flows
def bounds_var_trans_flows_glqpk(m, g, l, q, p, k):
try:
return (0, sum(m.param_v_max_glqpks[(g, l, q, p, k, s)]
for s in m.set_S[(g, l, q, p, k)]))
except Exception:
return (0, None)
model.var_trans_flows_glqpk = pyo.Var(
model.set_GLQPK,
bounds=bounds_var_trans_flows_glqpk
)
# *************************************************************************
# *************************************************************************
def rule_constr_stick_to_line(m, g, l, q, p, k):
return sum(m.var_weights_glqpko[(g,l,q,p,k,o)]
for o in m.set_O[(g,l,q,p,k)]) == 1
model.constr_stick_to_line = pyo.Constraint(
model.set_GLQPK,
rule=rule_constr_stick_to_line
)
# *************************************************************************
# *************************************************************************
# import flow costs and export flow revenues (y equation)
def rule_constr_y_equation(m, g, l, q, p, k):
if (g,l) in m.set_GL_imp:
return (
sum(m.var_weights_glqpko[(g,l,q,p,k,o)]*
m.param_y_glqpko[(g,l,q,p,k,o)]
for o in m.set_O[(g,l,q,p,k)])
== m.var_ifc_glqpk[(g,l,q,p,k)]
)
else:
return (
sum(m.var_weights_glqpko[(g,l,q,p,k,o)]*
m.param_y_glqpko[(g,l,q,p,k,o)]
for o in m.set_O[(g,l,q,p,k)])
== m.var_efr_glqpk[(g,l,q,p,k)]
)
model.constr_y_equation = pyo.Constraint(
model.set_GLQPK,
rule=rule_constr_y_equation
)
# imported and exported flows (x equation)
def rule_constr_x_equation(m, g, l, q, p, k):
if (g,l) in m.set_GL_imp:
return (
sum(m.var_weights_glqpko[(g,l,q,p,k,o)]*m.param_x_glqpko[(g,l,q,p,k,o)] for o in m.set_O[(g,l,q,p,k)])
== m.var_trans_flows_glqpk[(g,l,q,p,k)]
)
else:
return (
sum(m.var_weights_glqpko[(g,l,q,p,k,o)]*m.param_x_glqpko[(g,l,q,p,k,o)] for o in m.set_O[(g,l,q,p,k)])
== m.var_trans_flows_glqpk[(g,l,q,p,k)]
)
model.constr_x_equation = pyo.Constraint(
model.set_GLQPK,
rule=rule_constr_x_equation
)
# imported and exported flows (system equation)
def rule_constr_sys_equation(m, g, l, q, p, k):
if (g,l) in m.set_GL_imp:
return sum(
m.var_v_glljqk[(g,l,l_star,j,q,k)]
for l_star in m.set_L[g]
if l_star not in m.set_L_imp[g]
for j in m.set_J[(g,l,l_star)] # only directed arcs
) == m.var_trans_flows_glqpk[(g,l,q,p,k)]
else:
return sum(
m.var_v_glljqk[(g,l_star,l,j,q,k)]
* m.param_eta_glljqk[(g,l_star,l,j,q,k)]
for l_star in m.set_L[g]
if l_star not in m.set_L_exp[g]
for j in m.set_J[(g,l_star,l)] # only directed arcs
) == m.var_trans_flows_glqpk[(g,l,q,p,k)]
model.constr_sys_equation = pyo.Constraint(
model.set_GLQPK,
rule=rule_constr_sys_equation
)
# *************************************************************************
# *************************************************************************
# non-convex price functions
# declare SOS2
def rule_constr_sos2_weights(m, g, l, q, p, k):
if m.param_price_function_is_convex[(g,l,q,p,k)]:
return pyo.SOSConstraint.Skip
return [m.var_weights_glqpko[(g,l,q,p,k,o)] for o in m.set_O[(g,l,q,p,k)]]
model.constr_sos2_weights = pyo.SOSConstraint(
model.set_GLQPK,
rule=rule_constr_sos2_weights,
sos=2)
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
# imports
import pyomo.environ as pyo
# *****************************************************************************
# *****************************************************************************
# description: variation of the delta formulation
# *****************************************************************************
# *****************************************************************************
def price_other_block(
model: pyo.AbstractModel,
enable_default_values: bool = True,
enable_validation: bool = True,
enable_initialisation: bool = True
):
# auxiliary set for pyomo
model.set_GLQPK = model.set_GL_exp_imp*model.set_QPK
# create a block for a transshipment node during a given time interval
def rule_block_prices(b, g, l, q, p, k):
# *********************************************************************
# *********************************************************************
# sets
# set of price segments
b.set_S = pyo.Set()
# TODO: introduce a set of price segments for non-convex tariffs
# def init_set_S_nonconvex(b, s):
# return (s for s in b.set_S if b.param_price_function_is_convex)
# b.set_S_nonconvex = pyo.Set(within=b.set_S, initialize=init_set_S_nonconvex)
# *********************************************************************
# *********************************************************************
# parameters
# resource prices
b.param_p_s = pyo.Param(b.set_S, within=pyo.NonNegativeReals)
# price function convexity
b.param_price_function_is_convex = pyo.Param(within=pyo.Boolean)
# maximum resource volumes for each prices
b.param_v_max_s = pyo.Param(b.set_S, within=pyo.NonNegativeReals)
# *********************************************************************
# *********************************************************************
# variables
# *********************************************************************
# *********************************************************************
# TODO: consider replacing None in the bounds with inf in the parameter
# import and export flows
def bounds_var_trans_flows_s(b, s):
if s in b.param_v_max_s:
# predefined finite capacity
return (0, b.param_v_max_s[s])
else:
# infinite capacity
return (0, None)
b.var_trans_flows_s = pyo.Var(
b.set_S,
within=pyo.NonNegativeReals,
bounds=bounds_var_trans_flows_s
)
# *********************************************************************
# *********************************************************************
# import flow costs and export flow revenues
def rule_constr_trans_monetary_flows(b):
if (g,l) in b.parent_block().set_GL_imp:
return (
sum(b.var_trans_flows_s[s]*b.param_p_s[s]
for s in b.set_S)
== b.parent_block().var_ifc_glqpk[(g,l,q,p,k)]
)
else:
return (
sum(b.var_trans_flows_s[s]*b.param_p_s[s]
for s in b.set_S)
== b.parent_block().var_efr_glqpk[(g,l,q,p,k)]
)
b.constr_trans_monetary_flows = pyo.Constraint(
rule=rule_constr_trans_monetary_flows
)
# imported and exported flows
def rule_constr_trans_flows(b):
if (g,l) in b.parent_block().set_GL_imp:
return sum(
b.parent_block().var_v_glljqk[(g,l,l_star,j,q,k)]
for l_star in b.parent_block().set_L[g]
if l_star not in b.parent_block().set_L_imp[g]
for j in b.parent_block().set_J[(g,l,l_star)] # only directed arcs
) == sum(b.var_trans_flows_s[s] for s in b.set_S)
else:
return sum(
b.parent_block().var_v_glljqk[(g,l_star,l,j,q,k)]
* b.parent_block().param_eta_glljqk[(g,l_star,l,j,q,k)]
for l_star in b.parent_block().set_L[g]
if l_star not in b.parent_block().set_L_exp[g]
for j in b.parent_block().set_J[(g,l_star,l)] # only directed arcs
) == sum(b.var_trans_flows_s[s] for s in b.set_S)
b.constr_trans_flows = pyo.Constraint(rule=rule_constr_trans_flows)
# *********************************************************************
# *********************************************************************
# non-convex price functions
if not b.param_price_function_is_convex:
# delta variables
b.var_active_segment_s = pyo.Var(b.set_S, within=pyo.Binary)
# segments must be empty if the respective delta variable is zero
def rule_constr_empty_segment_if_delta_zero(b, s):
if len(b.set_S) == 1 or b.param_price_function_is_convex:
# single segment, skip
# convex, skip
return pyo.Constraint.Skip
return (
b.var_trans_flows_s[s] <=
b.param_v_max_s[s]*b.var_active_segment_s[s]
)
b.constr_empty_segment_if_delta_zero = pyo.Constraint(
b.set_S, rule=rule_constr_empty_segment_if_delta_zero
)
# if delta var is one, previous ones must be one too
# if delta var is zero, the next ones must also be zero
def rule_constr_delta_summing_logic(b, s):
if s == len(b.set_S)-1 or b.param_price_function_is_convex:
# last segment, skip
# convex, skip
return pyo.Constraint.Skip
return (
b.var_active_segment_s[s] >=
b.var_active_segment_s[s+1]
)
b.constr_delta_summing_logic = pyo.Constraint(
b.set_S, rule=rule_constr_delta_summing_logic
)
# if a segment is not completely used, the next ones must remain empty
def rule_constr_fill_up_segment_before_next(b, s):
if s == len(b.set_S)-1 or b.param_price_function_is_convex:
# last segment, skip
# convex, skip
return pyo.Constraint.Skip
return (
b.var_trans_flows_s[s] >=
b.var_active_segment_s[s+1]*
b.param_v_max_s[s]
)
b.constr_fill_up_segment_before_next = pyo.Constraint(
b.set_S, rule=rule_constr_fill_up_segment_before_next
)
# *********************************************************************
# *********************************************************************
model.block_prices = pyo.Block(model.set_GLQPK, rule=rule_block_prices)
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
def price_other_no_block(
model: pyo.AbstractModel,
# convex_price_function: bool = True,
enable_default_values: bool = True,
enable_validation: bool = True,
enable_initialisation: bool = True
):
# auxiliary set for pyomo
model.set_GLQPK = model.set_GL_exp_imp*model.set_QPK
# set of price segments
model.set_S = pyo.Set(model.set_GLQPK)
# set of GLQKS tuples
def init_set_GLQPKS(m):
return (
(g, l, q, p, k, s)
# for (g,l) in m.set_GL_exp_imp
# for (q,k) in m.set_QK
for (g, l, q, p, k) in m.set_S
for s in m.set_S[(g, l, q, p, k)]
)
model.set_GLQPKS = pyo.Set(
dimen=6, initialize=(init_set_GLQPKS if enable_initialisation else None)
)
# *************************************************************************
# *************************************************************************
# parameters
# resource prices
model.param_p_glqpks = pyo.Param(model.set_GLQPKS, within=pyo.NonNegativeReals)
# price function convexity
model.param_price_function_is_convex = pyo.Param(
model.set_GLQPK,
within=pyo.Boolean
)
# maximum resource volumes for each prices
model.param_v_max_glqpks = pyo.Param(
model.set_GLQPKS,
within=pyo.NonNegativeReals,
# default=math.inf
)
# *************************************************************************
# *************************************************************************
# variables
# *************************************************************************
# *************************************************************************
# import and export flows
def bounds_var_trans_flows_glqpks(m, g, l, q, p, k, s):
# return (0, m.param_v_max_glqpks[(g, l, q, p, k, s)])
if (g, l, q, p, k, s) in m.param_v_max_glqpks:
# predefined finite capacity
return (0, m.param_v_max_glqpks[(g, l, q, p, k, s)])
else:
# infinite capacity
return (0, None)
model.var_trans_flows_glqpks = pyo.Var(
model.set_GLQPKS, within=pyo.NonNegativeReals, bounds=bounds_var_trans_flows_glqpks
)
# *************************************************************************
# *************************************************************************
# import flow costs and export flow revenues
def rule_constr_trans_monetary_flows(m, g, l, q, p, k):
if (g,l) in m.set_GL_imp:
return (
sum(
m.var_trans_flows_glqpks[(g, l, q, p, k, s)]
* m.param_p_glqpks[(g, l, q, p, k, s)]
for s in m.set_S[(g, l, q, p, k)]
)
== m.var_ifc_glqpk[(g, l, q, p, k)]
)
else:
return (
sum(
m.var_trans_flows_glqpks[(g, l, q, p, k, s)]
* m.param_p_glqpks[(g, l, q, p, k, s)]
for s in m.set_S[(g, l, q, p, k)]
)
== m.var_efr_glqpk[(g, l, q, p, k)]
)
model.constr_trans_monetary_flows = pyo.Constraint(
model.set_GLQPK, rule=rule_constr_trans_monetary_flows
)
# imported and exported flows
def rule_constr_trans_flows(m, g, l, q, p, k):
if (g,l) in m.set_GL_imp:
return sum(
m.var_v_glljqk[(g, l, l_star, j, q, k)]
for l_star in m.set_L[g]
if l_star not in m.set_L_imp[g]
for j in m.set_J[(g, l, l_star)] # only directed arcs
) == sum(m.var_trans_flows_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)])
else:
return sum(
m.var_v_glljqk[(g, l_star, l, j, q, k)]
* m.param_eta_glljqk[(g, l_star, l, j, q, k)]
for l_star in m.set_L[g]
if l_star not in m.set_L_exp[g]
for j in m.set_J[(g, l_star, l)] # only directed arcs
) == sum(m.var_trans_flows_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)])
model.constr_trans_flows = pyo.Constraint(
model.set_GLQPK, rule=rule_constr_trans_flows
)
# *************************************************************************
# *************************************************************************
# non-convex price functions
# TODO: remove these variables from the model if they are not needed
# delta variables
model.var_active_segment_glqpks = pyo.Var(
model.set_GLQPKS, within=pyo.Binary
)
# segments must be empty if the respective delta variable is zero
def rule_constr_empty_segment_if_delta_zero(m, g, l, q, p, k, s):
if len(m.set_S[(g,l,q,p,k)]) == 1 or m.param_price_function_is_convex[(g,l,q,p,k)]:
# single segment, skip
# convex, skip
return pyo.Constraint.Skip
return (
m.var_trans_flows_glqpks[(g,l,q,p,k,s)] <=
m.param_v_max_glqpks[(g,l,q,p,k,s)]*
m.var_active_segment_glqpks[(g,l,q,p,k,s)]
)
model.constr_empty_segment_if_delta_zero = pyo.Constraint(
model.set_GLQPKS, rule=rule_constr_empty_segment_if_delta_zero
)
# if delta var is one, previous ones must be one too
# if delta var is zero, the next ones must also be zero
def rule_constr_delta_summing_logic(m, g, l, q, p, k, s):
if s == len(m.set_S[(g,l,q,p,k)])-1 or m.param_price_function_is_convex[(g,l,q,p,k)]:
# last segment, skip
# convex, skip
return pyo.Constraint.Skip
return (
m.var_active_segment_glqpks[(g,l,q,p,k,s)] >=
m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]
)
model.constr_delta_summing_logic = pyo.Constraint(
model.set_GLQPKS, rule=rule_constr_delta_summing_logic
)
# if a segment is not completely used, the next ones must remain empty
def rule_constr_fill_up_segment_before_next(m, g, l, q, p, k, s):
if s == len(m.set_S[(g,l,q,p,k)])-1 or m.param_price_function_is_convex[(g,l,q,p,k)]:
# last segment, skip
# convex, skip
return pyo.Constraint.Skip
return (
m.var_trans_flows_glqpks[(g,l,q,p,k,s)] >=
m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]*
m.param_v_max_glqpks[(g,l,q,p,k,s)]
)
# return (
# m.var_if_glqpks[(g,l,q,p,k,s)]/m.param_v_max_glqpks[(g,l,q,p,k,s)] >=
# m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]
# )
# return (
# m.param_v_max_glqpks[(g,l,q,p,k,s)]-m.var_if_glqpks[(g,l,q,p,k,s)] <=
# m.param_v_max_glqpks[(g,l,q,p,k,s)]*(1- m.var_active_segment_glqpks[(g,l,q,p,k,s+1)])
# )
model.constr_fill_up_segment_before_next = pyo.Constraint(
model.set_GLQPKS, rule=rule_constr_fill_up_segment_before_next
)
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
# imports # imports
import pyomo.environ as pyo import pyomo.environ as pyo
# ***************************************************************************** from .other import price_other_block, price_other_no_block
# ***************************************************************************** from .delta import price_delta_block, price_delta_no_block
from .lambda_ import price_lambda_block, price_lambda_no_block
def add_prices_block(
model: pyo.AbstractModel,
**kwargs
):
# *************************************************************************
# *************************************************************************
# model.node_price_block = pyo.Block(model.set_QPK)
price_other(model, **kwargs)
# price_block_other(model, **kwargs)
# ***************************************************************************** # *****************************************************************************
# ***************************************************************************** # *****************************************************************************
# TODO: try to implement it as a block
def price_block_other(
model: pyo.AbstractModel,
enable_default_values: bool = True,
enable_validation: bool = True,
enable_initialisation: bool = True
):
model.set_GLQPK = model.set_GL_exp_imp*model.set_QPK
def rule_node_prices(b, g, l, q, p, k):
# imported flow
def bounds_var_if_glqpks(m, g, l, q, p, k, s):
if (g, l, q, p, k, s) in m.param_v_max_glqpks:
# predefined finite capacity
return (0, m.param_v_max_glqpks[(g, l, q, p, k, s)])
else:
# infinite capacity
return (0, None)
b.var_trans_flow_s = pyo.Var(
b.set_GLQPKS, within=pyo.NonNegativeReals, bounds=bounds_var_trans_flow_s
)
# imported flow cost
def rule_constr_imp_flow_cost(m, g, l, q, p, k):
return (
sum(
m.var_if_glqpks[(g, l, q, p, k, s)]
* m.param_p_glqpks[(g, l, q, p, k, s)]
for s in m.set_S[(g, l, q, p, k)]
)
== m.var_ifc_glqpk[(g, l, q, p, k)]
)
model.constr_imp_flow_cost = pyo.Constraint(
model.set_GL_imp, model.set_QPK, rule=rule_constr_imp_flow_cost
)
# imported flows
def rule_constr_imp_flows(m, g, l, q, p, k):
return sum(
m.var_v_glljqk[(g, l, l_star, j, q, k)]
for l_star in m.set_L[g]
if l_star not in m.set_L_imp[g]
for j in m.set_J[(g, l, l_star)] # only directed arcs
) == sum(m.var_if_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)])
model.constr_imp_flows = pyo.Constraint(
model.set_GL_imp, model.set_QPK, rule=rule_constr_imp_flows
)
# if (g,l) in b.parent_block().set_GL_imp:
# # import node
# pass
# elif (g,l) in b.parent_block().set_GL_exp:
# # export node
# pass
# otherwise: do nothing
model.node_price_block = pyo.Block(model.set_GLQPK, rule=rule_node_prices)
# set of price segments
model.node_price_block.set_S = pyo.Set()
# set of GLQKS tuples
def init_set_GLQPKS(m):
return (
(g, l, q, p, k, s)
# for (g,l) in m.set_GL_exp_imp
# for (q,k) in m.set_QK
for (g, l, q, p, k) in m.node_price_block.set_S
for s in m.node_price_block.set_S[(g, l, q, p, k)]
)
model.node_price_block.set_GLQPKS = pyo.Set(
dimen=6, initialize=(init_set_GLQPKS if enable_initialisation else None)
)
def init_set_GLQPKS_exp(m):
return (
glqpks for glqpks in m.set_GLQPKS if glqpks[1] in m.set_L_exp[glqpks[0]]
)
model.node_price_block.set_GLQPKS_exp = pyo.Set(
dimen=6, initialize=(init_set_GLQPKS_exp if enable_initialisation else None)
)
def init_set_GLQPKS_imp(m):
return (
glqpks for glqpks in m.set_GLQPKS if glqpks[1] in m.set_L_imp[glqpks[0]]
)
model.node_price_block.set_GLQPKS_imp = pyo.Set(
dimen=6, initialize=(init_set_GLQPKS_imp if enable_initialisation else None)
)
# *************************************************************************
# *************************************************************************
# parameters
# resource prices
model.param_p_glqpks = pyo.Param(model.set_GLQPKS, within=pyo.NonNegativeReals)
# maximum resource volumes for each prices
model.param_v_max_glqpks = pyo.Param(
model.set_GLQPKS,
within=pyo.NonNegativeReals
)
# ************************************************************************* NODE_PRICE_LAMBDA = 'lambda'
# ************************************************************************* NODE_PRICE_DELTA = 'delta'
NODE_PRICE_OTHER = None
# variables NODE_PRICES = (
NODE_PRICE_LAMBDA,
# ************************************************************************* NODE_PRICE_DELTA,
# ************************************************************************* NODE_PRICE_OTHER
# exported flow
# TODO: validate the bounds by ensuring inf. cap. only exists in last segm.
def bounds_var_ef_glqpks(m, g, l, q, p, k, s):
if (g, l, q, p, k, s) in m.param_v_max_glqpks:
# predefined finite capacity
return (0, m.param_v_max_glqpks[(g, l, q, p, k, s)])
else:
# infinite capacity
return (0, None)
model.var_ef_glqpks = pyo.Var(
model.set_GLQPKS_exp, within=pyo.NonNegativeReals, bounds=bounds_var_ef_glqpks
) )
def add_price_functions(
# *************************************************************************
# *************************************************************************
# exported flow revenue
def rule_constr_exp_flow_revenue(m, g, l, q, p, k):
return (
sum(
m.var_ef_glqpks[(g, l, q, p, k, s)]
* m.param_p_glqpks[(g, l, q, p, k, s)]
for s in m.set_S[(g, l, q, p, k)]
)
== m.var_efr_glqpk[(g, l, q, p, k)]
)
model.constr_exp_flow_revenue = pyo.Constraint(
model.set_GL_exp, model.set_QPK, rule=rule_constr_exp_flow_revenue
)
# exported flows
def rule_constr_exp_flows(m, g, l, q, p, k):
return sum(
m.var_v_glljqk[(g, l_star, l, j, q, k)]
* m.param_eta_glljqk[(g, l_star, l, j, q, k)]
for l_star in m.set_L[g]
if l_star not in m.set_L_exp[g]
for j in m.set_J[(g, l_star, l)] # only directed arcs
) == sum(m.var_ef_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)])
model.constr_exp_flows = pyo.Constraint(
model.set_GL_exp, model.set_QPK, rule=rule_constr_exp_flows
)
# *************************************************************************
# *************************************************************************
# # non-convex price functions
# if not convex_price_function:
# # delta variables
# model.var_active_segment_glqpks = pyo.Var(
# model.set_GLQPKS, within=pyo.Binary
# )
# # segments must be empty if the respective delta variable is zero
# def rule_constr_empty_segment_if_delta_zero_imp(m, g, l, q, p, k, s):
# return (
# m.var_if_glqpks[(g,l,q,p,k,s)] <=
# m.param_v_max_glqpks[(g,l,q,p,k,s)]*
# m.var_active_segment_glqpks[(g,l,q,p,k,s)]
# )
# model.constr_empty_segment_if_delta_zero_imp = pyo.Constraint(
# model.set_GLQPKS_imp, rule=rule_constr_empty_segment_if_delta_zero_imp
# )
# # segments must be empty if the respective delta variable is zero
# def rule_constr_empty_segment_if_delta_zero_exp(m, g, l, q, p, k, s):
# return (
# m.var_ef_glqpks[(g,l,q,p,k,s)] <=
# m.param_v_max_glqpks[(g,l,q,p,k,s)]*
# m.var_active_segment_glqpks[(g,l,q,p,k,s)]
# )
# model.constr_empty_segment_if_delta_zero_exp = pyo.Constraint(
# model.set_GLQPKS_exp, rule=rule_constr_empty_segment_if_delta_zero_exp
# )
# # if delta var is one, previous ones must be one too
# def rule_constr_delta_summing_logic(m, g, l, q, p, k, s):
# if s == len(m.set_S)-1:
# return pyo.Constraint.Skip
# return (
# m.var_active_segment_glqpks[(g,l,q,p,k,s)] >=
# m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]
# )
# model.constr_delta_summing_logic = pyo.Constraint(
# model.set_GLQPKS, rule=rule_constr_delta_summing_logic
# )
# # if delta var is zero, subsequent ones must also be zero
# def rule_constr_delta_next_zeros(m, g, l, q, p, k, s):
# if s == len(m.set_S)-1:
# return pyo.Constraint.Skip
# return (
# 1-m.var_active_segment_glqpks[(g,l,q,p,k,s)] >=
# m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]
# )
# model.constr_delta_next_zeros = pyo.Constraint(
# model.set_GLQPKS, rule=rule_constr_delta_next_zeros
# )
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
# def price_other2(
# model: pyo.AbstractModel,
# convex_price_function: bool = False,
# enable_default_values: bool = True,
# enable_validation: bool = True,
# enable_initialisation: bool = True
# ):
# # set of price segments
# model.set_S = pyo.Set(model.set_GL_exp_imp, model.set_QPK)
# # set of GLQKS tuples
# def init_set_GLQPKS(m):
# return (
# (g, l, q, p, k, s)
# # for (g,l) in m.set_GL_exp_imp
# # for (q,k) in m.set_QK
# for (g, l, q, p, k) in m.set_S
# for s in m.set_S[(g, l, q, p, k)]
# )
# model.set_GLQPKS = pyo.Set(
# dimen=6, initialize=(init_set_GLQPKS if enable_initialisation else None)
# )
# def init_set_GLQPKS_exp(m):
# return (
# glqpks for glqpks in m.set_GLQPKS if glqpks[1] in m.set_L_exp[glqpks[0]]
# )
# model.set_GLQPKS_exp = pyo.Set(
# dimen=6, initialize=(init_set_GLQPKS_exp if enable_initialisation else None)
# )
# def init_set_GLQPKS_imp(m):
# return (
# glqpks for glqpks in m.set_GLQPKS if glqpks[1] in m.set_L_imp[glqpks[0]]
# )
# model.set_GLQPKS_imp = pyo.Set(
# dimen=6, initialize=(init_set_GLQPKS_imp if enable_initialisation else None)
# )
# # *************************************************************************
# # *************************************************************************
# # parameters
# # resource prices
# model.param_p_glqpks = pyo.Param(model.set_GLQPKS, within=pyo.NonNegativeReals)
# # maximum resource volumes for each prices
# model.param_v_max_glqpks = pyo.Param(
# model.set_GLQPKS,
# within=pyo.NonNegativeReals
# )
# # *************************************************************************
# # *************************************************************************
# # variables
# # *************************************************************************
# # *************************************************************************
# # exported flow
# # TODO: validate the bounds by ensuring inf. cap. only exists in last segm.
# def bounds_var_ef_glqpks(m, g, l, q, p, k, s):
# if (g, l, q, p, k, s) in m.param_v_max_glqpks:
# # predefined finite capacity
# return (0, m.param_v_max_glqpks[(g, l, q, p, k, s)])
# else:
# # infinite capacity
# return (0, None)
# model.var_ef_glqpks = pyo.Var(
# model.set_GLQPKS_exp, within=pyo.NonNegativeReals, bounds=bounds_var_ef_glqpks
# )
# # imported flow
# def bounds_var_if_glqpks(m, g, l, q, p, k, s):
# if (g, l, q, p, k, s) in m.param_v_max_glqpks:
# # predefined finite capacity
# return (0, m.param_v_max_glqpks[(g, l, q, p, k, s)])
# else:
# # infinite capacity
# return (0, None)
# model.var_if_glqpks = pyo.Var(
# model.set_GLQPKS_imp, within=pyo.NonNegativeReals, bounds=bounds_var_if_glqpks
# )
# # *************************************************************************
# # *************************************************************************
# # exported flow revenue
# def rule_constr_exp_flow_revenue(m, g, l, q, p, k):
# return (
# sum(
# m.var_ef_glqpks[(g, l, q, p, k, s)]
# * m.param_p_glqpks[(g, l, q, p, k, s)]
# for s in m.set_S[(g, l, q, p, k)]
# )
# == m.var_efr_glqpk[(g, l, q, p, k)]
# )
# model.constr_exp_flow_revenue = pyo.Constraint(
# model.set_GL_exp, model.set_QPK, rule=rule_constr_exp_flow_revenue
# )
# # imported flow cost
# def rule_constr_imp_flow_cost(m, g, l, q, p, k):
# return (
# sum(
# m.var_if_glqpks[(g, l, q, p, k, s)]
# * m.param_p_glqpks[(g, l, q, p, k, s)]
# for s in m.set_S[(g, l, q, p, k)]
# )
# == m.var_ifc_glqpk[(g, l, q, p, k)]
# )
# model.constr_imp_flow_cost = pyo.Constraint(
# model.set_GL_imp, model.set_QPK, rule=rule_constr_imp_flow_cost
# )
# # exported flows
# def rule_constr_exp_flows(m, g, l, q, p, k):
# return sum(
# m.var_v_glljqk[(g, l_star, l, j, q, k)]
# * m.param_eta_glljqk[(g, l_star, l, j, q, k)]
# for l_star in m.set_L[g]
# if l_star not in m.set_L_exp[g]
# for j in m.set_J[(g, l_star, l)] # only directed arcs
# ) == sum(m.var_ef_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)])
# model.constr_exp_flows = pyo.Constraint(
# model.set_GL_exp, model.set_QPK, rule=rule_constr_exp_flows
# )
# # imported flows
# def rule_constr_imp_flows(m, g, l, q, p, k):
# return sum(
# m.var_v_glljqk[(g, l, l_star, j, q, k)]
# for l_star in m.set_L[g]
# if l_star not in m.set_L_imp[g]
# for j in m.set_J[(g, l, l_star)] # only directed arcs
# ) == sum(m.var_if_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)])
# model.constr_imp_flows = pyo.Constraint(
# model.set_GL_imp, model.set_QPK, rule=rule_constr_imp_flows
# )
# # *************************************************************************
# # *************************************************************************
# # non-convex price functions
# if not convex_price_function:
# # delta variables
# model.var_active_segment_glqpks = pyo.Var(
# model.set_GLQPKS, within=pyo.Binary
# )
# # segments must be empty if the respective delta variable is zero
# def rule_constr_empty_segment_if_delta_zero_imp(m, g, l, q, p, k, s):
# return (
# m.var_if_glqpks[(g,l,q,p,k,s)] <=
# m.param_v_max_glqpks[(g,l,q,p,k,s)]*
# m.var_active_segment_glqpks[(g,l,q,p,k,s)]
# )
# model.constr_empty_segment_if_delta_zero_imp = pyo.Constraint(
# model.set_GLQPKS_imp, rule=rule_constr_empty_segment_if_delta_zero_imp
# )
# # segments must be empty if the respective delta variable is zero
# def rule_constr_empty_segment_if_delta_zero_exp(m, g, l, q, p, k, s):
# return (
# m.var_ef_glqpks[(g,l,q,p,k,s)] <=
# m.param_v_max_glqpks[(g,l,q,p,k,s)]*
# m.var_active_segment_glqpks[(g,l,q,p,k,s)]
# )
# model.constr_empty_segment_if_delta_zero_exp = pyo.Constraint(
# model.set_GLQPKS_exp, rule=rule_constr_empty_segment_if_delta_zero_exp
# )
# # if delta var is one, previous ones must be one too
# # if delta var is zero, the next ones must also be zero
# def rule_constr_delta_summing_logic(m, g, l, q, p, k, s):
# if s == len(m.set_S[(g,l,q,p,k)])-1:
# # last segment, skip
# return pyo.Constraint.Skip
# return (
# m.var_active_segment_glqpks[(g,l,q,p,k,s)] >=
# m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]
# )
# model.constr_delta_summing_logic = pyo.Constraint(
# model.set_GLQPKS, rule=rule_constr_delta_summing_logic
# )
# # if a segment is not completely used, the next ones must remain empty
# def rule_constr_fill_up_segment_before_next(m, g, l, q, p, k, s):
# if s == len(m.set_S[(g,l,q,p,k)])-1:
# # last segment, skip
# return pyo.Constraint.Skip
# if (g,l) in m.set_GL_imp:
# return (
# m.var_if_glqpks[(g,l,q,p,k,s)] >=
# m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]*
# m.param_v_max_glqpks[(g,l,q,p,k,s)]
# )
# else:
# return (
# m.var_ef_glqpks[(g,l,q,p,k,s)] >=
# m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]*
# m.param_v_max_glqpks[(g,l,q,p,k,s)]
# )
# # return (
# # m.var_if_glqpks[(g,l,q,p,k,s)]/m.param_v_max_glqpks[(g,l,q,p,k,s)] >=
# # m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]
# # )
# # return (
# # m.param_v_max_glqpks[(g,l,q,p,k,s)]-m.var_if_glqpks[(g,l,q,p,k,s)] <=
# # m.param_v_max_glqpks[(g,l,q,p,k,s)]*(1- m.var_active_segment_glqpks[(g,l,q,p,k,s+1)])
# # )
# model.constr_fill_up_segment_before_next = pyo.Constraint(
# model.set_GLQPKS, rule=rule_constr_fill_up_segment_before_next
# )
# *****************************************************************************
# *****************************************************************************
def price_other(
model: pyo.AbstractModel, model: pyo.AbstractModel,
convex_price_function: bool = True, use_blocks: bool = True,
enable_default_values: bool = True, node_price_model = NODE_PRICE_OTHER,
enable_validation: bool = True, **kwargs
enable_initialisation: bool = True ):
):
# auxiliary set for pyomo
model.set_GLQPK = model.set_GL_exp_imp*model.set_QPK
# set of price segments
model.set_S = pyo.Set(model.set_GLQPK)
# set of GLQKS tuples
def init_set_GLQPKS(m):
return (
(g, l, q, p, k, s)
# for (g,l) in m.set_GL_exp_imp
# for (q,k) in m.set_QK
for (g, l, q, p, k) in m.set_S
for s in m.set_S[(g, l, q, p, k)]
)
model.set_GLQPKS = pyo.Set(
dimen=6, initialize=(init_set_GLQPKS if enable_initialisation else None)
)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
# parameters
# resource prices
model.param_p_glqpks = pyo.Param(model.set_GLQPKS, within=pyo.NonNegativeReals)
# price function convexity if use_blocks:
# with blocks
model.param_price_function_is_convex = pyo.Param( if node_price_model == NODE_PRICE_LAMBDA:
model.set_GLQPK, price_lambda_block(model, **kwargs)
within=pyo.Boolean elif node_price_model == NODE_PRICE_DELTA:
) price_delta_block(model, **kwargs)
# maximum resource volumes for each prices
model.param_v_max_glqpks = pyo.Param(
model.set_GLQPKS,
within=pyo.NonNegativeReals
)
# *************************************************************************
# *************************************************************************
# variables
# *************************************************************************
# *************************************************************************
# import and export flows
def bounds_var_trans_flows_glqpks(m, g, l, q, p, k, s):
if (g, l, q, p, k, s) in m.param_v_max_glqpks:
# predefined finite capacity
return (0, m.param_v_max_glqpks[(g, l, q, p, k, s)])
else: else:
# infinite capacity price_other_block(model, **kwargs)
return (0, None) else:
model.var_trans_flows_glqpks = pyo.Var( # without blocks
model.set_GLQPKS, within=pyo.NonNegativeReals, bounds=bounds_var_trans_flows_glqpks if node_price_model == NODE_PRICE_LAMBDA:
) price_lambda_no_block(model, **kwargs)
elif node_price_model == NODE_PRICE_DELTA:
# ************************************************************************* price_delta_no_block(model, **kwargs)
# *************************************************************************
# import flow costs and export flow revenues
def rule_constr_trans_monetary_flows(m, g, l, q, p, k):
if (g,l) in m.set_GL_imp:
return (
sum(
m.var_trans_flows_glqpks[(g, l, q, p, k, s)]
* m.param_p_glqpks[(g, l, q, p, k, s)]
for s in m.set_S[(g, l, q, p, k)]
)
== m.var_ifc_glqpk[(g, l, q, p, k)]
)
else: else:
return ( price_other_no_block(model, **kwargs)
sum(
m.var_trans_flows_glqpks[(g, l, q, p, k, s)]
* m.param_p_glqpks[(g, l, q, p, k, s)]
for s in m.set_S[(g, l, q, p, k)]
)
== m.var_efr_glqpk[(g, l, q, p, k)]
)
model.constr_trans_monetary_flows = pyo.Constraint(
model.set_GLQPK, rule=rule_constr_trans_monetary_flows
)
# imported and exported flows
def rule_constr_trans_flows(m, g, l, q, p, k):
if (g,l) in m.set_GL_imp:
return sum(
m.var_v_glljqk[(g, l, l_star, j, q, k)]
for l_star in m.set_L[g]
if l_star not in m.set_L_imp[g]
for j in m.set_J[(g, l, l_star)] # only directed arcs
) == sum(m.var_trans_flows_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)])
else:
return sum(
m.var_v_glljqk[(g, l_star, l, j, q, k)]
* m.param_eta_glljqk[(g, l_star, l, j, q, k)]
for l_star in m.set_L[g]
if l_star not in m.set_L_exp[g]
for j in m.set_J[(g, l_star, l)] # only directed arcs
) == sum(m.var_trans_flows_glqpks[(g, l, q, p, k, s)] for s in m.set_S[(g, l, q, p, k)])
model.constr_trans_flows = pyo.Constraint(
model.set_GLQPK, rule=rule_constr_trans_flows
)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
# non-convex price functions
# delta variables
model.var_active_segment_glqpks = pyo.Var(
model.set_GLQPKS, within=pyo.Binary
)
# segments must be empty if the respective delta variable is zero
def rule_constr_empty_segment_if_delta_zero(m, g, l, q, p, k, s):
if len(m.set_S[(g,l,q,p,k)]) == 1 or m.param_price_function_is_convex[(g,l,q,p,k)]:
# single segment, skip
# convex, skip
return pyo.Constraint.Skip
return (
m.var_trans_flows_glqpks[(g,l,q,p,k,s)] <=
m.param_v_max_glqpks[(g,l,q,p,k,s)]*
m.var_active_segment_glqpks[(g,l,q,p,k,s)]
)
model.constr_empty_segment_if_delta_zero = pyo.Constraint(
model.set_GLQPKS, rule=rule_constr_empty_segment_if_delta_zero
)
# if delta var is one, previous ones must be one too
# if delta var is zero, the next ones must also be zero
def rule_constr_delta_summing_logic(m, g, l, q, p, k, s):
if s == len(m.set_S[(g,l,q,p,k)])-1 or m.param_price_function_is_convex[(g,l,q,p,k)]:
# last segment, skip
# convex, skip
return pyo.Constraint.Skip
return (
m.var_active_segment_glqpks[(g,l,q,p,k,s)] >=
m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]
)
model.constr_delta_summing_logic = pyo.Constraint(
model.set_GLQPKS, rule=rule_constr_delta_summing_logic
)
# if a segment is not completely used, the next ones must remain empty
def rule_constr_fill_up_segment_before_next(m, g, l, q, p, k, s):
if s == len(m.set_S[(g,l,q,p,k)])-1 or m.param_price_function_is_convex[(g,l,q,p,k)]:
# last segment, skip
# convex, skip
return pyo.Constraint.Skip
return (
m.var_trans_flows_glqpks[(g,l,q,p,k,s)] >=
m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]*
m.param_v_max_glqpks[(g,l,q,p,k,s)]
)
# return (
# m.var_if_glqpks[(g,l,q,p,k,s)]/m.param_v_max_glqpks[(g,l,q,p,k,s)] >=
# m.var_active_segment_glqpks[(g,l,q,p,k,s+1)]
# )
# return (
# m.param_v_max_glqpks[(g,l,q,p,k,s)]-m.var_if_glqpks[(g,l,q,p,k,s)] <=
# m.param_v_max_glqpks[(g,l,q,p,k,s)]*(1- m.var_active_segment_glqpks[(g,l,q,p,k,s+1)])
# )
model.constr_fill_up_segment_before_next = pyo.Constraint(
model.set_GLQPKS, rule=rule_constr_fill_up_segment_before_next
)
# *****************************************************************************
# *****************************************************************************
def price_block_lambda(model: pyo.AbstractModel, **kwargs):
raise NotImplementedError
# *****************************************************************************
# *****************************************************************************
def price_block_delta(model: pyo.AbstractModel, **kwargs):
raise NotImplementedError
# ***************************************************************************** # *****************************************************************************
# ***************************************************************************** # *****************************************************************************
\ No newline at end of file
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
import pyomo.environ as pyo import pyomo.environ as pyo
from .blocks.networks import add_network_restrictions from .blocks.networks import add_network_restrictions
from .blocks.prices import add_prices_block from .blocks.prices import add_price_functions
# ***************************************************************************** # *****************************************************************************
# ***************************************************************************** # *****************************************************************************
...@@ -11,9 +11,11 @@ from .blocks.prices import add_prices_block ...@@ -11,9 +11,11 @@ from .blocks.prices import add_prices_block
def create_model( def create_model(
name: str, name: str,
use_prices_block: bool,
node_price_model: str,
enable_default_values: bool = True, enable_default_values: bool = True,
enable_validation: bool = True, enable_validation: bool = True,
enable_initialisation: bool = True, enable_initialisation: bool = True
): ):
# TODO: test initialisation # TODO: test initialisation
...@@ -57,33 +59,26 @@ def create_model( ...@@ -57,33 +59,26 @@ def create_model(
# ************************************************************************* # *************************************************************************
# set of assessments # set of assessments
model.set_Q = pyo.Set() model.set_Q = pyo.Set()
# TODO: use rangesets for time-related sets # TODO: use rangesets for time-related sets
# set of time step intervals for each assessment # set of time step intervals for each assessment
model.set_K_q = pyo.Set(model.set_Q) model.set_K_q = pyo.Set(model.set_Q)
# set of representative evaluation periods for each assessment # set of representative evaluation periods for each assessment
model.set_P_q = pyo.Set(model.set_Q) model.set_P_q = pyo.Set(model.set_Q)
# set of networks # set of networks
model.set_G = pyo.Set() model.set_G = pyo.Set()
# set of nodes on each network # set of nodes on each network
model.set_L = pyo.Set(model.set_G) model.set_L = pyo.Set(model.set_G)
# set of importing nodes on each network # set of importing nodes on each network
model.set_L_imp = pyo.Set(model.set_G, within=model.set_L) model.set_L_imp = pyo.Set(model.set_G, within=model.set_L)
# set of exporting nodes on each network # set of exporting nodes on each network
model.set_L_exp = pyo.Set(model.set_G, within=model.set_L) model.set_L_exp = pyo.Set(model.set_G, within=model.set_L)
# ************************************************************************* # *************************************************************************
...@@ -963,20 +958,6 @@ def create_model( ...@@ -963,20 +958,6 @@ def create_model(
initialize=(init_set_GLLJH_arc_inv_sos1 if enable_initialisation else None), initialize=(init_set_GLLJH_arc_inv_sos1 if enable_initialisation else None),
) )
# set of GLLJ-indexed GLLJH tuples for new arcs modelled using SOS1
def init_set_GLLJH_arc_inv_sos1_gllj(m, g, l1, l2, j):
return ((g, l1, l2, j, h) for h in m.set_H_gllj[(g, l1, l2, j)])
model.set_GLLJH_arc_inv_sos1_gllj = pyo.Set(
model.set_GLLJ_arc_inv_sos1,
dimen=5,
within=model.set_GLLJH_arc_inv_sos1,
initialize=(
init_set_GLLJH_arc_inv_sos1_gllj if enable_initialisation else None
),
)
# ************************************************************************* # *************************************************************************
# flow sense determination using SOS1 # flow sense determination using SOS1
...@@ -1315,18 +1296,6 @@ def create_model( ...@@ -1315,18 +1296,6 @@ def create_model(
dimen=2, within=model.set_TH, initialize=init_set_TH_arc_inv_sos1 dimen=2, within=model.set_TH, initialize=init_set_TH_arc_inv_sos1
) )
# set of t-indexed TH tuples for groups of arcs relying on SOS1
def init_set_TH_arc_inv_sos1_t(m, t):
return ((t, h) for h in m.set_H_t[t])
model.set_TH_arc_inv_sos1_t = pyo.Set(
model.set_T_sos1,
dimen=2,
within=model.set_TH_arc_inv_sos1,
initialize=init_set_TH_arc_inv_sos1_t,
)
# minimum cost of a group of arcs # minimum cost of a group of arcs
model.param_c_arc_min_th = pyo.Param(model.set_TH, within=pyo.NonNegativeReals) model.param_c_arc_min_th = pyo.Param(model.set_TH, within=pyo.NonNegativeReals)
...@@ -2163,7 +2132,11 @@ def create_model( ...@@ -2163,7 +2132,11 @@ def create_model(
) )
# prices # prices
add_prices_block(model) add_price_functions(
model,
use_blocks=use_prices_block,
node_price_model=node_price_model
)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
...@@ -2435,14 +2408,21 @@ def create_model( ...@@ -2435,14 +2408,21 @@ def create_model(
# ************************************************************************* # *************************************************************************
# SOS1 constraints for arc selection # SOS1 constraints for arc selection
def rule_constr_arc_sos1(m, g, l1, l2, j):
var_list = [
m.var_delta_arc_inv_glljh[(g, l1, l2, j, h)]
for h in m.set_H_gllj[(g, l1, l2, j)]
]
weight_list = [
m.param_arc_inv_sos1_weights_glljh[(g, l1, l2, j, h)]
for h in m.set_H_gllj[(g, l1, l2, j)]
]
return (var_list, weight_list)
model.constr_arc_sos1 = pyo.SOSConstraint( model.constr_arc_sos1 = pyo.SOSConstraint(
model.set_GLLJ_arc_inv_sos1, # for (directed and undirected) new arcs model.set_GLLJ_arc_inv_sos1,
var=model.var_delta_arc_inv_glljh, # set_GLLJH_sgl indexes the variables rule=rule_constr_arc_sos1,
index=model.set_GLLJH_arc_inv_sos1_gllj, # key: GLLJ; value: GLLJH sos=1
weights=model.param_arc_inv_sos1_weights_glljh, # key: GLLJH; alue: weight
sos=1,
) )
# ************************************************************************* # *************************************************************************
...@@ -2572,16 +2552,29 @@ def create_model( ...@@ -2572,16 +2552,29 @@ def create_model(
# ************************************************************************* # *************************************************************************
# SOS1 constraints for flow sense determination (undirected arcs) # SOS1 constraints for flow sense determination (undirected arcs)
# model.constr_sns_sos1 = pyo.SOSConstraint(
# model.set_GLLJ_und, # one constraint per undirected arc
# model.set_QK, # and time interval
# var=model.var_zeta_sns_glljqk, # set_GLLJ_und_red and set_K
# index=model.set_GLLJQK_und_sns_sos1_red_gllj, #
# weights=model.param_arc_sns_sos1_weights_glljqk,
# sos=1,
# )
# TODO: make the weight list dependent in model options rather than literal
def rule_constr_sns_sos1(m, g, l1, l2, j, q, k):
var_list = [
m.var_zeta_sns_glljqk[(g, l1, l2, j, q, k)],
m.var_zeta_sns_glljqk[(g, l2, l1, j, q, k)]
]
weight_list = [1, 2] if True else [2, 1]
return (var_list, weight_list)
model.constr_sns_sos1 = pyo.SOSConstraint( model.constr_sns_sos1 = pyo.SOSConstraint(
model.set_GLLJ_und, # one constraint per undirected arc model.set_GLLJ_und,
model.set_QK, # and time interval model.set_QK,
var=model.var_zeta_sns_glljqk, # set_GLLJ_und_red and set_K rule=rule_constr_sns_sos1,
index=model.set_GLLJQK_und_sns_sos1_red_gllj, # sos=1
weights=model.param_arc_sns_sos1_weights_glljqk,
sos=1,
) )
# ************************************************************************* # *************************************************************************
# static losses # static losses
...@@ -2941,13 +2934,20 @@ def create_model( ...@@ -2941,13 +2934,20 @@ def create_model(
# ************************************************************************* # *************************************************************************
# SOS1 constraints for arc group selection # SOS1 constraints for arc group selection
def rule_constr_arc_group_sos1(m, t):
var_list = [
m.var_delta_arc_inv_th[(t, h)]
for h in m.set_H_t[t]
]
weight_list = [
m.param_arc_inv_sos1_weights_th[(t, h)]
for h in m.set_H_t[t]
]
return (var_list, weight_list)
model.constr_arc_group_sos1 = pyo.SOSConstraint( model.constr_arc_group_sos1 = pyo.SOSConstraint(
model.set_T_sos1, # for all groups using sos1 model.set_T_sos1,
var=model.var_delta_arc_inv_th, # set_TH indexes the variables rule=rule_constr_arc_group_sos1,
index=model.set_TH_arc_inv_sos1_t, # key: t; value: TH sos=1
weights=model.param_arc_inv_sos1_weights_th, # key: TH; value: weight
sos=1,
) )
# ************************************************************************* # *************************************************************************
......
...@@ -15,9 +15,9 @@ from math import inf ...@@ -15,9 +15,9 @@ from math import inf
# import numpy as np # import numpy as np
import networkx as nx import networkx as nx
from ...data.gis.identify import get_edges_involving_node
from ...data.gis.identify import find_unconnected_nodes from ...data.gis.identify import find_unconnected_nodes
from .resource import are_prices_time_invariant # from .resource import are_prices_time_invariant, ResourcePrice
# ***************************************************************************** # *****************************************************************************
# ***************************************************************************** # *****************************************************************************
...@@ -624,20 +624,34 @@ class Network(nx.MultiDiGraph): ...@@ -624,20 +624,34 @@ class Network(nx.MultiDiGraph):
) )
def __init__(self, network_type = NET_TYPE_HYBRID, **kwargs): def __init__(self, network_type = NET_TYPE_HYBRID, **kwargs):
# run base class init routine
# initialise the node type
nx.MultiDiGraph.__init__(self, **kwargs) self.import_nodes = set()
self.export_nodes = set()
# identify node types self.source_sink_nodes = set()
self.waypoint_nodes = set()
self.identify_node_types()
# declare variables for the nodes without directed arc limitations # declare variables for the nodes without directed arc limitations
if network_type not in self.NET_TYPES:
raise ValueError('Unknown network type.')
self.network_type = network_type self.network_type = network_type
# nodes without incoming directed arcs limitations
self.nodes_w_in_dir_arc_limitations = dict() self.nodes_w_in_dir_arc_limitations = dict()
# nodes without outgoing directed arcs limitations
self.nodes_w_out_dir_arc_limitations = dict() self.nodes_w_out_dir_arc_limitations = dict()
# run base class init routine
nx.MultiDiGraph.__init__(self, **kwargs)
# process the input data
for node_key in self.nodes():
self._process_node_data(node_key, data=self.nodes[node_key])
# # set up initial nodes if the network is not hybrid
# if self.network_type != self.NET_TYPE_HYBRID:
# for node_key in self.nodes():
# self._set_up_node(node_key)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
...@@ -671,16 +685,226 @@ class Network(nx.MultiDiGraph): ...@@ -671,16 +685,226 @@ class Network(nx.MultiDiGraph):
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def _process_node_data(self, node_key, data: dict, **kwargs):
"Check the data, determine the node type and update the structures."
# find out whicht type of node it is
# no data: waypoint
# prices: transshipment (imp/exp)
# flows: source/sink
if type(data) == type(None):
self.waypoint_nodes.add(node_key)
elif type(data) == dict:
# transshipment or source/sink
if self.KEY_NODE_BASE_FLOW in data and len(data) == 1:
# source/sink
self.source_sink_nodes.add(node_key)
elif self.KEY_NODE_PRICES in data and self.KEY_NODE_TYPE in data and data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP:
# import node
self.import_nodes.add(node_key)
elif self.KEY_NODE_PRICES in data and self.KEY_NODE_TYPE in data and data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP:
# export node
self.export_nodes.add(node_key)
elif self.KEY_NODE_PRICES not in data:
# waypoint node
self.waypoint_nodes.add(node_key)
else:
# error
raise TypeError('Invalid input data combination.')
else:
raise TypeError('Invalid type for node data.')
# set up the node
self._set_up_node(node_key, **kwargs)
# TODO: automatically identify import and export nodes (without defining them explicitly)
# *************************************************************************
# *************************************************************************
def is_export_node(self, node_key) -> bool:
"Returns True if the key matches that of an export node."
return node_key in self.export_nodes
def is_import_node(self, node_key) -> bool:
"Returns True if the key matches that of an import node."
return node_key in self.import_nodes
def is_waypoint_node(self, node_key) -> bool:
"Returns True if the key matches that of an waypoint node."
return node_key in self.waypoint_nodes
def is_source_sink_node(self, node_key) -> bool:
"Returns True if the key matches that of an source or sink node."
return node_key in self.source_sink_nodes
# *************************************************************************
# *************************************************************************
def _reset_node_type(self, node_key):
if self.is_export_node(node_key):
# export node
self.export_nodes.remove(node_key)
elif self.is_import_node(node_key):
# import node
self.import_nodes.remove(node_key)
elif self.is_source_sink_node(node_key):
# source/sink node
self.source_sink_nodes.remove(node_key)
elif self.is_waypoint_node(node_key):
# has to be a waypoint node
self.waypoint_nodes.remove(node_key)
# No need to reset node but this could mean something is up
# else:
# raise ValueError('Unknown node type.')
# *************************************************************************
# *************************************************************************
# TODO: use a decorator function to prevent the original method(s) from being used inappropriately
def add_node(self, node_key, **kwargs):
# check if the node can be added and add it
self._handle_node(node_key, **kwargs)
# *************************************************************************
# *************************************************************************
def modify_node(self, node_key, **kwargs):
if not self.has_node(node_key):
raise ValueError('The node indicated does not exist.')
self._handle_node(node_key, **kwargs)
# *************************************************************************
# *************************************************************************
# TODO: automatically check if node already exists and implications when "adding" one
# def add_nodes(self, node_key_data: list):
# # process the input data
# for entry in node_key_data:
# if type(entry) != tuple :
# raise ValueError('The input must be a list of tuples.')
# # self._handle_node(entry[0], **entry[1])
# self._process_node_data(entry[0], entry[1])
# # add the nodes
# nx.MultiDiGraph.add_nodes_from(self, node_key_data)
# *************************************************************************
# *************************************************************************
def add_nodes_from(self, nodes_for_adding, **kwargs):
# input formats:
# 1) container of node keys
# 2) container of tuples
# process the input data
for entry in nodes_for_adding:
if type(entry) == tuple and len(entry) == 2 and type(entry[1]) == dict:
# option 2
# update the dict
new_dict = kwargs.copy()
new_dict.update(entry[1])
self._handle_node(entry[0], **new_dict)
else:
# option 1
self._handle_node(entry, **kwargs)
# *************************************************************************
# *************************************************************************
def modify_nodes_from(self, nodes_for_adding, **kwargs):
# input formats:
# 1) container of node keys
# 2) container of tuples
# process the input data
for entry in nodes_for_adding:
if type(entry) == tuple and len(entry) == 2 and type(entry[1]) == dict:
# option 2
new_dict = kwargs.copy()
new_dict.update(entry[1])
if not self.has_node(entry[0]):
raise ValueError('The node indicated does not exist.')
self._handle_node(entry[0], **new_dict)
else:
# option 1
if not self.has_node(entry):
raise ValueError('The node indicated does not exist.')
self._handle_node(entry, **kwargs)
# *************************************************************************
# *************************************************************************
def _handle_node(self, node_key, **kwargs):
# node has to exist
# the changes have to be compatible with the arcs involving the node
# - has outgoing arcs: cannot be an export node
# - has incoming arcs: cannot be an import node
# - has no arcs: can be anything
# the structures have to be updated accordingly
if self.has_node(node_key):
# node exists
if type(kwargs) != type(None):
# has data, check if the node type changes
if self.KEY_NODE_TYPE in kwargs:
# node type is in the dict
_edges = get_edges_involving_node(self, node_key, include_self_loops=False)
# the node type is specified, possible change of node
if kwargs[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP and not self.is_export_node(node_key):
# change to an export node: it cannot have outgoing arcs
for _edge in _edges:
if _edge[0] == node_key:
# outgoing arc, raise error
raise ValueError(
'A node with outgoing arcs cannot be an '
'export node.'
)
# change the node type
elif kwargs[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP and not self.is_import_node(node_key):
# change to an import node: it cannot have incoming arcs
for _edge in _edges:
if _edge[1] == node_key:
# incoming arc, raise error
raise ValueError(
'A node with incoming arcs cannot be an '
'import node.'
)
# else:
# raise ValueError('Unknown option.')
# everything seems to be okay: reset node data
self._reset_node_type(node_key)
else:
# no data: waypoint node, clear node data
self._reset_node_type(node_key)
keys = (self.KEY_NODE_BASE_FLOW, self.KEY_NODE_PRICES)
for key in keys:
if key in self.nodes[node_key]:
self.nodes[node_key].pop(key)
# the changes seem okay
self._process_node_data(node_key, kwargs)
nx.MultiDiGraph.add_node(self, node_key, **kwargs)
else:
# process the input data
self._process_node_data(node_key, kwargs)
# add the node
nx.MultiDiGraph.add_node(self, node_key, **kwargs)
# *************************************************************************
# *************************************************************************
# add a new import node # add a new import node
def add_import_node(self, node_key, prices: dict): def add_import_node(self, node_key, prices: dict):
node_dict = { node_dict = {
self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_IMP, self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_IMP,
self.KEY_NODE_PRICES: prices, self.KEY_NODE_PRICES: prices
self.KEY_NODE_PRICES_TIME_INVARIANT: (are_prices_time_invariant(prices)),
} }
self.add_node(node_key, **node_dict) self.add_node(node_key, **node_dict)
# ************************************************************************* # *************************************************************************
...@@ -691,10 +915,8 @@ class Network(nx.MultiDiGraph): ...@@ -691,10 +915,8 @@ class Network(nx.MultiDiGraph):
def add_export_node(self, node_key, prices: dict): def add_export_node(self, node_key, prices: dict):
node_dict = { node_dict = {
self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_EXP, self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_EXP,
self.KEY_NODE_PRICES: prices, self.KEY_NODE_PRICES: prices
self.KEY_NODE_PRICES_TIME_INVARIANT: (are_prices_time_invariant(prices)),
} }
self.add_node(node_key, **node_dict) self.add_node(node_key, **node_dict)
# ************************************************************************* # *************************************************************************
...@@ -704,169 +926,164 @@ class Network(nx.MultiDiGraph): ...@@ -704,169 +926,164 @@ class Network(nx.MultiDiGraph):
def add_source_sink_node(self, node_key, base_flow: dict, **kwargs): def add_source_sink_node(self, node_key, base_flow: dict, **kwargs):
node_dict = { node_dict = {
self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_SOURCE_SINK,
self.KEY_NODE_BASE_FLOW: base_flow, self.KEY_NODE_BASE_FLOW: base_flow,
} }
self.add_node(node_key, **node_dict) self.add_node(node_key, **node_dict)
self._set_up_node(node_key, **kwargs)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
# add a new waypoint node # add a new waypoint node
def add_waypoint_node(self, node_key, **kwargs): def add_waypoint_node(self, node_key):
node_dict = {self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_WAY} self.add_node(node_key)
self.add_node(node_key, **node_dict)
self._set_up_node(node_key, **kwargs)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
# modify an existing network node # # modify an existing network node
def modify_network_node(self, node_key, node_data: dict): # def modify_network_node(self, node_key, node_data: dict, **kwargs):
""" # """
Modifies a node in the network object. # Modifies a node in the network object.
Parameters # Parameters
---------- # ----------
node_key : hashable-type # node_key : hashable-type
The key that identifies the node. # The key that identifies the node.
node_data : dict # node_data : dict
A dictionary with the data that one wishes to change in the object. # A dictionary with the data that one wishes to change in the object.
Raises
------
ValueError
Errors are raised if the node does not exist in the network object,
and if the node changed has arcs that are incompatible with its new
version, namely in terms of incoming and outgoing arcs.
Returns # Raises
------- # ------
None. # ValueError
# Errors are raised if the node does not exist in the network object,
# and if the node changed has arcs that are incompatible with its new
# version, namely in terms of incoming and outgoing arcs.
""" # Returns
# -------
# None.
if self.has_node(node_key): # """
# check if there will be changes to the type of node
if ( # if self.has_node(node_key):
self.KEY_NODE_TYPE in node_data # # check if there will be changes to the type of node
and self.KEY_NODE_TYPE in self.nodes[node_key]
):
if (
node_data[self.KEY_NODE_TYPE]
!= self.nodes[node_key][self.KEY_NODE_TYPE]
):
# the node type changed: check if final node is imp./exp.
# to export nodes # if (
# self.KEY_NODE_TYPE in node_data
# and self.KEY_NODE_TYPE in self.nodes[node_key]
# ):
# if (
# node_data[self.KEY_NODE_TYPE]
# != self.nodes[node_key][self.KEY_NODE_TYPE]
# ):
# # the node type changed: check if final node is imp./exp.
if node_data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP: # # to export nodes
# export nodes cannot have outgoing arcs
# check if there are outgoing arcs involving this node
number_out_arcs = len( # if node_data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP:
tuple( # # export nodes cannot have outgoing arcs
arc_key # # check if there are outgoing arcs involving this node
for arc_key in self.edges(keys=True)
if arc_key[0] == node_key # is source
)
)
if number_out_arcs > 0: # number_out_arcs = len(
raise ValueError( # tuple(
"A node with outgoing arcs cannot be changed" # arc_key
+ " into an export node, since export nodes " # for arc_key in self.edges(keys=True)
+ " cannot have outgoing arcs." # if arc_key[0] == node_key # is source
) # )
# )
# to import nodes
if node_data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP:
# import nodes cannot have incoming arcs
# check if there are incoming arcs involving this node
number_in_arcs = len(
tuple(
arc_key
for arc_key in self.edges(keys=True)
if arc_key[1] == node_key # is destination
)
)
if number_in_arcs > 0: # if number_out_arcs > 0:
raise ValueError( # raise ValueError(
"A node with incoming arcs cannot be changed" # "A node with outgoing arcs cannot be changed"
+ " into an import node, since import nodes " # + " into an export node, since export nodes "
+ " cannot have incoming arcs." # + " cannot have outgoing arcs."
) # )
# all good # # to import nodes
self.add_node(node_key, **node_data) # if node_data[self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP:
# # import nodes cannot have incoming arcs
# # check if there are incoming arcs involving this node
else: # number_in_arcs = len(
raise ValueError("No such node was found.") # tuple(
# arc_key
# for arc_key in self.edges(keys=True)
# if arc_key[1] == node_key # is destination
# )
# )
# ************************************************************************* # if number_in_arcs > 0:
# ************************************************************************* # raise ValueError(
# "A node with incoming arcs cannot be changed"
# + " into an import node, since import nodes "
# + " cannot have incoming arcs."
# )
# identify importing nodes # # all good
def identify_import_nodes(self): # self.add_node(node_key, **node_data)
self.import_nodes = tuple( # self._set_up_node(node_key, **kwargs)
node_key
for node_key in self.nodes # else:
if self.KEY_NODE_TYPE in self.nodes[node_key] # raise ValueError("No such node was found.")
if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP)
)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
# identify exporting nodes # # identify importing nodes
def identify_export_nodes(self): # def identify_import_nodes(self):
self.export_nodes = tuple( # self.import_nodes = tuple(
node_key # node_key
for node_key in self.nodes # for node_key in self.nodes
if self.KEY_NODE_TYPE in self.nodes[node_key] # if self.KEY_NODE_TYPE in self.nodes[node_key]
if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP) # if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_IMP)
) # )
# ************************************************************************* # # *************************************************************************
# ************************************************************************* # # *************************************************************************
# identify waypoint nodes # # identify exporting nodes
# def identify_export_nodes(self):
# self.export_nodes = tuple(
# node_key
# for node_key in self.nodes
# if self.KEY_NODE_TYPE in self.nodes[node_key]
# if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_EXP)
# )
def identify_waypoint_nodes(self): # # *************************************************************************
self.waypoint_nodes = tuple( # # *************************************************************************
node_key
for node_key in self.nodes
if self.KEY_NODE_TYPE in self.nodes[node_key]
if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_WAY)
)
# ************************************************************************* # # identify waypoint nodes
# *************************************************************************
# identify source sink nodes # def identify_waypoint_nodes(self):
# self.waypoint_nodes = tuple(
# node_key
# for node_key in self.nodes
# if self.KEY_NODE_TYPE in self.nodes[node_key]
# if (self.nodes[node_key][self.KEY_NODE_TYPE] == self.KEY_NODE_TYPE_WAY)
# )
def identify_source_sink_nodes(self): # # *************************************************************************
self.source_sink_nodes = tuple( # # *************************************************************************
node_key
for node_key in self.nodes # # identify source sink nodes
if self.KEY_NODE_TYPE in self.nodes[node_key]
if ( # def identify_source_sink_nodes(self):
self.nodes[node_key][self.KEY_NODE_TYPE] # self.source_sink_nodes = tuple(
== self.KEY_NODE_TYPE_SOURCE_SINK # node_key
) # for node_key in self.nodes
) # if self.KEY_NODE_TYPE in self.nodes[node_key]
# if (
# self.nodes[node_key][self.KEY_NODE_TYPE]
# == self.KEY_NODE_TYPE_SOURCE_SINK
# )
# )
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
...@@ -910,52 +1127,47 @@ class Network(nx.MultiDiGraph): ...@@ -910,52 +1127,47 @@ class Network(nx.MultiDiGraph):
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
# identify node types # # identify node types
def identify_node_types(self): # def identify_node_types(self):
"Identifies the node type for each node in the network objects." # "Identifies the node type for each node in the network objects."
# identify import nodes # # identify import nodes
self.identify_import_nodes() # self.identify_import_nodes()
# identify export nodes # # identify export nodes
self.identify_export_nodes() # self.identify_export_nodes()
# identify source/sink nodes # # identify source/sink nodes
self.identify_source_sink_nodes() # self.identify_source_sink_nodes()
# identify waypoint nodes # # identify waypoint nodes
self.identify_waypoint_nodes() # self.identify_waypoint_nodes()
# validate # # validate
self.validate() # self.validate()
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def add_directed_arc(self, node_key_a, node_key_b, arcs: Arcs): def add_directed_arc(self, node_key_a, node_key_b, arcs: Arcs):
# check if the arc ends in an import node # check if the arc ends in an import node
if node_key_b in self.import_nodes: if node_key_b in self.import_nodes:
raise ValueError("Directed arcs cannot end in an import node.") raise ValueError("Directed arcs cannot end in an import node.")
# check if the arc starts in an export node # check if the arc starts in an export node
if node_key_a in self.export_nodes: if node_key_a in self.export_nodes:
raise ValueError("Directed arcs cannot start in an export node.") raise ValueError("Directed arcs cannot start in an export node.")
# check the arc is between import and export nodes # check the arc is between import and export nodes
if node_key_a in self.import_nodes and node_key_b in self.export_nodes: if node_key_a in self.import_nodes and node_key_b in self.export_nodes:
# it is between import and export nodes # it is between import and export nodes
# check if it involves static losses # check if it involves static losses
if arcs.has_static_losses(): if arcs.has_static_losses():
raise ValueError( raise ValueError(
"Arcs between import and export nodes cannot have static " "Arcs between import and export nodes cannot have static "
...@@ -963,7 +1175,6 @@ class Network(nx.MultiDiGraph): ...@@ -963,7 +1175,6 @@ class Network(nx.MultiDiGraph):
) )
# add a new arc # add a new arc
return self.add_edge( return self.add_edge(
node_key_a, node_key_b, **{self.KEY_ARC_TECH: arcs, self.KEY_ARC_UND: False} node_key_a, node_key_b, **{self.KEY_ARC_TECH: arcs, self.KEY_ARC_UND: False}
) )
......
...@@ -16,6 +16,7 @@ from .system import EnergySystem ...@@ -16,6 +16,7 @@ from .system import EnergySystem
from .resource import ResourcePrice from .resource import ResourcePrice
from .time import EconomicTimeFrame from .time import EconomicTimeFrame
from .time import entries_are_invariant from .time import entries_are_invariant
from .blocks.prices import NODE_PRICE_OTHER, NODE_PRICES
# ***************************************************************************** # *****************************************************************************
# ***************************************************************************** # *****************************************************************************
...@@ -63,15 +64,6 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -63,15 +64,6 @@ class InfrastructurePlanningProblem(EnergySystem):
STATIC_LOSS_MODE_US, STATIC_LOSS_MODE_US,
STATIC_LOSS_MODE_DS, STATIC_LOSS_MODE_DS,
) )
NODE_PRICE_LAMBDA = 1
NODE_PRICE_DELTA = 2
NODE_PRICE_OTHER = 3
NODE_PRICES = (
NODE_PRICE_LAMBDA,
NODE_PRICE_DELTA,
NODE_PRICE_OTHER
)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
...@@ -89,7 +81,8 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -89,7 +81,8 @@ class InfrastructurePlanningProblem(EnergySystem):
converters: dict = None, converters: dict = None,
prepare_model: bool = True, prepare_model: bool = True,
validate_inputs: bool = True, validate_inputs: bool = True,
node_price_model = NODE_PRICE_DELTA node_price_model = NODE_PRICE_OTHER,
use_prices_block: bool = True #False
): # TODO: switch to False when everything is more mature ): # TODO: switch to False when everything is more mature
# ********************************************************************* # *********************************************************************
...@@ -122,6 +115,16 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -122,6 +115,16 @@ class InfrastructurePlanningProblem(EnergySystem):
# self.number_time_intervals = { # self.number_time_intervals = {
# q: len(time_frame.time_intervals[q]) for q in self.time_frame.assessments # q: len(time_frame.time_intervals[q]) for q in self.time_frame.assessments
# } # }
# *********************************************************************
# *********************************************************************
# options
self.use_prices_block = use_prices_block
self.node_price_model = node_price_model
# *********************************************************************
# *********************************************************************
# initialise # initialise
...@@ -470,6 +473,7 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -470,6 +473,7 @@ class InfrastructurePlanningProblem(EnergySystem):
use_sos1: bool = False, use_sos1: bool = False,
use_interface: bool = False, use_interface: bool = False,
use_nnr_variables_if_possible: bool = False, use_nnr_variables_if_possible: bool = False,
sos1_weight_method: int = SOS1_ARC_WEIGHTS_NONE
) -> int: ) -> int:
""" """
Create a group of arcs whose invesment is to be decided collectively. Create a group of arcs whose invesment is to be decided collectively.
...@@ -490,14 +494,14 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -490,14 +494,14 @@ class InfrastructurePlanningProblem(EnergySystem):
Returns the key that identifies the arc group, an integer. Returns the key that identifies the arc group, an integer.
""" """
# make sure there is at least one arc # make sure there is at least one arc
if len(gllj_tuples) < 2: if len(gllj_tuples) < 2:
raise ValueError( raise ValueError(
"At least two arcs need to be identified to create a group." "At least two arcs need to be identified to create a group."
) )
for arc_number, gllj in enumerate(gllj_tuples): for arc_number, gllj in enumerate(gllj_tuples):
# does the network exist? # does the network exist?
...@@ -555,10 +559,10 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -555,10 +559,10 @@ class InfrastructurePlanningProblem(EnergySystem):
# groups using interfaces # groups using interfaces
self.groups_int[new_t] = use_interface self.groups_int[new_t] = use_interface
# TODO: allow users to set the weights
# groups using sos1 for arc selection # groups using sos1 for arc selection
if use_sos1:
self.groups_arc_sos1[new_t] = use_sos1 self.groups_arc_sos1[new_t] = [i for i in range(number_options)]
# groups using nnr for arc selection # groups using nnr for arc selection
...@@ -1625,28 +1629,22 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -1625,28 +1629,22 @@ class InfrastructurePlanningProblem(EnergySystem):
raise ValueError( raise ValueError(
"The method to determine the SOS1 weights was not recognised." "The method to determine the SOS1 weights was not recognised."
) )
if method == self.SOS1_ARC_WEIGHTS_CAP: if method == self.SOS1_ARC_WEIGHTS_CAP:
sos1_weights = tuple(capacity for capacity in arcs.capacity) sos1_weights = tuple(capacity for capacity in arcs.capacity)
elif method == self.SOS1_ARC_WEIGHTS_COST: elif method == self.SOS1_ARC_WEIGHTS_COST:
sos1_weights = tuple(cost for cost in arcs.minimum_cost) sos1_weights = tuple(cost for cost in arcs.minimum_cost)
elif method == self.SOS1_ARC_WEIGHTS_SPEC_CAP: elif method == self.SOS1_ARC_WEIGHTS_SPEC_CAP:
sos1_weights = tuple( sos1_weights = tuple(
cap / cost for cap, cost in zip(arcs.capacity, arcs.minimum_cost) cap / cost for cap, cost in zip(arcs.capacity, arcs.minimum_cost)
) )
elif method == self.SOS1_ARC_WEIGHTS_SPEC_COST: elif method == self.SOS1_ARC_WEIGHTS_SPEC_COST:
sos1_weights = tuple( sos1_weights = tuple(
cost / cap for cap, cost in zip(arcs.capacity, arcs.minimum_cost) cost / cap for cap, cost in zip(arcs.capacity, arcs.minimum_cost)
) )
else: # SOS1_ARC_WEIGHTS_NONE else: # SOS1_ARC_WEIGHTS_NONE
return None return None
# make sure they are unique # make sure they are unique
if verify_weights: if verify_weights:
for weight in sos1_weights: for weight in sos1_weights:
if sos1_weights.count(weight) >= 2: # TODO: reach this point if sos1_weights.count(weight) >= 2: # TODO: reach this point
...@@ -1655,7 +1653,6 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -1655,7 +1653,6 @@ class InfrastructurePlanningProblem(EnergySystem):
+ "special ordered sets of type 1 (SOS1)," + "special ordered sets of type 1 (SOS1),"
+ " since some weights are not unique." + " since some weights are not unique."
) )
return sos1_weights return sos1_weights
# ************************************************************************* # *************************************************************************
...@@ -1663,10 +1660,12 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -1663,10 +1660,12 @@ class InfrastructurePlanningProblem(EnergySystem):
def prepare(self, name = None): def prepare(self, name = None):
"""Sets up the problem model with which instances can be built.""" """Sets up the problem model with which instances can be built."""
# create pyomo model (AbstractModel) # create pyomo model (AbstractModel)
self.model = create_model(
self.model = create_model(name) name,
use_prices_block=self.use_prices_block,
node_price_model=self.node_price_model
)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
...@@ -1883,19 +1882,6 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -1883,19 +1882,6 @@ class InfrastructurePlanningProblem(EnergySystem):
.number_segments() .number_segments()
) )
) )
if not self.networks[g].nodes[l][Network.KEY_NODE_PRICES_TIME_INVARIANT]
else tuple(
s
for s in range(
self.networks[g]
.nodes[l][Network.KEY_NODE_PRICES][(q, p, k)]
.number_segments()
)
)
# for g in self.networks.keys()
# for l in self.networks[g].nodes
# if (l in self.networks[g].import_nodes or
# l in self.networks[g].export_nodes)
for (g, l) in set_GL_exp_imp for (g, l) in set_GL_exp_imp
for (q, p, k) in set_QPK for (q, p, k) in set_QPK
} }
...@@ -2467,7 +2453,7 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -2467,7 +2453,7 @@ class InfrastructurePlanningProblem(EnergySystem):
# set of arc groups relying on SOS1 # set of arc groups relying on SOS1
set_T_sos1 = tuple(t for t in set_T if self.groups_arc_sos1[t]) set_T_sos1 = tuple(t for t in set_T if t in self.groups_arc_sos1)
# set of arg groups relying on non-negative real variables # set of arg groups relying on non-negative real variables
...@@ -2561,7 +2547,7 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -2561,7 +2547,7 @@ class InfrastructurePlanningProblem(EnergySystem):
} }
# maximum resource volume per segment (infinity is the default) # maximum resource volume per segment (infinity is the default)
param_v_max_glqpks = { param_v_max_glqpks = {
(g, l, q, p, k, s): self.networks[g] (g, l, q, p, k, s): self.networks[g]
.nodes[l][Network.KEY_NODE_PRICES][(q, p, k)] .nodes[l][Network.KEY_NODE_PRICES][(q, p, k)]
...@@ -3145,12 +3131,6 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -3145,12 +3131,6 @@ class InfrastructurePlanningProblem(EnergySystem):
set_GLLJH_arc_inv_sos1 = tuple(param_arc_inv_sos1_weights_glljh.keys()) set_GLLJH_arc_inv_sos1 = tuple(param_arc_inv_sos1_weights_glljh.keys())
set_GLLJH_arc_inv_sos1_gllj = {
(g, u, v, j): tuple((g, u, v, j, h) for h in set_H_gllj[(g, u, v, j)])
for (g, u, v) in set_J_arc_sos1
for j in set_J_arc_sos1[(g, u, v)]
}
# sos1 weights for flow sense determination # sos1 weights for flow sense determination
param_arc_sns_sos1_weights_glljqk = { param_arc_sns_sos1_weights_glljqk = {
...@@ -3214,7 +3194,7 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -3214,7 +3194,7 @@ class InfrastructurePlanningProblem(EnergySystem):
} }
param_arc_inv_sos1_weights_th = { param_arc_inv_sos1_weights_th = {
(t, h): h (t, h): self.groups_arc_sos1[t][h]
# (t, h): mean( # (t, h): mean(
# (self.use_sos1_arc_inv[(g,u,v,j)][h] # (self.use_sos1_arc_inv[(g,u,v,j)][h]
# if self.use_sos1_arc_inv[(g,u,v,j)] is not None else h) # if self.use_sos1_arc_inv[(g,u,v,j)] is not None else h)
...@@ -3309,6 +3289,8 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -3309,6 +3289,8 @@ class InfrastructurePlanningProblem(EnergySystem):
# ********************************************************************* # *********************************************************************
# produce a dictionary with the data for the problem # produce a dictionary with the data for the problem
# TODO: built the dict as a function of the price submodel
data_dict = { data_dict = {
None: { None: {
...@@ -3433,7 +3415,6 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -3433,7 +3415,6 @@ class InfrastructurePlanningProblem(EnergySystem):
"set_GLLJ_pre_fin_red": set_GLLJ_pre_fin_red, "set_GLLJ_pre_fin_red": set_GLLJ_pre_fin_red,
"set_GLLJ_arc_inv_sos1": set_GLLJ_arc_inv_sos1, "set_GLLJ_arc_inv_sos1": set_GLLJ_arc_inv_sos1,
"set_GLLJH_arc_inv_sos1": set_GLLJH_arc_inv_sos1, "set_GLLJH_arc_inv_sos1": set_GLLJH_arc_inv_sos1,
"set_GLLJH_arc_inv_sos1_gllj": set_GLLJH_arc_inv_sos1_gllj,
"set_GLLJQK_und_sns_sos1_red": set_GLLJQK_und_sns_sos1_red, "set_GLLJQK_und_sns_sos1_red": set_GLLJQK_und_sns_sos1_red,
"set_GLLJ_int": set_GLLJ_int, "set_GLLJ_int": set_GLLJ_int,
"set_GLLJ_static": set_GLLJ_static, "set_GLLJ_static": set_GLLJ_static,
...@@ -3524,6 +3505,16 @@ class InfrastructurePlanningProblem(EnergySystem): ...@@ -3524,6 +3505,16 @@ class InfrastructurePlanningProblem(EnergySystem):
"param_arc_sns_sos1_weights_glljqk": param_arc_sns_sos1_weights_glljqk, "param_arc_sns_sos1_weights_glljqk": param_arc_sns_sos1_weights_glljqk,
# ***************************************************************** # *****************************************************************
# ***************************************************************** # *****************************************************************
"block_prices": {
(*gl,*qpk): {
'param_price_function_is_convex': {None: param_price_function_is_convex[(*gl,*qpk)]},
'set_S': {None: set_S[(*gl,*qpk)]},
'param_p_s': {s: param_p_glqpks[(*gl,*qpk,s)] for s in set_S[(*gl,*qpk)]},
'param_v_max_s': {s: param_v_max_glqpks[(*gl,*qpk,s)] for s in set_S[(*gl,*qpk)] if (*gl,*qpk,s) in param_v_max_glqpks},
}
for gl in set_GL_exp_imp
for qpk in set_QPK
}
} }
} }
......
...@@ -13,6 +13,7 @@ from matplotlib import pyplot as plt ...@@ -13,6 +13,7 @@ from matplotlib import pyplot as plt
# local, internal # local, internal
from .problem import InfrastructurePlanningProblem from .problem import InfrastructurePlanningProblem
from .network import Network from .network import Network
from .blocks.prices import NODE_PRICE_DELTA, NODE_PRICE_LAMBDA #, NODE_PRICE_OTHER
# ***************************************************************************** # *****************************************************************************
# ***************************************************************************** # *****************************************************************************
...@@ -69,6 +70,7 @@ def statistics(ipp: InfrastructurePlanningProblem, ...@@ -69,6 +70,7 @@ def statistics(ipp: InfrastructurePlanningProblem,
other_node_keys: tuple = None): other_node_keys: tuple = None):
"Returns flow statistics using the optimisation results." "Returns flow statistics using the optimisation results."
prices_in_block = ipp.use_prices_block
if type(import_node_keys) == type(None): if type(import_node_keys) == type(None):
# pick all import nodes # pick all import nodes
...@@ -94,36 +96,116 @@ def statistics(ipp: InfrastructurePlanningProblem, ...@@ -94,36 +96,116 @@ def statistics(ipp: InfrastructurePlanningProblem,
for node_key in net.nodes() for node_key in net.nodes()
if Network.KEY_NODE_BASE_FLOW in net.nodes[node_key] if Network.KEY_NODE_BASE_FLOW in net.nodes[node_key]
) )
if prices_in_block:
# imports # imports
imports_qpk = { if ipp.node_price_model == NODE_PRICE_DELTA or ipp.node_price_model == NODE_PRICE_LAMBDA:
qpk: pyo.value( # imports
sum( imports_qpk = {
ipp.instance.var_trans_flows_glqpks[(g,l_imp,*qpk, s)] qpk: pyo.value(
for g, l_imp in import_node_keys sum(
# for g in ipp.networks ipp.instance.block_prices[(g,l_imp,*qpk)].var_trans_flows
# for l_imp in ipp.networks[g].import_nodes for g, l_imp in import_node_keys
for s in ipp.instance.set_S[(g,l_imp,*qpk)] )
) *ipp.instance.param_c_time_qpk[qpk]
*ipp.instance.param_c_time_qpk[qpk] )
) for qpk in ipp.time_frame.qpk()
for qpk in ipp.time_frame.qpk() }
} # exports
exports_qpk = {
# exports qpk: pyo.value(
exports_qpk = { sum(
qpk: pyo.value( ipp.instance.block_prices[(g,l_exp,*qpk)].var_trans_flows
sum( for g, l_exp in export_node_keys
ipp.instance.var_trans_flows_glqpks[(g,l_exp,*qpk, s)] )
for g, l_exp in export_node_keys *ipp.instance.param_c_time_qpk[qpk]
# for g in ipp.networks )
# for l_exp in ipp.networks[g].export_nodes for qpk in ipp.time_frame.qpk()
for s in ipp.instance.set_S[(g,l_exp,*qpk)] }
) else:
*ipp.instance.param_c_time_qpk[qpk] # imports
) imports_qpk = {
for qpk in ipp.time_frame.qpk() qpk: pyo.value(
} sum(
ipp.instance.block_prices[(g,l_imp,*qpk)].var_trans_flows_s[s]
for g, l_imp in import_node_keys
# for g in ipp.networks
# for l_imp in ipp.networks[g].import_nodes
for s in ipp.instance.block_prices[(g,l_imp,*qpk)].set_S
)
*ipp.instance.param_c_time_qpk[qpk]
)
for qpk in ipp.time_frame.qpk()
}
# exports
exports_qpk = {
qpk: pyo.value(
sum(
ipp.instance.block_prices[(g,l_exp,*qpk)].var_trans_flows_s[s]
for g, l_exp in export_node_keys
# for g in ipp.networks
# for l_exp in ipp.networks[g].export_nodes
for s in ipp.instance.block_prices[(g,l_exp,*qpk)].set_S
)
*ipp.instance.param_c_time_qpk[qpk]
)
for qpk in ipp.time_frame.qpk()
}
else:
# not in a block
if ipp.node_price_model == NODE_PRICE_DELTA or ipp.node_price_model == NODE_PRICE_LAMBDA:
# imports
imports_qpk = {
qpk: pyo.value(
sum(
ipp.instance.var_trans_flows_glqpk[(g,l_imp,*qpk)]
for g, l_imp in import_node_keys
)
*ipp.instance.param_c_time_qpk[qpk]
)
for qpk in ipp.time_frame.qpk()
}
# exports
exports_qpk = {
qpk: pyo.value(
sum(
ipp.instance.var_trans_flows_glqpk[(g,l_exp,*qpk)]
for g, l_exp in export_node_keys
)
*ipp.instance.param_c_time_qpk[qpk]
)
for qpk in ipp.time_frame.qpk()
}
else:
# imports
imports_qpk = {
qpk: pyo.value(
sum(
ipp.instance.var_trans_flows_glqpks[(g,l_imp,*qpk, s)]
for g, l_imp in import_node_keys
# for g in ipp.networks
# for l_imp in ipp.networks[g].import_nodes
for s in ipp.instance.set_S[(g,l_imp,*qpk)]
)
*ipp.instance.param_c_time_qpk[qpk]
)
for qpk in ipp.time_frame.qpk()
}
# exports
exports_qpk = {
qpk: pyo.value(
sum(
ipp.instance.var_trans_flows_glqpks[(g,l_exp,*qpk, s)]
for g, l_exp in export_node_keys
# for g in ipp.networks
# for l_exp in ipp.networks[g].export_nodes
for s in ipp.instance.set_S[(g,l_exp,*qpk)]
)
*ipp.instance.param_c_time_qpk[qpk]
)
for qpk in ipp.time_frame.qpk()
}
# balance # balance
balance_qpk = { balance_qpk = {
qpk: imports_qpk[qpk]-exports_qpk[qpk] qpk: imports_qpk[qpk]-exports_qpk[qpk]
......
...@@ -122,18 +122,18 @@ def example_amplitude_constrained_nnr_signals(): ...@@ -122,18 +122,18 @@ def example_amplitude_constrained_nnr_signals():
# by providing a non-numeric nr. of samples without specific lower bounds # by providing a non-numeric nr. of samples without specific lower bounds
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedNNRSignal( sig = signal.AmplitudeConstrainedNNRSignal(
number_samples=(number_intervals,), max_pos_amp_limit=10 number_samples=(number_intervals,), max_pos_amp_limit=10
) )
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing negative lower bounds # by providing negative lower bounds
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedNNRSignal( sig = signal.AmplitudeConstrainedNNRSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -141,8 +141,8 @@ def example_amplitude_constrained_nnr_signals(): ...@@ -141,8 +141,8 @@ def example_amplitude_constrained_nnr_signals():
lower_bounds=[-1 for i in range(number_intervals)], lower_bounds=[-1 for i in range(number_intervals)],
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# ****************************************************************************** # ******************************************************************************
...@@ -378,7 +378,7 @@ def example_amplitude_constrained_signals(): ...@@ -378,7 +378,7 @@ def example_amplitude_constrained_signals():
# by providing negative 'positive' amplitude limits # by providing negative 'positive' amplitude limits
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -388,10 +388,10 @@ def example_amplitude_constrained_signals(): ...@@ -388,10 +388,10 @@ def example_amplitude_constrained_signals():
min_neg_amp_limit=None, min_neg_amp_limit=None,
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -401,12 +401,12 @@ def example_amplitude_constrained_signals(): ...@@ -401,12 +401,12 @@ def example_amplitude_constrained_signals():
min_neg_amp_limit=None, min_neg_amp_limit=None,
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing negative 'negative' amplitude limits # by providing negative 'negative' amplitude limits
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -416,10 +416,10 @@ def example_amplitude_constrained_signals(): ...@@ -416,10 +416,10 @@ def example_amplitude_constrained_signals():
min_neg_amp_limit=4, min_neg_amp_limit=4,
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -429,12 +429,12 @@ def example_amplitude_constrained_signals(): ...@@ -429,12 +429,12 @@ def example_amplitude_constrained_signals():
min_neg_amp_limit=-4, min_neg_amp_limit=-4,
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing non-numeric or not None amplitude limits (e.g. tuple) # by providing non-numeric or not None amplitude limits (e.g. tuple)
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -444,10 +444,10 @@ def example_amplitude_constrained_signals(): ...@@ -444,10 +444,10 @@ def example_amplitude_constrained_signals():
min_neg_amp_limit=None, min_neg_amp_limit=None,
) )
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -457,10 +457,10 @@ def example_amplitude_constrained_signals(): ...@@ -457,10 +457,10 @@ def example_amplitude_constrained_signals():
min_neg_amp_limit=None, min_neg_amp_limit=None,
) )
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -470,10 +470,10 @@ def example_amplitude_constrained_signals(): ...@@ -470,10 +470,10 @@ def example_amplitude_constrained_signals():
min_neg_amp_limit=None, min_neg_amp_limit=None,
) )
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -483,12 +483,12 @@ def example_amplitude_constrained_signals(): ...@@ -483,12 +483,12 @@ def example_amplitude_constrained_signals():
min_neg_amp_limit=(3,), min_neg_amp_limit=(3,),
) )
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing bounds incompatible with positive limits # by providing bounds incompatible with positive limits
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -500,12 +500,12 @@ def example_amplitude_constrained_signals(): ...@@ -500,12 +500,12 @@ def example_amplitude_constrained_signals():
lower_bounds=[10 for i in range(number_intervals)], lower_bounds=[10 for i in range(number_intervals)],
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing bounds incompatible with negative limits # by providing bounds incompatible with negative limits
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -517,12 +517,12 @@ def example_amplitude_constrained_signals(): ...@@ -517,12 +517,12 @@ def example_amplitude_constrained_signals():
lower_bounds=None, lower_bounds=None,
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing incompatible maximum and minimum positive limits # by providing incompatible maximum and minimum positive limits
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -532,12 +532,12 @@ def example_amplitude_constrained_signals(): ...@@ -532,12 +532,12 @@ def example_amplitude_constrained_signals():
min_neg_amp_limit=None, min_neg_amp_limit=None,
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing incompatible maximum and minimum negative limits # by providing incompatible maximum and minimum negative limits
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -547,12 +547,12 @@ def example_amplitude_constrained_signals(): ...@@ -547,12 +547,12 @@ def example_amplitude_constrained_signals():
min_neg_amp_limit=11, min_neg_amp_limit=11,
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing non-numeric or not None amplitude limits (e.g. tuple) # by providing non-numeric or not None amplitude limits (e.g. tuple)
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -563,12 +563,12 @@ def example_amplitude_constrained_signals(): ...@@ -563,12 +563,12 @@ def example_amplitude_constrained_signals():
) )
sig.set_positive_amplitude(positive_amplitude=(5,)) sig.set_positive_amplitude(positive_amplitude=(5,))
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing non-numeric or not None amplitude limits (e.g. tuple) # by providing non-numeric or not None amplitude limits (e.g. tuple)
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -579,12 +579,12 @@ def example_amplitude_constrained_signals(): ...@@ -579,12 +579,12 @@ def example_amplitude_constrained_signals():
) )
sig.set_negative_amplitude(negative_amplitude=(5,)) sig.set_negative_amplitude(negative_amplitude=(5,))
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by checking if bounds have been violated without there being samples # by checking if bounds have been violated without there being samples
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -602,13 +602,13 @@ def example_amplitude_constrained_signals(): ...@@ -602,13 +602,13 @@ def example_amplitude_constrained_signals():
assert not sig.is_signal_fixed() # signal is not set assert not sig.is_signal_fixed() # signal is not set
assert not sig.violates_amplitude_limits() # since the sig is not set assert not sig.violates_amplitude_limits() # since the sig is not set
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by seeking to validate a positive amplitude when there are no positive # by seeking to validate a positive amplitude when there are no positive
# amplitude limits # amplitude limits
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -619,13 +619,13 @@ def example_amplitude_constrained_signals(): ...@@ -619,13 +619,13 @@ def example_amplitude_constrained_signals():
) )
sig.validate_negative_amplitude() sig.validate_negative_amplitude()
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by seeking to validate a negative amplitude when there are no negative # by seeking to validate a negative amplitude when there are no negative
# amplitude limits # amplitude limits
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -636,13 +636,13 @@ def example_amplitude_constrained_signals(): ...@@ -636,13 +636,13 @@ def example_amplitude_constrained_signals():
) )
sig.validate_positive_amplitude() sig.validate_positive_amplitude()
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by seeking to validate a positive amplitude that exceeds its tolerated # by seeking to validate a positive amplitude that exceeds its tolerated
# maximum, using the internal positive amplitude # maximum, using the internal positive amplitude
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -654,13 +654,13 @@ def example_amplitude_constrained_signals(): ...@@ -654,13 +654,13 @@ def example_amplitude_constrained_signals():
sig.set_positive_amplitude(12) sig.set_positive_amplitude(12)
sig.validate_positive_amplitude() sig.validate_positive_amplitude()
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by seeking to validate a positive amplitude that exceeds its tolerated # by seeking to validate a positive amplitude that exceeds its tolerated
# maximum, using an externally supplied amplitude # maximum, using an externally supplied amplitude
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -671,13 +671,13 @@ def example_amplitude_constrained_signals(): ...@@ -671,13 +671,13 @@ def example_amplitude_constrained_signals():
) )
sig.validate_positive_amplitude(12) sig.validate_positive_amplitude(12)
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by seeking to validate a positive amplitude that is below its tolerated # by seeking to validate a positive amplitude that is below its tolerated
# minimum, using the internal positive amplitude # minimum, using the internal positive amplitude
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -689,13 +689,13 @@ def example_amplitude_constrained_signals(): ...@@ -689,13 +689,13 @@ def example_amplitude_constrained_signals():
sig.set_positive_amplitude(2) sig.set_positive_amplitude(2)
sig.validate_positive_amplitude() sig.validate_positive_amplitude()
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by seeking to validate a positive amplitude that is below its tolerated # by seeking to validate a positive amplitude that is below its tolerated
# minimum, using an externally supplied amplitude # minimum, using an externally supplied amplitude
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -706,13 +706,13 @@ def example_amplitude_constrained_signals(): ...@@ -706,13 +706,13 @@ def example_amplitude_constrained_signals():
) )
sig.validate_positive_amplitude(2) sig.validate_positive_amplitude(2)
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by seeking to validate a negative amplitude that exceeds its tolerated # by seeking to validate a negative amplitude that exceeds its tolerated
# maximum, using the internal negative amplitude # maximum, using the internal negative amplitude
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -724,13 +724,13 @@ def example_amplitude_constrained_signals(): ...@@ -724,13 +724,13 @@ def example_amplitude_constrained_signals():
sig.set_negative_amplitude(12) sig.set_negative_amplitude(12)
sig.validate_negative_amplitude() sig.validate_negative_amplitude()
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by seeking to validate a negative amplitude that exceeds its tolerated # by seeking to validate a negative amplitude that exceeds its tolerated
# maximum, using an externally supplied amplitude # maximum, using an externally supplied amplitude
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -741,13 +741,13 @@ def example_amplitude_constrained_signals(): ...@@ -741,13 +741,13 @@ def example_amplitude_constrained_signals():
) )
sig.validate_negative_amplitude(12) sig.validate_negative_amplitude(12)
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by seeking to validate a negative amplitude that is below its tolerated # by seeking to validate a negative amplitude that is below its tolerated
# minimum, using the internal negative amplitude # minimum, using the internal negative amplitude
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -759,13 +759,13 @@ def example_amplitude_constrained_signals(): ...@@ -759,13 +759,13 @@ def example_amplitude_constrained_signals():
sig.set_negative_amplitude(2) sig.set_negative_amplitude(2)
sig.validate_negative_amplitude() sig.validate_negative_amplitude()
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by seeking to validate a negative amplitude that is below its tolerated # by seeking to validate a negative amplitude that is below its tolerated
# minimum, using an externally supplied amplitude # minimum, using an externally supplied amplitude
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.AmplitudeConstrainedSignal( sig = signal.AmplitudeConstrainedSignal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -776,8 +776,8 @@ def example_amplitude_constrained_signals(): ...@@ -776,8 +776,8 @@ def example_amplitude_constrained_signals():
) )
sig.validate_negative_amplitude(2) sig.validate_negative_amplitude(2)
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# ****************************************************************************** # ******************************************************************************
...@@ -793,7 +793,7 @@ def example_peculiar_errors(): ...@@ -793,7 +793,7 @@ def example_peculiar_errors():
# by providing samples as something other than a list, e.g. tuples # by providing samples as something other than a list, e.g. tuples
error_was_triggered = False error_was_raised = False
try: try:
_ = signal.Signal( _ = signal.Signal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -802,12 +802,12 @@ def example_peculiar_errors(): ...@@ -802,12 +802,12 @@ def example_peculiar_errors():
upper_bounds=None, upper_bounds=None,
) )
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing an incorrect number of samples # by providing an incorrect number of samples
error_was_triggered = False error_was_raised = False
try: try:
_ = signal.Signal( _ = signal.Signal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -816,8 +816,8 @@ def example_peculiar_errors(): ...@@ -816,8 +816,8 @@ def example_peculiar_errors():
upper_bounds=None, upper_bounds=None,
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# ************************************************************************** # **************************************************************************
# ************************************************************************** # **************************************************************************
...@@ -830,7 +830,7 @@ def example_peculiar_errors(): ...@@ -830,7 +830,7 @@ def example_peculiar_errors():
upper_bounds = [7 for i in range(number_intervals)] upper_bounds = [7 for i in range(number_intervals)]
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.Signal( sig = signal.Signal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -841,12 +841,12 @@ def example_peculiar_errors(): ...@@ -841,12 +841,12 @@ def example_peculiar_errors():
sig.lower_bounds = [random.random() for i in range(number_intervals + 1)] sig.lower_bounds = [random.random() for i in range(number_intervals + 1)]
sig.has_lower_bounds() sig.has_lower_bounds()
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing an incorrect number of upper bounds # by providing an incorrect number of upper bounds
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.Signal( sig = signal.Signal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -857,12 +857,12 @@ def example_peculiar_errors(): ...@@ -857,12 +857,12 @@ def example_peculiar_errors():
sig.upper_bounds = [random.random() for i in range(number_intervals - 1)] sig.upper_bounds = [random.random() for i in range(number_intervals - 1)]
sig.has_upper_bounds() sig.has_upper_bounds()
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing an incorrect number of samples # by providing an incorrect number of samples
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.Signal( sig = signal.Signal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -873,12 +873,12 @@ def example_peculiar_errors(): ...@@ -873,12 +873,12 @@ def example_peculiar_errors():
sig.samples = [random.random() for i in range(number_intervals - 1)] sig.samples = [random.random() for i in range(number_intervals - 1)]
sig.is_signal_fixed() sig.is_signal_fixed()
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by deleting the lower bounds after creating the object # by deleting the lower bounds after creating the object
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.NonNegativeRealSignal(number_samples=number_intervals) sig = signal.NonNegativeRealSignal(number_samples=number_intervals)
sig.lower_bounds = None sig.lower_bounds = None
...@@ -886,12 +886,12 @@ def example_peculiar_errors(): ...@@ -886,12 +886,12 @@ def example_peculiar_errors():
if not sig.are_bounds_nnr(): if not sig.are_bounds_nnr():
raise ValueError() raise ValueError()
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing negative upper bounds (requires even lower lower bounds) # by providing negative upper bounds (requires even lower lower bounds)
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.NonNegativeRealSignal(number_samples=number_intervals) sig = signal.NonNegativeRealSignal(number_samples=number_intervals)
sig.is_upper_bounded = True sig.is_upper_bounded = True
...@@ -899,8 +899,8 @@ def example_peculiar_errors(): ...@@ -899,8 +899,8 @@ def example_peculiar_errors():
if not sig.are_bounds_nnr(): if not sig.are_bounds_nnr():
raise ValueError() raise ValueError()
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# ****************************************************************************** # ******************************************************************************
...@@ -1016,48 +1016,48 @@ def example_binary_signals(): ...@@ -1016,48 +1016,48 @@ def example_binary_signals():
# by specifying an integrality tolerance greater than or equal to 0.5 # by specifying an integrality tolerance greater than or equal to 0.5
error_was_triggered = False error_was_raised = False
try: try:
sig.is_signal_binary_only(integrality_tolerance=0.5) sig.is_signal_binary_only(integrality_tolerance=0.5)
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by specifying an integrality tolerance greater than or equal to 0.5 # by specifying an integrality tolerance greater than or equal to 0.5
error_was_triggered = False error_was_raised = False
try: try:
sig.is_signal_integer_only(integrality_tolerance=0.5) sig.is_signal_integer_only(integrality_tolerance=0.5)
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by specifying an integrality tolerance as a tuple # by specifying an integrality tolerance as a tuple
error_was_triggered = False error_was_raised = False
try: try:
sig.is_signal_binary_only(integrality_tolerance=(0.5,)) sig.is_signal_binary_only(integrality_tolerance=(0.5,))
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by specifying an integrality tolerance as a tuple # by specifying an integrality tolerance as a tuple
error_was_triggered = False error_was_raised = False
try: try:
sig.is_signal_integer_only(integrality_tolerance=(0.5,)) sig.is_signal_integer_only(integrality_tolerance=(0.5,))
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by specifying the number of samples as a float # by specifying the number of samples as a float
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.BinarySignal(number_samples=float(number_intervals)) sig = signal.BinarySignal(number_samples=float(number_intervals))
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# ****************************************************************************** # ******************************************************************************
...@@ -1159,24 +1159,24 @@ def example_nnr_signals(): ...@@ -1159,24 +1159,24 @@ def example_nnr_signals():
# by providing a float as the number of intervals # by providing a float as the number of intervals
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.NonNegativeRealSignal(number_samples=float(number_intervals)) sig = signal.NonNegativeRealSignal(number_samples=float(number_intervals))
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing negative lower bounds # by providing negative lower bounds
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.NonNegativeRealSignal( sig = signal.NonNegativeRealSignal(
number_samples=number_intervals, number_samples=number_intervals,
lower_bounds=[-1 for i in range(number_intervals)], lower_bounds=[-1 for i in range(number_intervals)],
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing samples that are not nnr # by providing samples that are not nnr
...@@ -1184,23 +1184,23 @@ def example_nnr_signals(): ...@@ -1184,23 +1184,23 @@ def example_nnr_signals():
samples[-1] = -1 samples[-1] = -1
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.FixedNonNegativeRealSignal(samples=samples) sig = signal.FixedNonNegativeRealSignal(samples=samples)
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing samples as tuples # by providing samples as tuples
samples = (random.random() for i in range(number_intervals)) samples = (random.random() for i in range(number_intervals))
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.FixedNonNegativeRealSignal(samples=samples) sig = signal.FixedNonNegativeRealSignal(samples=samples)
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# ****************************************************************************** # ******************************************************************************
...@@ -1254,21 +1254,21 @@ def example_set_signal(): ...@@ -1254,21 +1254,21 @@ def example_set_signal():
# by providing an integer instead of a list # by providing an integer instead of a list
error_was_triggered = False error_was_raised = False
try: try:
sig.set_signal(samples=3) sig.set_signal(samples=3)
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing an incorrectly sized list # by providing an incorrectly sized list
error_was_triggered = False error_was_raised = False
try: try:
sig.set_signal(samples=[2 for i in range(number_intervals + 1)]) sig.set_signal(samples=[2 for i in range(number_intervals + 1)])
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# ************************************************************************** # **************************************************************************
...@@ -1413,7 +1413,7 @@ def example_bounded_signals(): ...@@ -1413,7 +1413,7 @@ def example_bounded_signals():
# by providing upper bounds with an inconsistent number of samples # by providing upper bounds with an inconsistent number of samples
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.Signal( sig = signal.Signal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -1422,12 +1422,12 @@ def example_bounded_signals(): ...@@ -1422,12 +1422,12 @@ def example_bounded_signals():
upper_bounds=[10 for i in range(number_intervals - 1)], # one too few upper_bounds=[10 for i in range(number_intervals - 1)], # one too few
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing lower bounds with an inconsistent number of samples # by providing lower bounds with an inconsistent number of samples
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.Signal( sig = signal.Signal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -1436,12 +1436,12 @@ def example_bounded_signals(): ...@@ -1436,12 +1436,12 @@ def example_bounded_signals():
upper_bounds=None, upper_bounds=None,
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing upper bounds not as a list but as a numeric type # by providing upper bounds not as a list but as a numeric type
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.Signal( sig = signal.Signal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -1450,12 +1450,12 @@ def example_bounded_signals(): ...@@ -1450,12 +1450,12 @@ def example_bounded_signals():
upper_bounds=6, upper_bounds=6,
) )
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing lower bounds not as a list but as a numeric type # by providing lower bounds not as a list but as a numeric type
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.Signal( sig = signal.Signal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -1464,8 +1464,8 @@ def example_bounded_signals(): ...@@ -1464,8 +1464,8 @@ def example_bounded_signals():
upper_bounds=[5 for i in range(number_intervals)], upper_bounds=[5 for i in range(number_intervals)],
) )
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing upper bounds lower than the lower bounds # by providing upper bounds lower than the lower bounds
...@@ -1475,7 +1475,7 @@ def example_bounded_signals(): ...@@ -1475,7 +1475,7 @@ def example_bounded_signals():
upper_bounds[-1] = 3 upper_bounds[-1] = 3
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.Signal( sig = signal.Signal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -1484,8 +1484,8 @@ def example_bounded_signals(): ...@@ -1484,8 +1484,8 @@ def example_bounded_signals():
upper_bounds=upper_bounds, upper_bounds=upper_bounds,
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing lower bounds higher than the uppper bounds # by providing lower bounds higher than the uppper bounds
...@@ -1495,7 +1495,7 @@ def example_bounded_signals(): ...@@ -1495,7 +1495,7 @@ def example_bounded_signals():
lower_bounds[-1] = 9 lower_bounds[-1] = 9
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.Signal( sig = signal.Signal(
number_samples=number_intervals, number_samples=number_intervals,
...@@ -1504,8 +1504,8 @@ def example_bounded_signals(): ...@@ -1504,8 +1504,8 @@ def example_bounded_signals():
upper_bounds=upper_bounds, upper_bounds=upper_bounds,
) )
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# ****************************************************************************** # ******************************************************************************
...@@ -1562,7 +1562,7 @@ def example_free_signals(): ...@@ -1562,7 +1562,7 @@ def example_free_signals():
# by providing a float as the number of intervals # by providing a float as the number of intervals
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.Signal( sig = signal.Signal(
number_samples=float(number_intervals), number_samples=float(number_intervals),
...@@ -1571,8 +1571,8 @@ def example_free_signals(): ...@@ -1571,8 +1571,8 @@ def example_free_signals():
upper_bounds=None, upper_bounds=None,
) )
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# ****************************************************************************** # ******************************************************************************
...@@ -1619,25 +1619,25 @@ def example_fixed_signals(): ...@@ -1619,25 +1619,25 @@ def example_fixed_signals():
# by providing a None when creating a FixedSignal # by providing a None when creating a FixedSignal
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.FixedSignal(samples=None) sig = signal.FixedSignal(samples=None)
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing an empty list # by providing an empty list
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.FixedSignal(samples=[]) sig = signal.FixedSignal(samples=[])
except ValueError: except ValueError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# by providing the number of samples as a float # by providing the number of samples as a float
error_was_triggered = False error_was_raised = False
try: try:
sig = signal.Signal( sig = signal.Signal(
number_samples=float(number_intervals), number_samples=float(number_intervals),
...@@ -1646,8 +1646,8 @@ def example_fixed_signals(): ...@@ -1646,8 +1646,8 @@ def example_fixed_signals():
upper_bounds=None, upper_bounds=None,
) )
except TypeError: except TypeError:
error_was_triggered = True error_was_raised = True
assert error_was_triggered assert error_was_raised
# ****************************************************************************** # ******************************************************************************
......
...@@ -651,7 +651,7 @@ class TestDataFinance: ...@@ -651,7 +651,7 @@ class TestDataFinance:
# trigger ValueError # trigger ValueError
error_triggered = False error_raised = False
investment_period = analysis_period_span + 1 investment_period = analysis_period_span + 1
try: try:
npv_salvage = present_salvage_value_annuity( npv_salvage = present_salvage_value_annuity(
...@@ -662,8 +662,8 @@ class TestDataFinance: ...@@ -662,8 +662,8 @@ class TestDataFinance:
analysis_period_span=analysis_period_span, analysis_period_span=analysis_period_span,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
...@@ -1197,7 +1197,7 @@ class TestDataFinance: ...@@ -1197,7 +1197,7 @@ class TestDataFinance:
investment_period = analysis_period_span + 1 investment_period = analysis_period_span + 1
error_triggered = False error_raised = False
try: try:
residual_value = salvage_value_linear_depreciation( residual_value = salvage_value_linear_depreciation(
investment=investment, investment=investment,
...@@ -1206,8 +1206,8 @@ class TestDataFinance: ...@@ -1206,8 +1206,8 @@ class TestDataFinance:
analysis_period_span=analysis_period_span, analysis_period_span=analysis_period_span,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
...@@ -1318,78 +1318,78 @@ class TestDataFinance: ...@@ -1318,78 +1318,78 @@ class TestDataFinance:
# TypeError('The discount rates must be provided as a tuple.') # TypeError('The discount rates must be provided as a tuple.')
error_triggered = False error_raised = False
try: try:
my_inv = Investment(list(i_t), R_t) my_inv = Investment(list(i_t), R_t)
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# ValueError('The duration of the period under analysis must be positive.') # ValueError('The duration of the period under analysis must be positive.')
error_triggered = False error_raised = False
try: try:
my_inv = Investment(tuple()) my_inv = Investment(tuple())
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# TypeError('The discount rate must be provided as a float.') # TypeError('The discount rate must be provided as a float.')
error_triggered = False error_raised = False
try: try:
my_inv = Investment(None, None, 5, 10) my_inv = Investment(None, None, 5, 10)
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# ValueError('The discount rate must be in the open interval between 0 and 1.) # ValueError('The discount rate must be in the open interval between 0 and 1.)
error_triggered = False error_raised = False
try: try:
my_inv = Investment(None, None, 1.35, 10) my_inv = Investment(None, None, 1.35, 10)
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# TypeError('The duration of the period under consideration must be provided as an integer.') # TypeError('The duration of the period under consideration must be provided as an integer.')
error_triggered = False error_raised = False
try: try:
my_inv = Investment(None, None, 0.35, 10.0) my_inv = Investment(None, None, 0.35, 10.0)
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# ValueError('The duration of the period under analysis must be positive.) # ValueError('The duration of the period under analysis must be positive.)
error_triggered = False error_raised = False
try: try:
my_inv = Investment(None, None, 0.35, 0) my_inv = Investment(None, None, 0.35, 0)
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# TypeError('The net cash flows must be provided as a list.') # TypeError('The net cash flows must be provided as a list.')
error_triggered = False error_raised = False
try: try:
my_inv = Investment(i_t, tuple(R_t)) my_inv = Investment(i_t, tuple(R_t))
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
......
...@@ -140,7 +140,7 @@ class TestDataUtils: ...@@ -140,7 +140,7 @@ class TestDataUtils:
# raise exception # raise exception
error_triggered = False error_raised = False
time_interval_durations.pop(0) time_interval_durations.pop(0)
try: try:
new_profile = utils.create_profile_using_time_weighted_state( new_profile = utils.create_profile_using_time_weighted_state(
...@@ -151,8 +151,8 @@ class TestDataUtils: ...@@ -151,8 +151,8 @@ class TestDataUtils:
states_correlate_profile=True, states_correlate_profile=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -333,14 +333,14 @@ class TestDataUtils: ...@@ -333,14 +333,14 @@ class TestDataUtils:
# use zero iterations to force an error # use zero iterations to force an error
error_triggered = False error_raised = False
try: try:
new_key = utils.generate_pseudo_unique_key( new_key = utils.generate_pseudo_unique_key(
key_list=key_list, max_iterations=0 key_list=key_list, max_iterations=0
) )
except Exception: except Exception:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# use a seed number to trigger more iterations # use a seed number to trigger more iterations
......
...@@ -654,7 +654,7 @@ class TestDistrictHeatingNetwork: ...@@ -654,7 +654,7 @@ class TestDistrictHeatingNetwork:
# # single pipe, external cost, offset # # single pipe, external cost, offset
# error_triggered = False # error_raised = False
# try: # try:
# pipe_trench_obj = PipeTrench(name='hello', # pipe_trench_obj = PipeTrench(name='hello',
# trenches={0: trench_tech}, # trenches={0: trench_tech},
...@@ -665,12 +665,12 @@ class TestDistrictHeatingNetwork: ...@@ -665,12 +665,12 @@ class TestDistrictHeatingNetwork:
# minimum_cost_offset=external_cost, # minimum_cost_offset=external_cost,
# validate=True) # validate=True)
# except TypeError: # except TypeError:
# error_triggered = True # error_raised = True
# assert error_triggered # assert error_raised
# # use list as minimum cost offset # # use list as minimum cost offset
# error_triggered = False # error_raised = False
# try: # try:
# pipe_trench_obj = PipeTrench(name='hello', # pipe_trench_obj = PipeTrench(name='hello',
# trenches={0: trench_tech}, # trenches={0: trench_tech},
...@@ -684,8 +684,8 @@ class TestDistrictHeatingNetwork: ...@@ -684,8 +684,8 @@ class TestDistrictHeatingNetwork:
# ), # ),
# validate=True) # validate=True)
# except TypeError: # except TypeError:
# error_triggered = True # error_raised = True
# assert error_triggered # assert error_raised
# #************************************************************************** # #**************************************************************************
# #************************************************************************** # #**************************************************************************
...@@ -754,7 +754,7 @@ class TestDistrictHeatingNetwork: ...@@ -754,7 +754,7 @@ class TestDistrictHeatingNetwork:
# # single pipe, external cost, offset # # single pipe, external cost, offset
# error_triggered = False # error_raised = False
# try: # try:
# pipe_trench_obj = PipeTrench(name='hello', # pipe_trench_obj = PipeTrench(name='hello',
# trenches={0: trench_tech}, # trenches={0: trench_tech},
...@@ -765,12 +765,12 @@ class TestDistrictHeatingNetwork: ...@@ -765,12 +765,12 @@ class TestDistrictHeatingNetwork:
# minimum_cost_offset=external_cost, # minimum_cost_offset=external_cost,
# validate=True) # validate=True)
# except TypeError: # except TypeError:
# error_triggered = True # error_raised = True
# assert error_triggered # assert error_raised
# # use list as minimum cost offset # # use list as minimum cost offset
# error_triggered = False # error_raised = False
# try: # try:
# pipe_trench_obj = PipeTrench(name='hello', # pipe_trench_obj = PipeTrench(name='hello',
# trenches={0: trench_tech}, # trenches={0: trench_tech},
...@@ -784,8 +784,8 @@ class TestDistrictHeatingNetwork: ...@@ -784,8 +784,8 @@ class TestDistrictHeatingNetwork:
# ), # ),
# validate=True) # validate=True)
# except TypeError: # except TypeError:
# error_triggered = True # error_raised = True
# assert error_triggered # assert error_raised
# #************************************************************************** # #**************************************************************************
# #************************************************************************** # #**************************************************************************
......
...@@ -20,7 +20,6 @@ from topupheat.common.fluids import FluidDatabase # , Fluid ...@@ -20,7 +20,6 @@ from topupheat.common.fluids import FluidDatabase # , Fluid
# ***************************************************************************** # *****************************************************************************
# ***************************************************************************** # *****************************************************************************
class TestDistrictHeatingNetworkUtils: class TestDistrictHeatingNetworkUtils:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
...@@ -528,7 +527,7 @@ class TestDistrictHeatingNetworkUtils: ...@@ -528,7 +527,7 @@ class TestDistrictHeatingNetworkUtils:
# update the nodes # update the nodes
network.add_node(0, x=55, y=12) network.add_node(0, x=55, y=12)
network.add_node(2, x=55.01, y=12.01) network.add_node(2, x=55.01, y=12.01)
# ********************************************************************* # *********************************************************************
utils.summarise_network_by_pipe_technology(network, False) utils.summarise_network_by_pipe_technology(network, False)
......
# imports
# local
# import numpy as np
# import networkx as nx
from src.topupopt.problems.esipp.problem import InfrastructurePlanningProblem
from src.topupopt.problems.esipp.network import Network
from src.topupopt.problems.esipp.time import EconomicTimeFrame
from src.topupopt.problems.esipp.blocks.prices import NODE_PRICE_OTHER
# *****************************************************************************
# *****************************************************************************
def check_problem_size(ipp: InfrastructurePlanningProblem, nc, nv, nnz):
assert ipp.results["Problem"][0]["Number of constraints"] == nc # should be 80
assert ipp.results["Problem"][0]["Number of variables"] == nv # should be 84
assert ipp.results["Problem"][0]["Number of nonzeros"] == nnz
# *****************************************************************************
# *****************************************************************************
def build_solve_ipp(
solver: str = 'glpk',
solver_options: dict = None,
use_sos_arcs: bool = False,
use_sos_arc_groups: bool = False,
arc_sos_weight_key: str = InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_NONE,
arc_use_real_variables_if_possible: bool = False,
use_sos_sense: bool = False,
sense_sos_weight_key: int = (
InfrastructurePlanningProblem.SOS1_SENSE_WEIGHT_NOMINAL_HIGHER
),
sense_use_real_variables_if_possible: bool = False,
sense_use_arc_interfaces: bool = False,
perform_analysis: bool = False,
plot_results: bool = False,
print_solver_output: bool = False,
time_frame: EconomicTimeFrame = None,
networks: dict = None,
converters: dict = None,
static_losses_mode=None,
mandatory_arcs: list = None,
max_number_parallel_arcs: dict = None,
arc_groups_dict: dict = None,
init_aux_sets: bool = False,
# discount_rates: dict = None,
assessment_weights: dict = None,
simplify_problem: bool = False,
use_prices_block: bool = False,
node_price_model: int = NODE_PRICE_OTHER
):
if type(assessment_weights) != dict:
assessment_weights = {} # default
if type(converters) != dict:
converters = {}
# time weights
# relative weight of time period
# one interval twice as long as the average is worth twice
# one interval half as long as the average is worth half
# time_weights = [
# [time_period_duration/average_time_interval_duration
# for time_period_duration in intraperiod_time_interval_duration]
# for p in range(number_periods)]
time_weights = None # nothing yet
normalised_time_interval_duration = None # nothing yet
# create problem object
ipp = InfrastructurePlanningProblem(
# discount_rates=discount_rates,
time_frame=time_frame,
# reporting_periods=time_frame.reporting_periods,
# time_intervals=time_frame.time_interval_durations,
time_weights=time_weights,
normalised_time_interval_duration=normalised_time_interval_duration,
assessment_weights=assessment_weights,
use_prices_block=use_prices_block,
node_price_model=node_price_model
)
# add networks and systems
for netkey, net in networks.items():
ipp.add_network(network_key=netkey, network=net)
# add converters
for cvtkey, cvt in converters.items():
ipp.add_converter(converter_key=cvtkey, converter=cvt)
# define arcs as mandatory
if type(mandatory_arcs) == list:
for full_arc_key in mandatory_arcs:
ipp.make_arc_mandatory(full_arc_key[0], full_arc_key[1:])
# if make_all_arcs_mandatory:
# for network_key in ipp.networks:
# for arc_key in ipp.networks[network_key].edges(keys=True):
# # preexisting arcs are no good
# if ipp.networks[network_key].edges[arc_key][
# Network.KEY_ARC_TECH].has_been_selected():
# continue
# ipp.make_arc_mandatory(network_key, arc_key)
# set up the use of sos for arc selection
if use_sos_arcs:
for network_key in ipp.networks:
for arc_key in ipp.networks[network_key].edges(keys=True):
if (
ipp.networks[network_key]
.edges[arc_key][Network.KEY_ARC_TECH]
.has_been_selected()
):
# skip arcs that have already been selected (pre-existing)
continue
ipp.use_sos1_for_arc_selection(
network_key,
arc_key,
use_real_variables_if_possible=(
arc_use_real_variables_if_possible
),
sos1_weight_method=arc_sos_weight_key,
)
# set up the use of sos for flow sense determination
if use_sos_sense:
for network_key in ipp.networks:
for arc_key in ipp.networks[network_key].edges(keys=True):
if not ipp.networks[network_key].edges[arc_key][
Network.KEY_ARC_UND
]:
continue
ipp.use_sos1_for_flow_senses(
network_key,
arc_key,
use_real_variables_if_possible=(
sense_use_real_variables_if_possible
),
use_interface_variables=sense_use_arc_interfaces,
sos1_weight_method=sense_sos_weight_key,
)
elif sense_use_arc_interfaces: # set up the use of arc interfaces w/o sos1
for network_key in ipp.networks:
for arc_key in ipp.networks[network_key].edges(keys=True):
if (
ipp.networks[network_key]
.edges[arc_key][Network.KEY_ARC_TECH]
.has_been_selected()
):
continue
ipp.use_interface_variables_for_arc_selection(network_key, arc_key)
# static losses
if static_losses_mode == ipp.STATIC_LOSS_MODE_ARR:
ipp.place_static_losses_arrival_node()
elif static_losses_mode == ipp.STATIC_LOSS_MODE_DEP:
ipp.place_static_losses_departure_node()
elif static_losses_mode == ipp.STATIC_LOSS_MODE_US:
ipp.place_static_losses_upstream()
elif static_losses_mode == ipp.STATIC_LOSS_MODE_DS:
ipp.place_static_losses_downstream()
else:
raise ValueError("Unknown static loss modelling mode.")
# *********************************************************************
# groups
if type(arc_groups_dict) != type(None):
for key in arc_groups_dict:
ipp.create_arc_group(
arc_groups_dict[key],
use_sos1=use_sos_arc_groups,
sos1_weight_method=arc_sos_weight_key
)
# *********************************************************************
# maximum number of parallel arcs
for key in max_number_parallel_arcs:
ipp.set_maximum_number_parallel_arcs(
network_key=key[0],
node_a=key[1],
node_b=key[2],
limit=max_number_parallel_arcs[key],
)
# *********************************************************************
if simplify_problem:
ipp.simplify_peak_total_assessments()
# *********************************************************************
# instantiate (disable the default case v-a-v fixed losses)
# ipp.instantiate(place_fixed_losses_upstream_if_possible=False)
ipp.instantiate(initialise_ancillary_sets=init_aux_sets)
# optimise
ipp.optimise(
solver_name=solver,
solver_options=solver_options,
output_options={},
print_solver_output=print_solver_output,
)
# ipp.instance.pprint()
# return the problem object
return ipp
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
# imports # imports
# standard # standard
import pytest
import random import random
from networkx import binomial_tree, MultiDiGraph from networkx import binomial_tree, MultiDiGraph
# local # local
from src.topupopt.problems.esipp.network import Arcs, Network from src.topupopt.problems.esipp.network import Arcs, Network
from src.topupopt.problems.esipp.network import ArcsWithoutLosses from src.topupopt.problems.esipp.network import ArcsWithoutLosses
from src.topupopt.problems.esipp.network import ArcsWithoutProportionalLosses from src.topupopt.problems.esipp.network import ArcsWithoutProportionalLosses
from src.topupopt.problems.esipp.network import ArcsWithoutStaticLosses from src.topupopt.problems.esipp.network import ArcsWithoutStaticLosses
from src.topupopt.problems.esipp.resource import ResourcePrice from src.topupopt.problems.esipp.resource import ResourcePrice
from src.topupopt.data.misc.utils import generate_pseudo_unique_key from src.topupopt.data.misc.utils import generate_pseudo_unique_key
# ***************************************************************************** # *****************************************************************************
...@@ -168,7 +161,7 @@ class TestNetwork: ...@@ -168,7 +161,7 @@ class TestNetwork:
# TypeError: The static losses should be given as a dict or None. # TypeError: The static losses should be given as a dict or None.
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="any", name="any",
...@@ -185,13 +178,13 @@ class TestNetwork: ...@@ -185,13 +178,13 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError('The static losses should be specified for each arc # ValueError('The static losses should be specified for each arc
# option.') # option.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="any", name="any",
...@@ -212,12 +205,12 @@ class TestNetwork: ...@@ -212,12 +205,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError('The static losses must be specified via a list of lists.') # TypeError('The static losses must be specified via a list of lists.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="any", name="any",
...@@ -234,13 +227,13 @@ class TestNetwork: ...@@ -234,13 +227,13 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError('The static loss values are inconsistent with the number ' # ValueError('The static loss values are inconsistent with the number '
# 'of options, scenarios and intervals.') # 'of options, scenarios and intervals.')
error_triggered = False error_raised = False
try: try:
arc_tech = Arcs( arc_tech = Arcs(
name="any", name="any",
...@@ -267,12 +260,12 @@ class TestNetwork: ...@@ -267,12 +260,12 @@ class TestNetwork:
], ],
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError('The static losses were not provided as numbers.') # TypeError('The static losses were not provided as numbers.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="any", name="any",
...@@ -291,12 +284,12 @@ class TestNetwork: ...@@ -291,12 +284,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError('The static losses must be positive or zero.') # ValueError('The static losses must be positive or zero.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="any", name="any",
...@@ -315,12 +308,12 @@ class TestNetwork: ...@@ -315,12 +308,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError: The static loss dict keys must be tuples # TypeError: The static loss dict keys must be tuples
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -334,12 +327,12 @@ class TestNetwork: ...@@ -334,12 +327,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError( 'The static loss dict keys must be tuples of size 3.') # ValueError( 'The static loss dict keys must be tuples of size 3.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -353,12 +346,12 @@ class TestNetwork: ...@@ -353,12 +346,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError(The staticl osses should be given as a dict or None.') # TypeError(The staticl osses should be given as a dict or None.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -372,14 +365,14 @@ class TestNetwork: ...@@ -372,14 +365,14 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError( # ValueError(
# 'No static loss values were provided. There should be one'+ # 'No static loss values were provided. There should be one'+
# ' value per option, scenario and time interval.') # ' value per option, scenario and time interval.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -393,8 +386,8 @@ class TestNetwork: ...@@ -393,8 +386,8 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
...@@ -616,7 +609,7 @@ class TestNetwork: ...@@ -616,7 +609,7 @@ class TestNetwork:
# TypeError('The name attribute is not hashable.') # TypeError('The name attribute is not hashable.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name=[1, 2, 3], name=[1, 2, 3],
...@@ -630,12 +623,12 @@ class TestNetwork: ...@@ -630,12 +623,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError:The efficiency dict keys must be (scenario, interval) tuples # TypeError:The efficiency dict keys must be (scenario, interval) tuples
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -649,12 +642,12 @@ class TestNetwork: ...@@ -649,12 +642,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError( 'The efficiency dict keys must be tuples of size 2.') # ValueError( 'The efficiency dict keys must be tuples of size 2.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -668,12 +661,12 @@ class TestNetwork: ...@@ -668,12 +661,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError(The efficiency should be given as a dict or None.') # TypeError(The efficiency should be given as a dict or None.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -687,13 +680,13 @@ class TestNetwork: ...@@ -687,13 +680,13 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError('The reverse efficiency has to match the nominal'+ # TypeError('The reverse efficiency has to match the nominal'+
# ' one when there are no proportional losses.') # ' one when there are no proportional losses.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -707,12 +700,12 @@ class TestNetwork: ...@@ -707,12 +700,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError:'The reverse efficiency should be given as a dict or None.' # TypeError:'The reverse efficiency should be given as a dict or None.'
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -726,14 +719,14 @@ class TestNetwork: ...@@ -726,14 +719,14 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError( # ValueError(
# 'No efficiency values were provided. There should be '+ # 'No efficiency values were provided. There should be '+
# 'one value per scenario and time interval.') # 'one value per scenario and time interval.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -747,12 +740,12 @@ class TestNetwork: ...@@ -747,12 +740,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError: The keys for the efficiency dicts do not match. # ValueError: The keys for the efficiency dicts do not match.
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -769,12 +762,12 @@ class TestNetwork: ...@@ -769,12 +762,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError: Efficiency values must be provided as numeric types. # TypeError: Efficiency values must be provided as numeric types.
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -791,12 +784,12 @@ class TestNetwork: ...@@ -791,12 +784,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError('Efficiency values must be positive.') # ValueError('Efficiency values must be positive.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -812,12 +805,12 @@ class TestNetwork: ...@@ -812,12 +805,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError('The capacity should be given as a list or tuple.') # TypeError('The capacity should be given as a list or tuple.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -831,12 +824,12 @@ class TestNetwork: ...@@ -831,12 +824,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError: The minimum cost values should be given as a list or tuple # TypeError: The minimum cost values should be given as a list or tuple
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -850,12 +843,12 @@ class TestNetwork: ...@@ -850,12 +843,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError: The specific capacity cost was not given as a numeric type # TypeError: The specific capacity cost was not given as a numeric type
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -869,12 +862,12 @@ class TestNetwork: ...@@ -869,12 +862,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError:The number of capacity and minimum cost entries must match # ValueError:The number of capacity and minimum cost entries must match
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -888,13 +881,13 @@ class TestNetwork: ...@@ -888,13 +881,13 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError: No entries for capacity and minimum cost were provided. # ValueError: No entries for capacity and minimum cost were provided.
# At least one option should be provided. # At least one option should be provided.
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -908,13 +901,13 @@ class TestNetwork: ...@@ -908,13 +901,13 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError: No entries for efficiency were provided. There should be # ValueError: No entries for efficiency were provided. There should be
# one entry per time interval. # one entry per time interval.
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -928,8 +921,8 @@ class TestNetwork: ...@@ -928,8 +921,8 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError('The number of efficiency values must match the number of # ValueError('The number of efficiency values must match the number of
# time intervals.') # time intervals.')
...@@ -950,7 +943,7 @@ class TestNetwork: ...@@ -950,7 +943,7 @@ class TestNetwork:
validate=True, validate=True,
) )
error_triggered = False error_raised = False
try: try:
arc_tech.validate_sizes( arc_tech.validate_sizes(
number_options=number_options, number_options=number_options,
...@@ -960,13 +953,13 @@ class TestNetwork: ...@@ -960,13 +953,13 @@ class TestNetwork:
], ],
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError('The number of efficiency values must match the number of # ValueError('The number of efficiency values must match the number of
# time intervals.') # time intervals.')
error_triggered = False error_raised = False
try: try:
arc_tech = Arcs( arc_tech = Arcs(
name="hey", name="hey",
...@@ -995,8 +988,8 @@ class TestNetwork: ...@@ -995,8 +988,8 @@ class TestNetwork:
], ],
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError('The number of capacity values must match the number of # ValueError('The number of capacity values must match the number of
# options.') # options.')
...@@ -1013,7 +1006,7 @@ class TestNetwork: ...@@ -1013,7 +1006,7 @@ class TestNetwork:
validate=True, validate=True,
) )
error_triggered = False error_raised = False
try: try:
arc_tech.validate_sizes( arc_tech.validate_sizes(
number_options=number_options, number_options=number_options,
...@@ -1023,8 +1016,8 @@ class TestNetwork: ...@@ -1023,8 +1016,8 @@ class TestNetwork:
], ],
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError: The minimum cost values are inconsistent with the number # ValueError: The minimum cost values are inconsistent with the number
# of options. # of options.
...@@ -1041,7 +1034,7 @@ class TestNetwork: ...@@ -1041,7 +1034,7 @@ class TestNetwork:
validate=True, validate=True,
) )
error_triggered = False error_raised = False
try: try:
arc_tech.validate_sizes( arc_tech.validate_sizes(
number_options=number_options, number_options=number_options,
...@@ -1051,12 +1044,12 @@ class TestNetwork: ...@@ -1051,12 +1044,12 @@ class TestNetwork:
], ],
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError('Efficiency values must be provided as numeric types.') # TypeError('Efficiency values must be provided as numeric types.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -1072,12 +1065,12 @@ class TestNetwork: ...@@ -1072,12 +1065,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError('Efficiency values must be positive.') # ValueError('Efficiency values must be positive.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -1094,12 +1087,12 @@ class TestNetwork: ...@@ -1094,12 +1087,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError('Capacity values must be provided as numeric types.') # TypeError('Capacity values must be provided as numeric types.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -1113,12 +1106,12 @@ class TestNetwork: ...@@ -1113,12 +1106,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError('Capacity values must be positive.') # ValueError('Capacity values must be positive.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -1134,12 +1127,12 @@ class TestNetwork: ...@@ -1134,12 +1127,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError('Minimum cost values must be provided as numeric types.') # TypeError('Minimum cost values must be provided as numeric types.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -1153,12 +1146,12 @@ class TestNetwork: ...@@ -1153,12 +1146,12 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ValueError('Minimum cost values must be positive or zero.') # ValueError('Minimum cost values must be positive or zero.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -1172,13 +1165,13 @@ class TestNetwork: ...@@ -1172,13 +1165,13 @@ class TestNetwork:
validate=True, validate=True,
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# TypeError('The information about capacities being instantaneous or not # TypeError('The information about capacities being instantaneous or not
# should be given as a boolean variable.') # should be given as a boolean variable.')
error_triggered = False error_raised = False
try: try:
_ = Arcs( _ = Arcs(
name="hey", name="hey",
...@@ -1192,8 +1185,8 @@ class TestNetwork: ...@@ -1192,8 +1185,8 @@ class TestNetwork:
validate=True, validate=True,
) )
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -1278,7 +1271,7 @@ class TestNetwork: ...@@ -1278,7 +1271,7 @@ class TestNetwork:
], ],
) )
net.add_import_node(node_key="G", prices={(0, 0, 0): imp_resource_price}) net.add_import_node("G", prices={(0, 0, 0): imp_resource_price})
# add export node # add export node
...@@ -1290,21 +1283,17 @@ class TestNetwork: ...@@ -1290,21 +1283,17 @@ class TestNetwork:
], ],
) )
net.add_export_node(node_key="H", prices={(0, 0, 0): exp_resource_price}) net.add_export_node("H", prices={(0, 0, 0): exp_resource_price})
net.add_waypoint_node(node_key="Z") net.add_waypoint_node("Z")
base_flow = {(i, j): random.random() for i in range(3) for j in range(4)} base_flow = {(i, j): random.random() for i in range(3) for j in range(4)}
net.add_source_sink_node(node_key="Y", base_flow=base_flow) net.add_source_sink_node("Y", base_flow=base_flow)
base_flow[(2, 3)] = random.random() base_flow[(2, 3)] = random.random()
net.modify_network_node( net.modify_node("Y", **{net.KEY_NODE_BASE_FLOW: base_flow})
node_key="Y", node_data={net.KEY_NODE_BASE_FLOW: base_flow}
)
net.identify_node_types()
assert "Z" in net.waypoint_nodes assert "Z" in net.waypoint_nodes
...@@ -1415,35 +1404,35 @@ class TestNetwork: ...@@ -1415,35 +1404,35 @@ class TestNetwork:
# add isolated import node # add isolated import node
net.add_import_node(node_key="I_iso", prices={(0, 0, 0): resource_price}) net.add_import_node("I_iso", prices={(0, 0, 0): resource_price})
# add import node with outgoing arcs # add import node with outgoing arcs
net.add_import_node(node_key="I", prices={(0, 0, 0): resource_price}) net.add_import_node("I", prices={(0, 0, 0): resource_price})
# add isolated export node # add isolated export node
net.add_import_node(node_key="E_iso", prices={(0, 0, 0): resource_price}) net.add_import_node("E_iso", prices={(0, 0, 0): resource_price})
# add export node with incoming arcs # add export node with incoming arcs
net.add_export_node(node_key="E", prices={(0, 0, 0): resource_price}) net.add_export_node("E", prices={(0, 0, 0): resource_price})
# add isolated normal node # add isolated normal node
net.add_source_sink_node(node_key="A_iso", base_flow=base_flow) net.add_source_sink_node("A_iso", base_flow=base_flow)
# add normal node with incoming arcs # add normal node with incoming arcs
net.add_source_sink_node(node_key="A_in", base_flow=base_flow) net.add_source_sink_node("A_in", base_flow=base_flow)
# add normal node with outgoing arcs # add normal node with outgoing arcs
net.add_source_sink_node(node_key="A_out", base_flow=base_flow) net.add_source_sink_node("A_out", base_flow=base_flow)
# add normal node with incoming and outgoing arcs # add normal node with incoming and outgoing arcs
net.add_source_sink_node(node_key="A", base_flow=base_flow) net.add_source_sink_node("A", base_flow=base_flow)
# ********************************************************************* # *********************************************************************
...@@ -1461,19 +1450,18 @@ class TestNetwork: ...@@ -1461,19 +1450,18 @@ class TestNetwork:
# change I_iso to regular: okay # change I_iso to regular: okay
net.modify_network_node( net.modify_node(
node_key="I_iso", "I_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK,
net.KEY_NODE_BASE_FLOW: base_flow, net.KEY_NODE_BASE_FLOW: base_flow,
}, },
) )
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="I_iso", "I_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1481,9 +1469,9 @@ class TestNetwork: ...@@ -1481,9 +1469,9 @@ class TestNetwork:
# change I_iso to export: okay # change I_iso to export: okay
net.modify_network_node( net.modify_node(
node_key="I_iso", "I_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1491,9 +1479,9 @@ class TestNetwork: ...@@ -1491,9 +1479,9 @@ class TestNetwork:
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="I_iso", "I_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1501,15 +1489,15 @@ class TestNetwork: ...@@ -1501,15 +1489,15 @@ class TestNetwork:
# change I_iso to waypoint: okay # change I_iso to waypoint: okay
net.modify_network_node( net.modify_node(
node_key="I_iso", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} "I_iso"
) )
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="I_iso", "I_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1519,19 +1507,18 @@ class TestNetwork: ...@@ -1519,19 +1507,18 @@ class TestNetwork:
# change E_iso to regular: okay # change E_iso to regular: okay
net.modify_network_node( net.modify_node(
node_key="E_iso", "E_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK,
net.KEY_NODE_BASE_FLOW: base_flow, net.KEY_NODE_BASE_FLOW: base_flow,
}, },
) )
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="E_iso", "E_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1539,9 +1526,9 @@ class TestNetwork: ...@@ -1539,9 +1526,9 @@ class TestNetwork:
# change E_iso to import: okay # change E_iso to import: okay
net.modify_network_node( net.modify_node(
node_key="E_iso", "E_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1549,9 +1536,9 @@ class TestNetwork: ...@@ -1549,9 +1536,9 @@ class TestNetwork:
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="E_iso", "E_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1559,15 +1546,15 @@ class TestNetwork: ...@@ -1559,15 +1546,15 @@ class TestNetwork:
# change E_iso to waypoint: okay # change E_iso to waypoint: okay
net.modify_network_node( net.modify_node(
node_key="E_iso", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} "E_iso"
) )
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="E_iso", "E_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1577,9 +1564,9 @@ class TestNetwork: ...@@ -1577,9 +1564,9 @@ class TestNetwork:
# change A_iso to export: okay # change A_iso to export: okay
net.modify_network_node( net.modify_node(
node_key="A_iso", "A_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1587,19 +1574,18 @@ class TestNetwork: ...@@ -1587,19 +1574,18 @@ class TestNetwork:
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="A_iso", "A_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK,
net.KEY_NODE_BASE_FLOW: base_flow, net.KEY_NODE_BASE_FLOW: base_flow,
}, },
) )
# change A_iso to import: okay # change A_iso to import: okay
net.modify_network_node( net.modify_node(
node_key="A_iso", "A_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1607,26 +1593,24 @@ class TestNetwork: ...@@ -1607,26 +1593,24 @@ class TestNetwork:
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="A_iso", "A_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK,
net.KEY_NODE_BASE_FLOW: base_flow, net.KEY_NODE_BASE_FLOW: base_flow,
}, },
) )
# change A_iso to waypoint: okay # change A_iso to waypoint: okay
net.modify_network_node( net.modify_node(
node_key="A_iso", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} "A_iso"
) )
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="A_iso", "A_iso",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK,
net.KEY_NODE_BASE_FLOW: base_flow, net.KEY_NODE_BASE_FLOW: base_flow,
}, },
) )
...@@ -1635,19 +1619,18 @@ class TestNetwork: ...@@ -1635,19 +1619,18 @@ class TestNetwork:
# change I to regular: okay # change I to regular: okay
net.modify_network_node( net.modify_node(
node_key="I", "I",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK,
net.KEY_NODE_BASE_FLOW: base_flow, net.KEY_NODE_BASE_FLOW: base_flow,
}, },
) )
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="I", "I",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1655,15 +1638,15 @@ class TestNetwork: ...@@ -1655,15 +1638,15 @@ class TestNetwork:
# change I to waypoint: okay # change I to waypoint: okay
net.modify_network_node( net.modify_node(
node_key="I", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} "I"
) )
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="I", "I",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1673,19 +1656,18 @@ class TestNetwork: ...@@ -1673,19 +1656,18 @@ class TestNetwork:
# change E to regular: okay # change E to regular: okay
net.modify_network_node( net.modify_node(
node_key="E", "E",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK,
net.KEY_NODE_BASE_FLOW: base_flow, net.KEY_NODE_BASE_FLOW: base_flow,
}, },
) )
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="E", "E",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1693,15 +1675,15 @@ class TestNetwork: ...@@ -1693,15 +1675,15 @@ class TestNetwork:
# change E to waypoint: okay # change E to waypoint: okay
net.modify_network_node( net.modify_node(
node_key="E", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} "E"
) )
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="E", "E",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1711,9 +1693,9 @@ class TestNetwork: ...@@ -1711,9 +1693,9 @@ class TestNetwork:
# change A_in to export: okay # change A_in to export: okay
net.modify_network_node( net.modify_node(
node_key="A_in", "A_in",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1721,26 +1703,24 @@ class TestNetwork: ...@@ -1721,26 +1703,24 @@ class TestNetwork:
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="A_in", "A_in",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK,
net.KEY_NODE_BASE_FLOW: base_flow, net.KEY_NODE_BASE_FLOW: base_flow,
}, },
) )
# change A_in to waypoint: okay # change A_in to waypoint: okay
net.modify_network_node( net.modify_node(
node_key="A_in", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} "A_in", **{net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY}
) )
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="A_in", "A_in",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK,
net.KEY_NODE_BASE_FLOW: base_flow, net.KEY_NODE_BASE_FLOW: base_flow,
}, },
) )
...@@ -1749,9 +1729,9 @@ class TestNetwork: ...@@ -1749,9 +1729,9 @@ class TestNetwork:
# change A_out to import: okay # change A_out to import: okay
net.modify_network_node( net.modify_node(
node_key="A_out", "A_out",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
...@@ -1759,26 +1739,24 @@ class TestNetwork: ...@@ -1759,26 +1739,24 @@ class TestNetwork:
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="A_out", "A_out",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK,
net.KEY_NODE_BASE_FLOW: base_flow, net.KEY_NODE_BASE_FLOW: base_flow,
}, },
) )
# change A_out to waypoint: okay # change A_out to waypoint: okay
net.modify_network_node( net.modify_node(
node_key="A_out", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY} "A_out", **{net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY}
) )
# reverse: okay # reverse: okay
net.modify_network_node( net.modify_node(
node_key="A_out", "A_out",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_SOURCE_SINK,
net.KEY_NODE_BASE_FLOW: base_flow, net.KEY_NODE_BASE_FLOW: base_flow,
}, },
) )
...@@ -1787,106 +1765,104 @@ class TestNetwork: ...@@ -1787,106 +1765,104 @@ class TestNetwork:
# change I to export: fail # change I to export: fail
error_triggered = False error_raised = False
try: try:
net.modify_network_node( net.modify_node(
node_key="I", "I",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# change E to import: fail # change E to import: fail
error_triggered = False error_raised = False
try: try:
net.modify_network_node( net.modify_node(
node_key="E", "E",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# change A_out to export: fail # change A_out to export: fail
error_triggered = False error_raised = False
try: try:
net.modify_network_node( net.modify_node(
node_key="A_out", "A_out",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# change A_in to import: fail # change A_in to import: fail
error_triggered = False error_raised = False
try: try:
net.modify_network_node( net.modify_node(
node_key="A_in", "A_in",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# change A to export: fail # change A to export: fail
error_triggered = False error_raised = False
try: try:
net.modify_network_node( net.modify_node(
node_key="A", "A",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# change A to import: fail # change A to import: fail
error_triggered = False error_raised = False
try: try:
net.modify_network_node( net.modify_node(
node_key="A", "A",
node_data={ **{
net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP,
net.KEY_NODE_PRICES: resource_price, net.KEY_NODE_PRICES: resource_price,
}, },
) )
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# try to modify a non-existent node # try to modify a non-existent node
error_triggered = False error_raised = False
try: try:
net.modify_network_node( net.modify_node("ABCD")
node_key="ABCD", node_data={net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_WAY}
)
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
...@@ -1925,28 +1901,24 @@ class TestNetwork: ...@@ -1925,28 +1901,24 @@ class TestNetwork:
# add import node I # add import node I
net.add_import_node(node_key="I", prices={(0, 0, 0): resource_price}) net.add_import_node("I", prices={(0, 0, 0): resource_price})
# add export node E # add export node E
net.add_export_node(node_key="E", prices={(0, 0, 0): resource_price}) net.add_export_node("E", prices={(0, 0, 0): resource_price})
# add regular node A # add regular node A
net.add_source_sink_node(node_key="A", base_flow=base_flow) net.add_source_sink_node("A", base_flow=base_flow)
# add regular node B # add regular node B
net.add_source_sink_node(node_key="B", base_flow=base_flow) net.add_source_sink_node("B", base_flow=base_flow)
# add a valid import-export arc # add a valid import-export arc
net.add_directed_arc(node_key_a="I", node_key_b="E", arcs=lossless_arcs) net.add_directed_arc(node_key_a="I", node_key_b="E", arcs=lossless_arcs)
# identify the nodes and validate
net.identify_node_types()
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -1954,57 +1926,57 @@ class TestNetwork: ...@@ -1954,57 +1926,57 @@ class TestNetwork:
# directed arcs cannot start in an export node: E -> B # directed arcs cannot start in an export node: E -> B
error_triggered = False error_raised = False
try: try:
net.add_directed_arc(node_key_a="E", node_key_b="B", arcs=lossless_arcs) net.add_directed_arc(node_key_a="E", node_key_b="B", arcs=lossless_arcs)
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# directed arcs cannot end on an import node: A -> I # directed arcs cannot end on an import node: A -> I
error_triggered = False error_raised = False
try: try:
net.add_directed_arc(node_key_a="A", node_key_b="I", arcs=lossless_arcs) net.add_directed_arc(node_key_a="A", node_key_b="I", arcs=lossless_arcs)
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# import-export nodes cannot have static losses # import-export nodes cannot have static losses
error_triggered = False error_raised = False
try: try:
net.add_directed_arc(node_key_a="I", node_key_b="E", arcs=lossy_arcs) net.add_directed_arc(node_key_a="I", node_key_b="E", arcs=lossy_arcs)
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# undirected arcs cannot involve import nor export nodes # undirected arcs cannot involve import nor export nodes
error_triggered = False error_raised = False
try: try:
net.add_undirected_arc(node_key_a="I", node_key_b="A", arcs=lossless_arcs) net.add_undirected_arc(node_key_a="I", node_key_b="A", arcs=lossless_arcs)
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# undirected arcs cannot involve import nor export nodes # undirected arcs cannot involve import nor export nodes
error_triggered = False error_raised = False
try: try:
net.add_undirected_arc(node_key_a="B", node_key_b="E", arcs=lossless_arcs) net.add_undirected_arc(node_key_a="B", node_key_b="E", arcs=lossless_arcs)
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# undirected arcs cannot involve import nor export nodes # undirected arcs cannot involve import nor export nodes
error_triggered = False error_raised = False
try: try:
net.add_undirected_arc(node_key_a="I", node_key_b="E", arcs=lossy_arcs) net.add_undirected_arc(node_key_a="I", node_key_b="E", arcs=lossy_arcs)
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
...@@ -2014,46 +1986,41 @@ class TestNetwork: ...@@ -2014,46 +1986,41 @@ class TestNetwork:
# create a new export node # create a new export node
net.add_export_node(node_key="E1", prices={(0, 0, 0): resource_price}) net.add_export_node("E1", prices={(0, 0, 0): resource_price})
# create an arc starting in that export node # create an arc starting in that export node
error_triggered = False error_raised = False
try: try:
net.add_directed_arc(node_key_a="E1", node_key_b="B", arcs=lossless_arcs) net.add_directed_arc(node_key_a="E1", node_key_b="B", arcs=lossless_arcs)
net.identify_node_types()
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# remove the troublesome arc
net.remove_edge(u="E1", v="B") # # remove the troublesome arc
# net.remove_edge(u="E1", v="B")
# ********************************************************************* # *********************************************************************
# create a new import node # create a new import node
net.add_import_node(node_key="I1", prices={(0, 0, 0): resource_price}) net.add_import_node("I1", prices={(0, 0, 0): resource_price})
# create an arc ending in that import node # create an arc ending in that import node
error_triggered = False error_raised = False
try: try:
net.add_directed_arc(node_key_a="A", node_key_b="I1", arcs=lossless_arcs) net.add_directed_arc(node_key_a="A", node_key_b="I1", arcs=lossless_arcs)
net.identify_node_types()
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# remove the troublesome arc
net.remove_edge(u="A", v="I1") # # remove the troublesome arc
# net.remove_edge(u="A", v="I1")
# ********************************************************************* # *********************************************************************
# check non-existent arc # check non-existent arc
net.arc_is_undirected(("X", "Y", 1)) net.arc_is_undirected(("X", "Y", 1))
# ************************************************************************* # *************************************************************************
...@@ -2067,7 +2034,7 @@ class TestNetwork: ...@@ -2067,7 +2034,7 @@ class TestNetwork:
# import node # import node
imp_node_key = generate_pseudo_unique_key(mynet.nodes()) imp_node_key = generate_pseudo_unique_key(mynet.nodes())
mynet.add_import_node( mynet.add_import_node(
node_key=imp_node_key, imp_node_key,
prices={ prices={
(0, 0, 0): ResourcePrice(prices=1+0.05, volumes=None) (0, 0, 0): ResourcePrice(prices=1+0.05, volumes=None)
}, },
...@@ -2076,13 +2043,13 @@ class TestNetwork: ...@@ -2076,13 +2043,13 @@ class TestNetwork:
# other nodes # other nodes
node_A = generate_pseudo_unique_key(mynet.nodes()) node_A = generate_pseudo_unique_key(mynet.nodes())
mynet.add_source_sink_node( mynet.add_source_sink_node(
node_key=node_A, node_A,
# base_flow=[1, -1, 0.5, -0.5] # base_flow=[1, -1, 0.5, -0.5]
base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5}, base_flow={(0, 0): 1, (0, 1): -1, (0, 2): 0.5, (0, 3): -0.5},
) )
node_B = generate_pseudo_unique_key(mynet.nodes()) node_B = generate_pseudo_unique_key(mynet.nodes())
mynet.add_source_sink_node( mynet.add_source_sink_node(
node_key=node_B, node_B,
# base_flow=[-1, 1, -0.5, 0.5] # base_flow=[-1, 1, -0.5, 0.5]
base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5},
) )
...@@ -2102,14 +2069,13 @@ class TestNetwork: ...@@ -2102,14 +2069,13 @@ class TestNetwork:
static_loss=None, static_loss=None,
validate=False, validate=False,
) )
mynet.add_undirected_arc(
node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA
)
error_raised = False error_raised = False
try: try:
# identify node types # ValueError: Undirected arcs cannot involve import or export nodes.
mynet.identify_node_types() mynet.add_undirected_arc(
node_key_a=imp_node_key, node_key_b=node_A, arcs=arc_tech_IA
)
except ValueError: except ValueError:
error_raised = True error_raised = True
assert error_raised assert error_raised
...@@ -2128,7 +2094,7 @@ class TestNetwork: ...@@ -2128,7 +2094,7 @@ class TestNetwork:
# export node # export node
exp_node_key = generate_pseudo_unique_key(mynet.nodes()) exp_node_key = generate_pseudo_unique_key(mynet.nodes())
mynet.add_export_node( mynet.add_export_node(
node_key=exp_node_key, exp_node_key,
prices={ prices={
(0, 0, 0): ResourcePrice(prices=0.1+0.05, volumes=None) (0, 0, 0): ResourcePrice(prices=0.1+0.05, volumes=None)
}, },
...@@ -2137,7 +2103,7 @@ class TestNetwork: ...@@ -2137,7 +2103,7 @@ class TestNetwork:
# other nodes # other nodes
node_B = generate_pseudo_unique_key(mynet.nodes()) node_B = generate_pseudo_unique_key(mynet.nodes())
mynet.add_source_sink_node( mynet.add_source_sink_node(
node_key=node_B, node_B,
# base_flow=[-1, 1, -0.5, 0.5] # base_flow=[-1, 1, -0.5, 0.5]
base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5}, base_flow={(0, 0): -1, (0, 1): 1, (0, 2): -0.5, (0, 3): 0.5},
) )
...@@ -2154,14 +2120,13 @@ class TestNetwork: ...@@ -2154,14 +2120,13 @@ class TestNetwork:
static_loss=None, static_loss=None,
validate=False, validate=False,
) )
mynet.add_undirected_arc(
node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE
)
error_raised = False error_raised = False
try: try:
# identify node types # ValueError: Undirected arcs cannot involve import or export nodes.
mynet.identify_node_types() mynet.add_undirected_arc(
node_key_a=node_B, node_key_b=exp_node_key, arcs=arc_tech_BE
)
except ValueError: except ValueError:
error_raised = True error_raised = True
assert error_raised assert error_raised
...@@ -2173,7 +2138,7 @@ class TestNetwork: ...@@ -2173,7 +2138,7 @@ class TestNetwork:
# create a network object with a tree topology # create a network object with a tree topology
tree_network = binomial_tree(3, create_using=MultiDiGraph) tree_network = binomial_tree(3, create_using=MultiDiGraph)
network = Network(incoming_graph_data=tree_network) network = Network(network_type=Network.NET_TYPE_TREE, incoming_graph_data=tree_network)
for edge_key in network.edges(keys=True): for edge_key in network.edges(keys=True):
arc = ArcsWithoutLosses( arc = ArcsWithoutLosses(
name=str(edge_key), name=str(edge_key),
...@@ -2184,6 +2149,9 @@ class TestNetwork: ...@@ -2184,6 +2149,9 @@ class TestNetwork:
) )
network.add_edge(*edge_key, **{Network.KEY_ARC_TECH: arc}) network.add_edge(*edge_key, **{Network.KEY_ARC_TECH: arc})
# assert that it should have a tree topology
assert network.should_be_tree_network()
# assert that it does not have a tree topology # assert that it does not have a tree topology
assert not network.has_tree_topology() assert not network.has_tree_topology()
...@@ -2193,6 +2161,8 @@ class TestNetwork: ...@@ -2193,6 +2161,8 @@ class TestNetwork:
# assert that it has a tree topology # assert that it has a tree topology
assert network.has_tree_topology() assert network.has_tree_topology()
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
...@@ -2202,14 +2172,8 @@ class TestNetwork: ...@@ -2202,14 +2172,8 @@ class TestNetwork:
# create network # create network
network = Network() network = Network()
# add node A # add nodes A and B
network.add_waypoint_node(node_key="A") network.add_nodes_from(['A','B'])
# add node B
network.add_waypoint_node(node_key="B")
# identify nodes
network.identify_node_types()
# add arcs # add arcs
key_list = [ key_list = [
...@@ -2236,14 +2200,14 @@ class TestNetwork: ...@@ -2236,14 +2200,14 @@ class TestNetwork:
rand.seed(360) rand.seed(360)
uuid.uuid4 = lambda: uuid.UUID(int=rand.getrandbits(128), version=4) uuid.uuid4 = lambda: uuid.UUID(int=rand.getrandbits(128), version=4)
error_triggered = False error_raised = False
try: try:
_ = network.get_pseudo_unique_arc_key( _ = network.get_pseudo_unique_arc_key(
node_key_start="A", node_key_end="B", max_iterations=len(key_list) - 1 node_key_start="A", node_key_end="B", max_iterations=len(key_list) - 1
) )
except Exception: except Exception:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
...@@ -2265,7 +2229,7 @@ class TestNetwork: ...@@ -2265,7 +2229,7 @@ class TestNetwork:
for qpk in [(0,0,0),(0,0,1),(0,1,0),(0,1,1)] for qpk in [(0,0,0),(0,0,1),(0,1,0),(0,1,1)]
} }
mynet.add_import_node( mynet.add_import_node(
node_key=imp_node_key, imp_node_key,
prices=imp_prices prices=imp_prices
) )
...@@ -2279,7 +2243,7 @@ class TestNetwork: ...@@ -2279,7 +2243,7 @@ class TestNetwork:
for qpk in [(0,0,0),(0,0,1),(0,1,0),(0,1,1)] for qpk in [(0,0,0),(0,0,1),(0,1,0),(0,1,1)]
} }
mynet.add_export_node( mynet.add_export_node(
node_key=exp_node_key, exp_node_key,
prices=exp_prices, prices=exp_prices,
) )
...@@ -2308,15 +2272,13 @@ class TestNetwork: ...@@ -2308,15 +2272,13 @@ class TestNetwork:
(2, q, 1): 0.25, (2, q, 1): 0.25,
}, },
) )
mynet.add_directed_arc(
node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE_fix
)
error_raised = False error_raised = False
try: try:
# identify node types # ValueError: Arcs between import and export nodes cannot have static losses.
mynet.identify_node_types() mynet.add_directed_arc(
node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE_fix
)
except ValueError: except ValueError:
error_raised = True error_raised = True
assert error_raised assert error_raised
...@@ -2331,11 +2293,12 @@ class TestNetwork: ...@@ -2331,11 +2293,12 @@ class TestNetwork:
# add nodes # add nodes
node_a = 'A' node_a = 'A'
net.add_waypoint_node(node_a) # net.add_waypoint_node(node_a)
node_b = 'B' node_b = 'B'
net.add_waypoint_node(node_b) # net.add_waypoint_node(node_b)
node_c = 'C' node_c = 'C'
net.add_waypoint_node(node_c) # net.add_waypoint_node(node_c)
net.add_nodes_from([node_a,node_b,node_c])
# add arcs # add arcs
node_pairs = ((node_a, node_b), (node_b, node_a),) node_pairs = ((node_a, node_b), (node_b, node_a),)
...@@ -2349,8 +2312,6 @@ class TestNetwork: ...@@ -2349,8 +2312,6 @@ class TestNetwork:
capacity=1, capacity=1,
capacity_is_instantaneous=False capacity_is_instantaneous=False
) )
# identify the node types
net.identify_node_types()
# assert that it can detected the selected antiparallel arcs # assert that it can detected the selected antiparallel arcs
assert net.has_selected_antiparallel_arcs() assert net.has_selected_antiparallel_arcs()
...@@ -2361,6 +2322,96 @@ class TestNetwork: ...@@ -2361,6 +2322,96 @@ class TestNetwork:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_add_nodes(self):
# create network
net = Network()
# add nodes
node_a = 'A'
net.add_node(node_a)
assert net.is_waypoint_node(node_a)
# source
node_b = 'B'
net.add_node(node_b, **{net.KEY_NODE_BASE_FLOW: {(0,0):-1}})
assert net.is_source_sink_node(node_b)
# sink
node_c = 'C'
net.add_node(node_c, **{net.KEY_NODE_BASE_FLOW: {(0,0):1}})
assert net.is_source_sink_node(node_c)
# import node
node_d = 'D'
net.add_node(node_d, **{net.KEY_NODE_PRICES: {(0,0): ResourcePrice(prices=[1, 2], volumes=[1,None])}, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP})
assert net.is_import_node(node_d)
# export node
node_e = 'E'
net.add_node(node_e, **{net.KEY_NODE_PRICES: {(0,0): ResourcePrice(prices=[2, 3], volumes=[4,None])}, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP})
assert net.is_export_node(node_e)
# modify nodes
# from waypoint to source/sink
net.modify_node(node_a, **{net.KEY_NODE_BASE_FLOW: {(0,0):-2}})
assert not net.is_waypoint_node(node_a)
assert net.is_source_sink_node(node_a)
# from source/sink to waypoint
net.modify_node(node_a)
assert not net.is_source_sink_node(node_a)
assert net.is_waypoint_node(node_a)
# from waypoint to import node
net.modify_node(node_a, **{net.KEY_NODE_PRICES: {(0,0): ResourcePrice(prices=[5, 3.5], volumes=[2,4])}, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP})
assert not net.is_waypoint_node(node_a)
assert net.is_import_node(node_a)
# from import node to waypoint
net.modify_node(node_a)
assert not net.is_import_node(node_a)
assert net.is_waypoint_node(node_a)
# from waypoint node to export node
net.modify_node(node_a, **{net.KEY_NODE_PRICES: {(0,0): ResourcePrice(prices=[4, 1], volumes=[3,6])}, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP})
assert not net.is_waypoint_node(node_a)
assert net.is_export_node(node_a)
# from export node to sink/source
net.modify_node(node_a, **{net.KEY_NODE_BASE_FLOW: {(0,0):-1}})
assert not net.is_export_node(node_a)
assert net.is_source_sink_node(node_a)
# from sink/source node to import node
net.modify_node(node_a, **{net.KEY_NODE_PRICES: {(0,0): ResourcePrice(prices=[5, 3.5], volumes=[2,4])}, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP})
assert not net.is_source_sink_node(node_a)
assert net.is_import_node(node_a)
# from import node to export node
net.modify_node(node_a, **{net.KEY_NODE_PRICES: {(0,0): ResourcePrice(prices=[4, 1], volumes=[3,6])}, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP})
assert not net.is_import_node(node_a)
assert net.is_export_node(node_a)
# from export node to waypoint node
net.modify_node(node_a)
assert not net.is_export_node(node_a)
assert net.is_waypoint_node(node_a)
# *********************************************************************
# test modifying nodes with preexisting arcs
# add arcs
# add arc between two waypoint nodes
net.add_preexisting_directed_arc(
node_key_a=node_a,
node_key_b=node_b,
efficiency=None,
static_loss=None,
capacity=3,
capacity_is_instantaneous=False
)
# modify nodes
# try to change the start node to an export node
with pytest.raises(ValueError):
net.modify_node(node_a, **{net.KEY_NODE_PRICES: {(0,0): ResourcePrice(prices=[4, 1], volumes=[3,6])}, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_EXP})
# try to change the end node to an import node
with pytest.raises(ValueError):
net.modify_node(node_b, **{net.KEY_NODE_PRICES: {(0,0): ResourcePrice(prices=[4, 1], volumes=[3,6])}, net.KEY_NODE_TYPE: net.KEY_NODE_TYPE_IMP})
# *************************************************************************
# *************************************************************************
# ***************************************************************************** # *****************************************************************************
# ***************************************************************************** # *****************************************************************************
...@@ -2,12 +2,14 @@ ...@@ -2,12 +2,14 @@
# standard # standard
import math import math
import pytest
# local # local
# import numpy as np # import numpy as np
# import networkx as nx # import networkx as nx
import pyomo.environ as pyo import pyomo.environ as pyo
# import src.topupopt.problems.esipp.utils as utils # import src.topupopt.problems.esipp.utils as utils
from src.topupopt.data.misc.utils import generate_pseudo_unique_key from src.topupopt.data.misc.utils import generate_pseudo_unique_key
from src.topupopt.problems.esipp.problem import InfrastructurePlanningProblem from src.topupopt.problems.esipp.problem import InfrastructurePlanningProblem
...@@ -17,233 +19,162 @@ from src.topupopt.problems.esipp.resource import ResourcePrice ...@@ -17,233 +19,162 @@ from src.topupopt.problems.esipp.resource import ResourcePrice
from src.topupopt.problems.esipp.utils import statistics from src.topupopt.problems.esipp.utils import statistics
from src.topupopt.problems.esipp.time import EconomicTimeFrame from src.topupopt.problems.esipp.time import EconomicTimeFrame
# from src.topupopt.problems.esipp.converter import Converter # from src.topupopt.problems.esipp.converter import Converter
from test_esipp import build_solve_ipp
from src.topupopt.problems.esipp.blocks.prices import NODE_PRICE_OTHER, NODE_PRICE_DELTA, NODE_PRICE_LAMBDA
# ***************************************************************************** # *****************************************************************************
# ***************************************************************************** # *****************************************************************************
class TestESIPPProblem: # TODO: test time-varying tariffs (convex/non-convex)
# TODO: check problem sizes
solver = 'glpk'
# solver = 'scip'
# solver = 'cbc'
def build_solve_ipp(
self,
solver: str = None,
solver_options: dict = None,
use_sos_arcs: bool = False,
arc_sos_weight_key: str = (InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_NONE),
arc_use_real_variables_if_possible: bool = False,
use_sos_sense: bool = False,
sense_sos_weight_key: int = (
InfrastructurePlanningProblem.SOS1_SENSE_WEIGHT_NOMINAL_HIGHER
),
sense_use_real_variables_if_possible: bool = False,
sense_use_arc_interfaces: bool = False,
perform_analysis: bool = False,
plot_results: bool = False,
print_solver_output: bool = False,
time_frame: EconomicTimeFrame = None,
networks: dict = None,
converters: dict = None,
static_losses_mode=None,
mandatory_arcs: list = None,
max_number_parallel_arcs: dict = None,
arc_groups_dict: dict = None,
init_aux_sets: bool = False,
# discount_rates: dict = None,
assessment_weights: dict = None,
simplify_problem: bool = False,
):
if type(solver) == type(None):
solver = self.solver
if type(assessment_weights) != dict:
assessment_weights = {} # default
if type(converters) != dict:
converters = {}
# time weights
# relative weight of time period
# one interval twice as long as the average is worth twice
# one interval half as long as the average is worth half
# time_weights = [
# [time_period_duration/average_time_interval_duration
# for time_period_duration in intraperiod_time_interval_duration]
# for p in range(number_periods)]
time_weights = None # nothing yet
normalised_time_interval_duration = None # nothing yet class TestESIPPProblem:
# create problem object # *************************************************************************
# *************************************************************************
ipp = InfrastructurePlanningProblem( @pytest.mark.parametrize(
# discount_rates=discount_rates, "use_prices_block, node_price_model",
time_frame=time_frame, [(True, NODE_PRICE_OTHER),
# reporting_periods=time_frame.reporting_periods, (True, NODE_PRICE_DELTA),
# time_intervals=time_frame.time_interval_durations, (True, NODE_PRICE_LAMBDA),
time_weights=time_weights, (False, NODE_PRICE_OTHER),
normalised_time_interval_duration=normalised_time_interval_duration, (False, NODE_PRICE_DELTA),
assessment_weights=assessment_weights, (False, NODE_PRICE_LAMBDA)]
) )
def test_problem_increasing_imp_prices(self, use_prices_block, node_price_model):
# assessment
q = 0
# add networks and systems tf = EconomicTimeFrame(
discount_rate=0.0,
for netkey, net in networks.items(): reporting_periods={q: (0,)},
ipp.add_network(network_key=netkey, network=net) reporting_period_durations={q: (365 * 24 * 3600,)},
time_intervals={q: (0,)},
# add converters time_interval_durations={q: (1,)},
)
for cvtkey, cvt in converters.items():
ipp.add_converter(converter_key=cvtkey, converter=cvt)
# define arcs as mandatory
if type(mandatory_arcs) == list:
for full_arc_key in mandatory_arcs:
ipp.make_arc_mandatory(full_arc_key[0], full_arc_key[1:])
# if make_all_arcs_mandatory:
# for network_key in ipp.networks:
# for arc_key in ipp.networks[network_key].edges(keys=True):
# # preexisting arcs are no good
# if ipp.networks[network_key].edges[arc_key][
# Network.KEY_ARC_TECH].has_been_selected():
# continue
# ipp.make_arc_mandatory(network_key, arc_key)
# set up the use of sos for arc selection
if use_sos_arcs:
for network_key in ipp.networks:
for arc_key in ipp.networks[network_key].edges(keys=True):
if (
ipp.networks[network_key]
.edges[arc_key][Network.KEY_ARC_TECH]
.has_been_selected()
):
continue
ipp.use_sos1_for_arc_selection(
network_key,
arc_key,
use_real_variables_if_possible=(
arc_use_real_variables_if_possible
),
sos1_weight_method=arc_sos_weight_key,
)
# set up the use of sos for flow sense determination
if use_sos_sense:
for network_key in ipp.networks:
for arc_key in ipp.networks[network_key].edges(keys=True):
if not ipp.networks[network_key].edges[arc_key][
Network.KEY_ARC_UND
]:
continue
ipp.use_sos1_for_flow_senses(
network_key,
arc_key,
use_real_variables_if_possible=(
sense_use_real_variables_if_possible
),
use_interface_variables=sense_use_arc_interfaces,
sos1_weight_method=sense_sos_weight_key,
)
elif sense_use_arc_interfaces: # set up the use of arc interfaces w/o sos1
for network_key in ipp.networks:
for arc_key in ipp.networks[network_key].edges(keys=True):
if (
ipp.networks[network_key]
.edges[arc_key][Network.KEY_ARC_TECH]
.has_been_selected()
):
continue
ipp.use_interface_variables_for_arc_selection(network_key, arc_key)
# static losses
if static_losses_mode == ipp.STATIC_LOSS_MODE_ARR:
ipp.place_static_losses_arrival_node()
elif static_losses_mode == ipp.STATIC_LOSS_MODE_DEP:
ipp.place_static_losses_departure_node()
elif static_losses_mode == ipp.STATIC_LOSS_MODE_US: # 2 nodes: one import, one regular
ipp.place_static_losses_upstream() mynet = Network()
elif static_losses_mode == ipp.STATIC_LOSS_MODE_DS: # import node
ipp.place_static_losses_downstream() node_IMP = 'I'
prices = [1.0, 2.0]
volumes = [0.5, None] if node_price_model == NODE_PRICE_OTHER else [0.5, 1e5]
mynet.add_import_node(
node_key=node_IMP,
prices={
qpk: ResourcePrice(prices=prices, volumes=volumes)
for qpk in tf.qpk()
},
)
else: # other nodes
raise ValueError("Unknown static loss modelling mode.") node_A = 'A'
mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 1.0})
# ********************************************************************* # arc IA
arc_tech_IA = Arcs(
name="any",
efficiency={(q, 0): 0.5},
efficiency_reverse=None,
static_loss=None,
capacity=[3],
minimum_cost=[2],
specific_capacity_cost=1,
capacity_is_instantaneous=False,
validate=False,
)
mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA)
# groups # no sos, regular time intervals
ipp = build_solve_ipp(
solver_options={},
perform_analysis=False,
plot_results=False, # True,
print_solver_output=False,
time_frame=tf,
networks={"mynet": mynet},
static_losses_mode=True, # just to reach a line,
mandatory_arcs=[],
max_number_parallel_arcs={},
simplify_problem=False,
use_prices_block=use_prices_block,
node_price_model=node_price_model
)
if type(arc_groups_dict) != type(None): assert not ipp.has_peak_total_assessments()
for key in arc_groups_dict: # print('hey')
ipp.create_arc_group(arc_groups_dict[key]) # print((use_prices_block, node_price_model))
# print(ipp.results["Problem"][0])
if (use_prices_block, node_price_model) == (True, NODE_PRICE_OTHER):
assert ipp.results["Problem"][0]["Number of constraints"] == 10
assert ipp.results["Problem"][0]["Number of variables"] == 11
assert ipp.results["Problem"][0]["Number of nonzeros"] == 20
elif (use_prices_block, node_price_model) == (False, NODE_PRICE_OTHER):
assert ipp.results["Problem"][0]["Number of constraints"] == 10
assert ipp.results["Problem"][0]["Number of variables"] == 11
assert ipp.results["Problem"][0]["Number of nonzeros"] == 20
elif (use_prices_block, node_price_model) == (True, NODE_PRICE_DELTA):
assert ipp.results["Problem"][0]["Number of constraints"] == 11
assert ipp.results["Problem"][0]["Number of variables"] == 12
assert ipp.results["Problem"][0]["Number of nonzeros"] == 22
elif (use_prices_block, node_price_model) == (False, NODE_PRICE_DELTA):
assert ipp.results["Problem"][0]["Number of constraints"] == 15
assert ipp.results["Problem"][0]["Number of variables"] == 14
assert ipp.results["Problem"][0]["Number of nonzeros"] == 30
# ********************************************************************* # *********************************************************************
# maximum number of parallel arcs
for key in max_number_parallel_arcs:
ipp.set_maximum_number_parallel_arcs(
network_key=key[0],
node_a=key[1],
node_b=key[2],
limit=max_number_parallel_arcs[key],
)
# ********************************************************************* # *********************************************************************
if simplify_problem: # validation
ipp.simplify_peak_total_assessments()
# ********************************************************************* # the arc should be installed since it is required for feasibility
assert (
# instantiate (disable the default case v-a-v fixed losses) True
in ipp.networks["mynet"]
.edges[(node_IMP, node_A, 0)][Network.KEY_ARC_TECH]
.options_selected
)
# ipp.instantiate(place_fixed_losses_upstream_if_possible=False) # the flows should be 1.0, 0.0 and 2.0
assert math.isclose(
pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 0)]),
2.0,
abs_tol=1e-6,
)
ipp.instantiate(initialise_ancillary_sets=init_aux_sets) # arc amplitude should be two
# ipp.instance.pprint() assert math.isclose(
# optimise pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]),
ipp.optimise( 2.0,
solver_name=solver, abs_tol=0.01,
solver_options=solver_options,
output_options={},
print_solver_output=print_solver_output,
) )
# ipp.instance.pprint()
# return the problem object
return ipp
# ********************************************************************* # capex should be four
# ********************************************************************* assert math.isclose(pyo.value(ipp.instance.var_capex), 4.0, abs_tol=1e-3)
# sdncf should be -3.5
assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), -3.5, abs_tol=1e-3)
# the objective function should be -7.5
assert math.isclose(pyo.value(ipp.instance.obj_f), -7.5, abs_tol=1e-3)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_problem_increasing_imp_prices(self): @pytest.mark.parametrize(
"use_prices_block, node_price_model",
[(True, NODE_PRICE_OTHER),
(True, NODE_PRICE_DELTA),
(True, NODE_PRICE_LAMBDA),
(False, NODE_PRICE_OTHER),
(False, NODE_PRICE_DELTA),
(False, NODE_PRICE_LAMBDA)
]
)
def test_problem_decreasing_imp_prices(self, use_prices_block, node_price_model):
# assessment # assessment
q = 0 q = 0
...@@ -264,7 +195,7 @@ class TestESIPPProblem: ...@@ -264,7 +195,7 @@ class TestESIPPProblem:
mynet.add_import_node( mynet.add_import_node(
node_key=node_IMP, node_key=node_IMP,
prices={ prices={
qpk: ResourcePrice(prices=[1.0, 2.0], volumes=[0.5, None]) qpk: ResourcePrice(prices=[2.0, 1.0], volumes=[0.5, 3.0])
for qpk in tf.qpk() for qpk in tf.qpk()
}, },
) )
...@@ -287,28 +218,44 @@ class TestESIPPProblem: ...@@ -287,28 +218,44 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip' if node_price_model == NODE_PRICE_LAMBDA else 'glpk',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
print_solver_output=False, print_solver_output=False,
time_frame=tf, time_frame=tf,
networks={"mynet": mynet}, networks={"mynet": mynet},
static_losses_mode=True, # just to reach a line, static_losses_mode=True, # just to reach a line,
mandatory_arcs=[], mandatory_arcs=[],
max_number_parallel_arcs={}, max_number_parallel_arcs={},
simplify_problem=False simplify_problem=False,
use_prices_block=use_prices_block,
node_price_model=node_price_model
) )
assert not ipp.has_peak_total_assessments() assert not ipp.has_peak_total_assessments()
assert ipp.results["Problem"][0]["Number of constraints"] == 10 # print('hey')
assert ipp.results["Problem"][0]["Number of variables"] == 11 # print((use_prices_block, node_price_model))
assert ipp.results["Problem"][0]["Number of nonzeros"] == 20 # print(ipp.results["Problem"][0])
if (use_prices_block, node_price_model) == (True, NODE_PRICE_OTHER):
assert ipp.results["Problem"][0]["Number of constraints"] == 14
assert ipp.results["Problem"][0]["Number of variables"] == 13
assert ipp.results["Problem"][0]["Number of nonzeros"] == 28
elif (use_prices_block, node_price_model) == (False, NODE_PRICE_OTHER):
assert ipp.results["Problem"][0]["Number of constraints"] == 14
assert ipp.results["Problem"][0]["Number of variables"] == 13
assert ipp.results["Problem"][0]["Number of nonzeros"] == 28
elif (use_prices_block, node_price_model) == (True, NODE_PRICE_DELTA):
assert ipp.results["Problem"][0]["Number of constraints"] == 15
assert ipp.results["Problem"][0]["Number of variables"] == 14
assert ipp.results["Problem"][0]["Number of nonzeros"] == 30
elif (use_prices_block, node_price_model) == (False, NODE_PRICE_DELTA):
assert ipp.results["Problem"][0]["Number of constraints"] == 15
assert ipp.results["Problem"][0]["Number of variables"] == 14
assert ipp.results["Problem"][0]["Number of nonzeros"] == 30
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -339,16 +286,26 @@ class TestESIPPProblem: ...@@ -339,16 +286,26 @@ class TestESIPPProblem:
# capex should be four # capex should be four
assert math.isclose(pyo.value(ipp.instance.var_capex), 4.0, abs_tol=1e-3) assert math.isclose(pyo.value(ipp.instance.var_capex), 4.0, abs_tol=1e-3)
# sdncf should be -3.5 # sdncf should be -2.5
assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), -3.5, abs_tol=1e-3) assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), -2.5, abs_tol=1e-3)
# the objective function should be -7.5 # the objective function should be -7.5
assert math.isclose(pyo.value(ipp.instance.obj_f), -7.5, abs_tol=1e-3) assert math.isclose(pyo.value(ipp.instance.obj_f), -6.5, abs_tol=1e-3)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_problem_decreasing_imp_prices(self): @pytest.mark.parametrize(
"use_prices_block, node_price_model",
[(True, NODE_PRICE_OTHER),
(True, NODE_PRICE_DELTA),
(True, NODE_PRICE_LAMBDA),
(False, NODE_PRICE_OTHER),
(False, NODE_PRICE_DELTA),
(False, NODE_PRICE_LAMBDA)
]
)
def test_problem_decreasing_imp_prices2(self, use_prices_block, node_price_model):
# assessment # assessment
q = 0 q = 0
...@@ -376,7 +333,7 @@ class TestESIPPProblem: ...@@ -376,7 +333,7 @@ class TestESIPPProblem:
# other nodes # other nodes
node_A = 'A' node_A = 'A'
mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 1.0}) mynet.add_source_sink_node(node_key=node_A, base_flow={(q, 0): 0.25})
# arc IA # arc IA
arc_tech_IA = Arcs( arc_tech_IA = Arcs(
...@@ -392,11 +349,9 @@ class TestESIPPProblem: ...@@ -392,11 +349,9 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip' if node_price_model == NODE_PRICE_LAMBDA else 'glpk',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -406,14 +361,36 @@ class TestESIPPProblem: ...@@ -406,14 +361,36 @@ class TestESIPPProblem:
static_losses_mode=True, # just to reach a line, static_losses_mode=True, # just to reach a line,
mandatory_arcs=[], mandatory_arcs=[],
max_number_parallel_arcs={}, max_number_parallel_arcs={},
simplify_problem=False simplify_problem=False,
use_prices_block=use_prices_block,
node_price_model=node_price_model
) )
assert not ipp.has_peak_total_assessments() assert not ipp.has_peak_total_assessments()
assert ipp.results["Problem"][0]["Number of constraints"] == 14 # 10 prior to nonconvex block # print('hey')
assert ipp.results["Problem"][0]["Number of variables"] == 13 # 11 prior to nonconvex block # # print((use_prices_block, node_price_model))
assert ipp.results["Problem"][0]["Number of nonzeros"] == 28 # 20 prior to nonconvex block # print(ipp.results["Problem"][0])
# # capex should be four
# print(pyo.value(ipp.instance.var_capex))
# print(pyo.value(ipp.instance.var_sdncf_q[q]))
# print(pyo.value(ipp.instance.obj_f))
if (use_prices_block, node_price_model) == (True, NODE_PRICE_OTHER):
assert ipp.results["Problem"][0]["Number of constraints"] == 14
assert ipp.results["Problem"][0]["Number of variables"] == 13
assert ipp.results["Problem"][0]["Number of nonzeros"] == 28
elif (use_prices_block, node_price_model) == (False, NODE_PRICE_OTHER):
assert ipp.results["Problem"][0]["Number of constraints"] == 14
assert ipp.results["Problem"][0]["Number of variables"] == 13
assert ipp.results["Problem"][0]["Number of nonzeros"] == 28
elif (use_prices_block, node_price_model) == (True, NODE_PRICE_DELTA):
assert ipp.results["Problem"][0]["Number of constraints"] == 15
assert ipp.results["Problem"][0]["Number of variables"] == 14
assert ipp.results["Problem"][0]["Number of nonzeros"] == 30
elif (use_prices_block, node_price_model) == (False, NODE_PRICE_DELTA):
assert ipp.results["Problem"][0]["Number of constraints"] == 15
assert ipp.results["Problem"][0]["Number of variables"] == 14
assert ipp.results["Problem"][0]["Number of nonzeros"] == 30
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -427,33 +404,42 @@ class TestESIPPProblem: ...@@ -427,33 +404,42 @@ class TestESIPPProblem:
.options_selected .options_selected
) )
# the flows should be 1.0, 0.0 and 2.0 # the flows should be 0.5
assert math.isclose( assert math.isclose(
pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 0)]), pyo.value(ipp.instance.var_v_glljqk[("mynet", node_IMP, node_A, 0, q, 0)]),
2.0, 0.5,
abs_tol=1e-6, abs_tol=1e-6,
) )
# arc amplitude should be two # arc amplitude should be 0.5
assert math.isclose( assert math.isclose(
pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]), pyo.value(ipp.instance.var_v_amp_gllj[("mynet", node_IMP, node_A, 0)]),
2.0, 0.5,
abs_tol=0.01, abs_tol=0.01,
) )
# capex should be four # capex should be four
assert math.isclose(pyo.value(ipp.instance.var_capex), 4.0, abs_tol=1e-3) assert math.isclose(pyo.value(ipp.instance.var_capex), 2.5, abs_tol=1e-3)
# sdncf should be -2.5 # sdncf should be -2.5
assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), -2.5, abs_tol=1e-3) assert math.isclose(pyo.value(ipp.instance.var_sdncf_q[q]), -1.0, abs_tol=1e-3)
# the objective function should be -7.5 # the objective function should be -7.5
assert math.isclose(pyo.value(ipp.instance.obj_f), -6.5, abs_tol=1e-3) assert math.isclose(pyo.value(ipp.instance.obj_f), -3.5, abs_tol=1e-3)
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_problem_decreasing_imp_prices_infinite_capacity(self): @pytest.mark.parametrize(
"use_prices_block, node_price_model",
[(True, NODE_PRICE_OTHER),
(True, NODE_PRICE_DELTA),
(True, NODE_PRICE_LAMBDA),
(False, NODE_PRICE_OTHER),
(False, NODE_PRICE_DELTA),
(False, NODE_PRICE_LAMBDA)]
)
def test_problem_decreasing_imp_prices_infinite_capacity(self, use_prices_block, node_price_model):
# assessment # assessment
q = 0 q = 0
...@@ -496,15 +482,12 @@ class TestESIPPProblem: ...@@ -496,15 +482,12 @@ class TestESIPPProblem:
validate=False, validate=False,
) )
mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# trigger the error # trigger the error
error_raised = False error_raised = False
try: try:
# no sos, regular time intervals # no sos, regular time intervals
self.build_solve_ipp( build_solve_ipp(
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -523,7 +506,16 @@ class TestESIPPProblem: ...@@ -523,7 +506,16 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_problem_decreasing_exp_prices(self): @pytest.mark.parametrize(
"use_prices_block, node_price_model",
[(True, NODE_PRICE_OTHER),
(True, NODE_PRICE_DELTA),
(True, NODE_PRICE_LAMBDA),
(False, NODE_PRICE_OTHER),
(False, NODE_PRICE_DELTA),
(False, NODE_PRICE_LAMBDA)]
)
def test_problem_decreasing_exp_prices(self, use_prices_block, node_price_model):
# assessment # assessment
q = 0 q = 0
# time # time
...@@ -544,10 +536,12 @@ class TestESIPPProblem: ...@@ -544,10 +536,12 @@ class TestESIPPProblem:
# import node # import node
node_EXP = generate_pseudo_unique_key(mynet.nodes()) node_EXP = generate_pseudo_unique_key(mynet.nodes())
prices = [2.0, 1.0]
volumes = [0.5, None] if node_price_model == NODE_PRICE_OTHER else [0.5, 1e5]
mynet.add_export_node( mynet.add_export_node(
node_key=node_EXP, node_key=node_EXP,
prices={ prices={
(q, p, k): ResourcePrice(prices=[2.0, 1.0], volumes=[0.5, None]) (q, p, k): ResourcePrice(prices=prices, volumes=volumes)
for p in range(number_periods) for p in range(number_periods)
for k in range(number_intervals) for k in range(number_intervals)
}, },
...@@ -571,11 +565,8 @@ class TestESIPPProblem: ...@@ -571,11 +565,8 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -586,12 +577,11 @@ class TestESIPPProblem: ...@@ -586,12 +577,11 @@ class TestESIPPProblem:
mandatory_arcs=[], mandatory_arcs=[],
max_number_parallel_arcs={}, max_number_parallel_arcs={},
simplify_problem=False, simplify_problem=False,
use_prices_block=use_prices_block,
node_price_model=node_price_model
) )
assert not ipp.has_peak_total_assessments() assert not ipp.has_peak_total_assessments()
assert ipp.results["Problem"][0]["Number of constraints"] == 10
assert ipp.results["Problem"][0]["Number of variables"] == 11
assert ipp.results["Problem"][0]["Number of nonzeros"] == 20
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -632,7 +622,16 @@ class TestESIPPProblem: ...@@ -632,7 +622,16 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_problem_increasing_exp_prices(self): @pytest.mark.parametrize(
"use_prices_block, node_price_model",
[(True, NODE_PRICE_OTHER),
(True, NODE_PRICE_DELTA),
(True, NODE_PRICE_LAMBDA),
(False, NODE_PRICE_OTHER),
(False, NODE_PRICE_DELTA),
(False, NODE_PRICE_LAMBDA)]
)
def test_problem_increasing_exp_prices(self, use_prices_block, node_price_model):
# assessment # assessment
q = 0 q = 0
# time # time
...@@ -680,11 +679,9 @@ class TestESIPPProblem: ...@@ -680,11 +679,9 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip' if node_price_model == NODE_PRICE_LAMBDA else 'glpk',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -695,12 +692,11 @@ class TestESIPPProblem: ...@@ -695,12 +692,11 @@ class TestESIPPProblem:
mandatory_arcs=[], mandatory_arcs=[],
max_number_parallel_arcs={}, max_number_parallel_arcs={},
simplify_problem=False, simplify_problem=False,
use_prices_block=use_prices_block,
node_price_model=node_price_model
) )
assert not ipp.has_peak_total_assessments() assert not ipp.has_peak_total_assessments()
assert ipp.results["Problem"][0]["Number of constraints"] == 14 # 10 before nonconvex block
assert ipp.results["Problem"][0]["Number of variables"] == 13 # 11 before nonconvex block
assert ipp.results["Problem"][0]["Number of nonzeros"] == 28 # 20 before nonconvex block
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -741,7 +737,16 @@ class TestESIPPProblem: ...@@ -741,7 +737,16 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_problem_increasing_exp_prices_infinite_capacity(self): @pytest.mark.parametrize(
"use_prices_block, node_price_model",
[(True, NODE_PRICE_OTHER),
(True, NODE_PRICE_DELTA),
(True, NODE_PRICE_LAMBDA),
(False, NODE_PRICE_OTHER),
(False, NODE_PRICE_DELTA),
(False, NODE_PRICE_LAMBDA)]
)
def test_problem_increasing_exp_prices_infinite_capacity(self, use_prices_block, node_price_model):
# assessment # assessment
q = 0 q = 0
# time # time
...@@ -788,15 +793,12 @@ class TestESIPPProblem: ...@@ -788,15 +793,12 @@ class TestESIPPProblem:
validate=False, validate=False,
) )
mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# trigger the error # trigger the error
error_raised = False error_raised = False
try: try:
# no sos, regular time intervals # no sos, regular time intervals
self.build_solve_ipp( build_solve_ipp(
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -807,6 +809,8 @@ class TestESIPPProblem: ...@@ -807,6 +809,8 @@ class TestESIPPProblem:
mandatory_arcs=[], mandatory_arcs=[],
max_number_parallel_arcs={}, max_number_parallel_arcs={},
simplify_problem=False, simplify_problem=False,
use_prices_block=use_prices_block,
node_price_model=node_price_model
) )
except Exception: except Exception:
error_raised = True error_raised = True
...@@ -815,7 +819,16 @@ class TestESIPPProblem: ...@@ -815,7 +819,16 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_problem_increasing_imp_decreasing_exp_prices(self): @pytest.mark.parametrize(
"use_prices_block, node_price_model",
[(True, NODE_PRICE_OTHER),
(True, NODE_PRICE_DELTA),
(True, NODE_PRICE_LAMBDA),
(False, NODE_PRICE_OTHER),
(False, NODE_PRICE_DELTA),
(False, NODE_PRICE_LAMBDA)]
)
def test_problem_increasing_imp_decreasing_exp_prices(self, use_prices_block, node_price_model):
# scenario # scenario
q = 0 q = 0
# time # time
...@@ -836,10 +849,12 @@ class TestESIPPProblem: ...@@ -836,10 +849,12 @@ class TestESIPPProblem:
# import node # import node
node_IMP = 'I' node_IMP = 'I'
prices = [1.0, 2.0]
volumes = [0.5, None] if node_price_model == NODE_PRICE_OTHER else [0.5, 1e5]
mynet.add_import_node( mynet.add_import_node(
node_key=node_IMP, node_key=node_IMP,
prices={ prices={
(q, p, k): ResourcePrice(prices=[1.0, 2.0], volumes=[0.5, None]) (q, p, k): ResourcePrice(prices=prices, volumes=volumes)
for p in range(number_periods) for p in range(number_periods)
for k in range(number_intervals) for k in range(number_intervals)
}, },
...@@ -847,10 +862,12 @@ class TestESIPPProblem: ...@@ -847,10 +862,12 @@ class TestESIPPProblem:
# export node # export node
node_EXP = generate_pseudo_unique_key(mynet.nodes()) node_EXP = generate_pseudo_unique_key(mynet.nodes())
prices = [2.0, 1.0]
volumes = [0.5, None] if node_price_model == NODE_PRICE_OTHER else [0.5, 1e5]
mynet.add_export_node( mynet.add_export_node(
node_key=node_EXP, node_key=node_EXP,
prices={ prices={
(q, p, k): ResourcePrice(prices=[2.0, 1.0], volumes=[0.5, None]) (q, p, k): ResourcePrice(prices=prices, volumes=volumes)
for p in range(number_periods) for p in range(number_periods)
for k in range(number_intervals) for k in range(number_intervals)
}, },
...@@ -890,11 +907,8 @@ class TestESIPPProblem: ...@@ -890,11 +907,8 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_AE) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_AE)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -906,12 +920,11 @@ class TestESIPPProblem: ...@@ -906,12 +920,11 @@ class TestESIPPProblem:
max_number_parallel_arcs={}, max_number_parallel_arcs={},
simplify_problem=False, simplify_problem=False,
# discount_rates={0: (0.0,)}, # discount_rates={0: (0.0,)},
use_prices_block=use_prices_block,
node_price_model=node_price_model
) )
assert not ipp.has_peak_total_assessments() assert not ipp.has_peak_total_assessments()
assert ipp.results["Problem"][0]["Number of constraints"] == 23
assert ipp.results["Problem"][0]["Number of variables"] == 26
assert ipp.results["Problem"][0]["Number of nonzeros"] == 57
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -981,8 +994,17 @@ class TestESIPPProblem: ...@@ -981,8 +994,17 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_direct_imp_exp_network_higher_exp_prices(self): @pytest.mark.parametrize(
"use_prices_block, node_price_model",
[(True, NODE_PRICE_OTHER),
(True, NODE_PRICE_DELTA),
(True, NODE_PRICE_LAMBDA),
(False, NODE_PRICE_OTHER),
(False, NODE_PRICE_DELTA),
(False, NODE_PRICE_LAMBDA)]
)
def test_direct_imp_exp_network_higher_exp_prices(self, use_prices_block, node_price_model):
# time frame # time frame
q = 0 q = 0
...@@ -1002,7 +1024,7 @@ class TestESIPPProblem: ...@@ -1002,7 +1024,7 @@ class TestESIPPProblem:
imp_prices = { imp_prices = {
qpk: ResourcePrice( qpk: ResourcePrice(
prices=0.5, prices=0.5,
volumes=None, volumes=None if node_price_model == NODE_PRICE_OTHER else 1e4,
) )
for qpk in tf.qpk() for qpk in tf.qpk()
} }
...@@ -1016,7 +1038,7 @@ class TestESIPPProblem: ...@@ -1016,7 +1038,7 @@ class TestESIPPProblem:
exp_prices = { exp_prices = {
qpk: ResourcePrice( qpk: ResourcePrice(
prices=1.5, prices=1.5,
volumes=None, volumes=None if node_price_model == NODE_PRICE_OTHER else 1e4,
) )
for qpk in tf.qpk() for qpk in tf.qpk()
} }
...@@ -1042,11 +1064,8 @@ class TestESIPPProblem: ...@@ -1042,11 +1064,8 @@ class TestESIPPProblem:
node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -1055,7 +1074,9 @@ class TestESIPPProblem: ...@@ -1055,7 +1074,9 @@ class TestESIPPProblem:
time_frame=tf, time_frame=tf,
static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP, static_losses_mode=InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP,
mandatory_arcs=[], mandatory_arcs=[],
max_number_parallel_arcs={} max_number_parallel_arcs={},
use_prices_block=use_prices_block,
node_price_model=node_price_model
) )
# export prices are higher: it makes sense to install the arc since the # export prices are higher: it makes sense to install the arc since the
...@@ -1067,7 +1088,7 @@ class TestESIPPProblem: ...@@ -1067,7 +1088,7 @@ class TestESIPPProblem:
.edges[(imp_node_key, exp_node_key, 0)][Network.KEY_ARC_TECH] .edges[(imp_node_key, exp_node_key, 0)][Network.KEY_ARC_TECH]
.options_selected .options_selected
) )
# overview # overview
(imports_qpk, (imports_qpk,
exports_qpk, exports_qpk,
......
...@@ -4,8 +4,7 @@ ...@@ -4,8 +4,7 @@
import math import math
# local # local
# import numpy as np import pytest
# import networkx as nx
import pyomo.environ as pyo import pyomo.environ as pyo
# import src.topupopt.problems.esipp.utils as utils # import src.topupopt.problems.esipp.utils as utils
...@@ -18,234 +17,22 @@ from src.topupopt.problems.esipp.resource import ResourcePrice ...@@ -18,234 +17,22 @@ from src.topupopt.problems.esipp.resource import ResourcePrice
from src.topupopt.problems.esipp.utils import statistics from src.topupopt.problems.esipp.utils import statistics
from src.topupopt.problems.esipp.time import EconomicTimeFrame from src.topupopt.problems.esipp.time import EconomicTimeFrame
# from src.topupopt.problems.esipp.converter import Converter # from src.topupopt.problems.esipp.converter import Converter
from test_esipp import build_solve_ipp, check_problem_size
# ***************************************************************************** # *****************************************************************************
# ***************************************************************************** # *****************************************************************************
class TestESIPPProblem: class TestESIPPProblem:
solver = 'glpk'
# solver = 'scip'
# solver = 'cbc'
def build_solve_ipp(
self,
solver: str = None,
solver_options: dict = None,
use_sos_arcs: bool = False,
arc_sos_weight_key: str = (InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_NONE),
arc_use_real_variables_if_possible: bool = False,
use_sos_sense: bool = False,
sense_sos_weight_key: int = (
InfrastructurePlanningProblem.SOS1_SENSE_WEIGHT_NOMINAL_HIGHER
),
sense_use_real_variables_if_possible: bool = False,
sense_use_arc_interfaces: bool = False,
perform_analysis: bool = False,
plot_results: bool = False,
print_solver_output: bool = False,
time_frame: EconomicTimeFrame = None,
networks: dict = None,
converters: dict = None,
static_losses_mode=None,
mandatory_arcs: list = None,
max_number_parallel_arcs: dict = None,
arc_groups_dict: dict = None,
init_aux_sets: bool = False,
# discount_rates: dict = None,
assessment_weights: dict = None,
simplify_problem: bool = False,
):
if type(solver) == type(None):
solver = self.solver
if type(assessment_weights) != dict:
assessment_weights = {} # default
if type(converters) != dict:
converters = {}
# time weights
# relative weight of time period
# one interval twice as long as the average is worth twice
# one interval half as long as the average is worth half
# time_weights = [
# [time_period_duration/average_time_interval_duration
# for time_period_duration in intraperiod_time_interval_duration]
# for p in range(number_periods)]
time_weights = None # nothing yet
normalised_time_interval_duration = None # nothing yet
# create problem object
ipp = InfrastructurePlanningProblem(
# discount_rates=discount_rates,
time_frame=time_frame,
# reporting_periods=time_frame.reporting_periods,
# time_intervals=time_frame.time_interval_durations,
time_weights=time_weights,
normalised_time_interval_duration=normalised_time_interval_duration,
assessment_weights=assessment_weights,
)
# add networks and systems
for netkey, net in networks.items():
ipp.add_network(network_key=netkey, network=net)
# add converters
for cvtkey, cvt in converters.items():
ipp.add_converter(converter_key=cvtkey, converter=cvt)
# define arcs as mandatory
if type(mandatory_arcs) == list:
for full_arc_key in mandatory_arcs:
ipp.make_arc_mandatory(full_arc_key[0], full_arc_key[1:])
# if make_all_arcs_mandatory:
# for network_key in ipp.networks:
# for arc_key in ipp.networks[network_key].edges(keys=True):
# # preexisting arcs are no good
# if ipp.networks[network_key].edges[arc_key][
# Network.KEY_ARC_TECH].has_been_selected():
# continue
# ipp.make_arc_mandatory(network_key, arc_key)
# set up the use of sos for arc selection
if use_sos_arcs:
for network_key in ipp.networks:
for arc_key in ipp.networks[network_key].edges(keys=True):
if (
ipp.networks[network_key]
.edges[arc_key][Network.KEY_ARC_TECH]
.has_been_selected()
):
continue
ipp.use_sos1_for_arc_selection(
network_key,
arc_key,
use_real_variables_if_possible=(
arc_use_real_variables_if_possible
),
sos1_weight_method=arc_sos_weight_key,
)
# set up the use of sos for flow sense determination
if use_sos_sense:
for network_key in ipp.networks:
for arc_key in ipp.networks[network_key].edges(keys=True):
if not ipp.networks[network_key].edges[arc_key][
Network.KEY_ARC_UND
]:
continue
ipp.use_sos1_for_flow_senses(
network_key,
arc_key,
use_real_variables_if_possible=(
sense_use_real_variables_if_possible
),
use_interface_variables=sense_use_arc_interfaces,
sos1_weight_method=sense_sos_weight_key,
)
elif sense_use_arc_interfaces: # set up the use of arc interfaces w/o sos1
for network_key in ipp.networks:
for arc_key in ipp.networks[network_key].edges(keys=True):
if (
ipp.networks[network_key]
.edges[arc_key][Network.KEY_ARC_TECH]
.has_been_selected()
):
continue
ipp.use_interface_variables_for_arc_selection(network_key, arc_key)
# static losses
if static_losses_mode == ipp.STATIC_LOSS_MODE_ARR:
ipp.place_static_losses_arrival_node()
elif static_losses_mode == ipp.STATIC_LOSS_MODE_DEP:
ipp.place_static_losses_departure_node()
elif static_losses_mode == ipp.STATIC_LOSS_MODE_US:
ipp.place_static_losses_upstream()
elif static_losses_mode == ipp.STATIC_LOSS_MODE_DS:
ipp.place_static_losses_downstream()
else:
raise ValueError("Unknown static loss modelling mode.")
# *********************************************************************
# groups
if type(arc_groups_dict) != type(None):
for key in arc_groups_dict:
ipp.create_arc_group(arc_groups_dict[key])
# *********************************************************************
# maximum number of parallel arcs
for key in max_number_parallel_arcs:
ipp.set_maximum_number_parallel_arcs(
network_key=key[0],
node_a=key[1],
node_b=key[2],
limit=max_number_parallel_arcs[key],
)
# *********************************************************************
if simplify_problem:
ipp.simplify_peak_total_assessments()
# *********************************************************************
# instantiate (disable the default case v-a-v fixed losses)
# ipp.instantiate(place_fixed_losses_upstream_if_possible=False)
ipp.instantiate(initialise_ancillary_sets=init_aux_sets)
# optimise
ipp.optimise(
solver_name=solver,
solver_options=solver_options,
output_options={},
print_solver_output=print_solver_output,
)
# return the problem object
return ipp
# *********************************************************************
# *********************************************************************
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_single_network_single_arc_problem(self): @pytest.mark.parametrize(
"solver, use_sos_arcs, arc_sos_weight_key",
[('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_CAP),
('scip', False, None)]
)
def test_single_network_single_arc_problem(self, solver, use_sos_arcs, arc_sos_weight_key):
# assessment # assessment
q = 0 q = 0
...@@ -291,12 +78,12 @@ class TestESIPPProblem: ...@@ -291,12 +78,12 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver=solver,
solver_options={}, solver_options={},
use_sos_arcs=use_sos_arcs,
arc_sos_weight_key=arc_sos_weight_key,
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
print_solver_output=False, print_solver_output=False,
...@@ -312,11 +99,12 @@ class TestESIPPProblem: ...@@ -312,11 +99,12 @@ class TestESIPPProblem:
# ********************************************************************* # *********************************************************************
# validation # validation
assert len(ipp.instance.constr_arc_sos1) == 0
assert ipp.has_peak_total_assessments() assert ipp.has_peak_total_assessments()
assert ipp.results["Problem"][0]["Number of constraints"] == 24 # assert ipp.results["Problem"][0]["Number of constraints"] == 24
assert ipp.results["Problem"][0]["Number of variables"] == 22 # assert ipp.results["Problem"][0]["Number of variables"] == 22
assert ipp.results["Problem"][0]["Number of nonzeros"] == 49 # assert ipp.results["Problem"][0]["Number of nonzeros"] == 49
# check_problem_size(ipp, 24, 22, 49)
# the arc should be installed since it is required for feasibility # the arc should be installed since it is required for feasibility
assert ( assert (
...@@ -361,7 +149,7 @@ class TestESIPPProblem: ...@@ -361,7 +149,7 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_single_network_two_arcs_problem(self): def test_single_network_two_arcs_problem(self):
# TODO: test simplifying this problem # TODO: test simplifying this problem
...@@ -434,13 +222,12 @@ class TestESIPPProblem: ...@@ -434,13 +222,12 @@ class TestESIPPProblem:
validate=False, validate=False,
) )
mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_AE) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_EXP, arcs=arc_tech_AE)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver_options={}, solver_options={},
# use_sos_arcs=use_sos_arcs,
# arc_sos_weight_key=arc_sos_weight_key,
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
print_solver_output=False, print_solver_output=False,
...@@ -507,7 +294,12 @@ class TestESIPPProblem: ...@@ -507,7 +294,12 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_single_network_single_arc_problem_simpler(self): @pytest.mark.parametrize(
"solver, use_sos_arcs, arc_sos_weight_key",
[('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_COST),
('scip', False, None)]
)
def test_single_network_single_arc_problem_simpler(self, solver, use_sos_arcs, arc_sos_weight_key):
# assessment # assessment
q = 0 q = 0
...@@ -554,11 +346,9 @@ class TestESIPPProblem: ...@@ -554,11 +346,9 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver=solver,
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -570,11 +360,13 @@ class TestESIPPProblem: ...@@ -570,11 +360,13 @@ class TestESIPPProblem:
max_number_parallel_arcs={}, max_number_parallel_arcs={},
simplify_problem=True, simplify_problem=True,
) )
# validation
assert len(ipp.instance.constr_arc_sos1) == 0
assert ipp.has_peak_total_assessments() assert ipp.has_peak_total_assessments()
assert ipp.results["Problem"][0]["Number of constraints"] == 16 # 20 # assert ipp.results["Problem"][0]["Number of constraints"] == 16 # 20
assert ipp.results["Problem"][0]["Number of variables"] == 15 # 19 # assert ipp.results["Problem"][0]["Number of variables"] == 15 # 19
assert ipp.results["Problem"][0]["Number of nonzeros"] == 28 # 36 # assert ipp.results["Problem"][0]["Number of nonzeros"] == 28 # 36
# check_problem_size(ipp, 16, 15, 28)
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -660,13 +452,9 @@ class TestESIPPProblem: ...@@ -660,13 +452,9 @@ class TestESIPPProblem:
mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -785,11 +573,8 @@ class TestESIPPProblem: ...@@ -785,11 +573,8 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -871,11 +656,8 @@ class TestESIPPProblem: ...@@ -871,11 +656,8 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -997,11 +779,8 @@ class TestESIPPProblem: ...@@ -997,11 +779,8 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA) mynet.add_directed_arc(node_key_a=node_IMP, node_key_b=node_A, arcs=arc_tech_IA)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -1045,8 +824,17 @@ class TestESIPPProblem: ...@@ -1045,8 +824,17 @@ class TestESIPPProblem:
# problem with symmetrical nodes and one undirected arc, irregular steps # problem with symmetrical nodes and one undirected arc, irregular steps
# same problem as the previous one, except with interface variables # same problem as the previous one, except with interface variables
# problem with two symmetrical nodes and one undirected arc, w/ simple sos1 # problem with two symmetrical nodes and one undirected arc, w/ simple sos1
def test_isolated_undirected_network(self): @pytest.mark.parametrize(
"solver, use_sos_arcs, arc_sos_weight_key",
[('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_NONE),
# ('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_CAP),
# ('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_COST),
# ('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_CAP),
# ('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_COST),
('scip', False, None)]
)
def test_isolated_undirected_network(self, solver, use_sos_arcs, arc_sos_weight_key):
q = 0 q = 0
tf = EconomicTimeFrame( tf = EconomicTimeFrame(
...@@ -1090,11 +878,12 @@ class TestESIPPProblem: ...@@ -1090,11 +878,12 @@ class TestESIPPProblem:
node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( solver = 'scip'
ipp = build_solve_ipp(
solver=solver,
use_sos_arcs=use_sos_arcs,
arc_sos_weight_key=arc_sos_weight_key,
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -1106,10 +895,15 @@ class TestESIPPProblem: ...@@ -1106,10 +895,15 @@ class TestESIPPProblem:
max_number_parallel_arcs={} max_number_parallel_arcs={}
) )
if use_sos_arcs:
assert len(ipp.instance.constr_arc_sos1) != 0
else:
assert len(ipp.instance.constr_arc_sos1) == 0
assert ipp.has_peak_total_assessments() # TODO: make sure this is true assert ipp.has_peak_total_assessments() # TODO: make sure this is true
assert ipp.results["Problem"][0]["Number of constraints"] == 34 # assert ipp.results["Problem"][0]["Number of constraints"] == 34
assert ipp.results["Problem"][0]["Number of variables"] == 28 # assert ipp.results["Problem"][0]["Number of variables"] == 28
assert ipp.results["Problem"][0]["Number of nonzeros"] == 105 # assert ipp.results["Problem"][0]["Number of nonzeros"] == 105
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -1183,11 +977,10 @@ class TestESIPPProblem: ...@@ -1183,11 +977,10 @@ class TestESIPPProblem:
node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( solver = 'scip'
ipp = build_solve_ipp(
solver=solver,
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, plot_results=False,
...@@ -1199,10 +992,18 @@ class TestESIPPProblem: ...@@ -1199,10 +992,18 @@ class TestESIPPProblem:
max_number_parallel_arcs={} max_number_parallel_arcs={}
) )
assert ipp.has_peak_total_assessments() if solver == 'scip':
assert ipp.results["Problem"][0]["Number of constraints"] == 34 assert len(ipp.instance.constr_arc_sos1) == 0
assert ipp.results["Problem"][0]["Number of variables"] == 24 assert ipp.has_peak_total_assessments() # TODO: make sure this is true
assert ipp.results["Problem"][0]["Number of nonzeros"] == 77 assert ipp.results["Problem"][0]["Number of constraints"] == 0 # 34
assert ipp.results["Problem"][0]["Number of variables"] == 23 # 24
# assert ipp.results["Problem"][0]["Number of nonzeros"] == 77
else:
assert len(ipp.instance.constr_arc_sos1) == 0
assert ipp.has_peak_total_assessments() # TODO: make sure this is true
assert ipp.results["Problem"][0]["Number of constraints"] == 34
assert ipp.results["Problem"][0]["Number of variables"] == 24
assert ipp.results["Problem"][0]["Number of nonzeros"] == 77
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -1287,11 +1088,9 @@ class TestESIPPProblem: ...@@ -1287,11 +1088,9 @@ class TestESIPPProblem:
capacity_is_instantaneous=capacity_is_instantaneous, capacity_is_instantaneous=capacity_is_instantaneous,
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -1357,11 +1156,9 @@ class TestESIPPProblem: ...@@ -1357,11 +1156,9 @@ class TestESIPPProblem:
static_loss=None, static_loss=None,
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -1380,8 +1177,17 @@ class TestESIPPProblem: ...@@ -1380,8 +1177,17 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_nonisolated_undirected_network(self): @pytest.mark.parametrize(
"solver, use_sos_arcs, arc_sos_weight_key",
[('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_NONE),
('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_CAP),
('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_COST),
('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_CAP),
('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_COST),
('scip', False, None)]
)
def test_nonisolated_undirected_network(self, solver, use_sos_arcs, arc_sos_weight_key):
# scenario # scenario
q = 0 q = 0
...@@ -1485,12 +1291,12 @@ class TestESIPPProblem: ...@@ -1485,12 +1291,12 @@ class TestESIPPProblem:
node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver=solver,
solver_options={}, solver_options={},
use_sos_arcs=use_sos_arcs,
arc_sos_weight_key=arc_sos_weight_key,
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
print_solver_output=False, print_solver_output=False,
...@@ -1501,10 +1307,19 @@ class TestESIPPProblem: ...@@ -1501,10 +1307,19 @@ class TestESIPPProblem:
max_number_parallel_arcs={} max_number_parallel_arcs={}
) )
assert ipp.has_peak_total_assessments() # validation
assert ipp.results["Problem"][0]["Number of constraints"] == 80 if use_sos_arcs:
assert ipp.results["Problem"][0]["Number of variables"] == 84 assert len(ipp.instance.constr_arc_sos1) == 3
assert ipp.results["Problem"][0]["Number of nonzeros"] == 253 assert ipp.has_peak_total_assessments()
# assert ipp.results["Problem"][0]["Number of constraints"] == 0 # 80
# assert ipp.results["Problem"][0]["Number of variables"] == 83 # 84
# # assert ipp.results["Problem"][0]["Number of nonzeros"] == 253
else:
assert len(ipp.instance.constr_arc_sos1) == 0
assert ipp.has_peak_total_assessments()
# assert ipp.results["Problem"][0]["Number of constraints"] == 0 # 80
# assert ipp.results["Problem"][0]["Number of variables"] == 83 # 84
# # assert ipp.results["Problem"][0]["Number of nonzeros"] == 253
# ************************************************************************** # **************************************************************************
...@@ -1551,8 +1366,17 @@ class TestESIPPProblem: ...@@ -1551,8 +1366,17 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_nonisolated_undirected_network_diff_tech(self): @pytest.mark.parametrize(
"solver, use_sos_arcs, arc_sos_weight_key",
[('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_NONE),
# ('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_CAP),
# ('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_COST),
# ('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_CAP),
# ('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_COST),
('scip', False, None)]
)
def test_nonisolated_undirected_network_diff_tech(self, solver, use_sos_arcs, arc_sos_weight_key):
# scenario # scenario
q = 0 q = 0
...@@ -1655,12 +1479,12 @@ class TestESIPPProblem: ...@@ -1655,12 +1479,12 @@ class TestESIPPProblem:
node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver=solver,
solver_options={}, solver_options={},
use_sos_arcs=use_sos_arcs,
arc_sos_weight_key=arc_sos_weight_key,
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
print_solver_output=False, print_solver_output=False,
...@@ -1671,10 +1495,25 @@ class TestESIPPProblem: ...@@ -1671,10 +1495,25 @@ class TestESIPPProblem:
max_number_parallel_arcs={} max_number_parallel_arcs={}
) )
assert ipp.has_peak_total_assessments() # validation
assert ipp.results["Problem"][0]["Number of constraints"] == 80 # ipp.instance.constr_arc_sos1.pprint()
assert ipp.results["Problem"][0]["Number of variables"] == 84 # print(ipp.results["Problem"][0])
assert ipp.results["Problem"][0]["Number of nonzeros"] == 253 if use_sos_arcs:
# print(ipp.results["Problem"][0])
assert len(ipp.instance.constr_arc_sos1) == 3
assert ipp.has_peak_total_assessments()
# assert ipp.results["Problem"][0]["Number of constraints"] == 0 # should be 80
# assert ipp.results["Problem"][0]["Number of variables"] == 83 # should be 84
# assert ipp.results["Problem"][0]["Number of nonzeros"] == 253
# check_problem_size(ipp, 0, 83, 253)
else:
# print(ipp.results["Problem"][0])
assert len(ipp.instance.constr_arc_sos1) == 0
assert ipp.has_peak_total_assessments()
# assert ipp.results["Problem"][0]["Number of constraints"] == 0 # should be 80
# assert ipp.results["Problem"][0]["Number of variables"] == 83 # should be 84
# assert ipp.results["Problem"][0]["Number of nonzeros"] == 253
# check_problem_size(ipp, 0, 83, 253)
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -1720,7 +1559,13 @@ class TestESIPPProblem: ...@@ -1720,7 +1559,13 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_nonisolated_network_preexisting_directed_arcs(self): @pytest.mark.parametrize(
"solver, use_sos_arcs, arc_sos_weight_key",
[('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_NONE),
('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_CAP),
('scip', False, None)]
)
def test_nonisolated_network_preexisting_directed_arcs(self, solver, use_sos_arcs, arc_sos_weight_key):
# time frame # time frame
q = 0 q = 0
...@@ -1820,12 +1665,12 @@ class TestESIPPProblem: ...@@ -1820,12 +1665,12 @@ class TestESIPPProblem:
node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver=solver,
solver_options={}, solver_options={},
use_sos_arcs=use_sos_arcs,
arc_sos_weight_key=arc_sos_weight_key,
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
print_solver_output=False, print_solver_output=False,
...@@ -1839,6 +1684,10 @@ class TestESIPPProblem: ...@@ -1839,6 +1684,10 @@ class TestESIPPProblem:
# ********************************************************************* # *********************************************************************
# validation # validation
if use_sos_arcs:
assert len(ipp.instance.constr_arc_sos1) != 0
else:
assert len(ipp.instance.constr_arc_sos1) == 0
# network is still isolated # network is still isolated
# the undirected arc was installed # the undirected arc was installed
assert ( assert (
...@@ -1860,7 +1709,21 @@ class TestESIPPProblem: ...@@ -1860,7 +1709,21 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_nonisolated_network_preexisting_directed_arcs_diff_tech(self): @pytest.mark.parametrize(
"solver, use_sos_arcs, arc_sos_weight_key",
[('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_NONE),
('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_CAP),
('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_COST),
('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_CAP),
('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_SPEC_COST),
('scip', False, None)]
)
def test_nonisolated_network_preexisting_directed_arcs_diff_tech(
self,
solver,
use_sos_arcs,
arc_sos_weight_key
):
# time frame # time frame
q = 0 q = 0
...@@ -1961,12 +1824,12 @@ class TestESIPPProblem: ...@@ -1961,12 +1824,12 @@ class TestESIPPProblem:
node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver_options={},solver='scip', solver=solver,
solver_options={},
use_sos_arcs=use_sos_arcs,
arc_sos_weight_key=arc_sos_weight_key,
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
print_solver_output=False, print_solver_output=False,
...@@ -1980,6 +1843,10 @@ class TestESIPPProblem: ...@@ -1980,6 +1843,10 @@ class TestESIPPProblem:
# ************************************************************************** # **************************************************************************
# validation # validation
if use_sos_arcs:
assert len(ipp.instance.constr_arc_sos1) != 0
else:
assert len(ipp.instance.constr_arc_sos1) == 0
# the undirected arc should be installed since it is cheaper tham imp. # the undirected arc should be installed since it is cheaper tham imp.
assert ( assert (
True True
...@@ -2187,15 +2054,13 @@ class TestESIPPProblem: ...@@ -2187,15 +2054,13 @@ class TestESIPPProblem:
) )
} }
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
solver_options = {} solver_options = {}
solver_options["relative_mip_gap"] = 0 solver_options["relative_mip_gap"] = 0
solver_options["absolute_mip_gap"] = 1e-4 solver_options["absolute_mip_gap"] = 1e-4
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options=solver_options, solver_options=solver_options,
use_sos_arcs=False, use_sos_arcs=False,
arc_sos_weight_key=None, arc_sos_weight_key=None,
...@@ -2218,6 +2083,7 @@ class TestESIPPProblem: ...@@ -2218,6 +2083,7 @@ class TestESIPPProblem:
# ********************************************************************* # *********************************************************************
# overview # overview
assert len(ipp.instance.constr_arc_sos1) == 0
(imports_qpk, (imports_qpk,
exports_qpk, exports_qpk,
...@@ -2524,18 +2390,16 @@ class TestESIPPProblem: ...@@ -2524,18 +2390,16 @@ class TestESIPPProblem:
# do not use arc groups # do not use arc groups
arc_groups_dict = {} arc_groups_dict = {}
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
solver_options = {} solver_options = {}
solver_options["relative_mip_gap"] = 0 solver_options["relative_mip_gap"] = 0
solver_options["absolute_mip_gap"] = 1e-4 solver_options["absolute_mip_gap"] = 1e-4
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='glpk',
solver_options=solver_options, solver_options=solver_options,
use_sos_arcs=False, use_sos_arcs=False,
use_sos_arc_groups=False,
arc_sos_weight_key=None, arc_sos_weight_key=None,
arc_use_real_variables_if_possible=False, arc_use_real_variables_if_possible=False,
use_sos_sense=False, use_sos_sense=False,
...@@ -2553,7 +2417,10 @@ class TestESIPPProblem: ...@@ -2553,7 +2417,10 @@ class TestESIPPProblem:
arc_groups_dict=arc_groups_dict arc_groups_dict=arc_groups_dict
) )
# ************************************************************************** # *********************************************************************
# validation
assert len(ipp.instance.constr_arc_group_sos1) == 0
# overview # overview
(imports_qpk, (imports_qpk,
...@@ -2660,7 +2527,12 @@ class TestESIPPProblem: ...@@ -2660,7 +2527,12 @@ class TestESIPPProblem:
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
def test_arc_groups_individual_undirected(self): @pytest.mark.parametrize(
"solver, use_sos_arc_groups, arc_sos_weight_key",
[('scip', True, InfrastructurePlanningProblem.SOS1_ARC_WEIGHTS_CAP),
('scip', False, None)]
)
def test_arc_groups_individual_undirected(self, solver, use_sos_arc_groups, arc_sos_weight_key):
# time frame # time frame
q = 0 q = 0
...@@ -2796,9 +2668,6 @@ class TestESIPPProblem: ...@@ -2796,9 +2668,6 @@ class TestESIPPProblem:
) )
} }
# identify node types
mynet.identify_node_types()
# solver settings # solver settings
solver_options = {} solver_options = {}
solver_options["relative_mip_gap"] = 0 solver_options["relative_mip_gap"] = 0
...@@ -2821,10 +2690,12 @@ class TestESIPPProblem: ...@@ -2821,10 +2690,12 @@ class TestESIPPProblem:
Network.KEY_ARC_TECH].options_selected.index(True) Network.KEY_ARC_TECH].options_selected.index(True)
] = False ] = False
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver=solver,
solver_options=solver_options, solver_options=solver_options,
use_sos_arcs=False, use_sos_arcs=False,
arc_sos_weight_key=None, use_sos_arc_groups=use_sos_arc_groups,
arc_sos_weight_key=arc_sos_weight_key,
arc_use_real_variables_if_possible=False, arc_use_real_variables_if_possible=False,
use_sos_sense=False, use_sos_sense=False,
sense_sos_weight_key=None, sense_sos_weight_key=None,
...@@ -2841,6 +2712,11 @@ class TestESIPPProblem: ...@@ -2841,6 +2712,11 @@ class TestESIPPProblem:
arc_groups_dict=arc_groups_dict arc_groups_dict=arc_groups_dict
) )
if use_sos_arc_groups:
assert len(ipp.instance.constr_arc_group_sos1) != 0
else:
assert len(ipp.instance.constr_arc_group_sos1) == 0
# overview # overview
(imports_qpk, (imports_qpk,
exports_qpk, exports_qpk,
...@@ -3107,9 +2983,6 @@ class TestESIPPProblem: ...@@ -3107,9 +2983,6 @@ class TestESIPPProblem:
# arc groups # arc groups
arc_groups_dict = {} arc_groups_dict = {}
# identify node types
mynet.identify_node_types()
# solver settings # solver settings
solver_options = {} solver_options = {}
solver_options["relative_mip_gap"] = 0 solver_options["relative_mip_gap"] = 0
...@@ -3132,7 +3005,8 @@ class TestESIPPProblem: ...@@ -3132,7 +3005,8 @@ class TestESIPPProblem:
Network.KEY_ARC_TECH].options_selected.index(True) Network.KEY_ARC_TECH].options_selected.index(True)
] = False ] = False
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options=solver_options, solver_options=solver_options,
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -3299,11 +3173,9 @@ class TestESIPPProblem: ...@@ -3299,11 +3173,9 @@ class TestESIPPProblem:
node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE node_key_a=imp_node_key, node_key_b=exp_node_key, arcs=arc_tech_IE
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -3462,11 +3334,6 @@ class TestESIPPProblem: ...@@ -3462,11 +3334,6 @@ class TestESIPPProblem:
arc_key_AB_und = mynet.add_undirected_arc( arc_key_AB_und = mynet.add_undirected_arc(
node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
...@@ -3492,7 +3359,8 @@ class TestESIPPProblem: ...@@ -3492,7 +3359,8 @@ class TestESIPPProblem:
Network.KEY_ARC_TECH].options_selected.index(True) Network.KEY_ARC_TECH].options_selected.index(True)
] = False ] = False
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
use_sos_arcs=False, use_sos_arcs=False,
arc_sos_weight_key=None, arc_sos_weight_key=None,
...@@ -3855,10 +3723,6 @@ class TestESIPPProblem: ...@@ -3855,10 +3723,6 @@ class TestESIPPProblem:
capacity_is_instantaneous=False, capacity_is_instantaneous=False,
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES:
...@@ -3883,7 +3747,8 @@ class TestESIPPProblem: ...@@ -3883,7 +3747,8 @@ class TestESIPPProblem:
Network.KEY_ARC_TECH].options_selected.index(True) Network.KEY_ARC_TECH].options_selected.index(True)
] = False ] = False
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
use_sos_arcs=False, use_sos_arcs=False,
arc_sos_weight_key=None, arc_sos_weight_key=None,
...@@ -4270,14 +4135,12 @@ class TestESIPPProblem: ...@@ -4270,14 +4135,12 @@ class TestESIPPProblem:
node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES:
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -4895,14 +4758,12 @@ class TestESIPPProblem: ...@@ -4895,14 +4758,12 @@ class TestESIPPProblem:
capacity_is_instantaneous=False, capacity_is_instantaneous=False,
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES:
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -5457,9 +5318,6 @@ class TestESIPPProblem: ...@@ -5457,9 +5318,6 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
for static_losses_mode in [ for static_losses_mode in [
InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR,
...@@ -5474,7 +5332,8 @@ class TestESIPPProblem: ...@@ -5474,7 +5332,8 @@ class TestESIPPProblem:
Network.KEY_ARC_TECH].options_selected.index(True) Network.KEY_ARC_TECH].options_selected.index(True)
] = False ] = False
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, plot_results=False,
...@@ -5615,17 +5474,14 @@ class TestESIPPProblem: ...@@ -5615,17 +5474,14 @@ class TestESIPPProblem:
# arc_tech_AB.options_selected[0] = True # arc_tech_AB.options_selected[0] = True
# mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB) # mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
for static_losses_mode in [ for static_losses_mode in [
InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR, InfrastructurePlanningProblem.STATIC_LOSS_MODE_ARR,
InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP InfrastructurePlanningProblem.STATIC_LOSS_MODE_DEP
]: ]:
# TODO: make this work with GLPK and SCIP # TODO: make this work with GLPK and SCIP
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='cbc', # does not work with GLPK nor SCIP solver='scip', # does not work with GLPK nor SCIP
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -5761,9 +5617,6 @@ class TestESIPPProblem: ...@@ -5761,9 +5617,6 @@ class TestESIPPProblem:
arc_key_AB_und = mynet.add_undirected_arc( arc_key_AB_und = mynet.add_undirected_arc(
node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB node_key_a=node_A, node_key_b=node_B, arcs=arc_tech_AB
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES:
...@@ -5776,7 +5629,8 @@ class TestESIPPProblem: ...@@ -5776,7 +5629,8 @@ class TestESIPPProblem:
Network.KEY_ARC_TECH].options_selected.index(True) Network.KEY_ARC_TECH].options_selected.index(True)
] = False ] = False
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -6038,9 +5892,6 @@ class TestESIPPProblem: ...@@ -6038,9 +5892,6 @@ class TestESIPPProblem:
capacity=1.0, capacity=1.0,
capacity_is_instantaneous=False, capacity_is_instantaneous=False,
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES:
...@@ -6053,7 +5904,8 @@ class TestESIPPProblem: ...@@ -6053,7 +5904,8 @@ class TestESIPPProblem:
Network.KEY_ARC_TECH].options_selected.index(True) Network.KEY_ARC_TECH].options_selected.index(True)
] = False ] = False
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -6321,9 +6173,6 @@ class TestESIPPProblem: ...@@ -6321,9 +6173,6 @@ class TestESIPPProblem:
arc_key_AB_und = mynet.add_undirected_arc( arc_key_AB_und = mynet.add_undirected_arc(
node_key_a=node_B, node_key_b=node_A, arcs=arc_tech_AB node_key_a=node_B, node_key_b=node_A, arcs=arc_tech_AB
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES:
...@@ -6336,7 +6185,8 @@ class TestESIPPProblem: ...@@ -6336,7 +6185,8 @@ class TestESIPPProblem:
Network.KEY_ARC_TECH].options_selected.index(True) Network.KEY_ARC_TECH].options_selected.index(True)
] = False ] = False
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -6599,9 +6449,6 @@ class TestESIPPProblem: ...@@ -6599,9 +6449,6 @@ class TestESIPPProblem:
capacity=1.0, capacity=1.0,
capacity_is_instantaneous=False, capacity_is_instantaneous=False,
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES: for static_losses_mode in InfrastructurePlanningProblem.STATIC_LOSS_MODES:
...@@ -6614,7 +6461,8 @@ class TestESIPPProblem: ...@@ -6614,7 +6461,8 @@ class TestESIPPProblem:
# Network.KEY_ARC_TECH].options_selected.index(True) # Network.KEY_ARC_TECH].options_selected.index(True)
# ] = False # ] = False
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -6917,12 +6765,10 @@ class TestESIPPProblem: ...@@ -6917,12 +6765,10 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab) mynet.add_directed_arc(node_key_a=node_A, node_key_b=node_B, arcs=arcs_ab)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
plot_results=False, # True, plot_results=False, # True,
print_solver_output=False, print_solver_output=False,
...@@ -7085,13 +6931,10 @@ class TestESIPPProblem: ...@@ -7085,13 +6931,10 @@ class TestESIPPProblem:
capacity_is_instantaneous=False, capacity_is_instantaneous=False,
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='cbc', # TODO: make this work with other solvers solver='scip', # TODO: make this work with other solvers
solver_options={}, solver_options={},
plot_results=False, # True, plot_results=False, # True,
print_solver_output=False, print_solver_output=False,
...@@ -7229,12 +7072,9 @@ class TestESIPPProblem: ...@@ -7229,12 +7072,9 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arcs_ia2) mynet.add_directed_arc(node_key_a=imp_node_key, node_key_b=node_A, arcs=arcs_ia2)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='cbc', # TODO: make this work with other solvers solver='scip', # TODO: make this work with other solvers
solver_options={}, solver_options={},
plot_results=False, # True, plot_results=False, # True,
print_solver_output=False, print_solver_output=False,
...@@ -7359,12 +7199,9 @@ class TestESIPPProblem: ...@@ -7359,12 +7199,9 @@ class TestESIPPProblem:
capacity_is_instantaneous=False, capacity_is_instantaneous=False,
) )
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='cbc', # TODO: make this work with other solvers solver='scip', # TODO: make this work with other solvers
solver_options={}, solver_options={},
plot_results=False, # True, plot_results=False, # True,
print_solver_output=False, print_solver_output=False,
...@@ -7524,7 +7361,7 @@ class TestESIPPProblem: ...@@ -7524,7 +7361,7 @@ class TestESIPPProblem:
# # no sos, regular time intervals # # no sos, regular time intervals
# ipp = self.build_solve_ipp( # ipp = build_solve_ipp(
# solver_options={}, # solver_options={},
# perform_analysis=False, # perform_analysis=False,
# plot_results=False, # True, # plot_results=False, # True,
...@@ -7592,7 +7429,6 @@ class TestESIPPProblem: ...@@ -7592,7 +7429,6 @@ class TestESIPPProblem:
# TODO: test non-simplifiable problems with time varying prices on select assessments # TODO: test non-simplifiable problems with time varying prices on select assessments
# TODO: test non-simplifiable problems with volume varying prices on select assessments # TODO: test non-simplifiable problems with volume varying prices on select assessments
# ************************************************************************* # *************************************************************************
# ************************************************************************* # *************************************************************************
...@@ -7687,11 +7523,9 @@ class TestESIPPProblem: ...@@ -7687,11 +7523,9 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(*node_pair, arcs=new_arc_tech) mynet.add_directed_arc(*node_pair, arcs=new_arc_tech)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -7704,9 +7538,6 @@ class TestESIPPProblem: ...@@ -7704,9 +7538,6 @@ class TestESIPPProblem:
simplify_problem=True, simplify_problem=True,
) )
assert ipp.has_peak_total_assessments() assert ipp.has_peak_total_assessments()
assert ipp.results["Problem"][0]["Number of constraints"] == 61
assert ipp.results["Problem"][0]["Number of variables"] == 53
assert ipp.results["Problem"][0]["Number of nonzeros"] == 143
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
...@@ -7833,11 +7664,9 @@ class TestESIPPProblem: ...@@ -7833,11 +7664,9 @@ class TestESIPPProblem:
) )
mynet.add_directed_arc(*node_pair, arcs=new_arc_tech) mynet.add_directed_arc(*node_pair, arcs=new_arc_tech)
# identify node types
mynet.identify_node_types()
# no sos, regular time intervals # no sos, regular time intervals
ipp = self.build_solve_ipp( ipp = build_solve_ipp(
solver='scip',
solver_options={}, solver_options={},
perform_analysis=False, perform_analysis=False,
plot_results=False, # True, plot_results=False, # True,
...@@ -7850,9 +7679,6 @@ class TestESIPPProblem: ...@@ -7850,9 +7679,6 @@ class TestESIPPProblem:
simplify_problem=True, simplify_problem=True,
) )
assert ipp.has_peak_total_assessments() assert ipp.has_peak_total_assessments()
assert ipp.results["Problem"][0]["Number of constraints"] == 61
assert ipp.results["Problem"][0]["Number of variables"] == 53
assert ipp.results["Problem"][0]["Number of nonzeros"] == 143 #
# ********************************************************************* # *********************************************************************
# ********************************************************************* # *********************************************************************
......
...@@ -598,145 +598,145 @@ class TestResourcePrice: ...@@ -598,145 +598,145 @@ class TestResourcePrice:
# create object without prices # create object without prices
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=None, volumes=volumes) _ = ResourcePrice(prices=None, volumes=volumes)
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# create object with negative prices in lists # create object with negative prices in lists
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=[7, -3, 2], volumes=[3, 4, 5]) _ = ResourcePrice(prices=[7, -3, 2], volumes=[3, 4, 5])
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# create object where an intermediate segment has no volume limit # create object where an intermediate segment has no volume limit
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=[7, 4, 2], volumes=[3, None, 5]) _ = ResourcePrice(prices=[7, 4, 2], volumes=[3, None, 5])
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# create object with negative volumes in lists # create object with negative volumes in lists
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=[7, 3, 2], volumes=[4, -1, 2]) _ = ResourcePrice(prices=[7, 3, 2], volumes=[4, -1, 2])
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# create object with non-numeric prices in lists # create object with non-numeric prices in lists
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=[7, "4", 2], volumes=[3, 4, 5]) _ = ResourcePrice(prices=[7, "4", 2], volumes=[3, 4, 5])
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# create object with non-numeric volumes in lists # create object with non-numeric volumes in lists
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=[7, 3, 2], volumes=[4, "3", 2]) _ = ResourcePrice(prices=[7, 3, 2], volumes=[4, "3", 2])
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# create object with mismatched price and volume lists # create object with mismatched price and volume lists
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=[7, 3, 2], volumes=[5, 7]) _ = ResourcePrice(prices=[7, 3, 2], volumes=[5, 7])
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# create object with a price list as an input and an unsupported type # create object with a price list as an input and an unsupported type
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=[7, 3, 2], volumes="hello") _ = ResourcePrice(prices=[7, 3, 2], volumes="hello")
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# create object with negative prices in lists (no volumes are provided) # create object with negative prices in lists (no volumes are provided)
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=[7, 3, -2], volumes=None) _ = ResourcePrice(prices=[7, 3, -2], volumes=None)
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# create object with non-numeric prices in lists (no volumes are provided) # create object with non-numeric prices in lists (no volumes are provided)
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=[7, 3, "a"], volumes=None) _ = ResourcePrice(prices=[7, 3, "a"], volumes=None)
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# create object with non-numeric prices in lists (no volumes are provided) # create object with non-numeric prices in lists (no volumes are provided)
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=5, volumes=[7, 3, 4]) _ = ResourcePrice(prices=5, volumes=[7, 3, 4])
except TypeError: except TypeError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
# create object with negative prices # create object with negative prices
error_triggered = False error_raised = False
try: try:
_ = ResourcePrice(prices=-3, volumes=None) _ = ResourcePrice(prices=-3, volumes=None)
except ValueError: except ValueError:
error_triggered = True error_raised = True
assert error_triggered assert error_raised
# ********************************************************************* # *********************************************************************
......