diff --git a/src/topupopt/data/buildings/dk/heat.py b/src/topupopt/data/buildings/dk/heat.py
index 76bb32275c25d73446a0d3c6787f304bd26584a9..72625f9f7495a517fdb3cf3ca9019cc650c3da5c 100644
--- a/src/topupopt/data/buildings/dk/heat.py
+++ b/src/topupopt/data/buildings/dk/heat.py
@@ -4,7 +4,9 @@
 import numpy as np
 from geopandas import GeoDataFrame
 from ...misc.utils import discrete_sinusoid_matching_integral
-from ...misc.utils import create_profile_using_time_weighted_state
+# from ...misc.utils import create_profile_using_time_weighted_state
+from ...misc.utils import generate_manual_state_correlated_profile
+from ...misc.utils import generate_state_correlated_profile
 from .bbr import label_bbr_entrance_id, label_bbr_housing_area
 
 # *****************************************************************************
@@ -66,6 +68,8 @@ def heat_demand_dict_by_building_entrance(
     key_bbr_entr_id: str = label_bbr_entrance_id,
     avg_state: list = None,
     state_correlates_with_output: bool = False,
+    deviation_gain: float = 1.0,
+    solver: str = 'glpk'
 ) -> dict:
     
     # initialise dict for each building entrance
@@ -84,11 +88,8 @@ def heat_demand_dict_by_building_entrance(
         # for each building
         for building_index in building_indexes:
             # get relevant data
-            # base_load_avg_ratio = 0.3
-            # specific_demand = 107 # kWh/m2/year
             area = gdf_buildings.loc[building_index][label_bbr_housing_area]
-
-            # estimate its demand
+            # generate the profile
             if type(avg_state) == type(None):
                 # ignore states
                 heat_demand_profiles.append(
@@ -104,21 +105,49 @@ def heat_demand_dict_by_building_entrance(
                     )
                 )
 
-            else:
-                # states matter
+            elif type(deviation_gain) == type(None):
+                # states matter but the gain must be determined
+                # heat_demand_profiles.append(
+                #     np.array(
+                #         create_profile_using_time_weighted_state(
+                #             integration_result=(
+                #                 bdg_specific_demand[building_index] * area
+                #             ),
+                #             avg_state=avg_state,
+                #             time_interval_durations=time_interval_durations,
+                #             min_to_max_ratio=bdg_ratio_min_max[building_index],
+                #             state_correlates_with_output=state_correlates_with_output,
+                #         )
+                #     )
+                # )
                 heat_demand_profiles.append(
                     np.array(
-                        create_profile_using_time_weighted_state(
+                        generate_state_correlated_profile(
                             integration_result=(
                                 bdg_specific_demand[building_index] * area
-                            ),
-                            avg_state=avg_state,
+                            ), 
+                            states=avg_state, 
                             time_interval_durations=time_interval_durations,
-                            min_to_max_ratio=bdg_ratio_min_max[building_index],
-                            state_correlates_with_output=state_correlates_with_output,
+                            states_correlate_profile=state_correlates_with_output,
+                            min_max_ratio=bdg_ratio_min_max[building_index],
+                            solver=solver
+                            )
+                        )
+                    )
+            else:
+                # states matter and the gain is predefined
+                heat_demand_profiles.append(
+                    np.array(
+                        generate_manual_state_correlated_profile(
+                            integration_result=(
+                                bdg_specific_demand[building_index] * area
+                            ), 
+                            states=avg_state, 
+                            time_interval_durations=time_interval_durations, 
+                            deviation_gain=deviation_gain
+                            )
                         )
                     )
-                )
 
         # add the profiles, time step by time step
         if len(heat_demand_profiles) == 0:
diff --git a/src/topupopt/data/misc/utils.py b/src/topupopt/data/misc/utils.py
index 5f15ab0a895dbb65071bbfd61ec82a184c6230b4..0077e519758cc5caab0fd9bb0119c1576f67e1cc 100644
--- a/src/topupopt/data/misc/utils.py
+++ b/src/topupopt/data/misc/utils.py
@@ -368,7 +368,7 @@ def generate_state_correlated_profile(
     states_correlate_profile: bool,
     min_max_ratio: float,
     solver: str
-) -> list:
+) -> tuple:
     """
     Returns a profile observing a number of conditions.
     
@@ -378,6 +378,23 @@ def generate_state_correlated_profile(
     must be related by a factor between 0 and 1.
     
     This method relies on linear programming. An LP solver must be used.
+    
+    The profile for interval i is defined as follows:
+
+    P[i] = (dt[i]/dt_mean)*( (x[i]-x_mean)*gain + offset)
+
+    where:
+
+    dt[i] is the time interval duration for interval i
+    
+    dt_mean is the mean time interval duration
+    
+    x[i] is the state for interval i
+    
+    x_mean is the mean state for the entire profile
+    
+    The offset is defined as the integration result divided by the number 
+    time intervals, whereas the gain is determined via optimisation.
 
     Parameters
     ----------
@@ -402,14 +419,16 @@ def generate_state_correlated_profile(
 
     Returns
     -------
-    list
-        A profile matching the aforementioned characteristics.
+    tuple
+        A tuple containing the profile, the gain and the offset.
 
     """    
         
     # *************************************************************************
     # *************************************************************************
     
+    # TODO: find alternative solution, as this is most likely overkill
+    
     # internal model
     
     def model(states_correlate_profile: bool) -> pyo.AbstractModel:
@@ -477,10 +496,8 @@ def generate_state_correlated_profile(
     # *************************************************************************
     # *************************************************************************
     
-    
     dt_total = sum(time_interval_durations)
     dt_mean = mean(time_interval_durations)
-    # x_mean  = mean(states)
     x_mean = sum(
         deltat_k*x_k 
         for deltat_k, x_k in zip(time_interval_durations, states)
@@ -510,7 +527,11 @@ def generate_state_correlated_profile(
     problem = fit_model.create_instance(data=data_dict) 
     opt.solve(problem, tee=False)
     # return profile
-    return [pyo.value(problem.P_i[i]) for i in problem.I]
+    return (
+        [pyo.value(problem.P_i[i]) for i in problem.I], 
+        pyo.value(problem.alpha),
+        beta
+        )
 
 # *****************************************************************************
 # *****************************************************************************
diff --git a/src/topupopt/problems/esipp/utils.py b/src/topupopt/problems/esipp/utils.py
index ebfacf8b9d58efabd93c83ebb0651a8a054068f5..94c3f790cc9049638b683fd2dc4443cabdc0d323 100644
--- a/src/topupopt/problems/esipp/utils.py
+++ b/src/topupopt/problems/esipp/utils.py
@@ -6,7 +6,6 @@
 # import uuid
 
 # local, external
-
 import networkx as nx
 import pyomo.environ as pyo
 from matplotlib import pyplot as plt
@@ -18,353 +17,194 @@ from .network import Network
 # *****************************************************************************
 # *****************************************************************************
 
-
-def run_mvesipp_analysis(
-    problem: InfrastructurePlanningProblem = None,
-    model_instance: pyo.ConcreteModel = None,
-    analyse_results: bool = False,
-    analyse_problem: bool = False,
-):
-    # *************************************************************************
-
-    if model_instance != None and analyse_problem:
-        describe_mves(model_instance)
-
-    # *************************************************************************
-
-    if model_instance != None and analyse_results:
-        (
-            flow_in,
-            flow_in_k,
-            flow_out,
-            flow_in_cost,
-            flow_out_revenue,
-        ) = compute_cost_volume_metrics(model_instance)
-
-        present_summary_results(flow_in, flow_out, flow_in_cost, flow_out_revenue)
-
-    # *************************************************************************
-
-    if problem != None and analyse_results:
-        # paths
-
-        describe_solution(problem)
-
-    # *************************************************************************
-
+def pre_statistics(ipp: InfrastructurePlanningProblem, 
+                   node_keys = None):
+    "Returns preliminary problem statistics."
+    
+    if type(node_keys) == type(None):
+        # pick all
+        node_keys = tuple(
+            (g, node_key)
+            for g, net in ipp.networks.items()
+            for node_key in net.nodes()
+            if Network.KEY_NODE_BASE_FLOW in net.nodes[node_key] 
+            )
+    
+    # aggregate static (end use) demand
+    aggregate_static_demand_qk = {
+        qk: sum(
+            ipp.networks[g].nodes[node_key][Network.KEY_NODE_BASE_FLOW][qk]
+            for g, node_key in node_keys
+            if ipp.networks[g].nodes[node_key][Network.KEY_NODE_BASE_FLOW][qk] >= 0                      
+            ) 
+        for qk in ipp.time_frame.qk()
+        }
+    # aggregate static (primary) supply
+    aggregate_static_supply_qk = {
+        qk: - sum(
+            ipp.networks[g].nodes[node_key][Network.KEY_NODE_BASE_FLOW][qk]
+            for g, node_key in node_keys
+            if ipp.networks[g].nodes[node_key][Network.KEY_NODE_BASE_FLOW][qk] < 0                      
+            ) 
+        for qk in ipp.time_frame.qk()
+        }
+    # static nodal balance
+    aggregate_static_balance_qk = {
+        qk: aggregate_static_demand_qk[qk]-aggregate_static_supply_qk[qk]
+        for qk in ipp.time_frame.qk()
+        }    
+    
+    return (
+        aggregate_static_demand_qk,
+        aggregate_static_supply_qk,
+        aggregate_static_balance_qk
+        )
 
 # *****************************************************************************
 # *****************************************************************************
 
-# prepare a dictionary with the key results
-
-
-def compute_cost_volume_metrics(
-    instance: pyo.ConcreteModel, read_directly_if_possible: bool = True
-):
-    # select calculation method
-
-    if read_directly_if_possible:
-        # total flow imported
-
-        flow_in = {
-            (g, q, p): pyo.value(
-                sum(
-                    instance.var_if_glqpks[(g, l, q, p, k, s)]
-                    * instance.param_c_time_qpk[(q, p, k)]
-                    for l in instance.set_L_imp[g]
-                    for k in instance.set_K_q[q]
-                    for s in instance.set_S[(g, l, q, p, k)]
-                )
+def statistics(ipp: InfrastructurePlanningProblem, 
+               import_node_keys: tuple = None,
+               export_node_keys: tuple = None,
+               other_node_keys: tuple = None):
+    "Returns flow statistics using the optimisation results."
+    
+    
+    if type(import_node_keys) == type(None):
+        # pick all import nodes
+        import_node_keys = tuple(
+            (g, l)
+            for g in ipp.networks
+            for l in ipp.networks[g].import_nodes
             )
-            for g in instance.set_G
-            for q, p in instance.set_QP
-        }
-
-        # total flow imported per network, scenario, period, time interval
-
-        flow_in_k = {
-            (g, q, p, k): pyo.value(
-                sum(
-                    instance.var_if_glqpks[(g, l, q, p, k, s)]
-                    * instance.param_c_time_qpk[(q, p, k)]
-                    for l in instance.set_L_imp[g]
-                    # for k in instance.set_K_q[q]
-                    for s in instance.set_S[(g, l, q, p, k)]
-                )
+        
+    if type(export_node_keys) == type(None):
+        # pick all export nodes
+        export_node_keys = tuple(
+            (g, l)
+            for g in ipp.networks
+            for l in ipp.networks[g].export_nodes
             )
-            for g in instance.set_G
-            for q, p, k in instance.set_QPK
-        }
-
-        # total flow exported
-
-        flow_out = {
-            (g, q, p): pyo.value(
-                sum(
-                    instance.var_ef_glqpks[(g, l, q, p, k, s)]
-                    * instance.param_c_time_qpk[(q, p, k)]
-                    for l in instance.set_L_exp[g]
-                    for k in instance.set_K_q[q]
-                    for s in instance.set_S[(g, l, q, p, k)]
-                )
+    
+    if type(other_node_keys) == type(None):
+        # pick all
+        other_node_keys = tuple(
+            (g, node_key)
+            for g, net in ipp.networks.items()
+            for node_key in net.nodes()
+            if Network.KEY_NODE_BASE_FLOW in net.nodes[node_key] 
             )
-            for g in instance.set_G
-            for q, p in instance.set_QP
-        }
-
-        # import costs
-
-        flow_in_cost = {
-            (g, q, p): pyo.value(
-                sum(
-                    instance.var_ifc_glqpk[(g, l, q, p, k)]
-                    * instance.param_c_time_qpk[(q, p, k)]
-                    for l in instance.set_L_imp[g]
-                    for k in instance.set_K_q[q]
+    
+    # imports
+    imports_qpk = {
+        qpk: pyo.value( 
+            sum(
+                ipp.instance.var_if_glqpks[(g,l_imp,*qpk, s)]
+                for g, l_imp in import_node_keys
+                # for g in ipp.networks
+                # for l_imp in ipp.networks[g].import_nodes
+                for s in ipp.instance.set_S[(g,l_imp,*qpk)]
                 )
+            *ipp.instance.param_c_time_qpk[qpk]
             )
-            for g in instance.set_G
-            for q, p in instance.set_QP
+        for qpk in ipp.time_frame.qpk()
         }
-
-        # export revenue
-
-        flow_out_revenue = {
-            (g, q, p): pyo.value(
-                sum(
-                    instance.var_efr_glqpk[(g, l, q, p, k)]
-                    * instance.param_c_time_qpk[(q, p, k)]
-                    for l in instance.set_L_exp[g]
-                    for k in instance.set_K_q[q]
+    
+    # exports
+    exports_qpk = {
+        qpk: pyo.value(
+            sum(
+                ipp.instance.var_ef_glqpks[(g,l_exp,*qpk, s)]
+                for g, l_exp in export_node_keys
+                # for g in ipp.networks
+                # for l_exp in ipp.networks[g].export_nodes
+                for s in ipp.instance.set_S[(g,l_exp,*qpk)]
                 )
+            *ipp.instance.param_c_time_qpk[qpk]
             )
-            for g in instance.set_G
-            for q, p in instance.set_QP
+        for qpk in ipp.time_frame.qpk()
         }
-
-    else:
-        # total flow imported
-
-        flow_in = {
-            (g, q, p): pyo.value(
-                sum(
-                    instance.var_v_glljqk[(g, l1, l2, j, q, k)]
-                    * instance.param_c_time_qpk[(q, p, k)]
-                    for l1 in instance.set_L_imp[g]
-                    for l2 in instance.set_L[g] - instance.set_L_imp[g]
-                    for j in instance.set_J[(g, l1, l2)]
-                    for k in instance.set_K
-                )
-            )
-            for g in instance.set_G
-            for q, p in instance.set_QP
+    # balance
+    balance_qpk = {
+        qpk: imports_qpk[qpk]-exports_qpk[qpk]
+        for qpk in ipp.time_frame.qpk()
         }
-
-        # total flow imported per network, scenario, period, time interval
-
-        flow_in_k = {
-            (g, q, p, k): pyo.value(
-                sum(
-                    instance.var_if_glqpks[(g, l, q, p, k, s)]
-                    * instance.param_c_time_qpk[(q, p, k)]
-                    for l in instance.set_L_imp[g]
-                    # for k in instance.set_K_q[q]
-                    for s in instance.set_S[(g, l, q, p, k)]
+    # import costs
+    import_costs_qpk = {
+        qpk: pyo.value( 
+            sum(
+                ipp.instance.var_ifc_glqpk[(g,l_imp,*qpk)]
+                for g, l_imp in import_node_keys
+                # for g in ipp.networks
+                # for l_imp in ipp.networks[g].import_nodes
                 )
+            *ipp.instance.param_c_time_qpk[qpk]
             )
-            for g in instance.set_G
-            for q, p, k in instance.set_QPK
+        for qpk in ipp.time_frame.qpk()
         }
-
-        # total flow exported
-
-        flow_out = {
-            (g, q, p): pyo.value(
-                sum(
-                    instance.var_v_glljqk[(g, l1, l2, j, q, k)]
-                    * instance.param_c_time_qpk[(q, p, k)]
-                    for l2 in instance.set_L_exp[g]
-                    for l1 in instance.set_L[g] - instance.set_L_exp[g]
-                    for j in instance.set_J[(g, l1, l2)]
-                    for k in instance.set_K
+    # export revenue
+    export_revenue_qpk = {
+        qpk: pyo.value(
+            sum(
+                ipp.instance.var_efr_glqpk[(g,l_exp,*qpk)]
+                for g, l_exp in export_node_keys
+                # for g in ipp.networks
+                # for l_exp in ipp.networks[g].export_nodes
                 )
+            *ipp.instance.param_c_time_qpk[qpk]
             )
-            for g in instance.set_G
-            for q, p in instance.set_QP
+        for qpk in ipp.time_frame.qpk()
         }
-
-        # import costs
-
-        flow_in_cost = {
-            (g, q, p): pyo.value(
-                sum(
-                    instance.var_if_glqpks[(g, l, q, p, k, s)]
-                    * instance.param_p_glqks[(g, l, q, p, k, s)]
-                    * instance.param_c_time_qpk[(q, p, k)]
-                    for l in instance.set_L_imp[g]
-                    for k in instance.set_K_q[q]
-                    for s in instance.set_S[(g, l, q, p, k)]
-                    # for (_g,l,q,p,k,s) in instance.var_if_glqpks
-                    # if g == _g
-                )
+    # net cash flow
+    ncf_qpk = {
+        qpk: import_costs_qpk[qpk]-export_revenue_qpk[qpk]
+        for qpk in ipp.time_frame.qpk()
+        }
+    # aggregate static (end use) demand
+    aggregate_static_demand_qpk = {
+        qpk: pyo.value(
+            sum(
+                ipp.instance.param_v_base_glqk[(g, l, qpk[0], qpk[2])]
+                for g, l in other_node_keys
+                # for g in ipp.networks
+                # for l in ipp.networks[g].source_sink_nodes
+                if ipp.instance.param_v_base_glqk[(g, l, qpk[0], qpk[2])] >= 0
+                )   
+            *ipp.instance.param_c_time_qpk[qpk]
             )
-            for g in instance.set_G
-            for q, p in instance.set_QP
+        for qpk in ipp.time_frame.qpk()
         }
-
-        # export revenue
-
-        flow_out_revenue = {
-            (g, q, p): pyo.value(
-                sum(
-                    instance.var_ef_glqpks[(g, l, q, p, k, s)]
-                    * instance.param_p_glqpks[(g, l, q, p, k, s)]
-                    * instance.param_c_time_qpk[(q, p, k)]
-                    for l in instance.set_L_exp[g]
-                    for k in instance.set_K_q[q]
-                    for s in instance.set_S[(g, l, q, p, k)]
-                    # for (_g,l,q,p,k,s) in instance.var_ef_glqpks
-                    # if g == _g
-                )
+    # aggregate static (primary) supply
+    aggregate_static_supply_qpk = {
+        qpk: -pyo.value(
+            sum(
+                ipp.instance.param_v_base_glqk[(g, l, qpk[0], qpk[2])]
+                for g, l in other_node_keys
+                # for g in ipp.networks
+                # for l in ipp.networks[g].source_sink_nodes
+                if ipp.instance.param_v_base_glqk[(g, l, qpk[0], qpk[2])] < 0
+                ) 
+            *ipp.instance.param_c_time_qpk[qpk]
             )
-            for g in instance.set_G
-            for q, p in instance.set_QP
+        for qpk in ipp.time_frame.qpk()
         }
-
-    # *************************************************************************
-    # *************************************************************************
-
-    return flow_in, flow_in_k, flow_out, flow_in_cost, flow_out_revenue
-
-    # *************************************************************************
-    # *************************************************************************
-
-
-# *****************************************************************************
-# *****************************************************************************
-
-
-def compute_network_performance(solved_problem: InfrastructurePlanningProblem):
-    # gross results
-
-    network_flows_dict = compute_gross_network_flows(solved_problem)
-
-    # actual results
-
-    (
-        flow_in,
-        flow_in_k,
-        flow_out,
-        flow_in_cost,
-        flow_out_revenue,
-    ) = compute_cost_volume_metrics(solved_problem.instance)
-
-    # losses
-
-    losses_dict = {
-        (g, q, p): abs(
-            # imports
-            flow_in[(g, q, p)]
-            +
-            # local supply
-            network_flows_dict["gross_supply_gq"][(g, q)]
-            -
-            # exports
-            flow_out[(g, q, p)]
-            -
-            # local demand
-            network_flows_dict["gross_demand_gq"][(g, q)]
-        )
-        for q in solved_problem.time_frame.assessments
-        for p in solved_problem.time_frame.reporting_periods[q]
-        for g in solved_problem.networks
-    }
-
+    # static nodal balance
+    aggregate_static_balance_qpk = {
+        qpk: aggregate_static_demand_qpk[qpk]-aggregate_static_supply_qpk[qpk]
+        for qpk in ipp.time_frame.qpk()
+        }    
+    
     return (
-        network_flows_dict,
-        losses_dict,
-        flow_in,
-        flow_in_k,
-        flow_out,
-        flow_in_cost,
-        flow_out_revenue,
-    )
-
-
-# *****************************************************************************
-# *****************************************************************************
-
-# provide a summary of the results
-
-
-def present_summary_results(
-    flow_in: dict,
-    flow_out: dict,
-    flow_in_cost: dict,
-    flow_out_revenue: dict,
-    flow_unit: str = "MWh",
-    currency: str = "EUR",
-):
-    # *************************************************************************
-    # *************************************************************************
-
-    if len(flow_in) != 0:
-        print(">> Imports")
-
-    for g, q, p in flow_in:
-        print("Assessment: " + str(q))
-
-        print("Network: " + str(g))
-
-        print("Volume: " + str(flow_in[(g, q, p)]) + " " + str(flow_unit))
-
-        print("Cost: " + str(flow_in_cost[(g, q, p)]) + " " + str(currency))
-
-        if flow_in[(g, q, p)] != 0:
-            print(
-                "Average price: "
-                + str(flow_in_cost[(g, q, p)] / flow_in[(g, q, p)])
-                + " "
-                + str(currency)
-                + "/"
-                + str(flow_unit)
-            )
-
-        else:  # no flow
-            print("Average price: N/A (no flow imports are set to take place).")
-
-    # *************************************************************************
-    # *************************************************************************
-
-    if len(flow_out) != 0:
-        print(">> Exports")
-
-    for g, q, p in flow_out:
-        print("Assessment: " + str(q))
-
-        print("Network: " + str(g))
-
-        print("Volume: " + str(flow_out[(g, q, p)]) + " " + str(flow_unit))
-
-        print("Cost: " + str(flow_out_revenue[(g, q, p)]) + " " + str(currency))
-
-        if flow_out[(g, q, p)] != 0:
-            print(
-                "Average price: "
-                + str(flow_out_revenue[(g, q, p)] / flow_out[(g, q, p)])
-                + " "
-                + str(currency)
-                + "/"
-                + str(flow_unit)
-            )
-
-        else:  # no flow
-            print("Average price: N/A (no flow exports are set to take place).")
-
-    # *************************************************************************
-    # *************************************************************************
-
+        imports_qpk, 
+        exports_qpk, 
+        balance_qpk, 
+        import_costs_qpk, 
+        export_revenue_qpk, 
+        ncf_qpk, 
+        aggregate_static_demand_qpk,
+        aggregate_static_supply_qpk,
+        aggregate_static_balance_qpk
+        )
 
 # *****************************************************************************
 # *****************************************************************************
@@ -380,97 +220,6 @@ def unused_node_key(network: nx.MultiDiGraph):
             # it doesn't, return it
             return i
 
-
-# *****************************************************************************
-# *****************************************************************************
-
-# TODO: document
-
-
-def compute_gross_network_flows(problem: InfrastructurePlanningProblem) -> dict:
-    gross_supply_g = {}
-
-    gross_demand_g = {}
-
-    gross_supply_gq = {}
-
-    gross_demand_gq = {}
-
-    gross_supply_gqk = {}
-
-    gross_demand_gqk = {}
-
-    for g, net in problem.networks.items():
-        end_use_node_keys = tuple(
-            node_key
-            for node_key in net.nodes()
-            if Network.KEY_NODE_BASE_FLOW in net.nodes[node_key]
-            if len(net.nodes[node_key][Network.KEY_NODE_BASE_FLOW]) != 0
-        )
-
-        # flow: q, k
-
-        gross_demand_qk = {
-            (g, q, k): sum(
-                net.nodes[node_key][Network.KEY_NODE_BASE_FLOW][(q, k)]
-                for node_key in end_use_node_keys
-                if net.nodes[node_key][Network.KEY_NODE_BASE_FLOW][(q, k)] >= 0
-            )
-            for q in problem.time_frame.assessments
-            for k in problem.time_frame.time_intervals[q]
-        }
-
-        gross_supply_qk = {
-            (g, q, k): -sum(
-                net.nodes[node_key][Network.KEY_NODE_BASE_FLOW][(q, k)]
-                for node_key in end_use_node_keys
-                if net.nodes[node_key][Network.KEY_NODE_BASE_FLOW][(q, k)] < 0
-            )
-            for q in problem.time_frame.assessments
-            for k in problem.time_frame.time_intervals[q]
-        }
-
-        # (g,q,k)
-
-        gross_supply_gqk.update(gross_supply_qk)
-        gross_demand_gqk.update(gross_demand_qk)
-
-        # (g,q)
-
-        gross_supply_gq.update(
-            {
-                (g, q): sum(
-                    gross_supply_qk[(g, q, k)]
-                    for k in problem.time_frame.time_intervals[q]
-                )
-                for q in problem.time_frame.assessments
-            }
-        )
-        gross_demand_gq.update(
-            {
-                (g, q): sum(
-                    gross_demand_qk[(g, q, k)]
-                    for k in problem.time_frame.time_intervals[q]
-                )
-                for q in problem.time_frame.assessments
-            }
-        )
-
-        # g
-
-        gross_supply_g.update({g: sum(supply for supply in gross_supply_qk.values())})
-        gross_demand_g.update({g: sum(demand for demand in gross_demand_qk.values())})
-
-    return {
-        "gross_supply_gqk": gross_supply_gqk,
-        "gross_demand_gqk": gross_demand_gqk,
-        "gross_supply_gq": gross_supply_gq,
-        "gross_demand_gq": gross_demand_gq,
-        "gross_supply_g": gross_supply_g,
-        "gross_demand_g": gross_demand_g,
-    }
-
-
 # *****************************************************************************
 # *****************************************************************************
 
@@ -660,7 +409,6 @@ def describe_mves(obj: pyo.ConcreteModel):
     print("******************************************************************")
     print("******************************************************************")
 
-
 # *****************************************************************************
 # *****************************************************************************
 
@@ -669,7 +417,6 @@ def describe_mves(obj: pyo.ConcreteModel):
 # *****************************************************************************
 # *****************************************************************************
 
-
 def plot_networks(
     ipp: InfrastructurePlanningProblem,
     ax=None,
@@ -958,7 +705,6 @@ def plot_networks(
 # *****************************************************************************
 # *****************************************************************************
 
-
 def is_integer(variable: float, integrality_tolerance: float) -> bool:
     """Returns True if a given number qualifies as an integer."""
     if integrality_tolerance >= 0.5:
@@ -970,381 +716,4 @@ def is_integer(variable: float, integrality_tolerance: float) -> bool:
 
 
 # *****************************************************************************
-# *****************************************************************************
-
-
-def describe_solution(ipp: InfrastructurePlanningProblem):
-    # *************************************************************************
-
-    print("******************************************************************")
-
-    # for each grid
-
-    for grid_key, net in ipp.networks.items():
-        # describe the path from import nodes to demand nodes
-
-        print("Flow path analysis: grid " + str(grid_key))
-
-        # for each node
-
-        for node_key in net.nodes:
-            # as long as it is an import node
-
-            if node_key not in net.import_nodes:
-                continue
-
-            # for every node
-
-            for node2_key in net.nodes:
-                # except node_key or other import nodes
-
-                if node_key is node2_key or node2_key in net.import_nodes:
-                    continue
-
-                # or if there is no path
-
-                if nx.has_path(net, node_key, node2_key) == False:
-                    continue
-
-                # for each viable/potential path
-
-                for path in nx.all_simple_paths(net, node_key, node2_key):
-                    # check if all the pairs of nodes on the path were selected
-
-                    # if multiple technologies were selected, add the capacities
-
-                    arc_flow_capacities = [
-                        sum(
-                            net.edges[(path[node_pair], path[node_pair + 1], j)][
-                                Network.KEY_ARC_TECH
-                            ].capacity[
-                                net.edges[(path[node_pair], path[node_pair + 1], j)][
-                                    Network.KEY_ARC_TECH
-                                ].options_selected.index(True)
-                            ]
-                            for j in net._adj[path[node_pair]][path[node_pair + 1]]
-                            if True
-                            in net.edges[(path[node_pair], path[node_pair + 1], j)][
-                                Network.KEY_ARC_TECH
-                            ].options_selected
-                        )
-                        for node_pair in range(len(path) - 1)
-                        if (path[node_pair], path[node_pair + 1]) in net.edges
-                    ]
-
-                    # skip if at least one arc has zero capacity
-
-                    if 0 in arc_flow_capacities:
-                        continue
-
-                    arc_tech_efficiencies = [
-                        (
-                            min(
-                                net.edges[(path[node_pair], path[node_pair + 1], uv_k)][
-                                    Network.KEY_ARC_TECH
-                                ].efficiency[(0, k)]
-                                for uv_k in net._adj[path[node_pair]][
-                                    path[node_pair + 1]
-                                ]
-                                if True
-                                in net.edges[
-                                    (path[node_pair], path[node_pair + 1], uv_k)
-                                ][Network.KEY_ARC_TECH].options_selected
-                                for k in range(
-                                    len(
-                                        net.edges[
-                                            (path[node_pair], path[node_pair + 1], uv_k)
-                                        ][Network.KEY_ARC_TECH].efficiency
-                                    )
-                                )
-                            ),
-                            max(
-                                net.edges[(path[node_pair], path[node_pair + 1], uv_k)][
-                                    Network.KEY_ARC_TECH
-                                ].efficiency[(0, k)]
-                                for uv_k in net._adj[path[node_pair]][
-                                    path[node_pair + 1]
-                                ]
-                                if True
-                                in net.edges[
-                                    (path[node_pair], path[node_pair + 1], uv_k)
-                                ][Network.KEY_ARC_TECH].options_selected
-                                for k in range(
-                                    len(
-                                        net.edges[
-                                            (path[node_pair], path[node_pair + 1], uv_k)
-                                        ][Network.KEY_ARC_TECH].efficiency
-                                    )
-                                )
-                            ),
-                        )
-                        for node_pair in range(len(path) - 1)
-                        if (path[node_pair], path[node_pair + 1]) in net.edges
-                    ]
-
-                    max_static_flow = [
-                        max(
-                            [
-                                net.nodes[node][Network.KEY_NODE_BASE_FLOW][(0, k)]
-                                for k in range(
-                                    len(
-                                        ipp.networks[grid_key].nodes[node][
-                                            Network.KEY_NODE_BASE_FLOW
-                                        ]
-                                    )
-                                )
-                            ]
-                        )
-                        if node in net.source_sink_nodes
-                        else 0
-                        for node in path
-                        if node in net.nodes
-                    ]
-
-                    min_static_flow = [
-                        min(
-                            [
-                                net.nodes[node][Network.KEY_NODE_BASE_FLOW][(0, k)]
-                                for k in range(
-                                    len(
-                                        ipp.networks[grid_key].nodes[node][
-                                            Network.KEY_NODE_BASE_FLOW
-                                        ]
-                                    )
-                                )
-                            ]
-                        )
-                        if node in net.source_sink_nodes
-                        else 0
-                        for node in path
-                        if node in net.nodes
-                    ]
-
-                    # for each pair of nodes on the path
-
-                    if len(arc_flow_capacities) == len(path) - 1:
-                        print("**********************************************")
-                        print("Path: " + str(path))
-                        print("Max. static flow: " + str(max_static_flow))
-                        print("Min. static flow: " + str(min_static_flow))
-                        print("Capacities: " + str(arc_flow_capacities))
-                        print("Efficiencies: " + str(arc_tech_efficiencies))
-
-                        for arc_flow_index in range(len(arc_flow_capacities) - 1):
-                            if (
-                                arc_flow_capacities[arc_flow_index]
-                                < arc_flow_capacities[arc_flow_index + 1]
-                            ):
-                                # the flow capacities are increasing, which
-                                # usually indicates suboptimality
-
-                                # tech_options_first = [
-                                #     tech[Network.KEY_ARC_TECH_CAPACITY]
-                                #     for tech in ipp.networks[
-                                #             grid_key].edges[
-                                #                 (path[arc_flow_index],
-                                #                  path[arc_flow_index+1])][
-                                #                     net.KEY_ARC_TECH]
-                                #     if True in tech.options_selected]
-
-                                # tech_options_sec = [
-                                #     tech[net.KEY_ARC_TECH_CAPACITY]
-                                #     for tech in ipp.networks[
-                                #             grid_key].edges[
-                                #                 (path[arc_flow_index+1],
-                                #                  path[arc_flow_index+2])][
-                                #                     net.KEY_ARC_TECH]
-                                #     if True in tech.options_selected]
-
-                                # print('******************')
-                                print(
-                                    "Increasing capacities along the flow path have been detected between nodes "
-                                    + str(path[arc_flow_index])
-                                    + " and "
-                                    + str(path[arc_flow_index + 2])
-                                    + "."
-                                )
-                                # print(tech_options_first)
-                                # print(tech_options_sec)
-                                # print('******************')
-
-            # *****************************************************************
-
-        # *********************************************************************
-
-        # for each node
-
-        for node_key in net.nodes:
-            # as long as it is an export node
-
-            if node_key not in net.export_nodes:
-                continue
-
-            # for every node
-
-            for node2_key in net.nodes:
-                # except node_key or other export nodes
-
-                if node_key is node2_key or node2_key in net.export_nodes:
-                    continue
-
-                # or if there is no path
-
-                if nx.has_path(net, node2_key, node_key) == False:
-                    continue
-
-                # for each viable/potential path
-
-                for path in nx.all_simple_paths(net, node2_key, node_key):
-                    # check if all the pairs of nodes on the path were selected
-
-                    # if multiple technologies were selected, add the capacities
-
-                    arc_flow_capacities = [
-                        sum(
-                            net.edges[(path[node_pair], path[node_pair + 1], k)][
-                                Network.KEY_ARC_TECH
-                            ].capacity[
-                                net.edges[(path[node_pair], path[node_pair + 1], k)][
-                                    Network.KEY_ARC_TECH
-                                ].options_selected.index(True)
-                            ]
-                            for k in net._adj[path[node_pair]][path[node_pair + 1]]
-                            if True
-                            in net.edges[(path[node_pair], path[node_pair + 1], k)][
-                                Network.KEY_ARC_TECH
-                            ].options_selected
-                        )
-                        for node_pair in range(len(path) - 1)
-                        if (path[node_pair], path[node_pair + 1]) in net.edges
-                    ]
-
-                    # skip if at least one arc has zero capacity
-
-                    if 0 in arc_flow_capacities:
-                        continue
-
-                    arc_tech_efficiencies = [
-                        (
-                            min(
-                                net.edges[(path[node_pair], path[node_pair + 1], uv_k)][
-                                    Network.KEY_ARC_TECH
-                                ].efficiency[(0, k)]
-                                for uv_k in net._adj[path[node_pair]][
-                                    path[node_pair + 1]
-                                ]
-                                if True
-                                in net.edges[
-                                    (path[node_pair], path[node_pair + 1], uv_k)
-                                ][Network.KEY_ARC_TECH].options_selected
-                                for k in range(
-                                    len(
-                                        net.edges[
-                                            (path[node_pair], path[node_pair + 1], uv_k)
-                                        ][Network.KEY_ARC_TECH].efficiency
-                                    )
-                                )
-                            ),
-                            max(
-                                net.edges[(path[node_pair], path[node_pair + 1], uv_k)][
-                                    Network.KEY_ARC_TECH
-                                ].efficiency[(0, k)]
-                                for uv_k in net._adj[path[node_pair]][
-                                    path[node_pair + 1]
-                                ]
-                                if True
-                                in net.edges[
-                                    (path[node_pair], path[node_pair + 1], uv_k)
-                                ][Network.KEY_ARC_TECH].options_selected
-                                for k in range(
-                                    len(
-                                        net.edges[
-                                            (path[node_pair], path[node_pair + 1], uv_k)
-                                        ][Network.KEY_ARC_TECH].efficiency
-                                    )
-                                )
-                            ),
-                        )
-                        for node_pair in range(len(path) - 1)
-                        if (path[node_pair], path[node_pair + 1]) in net.edges
-                    ]
-
-                    max_static_flow = [
-                        max(
-                            [
-                                net.nodes[node][Network.KEY_NODE_BASE_FLOW][(0, k)]
-                                for k in range(
-                                    len(
-                                        ipp.networks[grid_key].nodes[node][
-                                            Network.KEY_NODE_BASE_FLOW
-                                        ]
-                                    )
-                                )
-                            ]
-                        )
-                        if node in net.source_sink_nodes
-                        else 0
-                        for node in path
-                        if node in net.nodes
-                    ]
-
-                    min_static_flow = [
-                        min(
-                            [
-                                net.nodes[node][Network.KEY_NODE_BASE_FLOW][(0, k)]
-                                for k in range(
-                                    len(
-                                        ipp.networks[grid_key].nodes[node][
-                                            Network.KEY_NODE_BASE_FLOW
-                                        ]
-                                    )
-                                )
-                            ]
-                        )
-                        if node in net.source_sink_nodes
-                        else 0
-                        for node in path
-                        if node in net.nodes
-                    ]
-
-                    # for each pair of nodes on the path
-
-                    if len(arc_flow_capacities) == len(path) - 1:
-                        print("**********************************************")
-                        print("Path: " + str(path))
-                        print("Max. static flow: " + str(max_static_flow))
-                        print("Min. static flow: " + str(min_static_flow))
-                        print("Capacities: " + str(arc_flow_capacities))
-                        print("Efficiencies: " + str(arc_tech_efficiencies))
-
-                        for arc_flow_index in range(len(arc_flow_capacities) - 1):
-                            if (
-                                arc_flow_capacities[arc_flow_index]
-                                < arc_flow_capacities[arc_flow_index + 1]
-                            ):
-                                # the flow capacities are increasing, which
-                                # usually indicates suboptimality
-
-                                # print('******************')
-                                print(
-                                    "Increasing capacities along the flow path have been detected between nodes "
-                                    + str(path[arc_flow_index])
-                                    + " and "
-                                    + str(path[arc_flow_index + 2])
-                                    + "."
-                                )
-                                # print(tech_options_first)
-                                # print(tech_options_sec)
-                                # print('******************')
-
-            # *****************************************************************
-
-        # *********************************************************************
-
-    print("******************************************************************")
-
-    # *************************************************************************
-
-
-# *****************************************************************************
-# *****************************************************************************
+# *****************************************************************************
\ No newline at end of file
diff --git a/tests/test_data_utils.py b/tests/test_data_utils.py
index 09d87af33c2727cddf6e865d30334248e7c32392..32aebc674834939cc590726aa1d7f5ffbd038f65 100644
--- a/tests/test_data_utils.py
+++ b/tests/test_data_utils.py
@@ -1,24 +1,14 @@
 # imports
 
 # standard
-
 import random
-
 import math
-
 from statistics import mean
 
 # local, internal
-
 from src.topupopt.data.misc import utils
 
-# ******************************************************************************
-# ******************************************************************************
-
-
 class TestDataUtils:
-    # *************************************************************************
-    # *************************************************************************
 
     def test_profile_synching2(self):
         integration_result = 10446
@@ -390,7 +380,7 @@ class TestDataUtils:
         states_correlate_profile = True
         min_max_ratio = 0.2
         
-        profile = utils.generate_state_correlated_profile(
+        profile, a, b = utils.generate_state_correlated_profile(
             integration_result=integration_result, 
             states=states, 
             time_interval_durations=time_interval_durations, 
@@ -400,6 +390,7 @@ class TestDataUtils:
             )
         
         # test profile 
+        assert a > 0 and b > 0
         assert len(profile) == number_time_intervals
         assert math.isclose(sum(profile), integration_result, abs_tol=1e-3)
         assert math.isclose(min(profile), max(profile)*min_max_ratio, abs_tol=1e-3)
@@ -413,7 +404,7 @@ class TestDataUtils:
         states_correlate_profile = False
         min_max_ratio = 0.2
         
-        profile = utils.generate_state_correlated_profile(
+        profile, a, b = utils.generate_state_correlated_profile(
             integration_result=integration_result, 
             states=states, 
             time_interval_durations=time_interval_durations, 
@@ -423,6 +414,7 @@ class TestDataUtils:
             )
         
         # test profile 
+        assert a < 0 and b > 0
         assert len(profile) == number_time_intervals
         assert math.isclose(sum(profile), integration_result, abs_tol=1e-3)
         assert math.isclose(min(profile), max(profile)*min_max_ratio, abs_tol=1e-3)
@@ -464,7 +456,6 @@ class TestDataUtils:
         # correlation: direct, inverse
         # states: positive, negative
         # time intervals: regular irregular
-        # 
         
         # profile with positive correlation, positive states, regular intervals
         number_time_intervals = 10
@@ -532,4 +523,4 @@ class TestDataUtils:
     # *************************************************************************
 
 # *****************************************************************************
-# *****************************************************************************
+# *****************************************************************************
\ No newline at end of file
diff --git a/tests/test_esipp_problem.py b/tests/test_esipp_problem.py
index c69bbc972f5b3151cc0de24afbc935ce95a1091c..fdb57243d007521d7b57af69a7f415c10f6ed309 100644
--- a/tests/test_esipp_problem.py
+++ b/tests/test_esipp_problem.py
@@ -14,7 +14,8 @@ from src.topupopt.problems.esipp.problem import InfrastructurePlanningProblem
 from src.topupopt.problems.esipp.network import Arcs, Network
 from src.topupopt.problems.esipp.network import ArcsWithoutStaticLosses
 from src.topupopt.problems.esipp.resource import ResourcePrice
-from src.topupopt.problems.esipp.utils import compute_cost_volume_metrics
+# from src.topupopt.problems.esipp.utils import compute_cost_volume_metrics
+from src.topupopt.problems.esipp.utils import statistics
 from src.topupopt.problems.esipp.time import EconomicTimeFrame
 # from src.topupopt.problems.esipp.converter import Converter
 
@@ -2381,11 +2382,8 @@ class TestESIPPProblem:
         )
     
         # there should be no opex (imports or exports), only capex from arcs
-    
         assert pyo.value(ipp.instance.var_sdncf_q[q]) < 0
-    
         assert pyo.value(ipp.instance.var_capex) > 0
-    
         assert (
             pyo.value(
                 ipp.instance.var_capex_arc_gllj[
@@ -2603,14 +2601,16 @@ class TestESIPPProblem:
         # *********************************************************************
     
         # overview
-    
-        (
-            flow_in,
-            flow_in_k,
-            flow_out,
-            flow_in_cost,
-            flow_out_revenue,
-        ) = compute_cost_volume_metrics(ipp.instance, True)
+        
+        (imports_qpk, 
+         exports_qpk, 
+         balance_qpk, 
+         import_costs_qpk, 
+         export_revenue_qpk, 
+         ncf_qpk, 
+         aggregate_static_demand_qpk,
+         aggregate_static_supply_qpk,
+         aggregate_static_balance_qpk) = statistics(ipp)
         
         q = 0
         capex_ind = 0.75
@@ -2660,45 +2660,38 @@ class TestESIPPProblem:
             .edges[(imp1_node_key, node_A, arc_key_I1A)][Network.KEY_ARC_TECH]
             .options_selected.index(True)
         )
-
         h2 = (
             ipp.networks["mynet"]
             .edges[(imp2_node_key, node_A, arc_key_I2A)][Network.KEY_ARC_TECH]
             .options_selected.index(True)
         )
-
         h3 = (
             ipp.networks["mynet"]
             .edges[(imp3_node_key, node_A, arc_key_I3A)][Network.KEY_ARC_TECH]
             .options_selected.index(True)
         )
-
         assert h1 == h2
-
         assert h1 == h3
 
         # the capex have to be higher than those of the best individual arc
-
         abs_tol = 1e-3
-
         assert math.isclose(
             pyo.value(ipp.instance.var_capex), capex_group, abs_tol=abs_tol
         )
 
         # there should be no exports
-
-        assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+        abs_tol = 1e-3
+        exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
 
         # the imports should be higher than with individual arcs
-
         abs_tol = 1e-3
-
-        assert math.isclose(flow_in[("mynet", 0, 0)], imp_group, abs_tol=abs_tol)
+        imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+        assert math.isclose(imports_qp, imp_group, abs_tol=abs_tol)
 
         # the operating results should be lower than with an individual arc
 
         abs_tol = 1e-3
-
         assert math.isclose(
             pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_group, abs_tol=abs_tol
         )
@@ -2706,13 +2699,11 @@ class TestESIPPProblem:
         # the externalities should be zero
 
         abs_tol = 1e-3
-
         assert math.isclose(pyo.value(ipp.instance.var_sdext_q[q]), 0, abs_tol=abs_tol)
 
         # the objective function should be -6.3639758220728595-1.5
 
         abs_tol = 1e-3
-
         assert math.isclose(pyo.value(ipp.instance.obj_f), obj_group, abs_tol=abs_tol)
 
         # the imports should be greater than or equal to the losses for all arx
@@ -2745,7 +2736,7 @@ class TestESIPPProblem:
 
         assert math.isclose(losses_model, losses_data, abs_tol=abs_tol)
 
-        assert flow_in[("mynet", 0, 0)] >= losses_model
+        assert imports_qp >= losses_model
         
     # *************************************************************************
     # *************************************************************************
@@ -2948,14 +2939,15 @@ class TestESIPPProblem:
         # **************************************************************************
     
         # overview
-    
-        (
-            flow_in,
-            flow_in_k,
-            flow_out,
-            flow_in_cost,
-            flow_out_revenue,
-        ) = compute_cost_volume_metrics(ipp.instance, True)
+        (imports_qpk, 
+         exports_qpk, 
+         balance_qpk, 
+         import_costs_qpk, 
+         export_revenue_qpk, 
+         ncf_qpk, 
+         aggregate_static_demand_qpk,
+         aggregate_static_supply_qpk,
+         aggregate_static_balance_qpk) = statistics(ipp)
         
         q = 0
         capex_ind = 0.75
@@ -3001,39 +2993,32 @@ class TestESIPPProblem:
         )
 
         # there should be no exports
-
-        assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+        abs_tol = 1e-3
+        exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
 
         # the imports should be lower than with a group of arcs
-
         abs_tol = 1e-3
-
-        assert math.isclose(flow_in[("mynet", 0, 0)], imp_ind, abs_tol=abs_tol)
+        imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+        assert math.isclose(imports_qp, imp_ind, abs_tol=abs_tol)
 
         # the operating results should be lower than with an individual arc
-
         abs_tol = 1e-3
-
         assert math.isclose(
             pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_ind, abs_tol=abs_tol
         )
 
         # the externalities should be zero
-
         abs_tol = 1e-3
-
         assert math.isclose(
             pyo.value(ipp.instance.var_sdext_q[q]), sdext_ind, abs_tol=abs_tol
         )
 
         # the objective function should be -6.3639758220728595-1.5
-
         abs_tol = 1e-3
-
         assert math.isclose(pyo.value(ipp.instance.obj_f), obj_ind, abs_tol=abs_tol)
 
         # the imports should be greater than or equal to the losses for all arx
-
         losses_model = sum(
             pyo.value(
                 ipp.instance.var_w_glljqk[
@@ -3053,7 +3038,7 @@ class TestESIPPProblem:
             for k in range(tf.number_time_intervals(q))
         )
 
-        assert flow_in[("mynet", 0, 0)] >= losses_model
+        assert imports_qp >= losses_model
         
     # *************************************************************************
     # *************************************************************************
@@ -3240,14 +3225,15 @@ class TestESIPPProblem:
             )
             
             # overview
-    
-            (
-                flow_in,
-                flow_in_k,
-                flow_out,
-                flow_in_cost,
-                flow_out_revenue,
-            ) = compute_cost_volume_metrics(ipp.instance, True)
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
     
             capex_ind = 3
             capex_group = 4
@@ -3318,49 +3304,39 @@ class TestESIPPProblem:
             assert h1 == h2
     
             # the capex have to be higher than those of the best individual arc
-    
             abs_tol = 1e-3
-    
             assert math.isclose(
                 pyo.value(ipp.instance.var_capex), capex_group, abs_tol=abs_tol
             )
     
             # there should be no exports
-    
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+            abs_tol = 1e-3
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
     
             # the imports should be higher than with individual arcs
-    
             abs_tol = 1e-3
-    
-            assert math.isclose(flow_in[("mynet", 0, 0)], imp_group, abs_tol=abs_tol)
-    
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+            assert math.isclose(imports_qp, imp_group, abs_tol=abs_tol)
             assert imp_group > imp_ind
     
             # the operating results should be lower than with an individual arc
-    
             abs_tol = 1e-3
-    
             assert math.isclose(
                 pyo.value(ipp.instance.var_sdncf_q[q]), sdncf_group, abs_tol=abs_tol
             )
     
             # the externalities should be zero
-    
             abs_tol = 1e-3
-    
             assert math.isclose(
                 pyo.value(ipp.instance.var_sdext_q[q]), sdnext_group, abs_tol=abs_tol
             )
     
             # the objective function should be -6.3639758220728595-1.5
-    
             abs_tol = 1e-3
-    
             assert math.isclose(pyo.value(ipp.instance.obj_f), obj_group, abs_tol=abs_tol)
     
             # the imports should be greater than or equal to the losses for all arx
-    
             losses_model = sum(
                 pyo.value(
                     ipp.instance.var_w_glljqk[("mynet", node_A, node_B, arc_key_AB, q, k)]
@@ -3552,12 +3528,15 @@ class TestESIPPProblem:
             )
             
             # overview
-            (flow_in,
-             flow_in_k,
-             flow_out,
-             flow_in_cost,
-             flow_out_revenue
-             ) = compute_cost_volume_metrics(ipp.instance, True)
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
     
             capex_ind = 3
             capex_group = 4
@@ -3614,11 +3593,14 @@ class TestESIPPProblem:
             )
     
             # there should be no exports
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+            abs_tol = 1e-3
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
     
             # the imports should be lower than with a group of arcs
             abs_tol = 1e-3
-            assert math.isclose(flow_in[("mynet", 0, 0)], imp_ind, abs_tol=abs_tol)
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+            assert math.isclose(imports_qp, imp_ind, abs_tol=abs_tol)
     
             # the operating results should be lower than with an individual arc
             abs_tol = 1e-3
@@ -3730,35 +3712,38 @@ class TestESIPPProblem:
         )
 
         # overview
-
-        (
-            flow_in,
-            flow_in_k,
-            flow_out,
-            flow_in_cost,
-            flow_out_revenue,
-        ) = compute_cost_volume_metrics(ipp.instance, True)
+        (imports_qpk, 
+         exports_qpk, 
+         balance_qpk, 
+         import_costs_qpk, 
+         export_revenue_qpk, 
+         ncf_qpk, 
+         aggregate_static_demand_qpk,
+         aggregate_static_supply_qpk,
+         aggregate_static_balance_qpk) = statistics(ipp)
 
         # there should be no imports
 
         abs_tol = 1e-6
 
-        assert math.isclose(flow_in[("mynet", 0, 0)], 0.0, abs_tol=abs_tol)
+        abs_tol = 1e-3
+        imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+        assert math.isclose(imports_qp, 0.0, abs_tol=abs_tol)
 
-        assert math.isclose(flow_in_cost[("mynet", 0, 0)], 0.0, abs_tol=abs_tol)
+        abs_tol = 1e-3
+        import_costs_qp = sum(import_costs_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+        assert math.isclose(import_costs_qp, 0.0, abs_tol=abs_tol)
 
         # there should be no exports
-
         abs_tol = 1e-2
+        exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        assert math.isclose(exports_qp, 0.0, abs_tol=abs_tol)
 
-        assert math.isclose(flow_out[("mynet", 0, 0)], 0.0, abs_tol=abs_tol)
-
-        assert math.isclose(flow_out_revenue[("mynet", 0, 0)], 0.0, abs_tol=abs_tol)
+        export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        assert math.isclose(export_revenue_qp, 0.0, abs_tol=abs_tol)
 
         # there should be no capex
-
         abs_tol = 1e-6
-
         assert math.isclose(pyo.value(ipp.instance.var_capex), 0.0, abs_tol=abs_tol)
             
     # *************************************************************************
@@ -3851,37 +3836,43 @@ class TestESIPPProblem:
         )
 
         # overview
-
-        (
-            flow_in,
-            flow_in_k,
-            flow_out,
-            flow_in_cost,
-            flow_out_revenue,
-        ) = compute_cost_volume_metrics(ipp.instance, True)
+        (imports_qpk, 
+         exports_qpk, 
+         balance_qpk, 
+         import_costs_qpk, 
+         export_revenue_qpk, 
+         ncf_qpk, 
+         aggregate_static_demand_qpk,
+         aggregate_static_supply_qpk,
+         aggregate_static_balance_qpk) = statistics(ipp)
 
         # there should be no imports
 
         abs_tol = 1e-6
+        
+        abs_tol = 1e-3
+        imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+        assert imports_qp > 0.0 - abs_tol
 
-        assert flow_in[("mynet", 0, 0)] > 0.0 - abs_tol
-
-        assert flow_in_cost[("mynet", 0, 0)] > 0.0 - abs_tol
+        abs_tol = 1e-3
+        import_costs_qp = sum(import_costs_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+        assert import_costs_qp > 0.0 - abs_tol
 
         # there should be no exports
 
         abs_tol = 1e-2
 
-        assert flow_out[("mynet", 0, 0)] > 0.0 - abs_tol
-
-        assert flow_out_revenue[("mynet", 0, 0)] > 0.0 - abs_tol
+        exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        assert exports_qp > 0.0 - abs_tol
+        assert export_revenue_qp > 0.0 - abs_tol
 
         # the revenue should exceed the costs
 
         abs_tol = 1e-2
 
         assert (
-            flow_out_revenue[("mynet", 0, 0)] > flow_in_cost[("mynet", 0, 0)] - abs_tol
+            export_revenue_qp > import_costs_qp - abs_tol
         )
 
         # the capex should be positive
@@ -4062,14 +4053,15 @@ class TestESIPPProblem:
             )
         
             # overview
-        
-            (
-                flow_in,
-                flow_in_k,
-                flow_out,
-                flow_in_cost,
-                flow_out_revenue,
-            ) = compute_cost_volume_metrics(ipp.instance, True)
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
         
             # the flow through AB should be from A to B during interval 0
         
@@ -4120,14 +4112,14 @@ class TestESIPPProblem:
             # there should be imports
         
             abs_tol = 1e-6
-        
-            assert math.isclose(flow_in[("mynet", 0, 0)], (1.2 + 1.2), abs_tol=abs_tol)
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+            assert math.isclose(imports_qp, (1.2 + 1.2), abs_tol=abs_tol)
         
             # there should be no exports
         
             abs_tol = 1e-6
-        
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
         
             # flow through I1A must be 1.0 during time interval 0
             # flow through I1A must be 0.2 during time interval 1
@@ -4452,14 +4444,15 @@ class TestESIPPProblem:
             )
         
             # overview
-        
-            (
-                flow_in,
-                flow_in_k,
-                flow_out,
-                flow_in_cost,
-                flow_out_revenue,
-            ) = compute_cost_volume_metrics(ipp.instance, True)
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
         
             # the flow through AB should be from A to B during interval 0
         
@@ -4510,14 +4503,17 @@ class TestESIPPProblem:
             # there should be imports
         
             abs_tol = 1e-6
-        
-            assert math.isclose(flow_in[("mynet", 0, 0)], (1.2 + 1.2), abs_tol=abs_tol)
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+            assert math.isclose(imports_qp, (1.2 + 1.2), abs_tol=abs_tol)
         
             # there should be no exports
         
             abs_tol = 1e-6
         
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
         
             # flow through I1A must be 1.0 during time interval 0
             # flow through I1A must be 0.2 during time interval 1
@@ -4833,28 +4829,32 @@ class TestESIPPProblem:
             )
         
             # overview
-        
-            (
-                flow_in,
-                flow_in_k,
-                flow_out,
-                flow_in_cost,
-                flow_out_revenue,
-            ) = compute_cost_volume_metrics(ipp.instance, True)
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
         
             # there should be imports
         
             abs_tol = 1e-6
-        
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
             assert math.isclose(
-                flow_in[("mynet", 0, 0)], (1 + 1 + 2 + 0.3 + 1), abs_tol=abs_tol
+                imports_qp, (1 + 1 + 2 + 0.3 + 1), abs_tol=abs_tol
             )
         
             # there should be no exports
         
             abs_tol = 1e-6
         
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
         
             # flow through I1A must be 1.1 during time interval 0
             # flow through I1A must be 0.0 during time interval 1
@@ -5456,28 +5456,32 @@ class TestESIPPProblem:
             )
         
             # overview
-        
-            (
-                flow_in,
-                flow_in_k,
-                flow_out,
-                flow_in_cost,
-                flow_out_revenue,
-            ) = compute_cost_volume_metrics(ipp.instance, True)
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
         
             # there should be imports
         
             abs_tol = 1e-6
-        
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
             assert math.isclose(
-                flow_in[("mynet", 0, 0)], (1 + 1 + 2 + 0.3 + 1), abs_tol=abs_tol
+                imports_qp, (1 + 1 + 2 + 0.3 + 1), abs_tol=abs_tol
             )
         
             # there should be no exports
         
             abs_tol = 1e-6
-        
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+            
+
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
         
             # flow through I1A must be 1.1 during time interval 0
             # flow through I1A must be 0.0 during time interval 1
@@ -6015,21 +6019,28 @@ class TestESIPPProblem:
             )
         
             # overview
-            (
-                flow_in,
-                flow_in_k,
-                flow_out,
-                flow_in_cost,
-                flow_out_revenue,
-            ) = compute_cost_volume_metrics(ipp.instance, True)
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
         
             # there should be imports
             abs_tol = 1e-6
-            assert math.isclose(flow_in[("mynet", 0, 0)], 0.35, abs_tol=abs_tol)
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+            assert math.isclose(imports_qp, 0.35, abs_tol=abs_tol)
         
             # there should be no exports
             abs_tol = 1e-6
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+            
+
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
         
             # flow through IA must be 0.35
             abs_tol = 1e-6
@@ -6159,21 +6170,28 @@ class TestESIPPProblem:
             )
         
             # overview
-            (
-                flow_in,
-                flow_in_k,
-                flow_out,
-                flow_in_cost,
-                flow_out_revenue,
-            ) = compute_cost_volume_metrics(ipp.instance, True)
+            
+        
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
         
             # there should be imports
             abs_tol = 1e-6
-            assert math.isclose(flow_in[("mynet", 0, 0)], 0.35, abs_tol=abs_tol)
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+            assert math.isclose(imports_qp, 0.35, abs_tol=abs_tol)
         
             # there should be no exports
             abs_tol = 1e-6
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
         
             # flow through IA must be 0.35
             abs_tol = 1e-6
@@ -6303,13 +6321,15 @@ class TestESIPPProblem:
             )
         
             # overview
-            (
-                flow_in,
-                flow_in_k,
-                flow_out,
-                flow_in_cost,
-                flow_out_revenue,
-            ) = compute_cost_volume_metrics(ipp.instance, True)
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
         
             # the flow through AB should be from A to B during interval 0
             abs_tol = 1e-6
@@ -6352,10 +6372,14 @@ class TestESIPPProblem:
             )
             # there should be imports
             abs_tol = 1e-6
-            assert math.isclose(flow_in[("mynet", 0, 0)], (0.35 + 0.15), abs_tol=abs_tol)
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+            assert math.isclose(imports_qp, (0.35 + 0.15), abs_tol=abs_tol)
             # there should be no exports
             abs_tol = 1e-6
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
             # flow through IA must be 0.35 during time interval 0
             # flow through IA must be 0.15 during time interval 1
             abs_tol = 1e-6
@@ -6574,13 +6598,15 @@ class TestESIPPProblem:
             )
         
             # overview
-            (
-                flow_in,
-                flow_in_k,
-                flow_out,
-                flow_in_cost,
-                flow_out_revenue,
-            ) = compute_cost_volume_metrics(ipp.instance, True)
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
         
             # the flow through AB should be from A to B during interval 0
             abs_tol = 1e-6
@@ -6623,10 +6649,14 @@ class TestESIPPProblem:
             )
             # there should be imports
             abs_tol = 1e-6
-            assert math.isclose(flow_in[("mynet", 0, 0)], (0.35 + 0.15), abs_tol=abs_tol)
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+            assert math.isclose(imports_qp, (0.35 + 0.15), abs_tol=abs_tol)
             # there should be no exports
             abs_tol = 1e-6
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
             # flow through IA must be 0.35 during time interval 0
             # flow through IA must be 0.15 during time interval 1
             abs_tol = 1e-6
@@ -6851,13 +6881,15 @@ class TestESIPPProblem:
             )
         
             # overview
-            (
-                flow_in,
-                flow_in_k,
-                flow_out,
-                flow_in_cost,
-                flow_out_revenue,
-            ) = compute_cost_volume_metrics(ipp.instance, True)
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
         
             # the flow through AB should be from A to B during interval 0
             abs_tol = 1e-6
@@ -6900,10 +6932,14 @@ class TestESIPPProblem:
             )
             # there should be imports
             abs_tol = 1e-6
-            assert math.isclose(flow_in[("mynet", 0, 0)], (0.35 + 0.15), abs_tol=abs_tol)
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+            assert math.isclose(imports_qp, (0.35 + 0.15), abs_tol=abs_tol)
             # there should be no exports
             abs_tol = 1e-6
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
             # flow through IA must be 0.35 during time interval 0
             # flow through IA must be 0.15 during time interval 1
             abs_tol = 1e-6
@@ -7123,13 +7159,15 @@ class TestESIPPProblem:
             )
         
             # overview
-            (
-                flow_in,
-                flow_in_k,
-                flow_out,
-                flow_in_cost,
-                flow_out_revenue,
-            ) = compute_cost_volume_metrics(ipp.instance, True)
+            (imports_qpk, 
+             exports_qpk, 
+             balance_qpk, 
+             import_costs_qpk, 
+             export_revenue_qpk, 
+             ncf_qpk, 
+             aggregate_static_demand_qpk,
+             aggregate_static_supply_qpk,
+             aggregate_static_balance_qpk) = statistics(ipp)
         
             # the flow through AB should be from A to B during interval 0
             abs_tol = 1e-6
@@ -7172,10 +7210,14 @@ class TestESIPPProblem:
             )
             # there should be imports
             abs_tol = 1e-6
-            assert math.isclose(flow_in[("mynet", 0, 0)], (0.35 + 0.15), abs_tol=abs_tol)
+            imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+            assert math.isclose(imports_qp, (0.35 + 0.15), abs_tol=abs_tol)
             # there should be no exports
             abs_tol = 1e-6
-            assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+
+            exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+            assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
             # flow through IA must be 0.35 during time interval 0
             # flow through IA must be 0.15 during time interval 1
             abs_tol = 1e-6
@@ -7433,21 +7475,27 @@ class TestESIPPProblem:
         )
     
         # overview
-        (
-            flow_in,
-            flow_in_k,
-            flow_out,
-            flow_in_cost,
-            flow_out_revenue,
-        ) = compute_cost_volume_metrics(ipp.instance, True)
+        (imports_qpk, 
+         exports_qpk, 
+         balance_qpk, 
+         import_costs_qpk, 
+         export_revenue_qpk, 
+         ncf_qpk, 
+         aggregate_static_demand_qpk,
+         aggregate_static_supply_qpk,
+         aggregate_static_balance_qpk) = statistics(ipp)
     
         # there should be imports
         abs_tol = 1e-6
-        assert math.isclose(flow_in[("mynet", 0, 0)], 1.1, abs_tol=abs_tol)
+        imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+        assert math.isclose(imports_qp, 1.1, abs_tol=abs_tol)
     
         # there should be no exports
         abs_tol = 1e-6
-        assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+
+        exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
     
         # interval 0: flow through IA1 must be 1
         abs_tol = 1e-6
@@ -7595,21 +7643,27 @@ class TestESIPPProblem:
         )
     
         # overview
-        (
-            flow_in,
-            flow_in_k,
-            flow_out,
-            flow_in_cost,
-            flow_out_revenue,
-        ) = compute_cost_volume_metrics(ipp.instance, True)
+        (imports_qpk, 
+         exports_qpk, 
+         balance_qpk, 
+         import_costs_qpk, 
+         export_revenue_qpk, 
+         ncf_qpk, 
+         aggregate_static_demand_qpk,
+         aggregate_static_supply_qpk,
+         aggregate_static_balance_qpk) = statistics(ipp)
     
         # there should be imports
         abs_tol = 1e-6
-        assert math.isclose(flow_in[("mynet", 0, 0)], 1.1, abs_tol=abs_tol)
+        imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+        assert math.isclose(imports_qp, 1.1, abs_tol=abs_tol)
     
         # there should be no exports
         abs_tol = 1e-6
-        assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+
+        exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
     
         # interval 0: flow through IA1 must be 1
         abs_tol = 1e-6
@@ -7730,23 +7784,28 @@ class TestESIPPProblem:
         )
     
         # overview
-        (
-            flow_in,
-            flow_in_k,
-            flow_out,
-            flow_in_cost,
-            flow_out_revenue,
-        ) = compute_cost_volume_metrics(ipp.instance, True)
+        (imports_qpk, 
+         exports_qpk, 
+         balance_qpk, 
+         import_costs_qpk, 
+         export_revenue_qpk, 
+         ncf_qpk, 
+         aggregate_static_demand_qpk,
+         aggregate_static_supply_qpk,
+         aggregate_static_balance_qpk) = statistics(ipp)
     
         # there should be imports
         abs_tol = 1e-6
-        assert math.isclose(flow_in[("mynet", 0, 0)], (1.0 + 0.1), abs_tol=abs_tol)
+        imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+        assert math.isclose(imports_qp, (1.0 + 0.1), abs_tol=abs_tol)
     
         # there should be no exports
     
         abs_tol = 1e-6
-    
-        assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+
+        exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        export_revenue_qp = sum(export_revenue_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
     
         # flow through IA1 must be 0.1
         abs_tol = 1e-6
@@ -7856,23 +7915,28 @@ class TestESIPPProblem:
         )
     
         # overview
-        (
-            flow_in,
-            flow_in_k,
-            flow_out,
-            flow_in_cost,
-            flow_out_revenue,
-        ) = compute_cost_volume_metrics(ipp.instance, True)
+        (imports_qpk, 
+         exports_qpk, 
+         balance_qpk, 
+         import_costs_qpk, 
+         export_revenue_qpk, 
+         ncf_qpk, 
+         aggregate_static_demand_qpk,
+         aggregate_static_supply_qpk,
+         aggregate_static_balance_qpk) = statistics(ipp)
     
         # there should be imports
         abs_tol = 1e-6
-        assert math.isclose(flow_in[("mynet", 0, 0)], (1.0 + 0.1), abs_tol=abs_tol)
+        imports_qp = sum(imports_qpk[qpk] for qpk in tf.qpk() if qpk[1] == 0)
+        assert math.isclose(imports_qp, (1.0 + 0.1), abs_tol=abs_tol)
     
         # there should be no exports
     
         abs_tol = 1e-6
     
-        assert math.isclose(flow_out[("mynet", 0, 0)], 0, abs_tol=abs_tol)
+
+        exports_qp = sum(exports_qpk[(q, 0, k)] for k in tf.time_intervals[q])
+        assert math.isclose(exports_qp, 0, abs_tol=abs_tol)
     
         # flow through IA1 must be 0.1
         abs_tol = 1e-6
@@ -8159,7 +8223,9 @@ class TestESIPPProblem:
             max_number_parallel_arcs={},
             simplify_problem=True,
         )
-
+        
+        print('wowowowow')
+        ipp.instance.constr_max_incoming_directed_arcs.pprint()
         assert ipp.has_peak_total_assessments()
         assert ipp.results["Problem"][0]["Number of constraints"] == 61 
         assert ipp.results["Problem"][0]["Number of variables"] == 53 
@@ -8306,11 +8372,12 @@ class TestESIPPProblem:
             max_number_parallel_arcs={},
             simplify_problem=True,
         )
-
+        print('owowowowow')
+        ipp.instance.constr_max_incoming_directed_arcs.pprint()
         assert ipp.has_peak_total_assessments()
         assert ipp.results["Problem"][0]["Number of constraints"] == 61 
         assert ipp.results["Problem"][0]["Number of variables"] == 53 
-        assert ipp.results["Problem"][0]["Number of nonzeros"] == 143
+        assert ipp.results["Problem"][0]["Number of nonzeros"] == 143 # 
         
         # *********************************************************************
         # *********************************************************************