Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • master
1 result

Target

Select target project
No results found
Select Git revision
  • master
1 result
Show changes
25 files
+ 6818
3014
Compare changes
  • Side-by-side
  • Inline

Files

Original line number Diff line number Diff line
@@ -5,6 +5,8 @@ import numpy as np
from geopandas import GeoDataFrame
from ...misc.utils import discrete_sinusoid_matching_integral
from ...misc.utils import create_profile_using_time_weighted_state
from ...misc.utils import generate_manual_state_correlated_profile
from ...misc.utils import generate_state_correlated_profile, generate_profile
from .bbr import label_bbr_entrance_id, label_bbr_housing_area

# *****************************************************************************
@@ -60,12 +62,15 @@ def heat_demand_dict_by_building_entrance(
    number_intervals: int,
    time_interval_durations: list,
    bdg_specific_demand: dict,
    bdg_ratio_min_max: dict,
    bdg_min_max_ratio: dict,
    bdg_demand_phase_shift: dict = None,
    key_osm_entr_id: str = label_osm_entrance_id,
    key_bbr_entr_id: str = label_bbr_entrance_id,
    avg_state: list = None,
    state_correlates_with_output: bool = False,
    deviation_gain: float = 1.0,
    solver: str = 'glpk',
    **kwargs
) -> dict:
    
    # initialise dict for each building entrance
@@ -84,11 +89,8 @@ def heat_demand_dict_by_building_entrance(
        # for each building
        for building_index in building_indexes:
            # get relevant data
            # base_load_avg_ratio = 0.3
            # specific_demand = 107 # kWh/m2/year
            area = gdf_buildings.loc[building_index][label_bbr_housing_area]

            # estimate its demand
            # generate the profile            
            if type(avg_state) == type(None):
                # ignore states
                heat_demand_profiles.append(
@@ -96,7 +98,7 @@ def heat_demand_dict_by_building_entrance(
                        discrete_sinusoid_matching_integral(
                            bdg_specific_demand[building_index] * area,
                            time_interval_durations=time_interval_durations,
                            min_to_max_ratio=bdg_ratio_min_max[building_index],
                            min_max_ratio=bdg_min_max_ratio[building_index],
                            phase_shift_radians=(
                                bdg_demand_phase_shift[building_index]
                            ),
@@ -104,18 +106,46 @@ def heat_demand_dict_by_building_entrance(
                    )
                )

            elif type(deviation_gain) == type(None):
                # states matter but the gain must be determined
                # heat_demand_profiles.append(
                #     np.array(
                #         create_profile_using_time_weighted_state(
                #             integration_result=(
                #                 bdg_specific_demand[building_index] * area
                #             ),
                #             avg_state=avg_state,
                #             time_interval_durations=time_interval_durations,
                #             min_max_ratio=bdg_min_max_ratio[building_index],
                #             state_correlates_with_output=state_correlates_with_output,
                #         )
                #     )
                # )
                heat_demand_profiles.append(
                    np.array(
                        generate_state_correlated_profile(
                            integration_result=(
                                bdg_specific_demand[building_index] * area
                            ), 
                            states=avg_state, 
                            time_interval_durations=time_interval_durations,
                            states_correlate_profile=state_correlates_with_output,
                            min_max_ratio=bdg_min_max_ratio[building_index],
                            solver=solver
                            )
                        )
                    )
            else:
                # states matter
                # states matter and the gain is predefined
                heat_demand_profiles.append(
                    np.array(
                        create_profile_using_time_weighted_state(
                        generate_manual_state_correlated_profile(
                            integration_result=(
                                bdg_specific_demand[building_index] * area
                            ), 
                            avg_state=avg_state,
                            states=avg_state, 
                            time_interval_durations=time_interval_durations, 
                            min_to_max_ratio=bdg_ratio_min_max[building_index],
                            state_correlates_with_output=state_correlates_with_output,
                            deviation_gain=deviation_gain
                            )
                        )
                    )
@@ -132,10 +162,74 @@ def heat_demand_dict_by_building_entrance(
    # return
    return demand_dict


# *****************************************************************************
# *****************************************************************************

# TODO: allow reusing the gain

def heat_demand_profiles(
    gdf_osm: GeoDataFrame,
    gdf_buildings: GeoDataFrame,
    time_interval_durations: list,
    assessments: list,
    annual_heat_demand: dict,
    air_temperature: dict = None, 
    reuse_deviation_gain: bool = True,
    **kwargs
) -> dict:    
    
    # calculate the total area
    total_area = total_heating_area(gdf_osm, gdf_buildings)
    # initialise data dict
    heat_demand_dict = {}
    # for each building entrance
    for osm_index in gdf_osm.index:
        # initialise dict for each building entrance
        bdg_entr_dict = {}
        # find the indexes for each building leading to the curr. cons. point
        building_indexes = gdf_buildings[
            gdf_buildings[label_bbr_entrance_id] == gdf_osm.loc[osm_index][label_osm_entrance_id]
        ].index
        for q in assessments:
            # define the specific heat demand
            specific_demand = annual_heat_demand[q]/total_area
            # 
            # initialise dict for each building consumption point
            bdg_profile_list = []
            # for each building
            for building_index in building_indexes:
                # get relevant data
                area = gdf_buildings.loc[building_index][label_bbr_housing_area]
                # handle states
                if type(air_temperature) != type(None):
                    kwargs['states'] = air_temperature[q]
                # append the profile for each building to the list
                _profile = generate_profile(
                    integration_result=specific_demand*area, 
                    time_interval_durations=time_interval_durations, 
                    **kwargs
                    # min_max_ratio, 
                    # states, 
                    # states_correlate_profile, 
                    # solver, 
                    # deviation_gain
                    )
                bdg_profile_list.append(np.array(_profile))
            # aggregate profiles for the same building entrance
            bdg_entr_profile = (
                [] 
                if len(bdg_profile_list) == 0 else 
                sum(profile for profile in bdg_profile_list)
                )
            # store the demand profile
            bdg_entr_dict[q] = bdg_entr_profile
        # add to the main dict
        heat_demand_dict[osm_index] = bdg_entr_dict
        
    return heat_demand_dict, total_area

# *****************************************************************************
# *****************************************************************************

def total_heating_area(
    gdf_osm: GeoDataFrame,
@@ -156,6 +250,5 @@ def total_heating_area(
            area += gdf_buildings.loc[building_index][label_bbr_housing_area]
    return area


# *****************************************************************************
# *****************************************************************************
 No newline at end of file
Original line number Diff line number Diff line
@@ -1090,7 +1090,7 @@ def is_path_straight(

def find_simplifiable_paths(
    network: nx.MultiDiGraph,
    excluded_nodes: list,
    protected_nodes: list,
    ignore_self_loops: bool = False,
    consider_reversed_edges: bool = False,
    include_both_directions: bool = False,
@@ -1106,7 +1106,7 @@ def find_simplifiable_paths(
    ----------
    network : nx.MultiDiGraph
        The object describing the graph.
    excluded_nodes : list
    protected_nodes : list
        A list of keys for nodes that cannot be in any straight path.
    ignore_self_loops : bool, optional
        If True, paths including self-loops can still be straight. If False,
@@ -1139,7 +1139,7 @@ def find_simplifiable_paths(
            node_key
            for node_key in network.nodes()
            # the node cannot be among those excluded
            if node_key not in excluded_nodes
            if node_key not in protected_nodes
            # the node has to be linked to two other nodes other than itself
            if len(set(neighbours(network, node_key, ignore_self_loops=True))) == 2
            # exclude nodes with self-loops if desired:
Original line number Diff line number Diff line
@@ -28,7 +28,7 @@ from .calculate import update_street_count, edge_lengths
# *****************************************************************************


def remove_self_loops(network: nx.MultiDiGraph):
def remove_self_loops(network: nx.MultiDiGraph) -> list:
    """
    Removes self-loops from a directed graph defined in a MultiDiGraph object.

@@ -39,7 +39,7 @@ def remove_self_loops(network: nx.MultiDiGraph):

    Returns
    -------
    generator-expression
    list
        The keys to the nodes whose self-loops were removed.

    """
@@ -276,7 +276,9 @@ def transform_roundabouts_into_crossroads(


def remove_dead_ends(
    network: nx.MultiDiGraph, keepers: tuple = None, max_iterations: int = 1
    network: nx.MultiDiGraph, 
    protected_nodes: list = None, 
    max_iterations: int = 1
) -> list:
    """
    Removes dead ends (non-cyclical branches) from a directed graph.
@@ -288,7 +290,7 @@ def remove_dead_ends(
    ----------
    network : nx.MultiDiGraph
        The object describing the directed graph.
    keepers : tuple, optional
    protected_nodes : list, optional
        A list of keys for the nodes that are not to be considered for removal.
        The default is None, which means all nodes are under consideration.
    max_iterations : int, optional
@@ -301,8 +303,8 @@ def remove_dead_ends(

    """

    if type(keepers) == type(None):
        keepers = []
    if type(protected_nodes) == type(None):
        protected_nodes = []

    # while true
    nodes_removed = []
@@ -313,7 +315,7 @@ def remove_dead_ends(
        target_nodes = [
            node_key
            for node_key in network.nodes()
            if node_key not in keepers
            if node_key not in protected_nodes
            # if it has at most one neighbour other than itself
            if len(set(gis_iden.neighbours(network, node_key, ignore_self_loops=True)))
            <= 1
@@ -505,20 +507,30 @@ def replace_path(network: nx.MultiDiGraph, path: list) -> tuple:


def remove_longer_parallel_edges(
    network: nx.MultiDiGraph, ignore_edge_directions: bool = False
    network: nx.MultiDiGraph, 
    distance_key: str = osm.KEY_OSMNX_LENGTH, 
    ignore_edge_directions: bool = True,
    protected_edges: list = None
) -> list:
    """
    Removes longer parallel edges from the network.
    
    Parallel edges are those connecting the same nodes in the same direction. If
    there are parallel edges between any given pair of nodes, the longer ones
    will be removed. If desired, edge directions can be ignored. In that case,
    only the shortest edge between the same pair of nodes will be retained.
    Parallel edges refer to multiple edges connecting the same nodes. If there 
    are parallel edges between any given pair of nodes, only the shortest one 
    will be retained. If desired, edge directions can be ignored. By default,
    edge directions are taken into account when selecting parallel edges.

    Parameters
    ----------
    network : nx.MultiDiGraph
        The object describing the graph.
    distance_key : str, optional
        The key used to obtain distances. The default is osm.KEY_OSMNX_LENGTH.
    ignore_edge_directions : bool, optional
        If True, edge directions are ignored. The default is True.
    protected_edges : list, optional
        A list of edges that should be retained. The default is None, which
        means all edges are elligible.

    Returns
    -------
@@ -526,12 +538,9 @@ def remove_longer_parallel_edges(
        A list of the edges removed.

    """

    # redundancy: having more than one edge between two nodes
    # solution: remove the longest one, leave the shortest one

    # for each node pair

    if type(protected_edges) == type(None):
        # default: empty list
        protected_edges = []
    edges_removed = []
    for node_one in network.nodes():
        for node_two in network.nodes():
@@ -547,28 +556,31 @@ def remove_longer_parallel_edges(
                list_edges = gis_iden.get_edges_from_a_to_b(
                    network, node_start=node_one, node_end=node_two
                )

            # if none exist, skip
            if len(list_edges) == 0:
                continue

            # otherwise, find out which is the shortest one
            # otherwise, sort them by distance (shorter distances first)
            # note: protected edges are considered in the sorting too
            sorted_edges = sorted(
                (network.edges[edge_key][osm.KEY_OSMNX_LENGTH], edge_key)
                (network.edges[edge_key][distance_key], edge_key)
                for edge_key in list_edges
            )

            network.remove_edges_from(edge_tuple[1] for edge_tuple in sorted_edges[1:])

            edges_removed.extend(edge_tuple[1] for edge_tuple in sorted_edges[1:])

            # edges to be removed (drop protected edges here)
            edges_for_removal = tuple(
                edge_tuple[1] 
                for edge_tuple in sorted_edges[1:]
                if edge_tuple[1] not in protected_edges
                )
            # remove all but the shortest edge
            network.remove_edges_from(edges_for_removal)
            # update the list of edges removed
            edges_removed.extend(edges_for_removal)
    # return statement
    return edges_removed


# *****************************************************************************
# *****************************************************************************


def merge_points_into_linestring(
    line: LineString,
    points: tuple or list,
Original line number Diff line number Diff line
@@ -11,8 +11,9 @@ from networkx import MultiDiGraph, MultiGraph
from pandas import MultiIndex, Series
from numpy import float64, int64
from geopandas import GeoDataFrame, read_file
from shapely.geometry import Point
from shapely.geometry import Point, LineString
import contextily as cx
from shapely import intersects

# local, internal

@@ -740,10 +741,13 @@ def get_directed(

def simplify_network(
    network: MultiDiGraph,
    protected_nodes: list,
    protected_nodes: list = None,
    protected_edges: list = None,
    dead_end_probing_depth: int = 5,
    remove_opposite_parallel_edges: bool = False,
    ignore_edge_directions: bool = True,
    update_street_count_per_node: bool = True,
    transform_roundabouts: bool = False,
    max_number_iterations: int = 5,
    **roundabout_conditions
):
    """
@@ -754,14 +758,21 @@ def simplify_network(
    network : MultiDiGraph
        The object describing the network.
    protected_nodes : list
        A list with the keys for the nodes that must be preserved.
    dead_end_probing_depth: int
        The maximum number of nodes a dead end can have to be detectable.
    remove_opposite_parallel_edges : bool, optional
        If True, longer parallel edges in opposite directions are also removed.
        The default is False.
        The keys for the nodes that must be preserved.
    protected_edges : list
        The keys for the edges that must be preserved.
    dead_end_probing_depth : int, optional
        The maximum number of nodes a dead end can have to be detectable. The 
        default is 5.
    ignore_edge_directions : bool, optional
        If True, direction is ignored in the search for parallel edges and
        simplifiable paths. The default is True.
    update_street_count_per_node : bool, optional
        If True, updates the street count on each node. The default is True.
    transform_roundabouts : bool, optional
        If True, roundabouts are to be transformed. The default is False.
    max_number_iterations : int, optional
        The maximum number of iterations. The default is 5.
    **roundabout_conditions : keyword and value pairs
        The conditions used to define which roundabouts are simplified.

@@ -771,26 +782,59 @@ def simplify_network(

    """
    
    # 1) remove dead ends (tends to create straight paths)
    gis_mod.remove_dead_ends(
        network, protected_nodes, max_iterations=dead_end_probing_depth
    if type(protected_nodes) == type(None):
        protected_nodes = []
    if type(protected_edges) == type(None):
        protected_edges = []
    else:
        # if there are protected edges, then the nodes involved in those edges
        # must also be preserved, otherwise they can be removed too
        protected_nodes.extend(
            # set(aaa for aa in a for aaa in aa[0:-1])
            set(nn for nnn in protected_edges for nn in nnn[0:-1])
            )
    iteration_counter = 0
    while iteration_counter < max_number_iterations:
        # remove self loops (can create straight paths and dead ends)
        looping_node_keys = gis_mod.remove_self_loops(network)
        # remove longer parallel edges (can create dead ends and straight paths)
        edge_keys = gis_mod.remove_longer_parallel_edges(
            network, 
            ignore_edge_directions=ignore_edge_directions,
            protected_edges=protected_edges,
        )
    # 2) remove longer parallel edges (tends to create straight paths)
    gis_mod.remove_longer_parallel_edges(
        network, ignore_edge_directions=remove_opposite_parallel_edges
        # remove dead ends (can create straight paths)
        node_keys = gis_mod.remove_dead_ends(
            network, 
            protected_nodes=protected_nodes, 
            max_iterations=dead_end_probing_depth
        )
    # 3) remove self loops (tends to create straight paths and dead ends)
    gis_mod.remove_self_loops(network)
    # 4) join segments (can create self-loops)
    simplifiable_paths = gis_iden.find_simplifiable_paths(network, protected_nodes)
    for path in simplifiable_paths:
        # join segments (can create self-loops and parallel edges)
        paths = gis_iden.find_simplifiable_paths(
            network, 
            protected_nodes=protected_nodes,
            consider_reversed_edges=ignore_edge_directions)
        for path in paths:
            gis_mod.replace_path(network, path)
    # 4) remove self loops (tends to create straight paths and dead ends)
    gis_mod.remove_self_loops(network)
    # 5) transform roundabouts into crossroads (can create straight paths)
        # update iteration counter
        iteration_counter += 1
        # check if it makes sense to break out of the loop
        if (len(looping_node_keys) == 0 and 
            len(edge_keys) == 0 and 
            len(paths) == 0 and 
            len(node_keys) == 0):
            # no self-loops
            # no edges were removed
            # no paths were simplified
            # no nodes were removed
            break
    
    # transform roundabouts into crossroads (can create straight paths)
    if transform_roundabouts:
        list_roundabout_nodes = gis_iden.find_roundabouts(network, **roundabout_conditions)
        gis_mod.transform_roundabouts_into_crossroads(network, list_roundabout_nodes)
    # 6) update street count
        
    # update street count
    if update_street_count_per_node:
        gis_calc.update_street_count(network)

@@ -1186,6 +1230,90 @@ def convert_edge_path(
    # return statement
    return node_path

# *****************************************************************************
# *****************************************************************************

def create_edge_geometry(
        network: MultiDiGraph, 
        edge_key,
        x_key = osm.KEY_OSMNX_X,
        y_key = osm.KEY_OSMNX_Y) -> LineString:
    "Returns a newly-created geometry for a given edge."
    
    return LineString(
        [(network.nodes[edge_key[0]][x_key],
          network.nodes[edge_key[0]][y_key]),
         (network.nodes[edge_key[1]][x_key],
          network.nodes[edge_key[1]][y_key])]
        )

# *****************************************************************************
# *****************************************************************************

def find_overlapping_edges(
        network: MultiDiGraph,
        excluded_edges: list = None
        ) -> list:
    """
    Returns a list of key pairs for edges whose geometries overlap.

    Parameters
    ----------
    network : MultiDiGraph
        The object describing the network.
    excluded_edges : list, optional
        A list of edges that should not be considered. The default is None, in
        which case all edges in the network object will be considered.

    Returns
    -------
    list
        A list containing key pairs for overlapping edges.

    """
    # check if there are excluded edges
    if type(excluded_edges) == type(None):
        excluded_edges = list()
    # initialise the list of edges to check
    edges = list(
        edge_key 
        for edge_key in network.edges(keys=True)
        if edge_key not in excluded_edges
        )
    visited_edges = []
    out = []
    # for each edge
    for edge_key in edges:
        # remove the current edge so it is not considered again
        visited_edges.append(edge_key)
        # for each other edge
        for other_edge_key in edges:
            # for each other edge
            # skip edges having nodes in common
            # this will also skip identical edges
            if edge_key[0] in other_edge_key[0:2] or edge_key[1] in other_edge_key[0:2]:
                # has nodes in common, skip
                continue
            # skip edges that have already been considered in the first loop
            if other_edge_key in visited_edges:
                # this edge has already been tested against all other edges
                continue
            # first edge                
            if osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
                first_geo = network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY]
            else:
                first_geo = create_edge_geometry(network, edge_key)
            # second edge
            if osm.KEY_OSMNX_GEOMETRY in network.edges[other_edge_key]:
                second_geo = network.edges[other_edge_key][osm.KEY_OSMNX_GEOMETRY] 
            else:
                second_geo = create_edge_geometry(network, other_edge_key)
            # check if they intersect
            if intersects(first_geo, second_geo):
                # they do, add tuple of the edges to the output
                out.append((edge_key, other_edge_key))
    # return tuples of overlapping edges
    return out

# *****************************************************************************
# *****************************************************************************
Original line number Diff line number Diff line
@@ -2,14 +2,11 @@
# *****************************************************************************

# standard

import uuid

import math

from statistics import mean

# local, external
import pyomo.environ as pyo

# *****************************************************************************
# *****************************************************************************
@@ -40,7 +37,7 @@ def generate_pseudo_unique_key(key_list: tuple, max_iterations: int = 10) -> str
def discrete_sinusoid_matching_integral(
    integration_result: float,
    time_interval_durations: list,
    min_to_max_ratio: float,
    min_max_ratio: float,
    phase_shift_radians: float = None,
) -> list:
    """
@@ -57,7 +54,7 @@ def discrete_sinusoid_matching_integral(

    where:

    a = b*(1-min_to_max_ratio)/(1+min_to_max_ratio)
    a = b*(1-min_max_ratio)/(1+min_max_ratio)

    b = integration_result/integration_period

@@ -71,7 +68,7 @@ def discrete_sinusoid_matching_integral(
        The result of integrating the sinusoidal function for one period.
    time_interval_durations : list
        The time interval durations for each sample.
    min_to_max_ratio : float
    min_max_ratio : float
        The ratio between the minimum and maximum values of the function.
    phase_shift_radians : float, optional
        The phase shift for the sinusoidal function. The default is None, which
@@ -90,7 +87,7 @@ def discrete_sinusoid_matching_integral(

    b = integration_result / integration_period

    a = b * (1 - min_to_max_ratio) / (1 + min_to_max_ratio)
    a = b * (1 - min_max_ratio) / (1 + min_max_ratio)

    alpha = 2 * math.pi / integration_period

@@ -127,7 +124,7 @@ def synch_profile(profile: list, reference_profile: list, synch: bool = True) ->

    By default, the profiles are synched: the highest sample in one is placed
    in the same position as the highest sample in the other; the second highest
    sample is placede in the same position as the second highest sample in the
    sample is placed in the same position as the second highest sample in the
    other profile; and so on. Alternatively, the profiles can be synched in
    reverse: the highest sample in one profile is placed in the same position
    as the lowest sample in the other; and so on and so forth.
@@ -188,10 +185,10 @@ def synch_profile(profile: list, reference_profile: list, synch: bool = True) ->

def create_profile_using_time_weighted_state(
    integration_result: float,
    avg_state: list,
    states: list,
    time_interval_durations: list,
    min_to_max_ratio: float,
    state_correlates_with_output: bool = True,
    min_max_ratio: float,
    states_correlate_profile: bool = True,
) -> list:
    """
    Returns a profile that approximates a sinusoidal function in discrete time.
@@ -210,7 +207,7 @@ def create_profile_using_time_weighted_state(

    where:

    a = b*(1-min_to_max_ratio)/(1+min_to_max_ratio)
    a = b*(1-min_max_ratio)/(1+min_max_ratio)

    b = integration_result/integration_period

@@ -222,13 +219,13 @@ def create_profile_using_time_weighted_state(
    ----------
    integration_result : float
        The result of integrating the sinusoidal function for one period.
    avg_state : list
    states : list
        The average state during each time interval.
    time_interval_durations : list
        The time interval durations for each sample.
    min_to_max_ratio : float
    min_max_ratio : float
        The ratio between the minimum and maximum values of the function.
    state_correlates_with_output : bool, optional
    states_correlate_profile : bool, optional
        If True, the peak should happen when the state is at its highest point.
        If False, the peak should happen when the state is at its lowest point.
        The default is True.
@@ -246,26 +243,26 @@ def create_profile_using_time_weighted_state(

    """

    if len(avg_state) != len(time_interval_durations):
    if len(states) != len(time_interval_durations):
        raise ValueError("The inputs are inconsistent.")

    period = sum(time_interval_durations)

    avg_time_interval_duration = mean(time_interval_durations)

    avg_state_weighted = [
    states_weighted = [
        (
            x_k * delta_k / avg_time_interval_duration
            if state_correlates_with_output
            if states_correlate_profile
            else -x_k * delta_k / avg_time_interval_duration
        )
        for delta_k, x_k in zip(time_interval_durations, avg_state)
        for delta_k, x_k in zip(time_interval_durations, states)
    ]

    # find the peak

    _sorted = sorted(
        ((state, index) for index, state in enumerate(avg_state_weighted)), reverse=True
        ((state, index) for index, state in enumerate(states_weighted)), reverse=True
    )

    # create new list for time durations starting with that of the peak
@@ -280,7 +277,7 @@ def create_profile_using_time_weighted_state(
    new_profile = discrete_sinusoid_matching_integral(
        integration_result=integration_result,
        time_interval_durations=swapped_time_durations,
        min_to_max_ratio=min_to_max_ratio,
        min_max_ratio=min_max_ratio,
        phase_shift_radians=(
            math.pi / 2
            - 0.5 * (time_interval_durations[_sorted[0][1]] / period) * 2 * math.pi
@@ -291,6 +288,257 @@ def create_profile_using_time_weighted_state(
    n = len(time_interval_durations)
    return [*new_profile[n - _sorted[0][1] :], *new_profile[0 : n - _sorted[0][1]]]

# *****************************************************************************
# *****************************************************************************

def generate_manual_state_correlated_profile(
    integration_result: float,
    states: list,
    time_interval_durations: list,
    deviation_gain: float
) -> list:
    """
    Returns a profile matching a given integral and varying according to a
    sequence of time intervals and the respective mean states.

    The profile for interval i is defined as follows:

    P[i] = (dt[i]/dt_mean)*( (x[i]-x_mean)*gain + offset)

    where:

    dt[i] is the time interval duration for interval i
    
    dt_mean is the mean time interval duration
    
    x[i] is the state for interval i
    
    x_mean is the mean state for the entire profile
    
    The offset is defined as the integration result divided by the number 
    time intervals, whereas the gain is user-defined and real-valued.

    Parameters
    ----------
    integration_result : float
        The result of integrating the sinusoidal function for one period.
    states : list
        The average state during each time interval.
    time_interval_durations : list
        The time interval durations for each sample.
    deviation_gain : float
        DESCRIPTION.

    Raises
    ------
    ValueError
        This error is raised if the list inputs do not have the same size.

    Returns
    -------
    list
        A profile matching the aforementioned characteristics.

    """

    if len(states) != len(time_interval_durations):
        raise ValueError("The inputs are inconsistent.")

    dt_total = sum(time_interval_durations)
    dt_mean = mean(time_interval_durations)
    # x_mean  = mean(states)
    x_mean = sum(
        deltat_k*x_k 
        for deltat_k, x_k in zip(time_interval_durations, states)
        )/dt_total
    beta = integration_result/len(states)
    return [
        ((x_k-x_mean)*deviation_gain+beta )* deltat_k / dt_mean
        for deltat_k, x_k in zip(time_interval_durations, states)
    ]

# *****************************************************************************
# *****************************************************************************

def generate_state_correlated_profile(
    integration_result: float,
    states: list,
    time_interval_durations: list,
    states_correlate_profile: bool,
    min_max_ratio: float,
    solver: str
) -> tuple:
    """
    Returns a profile observing a number of conditions.
    
    The profile must correlate with a set of states averaged over certain time 
    intervals, whose durations may be irregular. Integration of the profile 
    over all time intervals must also match a certain value. Finally, the peaks
    must be related by a factor between 0 and 1.
    
    This method relies on linear programming. An LP solver must be used.
    
    The profile for interval i is defined as follows:

    P[i] = (dt[i]/dt_mean)*( (x[i]-x_mean)*gain + offset)

    where:

    dt[i] is the time interval duration for interval i
    
    dt_mean is the mean time interval duration
    
    x[i] is the state for interval i
    
    x_mean is the mean state for the entire profile
    
    The offset is defined as the integration result divided by the number 
    time intervals, whereas the gain is determined via optimisation.

    Parameters
    ----------
    integration_result : float
        The result of integrating the sinusoidal function for one period.
    states : list
        The average state during each time interval.
    time_interval_durations : list
        The duration of each time interval.
    states_correlate_profile : bool
        If True, higher state values must lead to higher profile values. 
        If False, lower state values must lead to higher profile values.
    min_max_ratio : float
        The ratio between the minimum and the maximum profile values.
    solver : str
        The name of the LP solver to use, according to Pyomo conventions.

    Raises
    ------
    ValueError
        This error is raised if the list inputs do not have the same size.

    Returns
    -------
    tuple
        A tuple containing the profile, the gain and the offset.

    """    
        
    # *************************************************************************
    # *************************************************************************
    
    # TODO: find alternative solution, as this is most likely overkill
    
    # internal model
    
    def model(states_correlate_profile: bool) -> pyo.AbstractModel:
    
        # abstract model
        model = pyo.AbstractModel()
        
        # sets
        model.I = pyo.Set()
        
        # decision variables
        model.P_i = pyo.Var(model.I, domain=pyo.NonNegativeReals)
        model.P_max = pyo.Var(domain=pyo.NonNegativeReals)
        model.P_min = pyo.Var(domain=pyo.NonNegativeReals)
        if states_correlate_profile:
            model.alpha = pyo.Var(domain=pyo.PositiveReals)
        else:
            model.alpha = pyo.Var(domain=pyo.NegativeReals)
        
        # parameters
        model.param_R = pyo.Param()
        model.param_VI = pyo.Param()
        model.param_X_i = pyo.Param(model.I)
        model.param_Y_i = pyo.Param(model.I)
        
        def obj_f(m):
            if states_correlate_profile:
                return m.alpha # if positive
            else:
                return -m.alpha # if negative
        # model.OBJ = pyo.Objective(rule=obj_f)
        model.OBJ = pyo.Objective(rule=obj_f, sense=pyo.maximize)
        
        # integral
        def constr_integral_rule(m):
            return sum(m.P_i[i] for i in m.I) == m.param_VI
        model.constr_integral = pyo.Constraint(rule=constr_integral_rule)
        
        # profile equations
        def constr_profile_equations_rule(m,i):
            return m.P_i[i] - m.param_X_i[i]*m.alpha == m.param_Y_i[i]
        model.constr_profile_equations = pyo.Constraint(model.I, rule=constr_profile_equations_rule)
        
        # upper bound
        def constr_max_upper_bound_rule(m,i):
            return m.P_i[i] <= m.P_max
        model.constr_max_upper_bound = pyo.Constraint(model.I, rule=constr_max_upper_bound_rule)
        
        # lower bound
        def constr_max_lower_bound_rule(m,i):
            return m.P_i[i] >= m.P_min
        model.constr_max_lower_bound = pyo.Constraint(model.I, rule=constr_max_lower_bound_rule)
        
        # ratio
        def constr_min_max_rule(m,i):
            return m.P_min == m.P_max*m.param_R
        model.constr_min_max = pyo.Constraint(rule=constr_min_max_rule)
        
        return model

    number_time_steps = len(time_interval_durations)
    if len(states) != number_time_steps:
        raise ValueError("The inputs are inconsistent.")
        
    # *************************************************************************
    # *************************************************************************
    
    dt_total = sum(time_interval_durations)
    dt_mean = mean(time_interval_durations)
    x_mean = sum(
        deltat_k*x_k 
        for deltat_k, x_k in zip(time_interval_durations, states)
        )/dt_total
    beta = integration_result/number_time_steps
    f = [dt_k/dt_mean for dt_k in time_interval_durations]
    set_I = tuple(i for i in range(number_time_steps))
    
    if len(set(states)) == 1:
        # alpha = 0, return trivial profile
        return (
            [fi*beta for fi in f], 
            0,
            beta
            )
    
    # create a dictionary with the data (using pyomo conventions)
    data_dict = {
        None: {
            # sets
            "I": {None: set_I},
            # parameters
            "param_VI": {None: integration_result},
            "param_R": {None: min_max_ratio},
            "param_X_i": {i:f[i]*(states[i]-x_mean) for i in set_I},
            "param_Y_i": {i:f[i]*beta for i in set_I},
        }
    }
    
    # *************************************************************************
    # *************************************************************************
    
    opt = pyo.SolverFactory(solver)
    fit_model = model(states_correlate_profile=states_correlate_profile)  
    problem = fit_model.create_instance(data=data_dict) 
    opt.solve(problem, tee=False)
    # return profile
    return (
        [pyo.value(problem.P_i[i]) for i in problem.I], 
        pyo.value(problem.alpha),
        beta
        )

# *****************************************************************************
# *****************************************************************************
@@ -300,7 +548,7 @@ def max_min_sinusoidal_profile(
    integration_result: float or int,
    period: float or int,
    time_interval_duration: float or int,
    min_to_max_ratio: float,
    min_max_ratio: float,
) -> tuple:
    """
    Returns the maximum and minimum amount for a given time interval, according
@@ -317,7 +565,7 @@ def max_min_sinusoidal_profile(

    where:

    a = b*(1-min_to_max_ratio)/(1+min_to_max_ratio)
    a = b*(1-min_max_ratio)/(1+min_max_ratio)

    b = integration_result/integration_period

@@ -331,7 +579,7 @@ def max_min_sinusoidal_profile(
        The result of integrating the sinusoidal function for one period.
    time_interval_durations : list
        The time interval durations for each sample.
    min_to_max_ratio : float
    min_max_ratio : float
        The ratio between the minimum and maximum values of the function.
    phase_shift_radians : float, optional
        The phase shift for the sinusoidal function. The default is None, which
@@ -345,7 +593,7 @@ def max_min_sinusoidal_profile(
    """

    b = integration_result / period
    a = b * (1 - min_to_max_ratio) / (1 + min_to_max_ratio)
    a = b * (1 - min_max_ratio) / (1 + min_max_ratio)
    alpha = 2 * math.pi / period
    amplitude = a * (2 / alpha) * math.sin(alpha * time_interval_duration / 2)

@@ -354,6 +602,87 @@ def max_min_sinusoidal_profile(
        b * time_interval_duration - amplitude,
    )

# *****************************************************************************
# *****************************************************************************

def generate_profile(
        integration_result: float,
        time_interval_durations: list,
        **kwargs
    ) -> tuple:
    """
    Returns a profile according to a variety of methods.

    Parameters
    ----------
    integration_result : float
        The value which must be obtained by adding up all samples.
    time_interval_durations : list
        A list with the durations of each time interval.
    **kwargs : 
        A sequence of key and value pairs for use in subsequent methods.

    Returns
    -------
    numpy array
        Returns the desired profile.

    """
    
    # generate the profile
    if 'states' not in kwargs: 
        # min_max_ratio is necessary
        # phase_shift_radians is optional
        # states play no role
        
        # # remove unnecessary arguments
        # if 'deviation_gain' in kwargs:
        #     kwargs.pop('deviation_gain')
        # if 'solver' in kwargs:
        #     kwargs.pop('solver')
        return discrete_sinusoid_matching_integral(
            integration_result=integration_result,
            time_interval_durations=time_interval_durations,
            **kwargs
            )
    elif 'deviation_gain' not in kwargs:
        # states matter but the gain must be determined
        
        if 'solver' in kwargs:
            # - states_correlate_profile is necessary
            # - min_max_ratio is necessary
            # - solver is necessary
            return generate_state_correlated_profile(
                integration_result=integration_result, 
                time_interval_durations=time_interval_durations,
                **kwargs
                )[0]
        else:
            # - states_correlate_profile is necessary
            # - min_max_ratio is necessary
            # - solver is not necessary
            return create_profile_using_time_weighted_state(
                integration_result=integration_result,
                time_interval_durations=time_interval_durations,
                **kwargs)
    else:
        # states matter and the gain is predefined
        # states are necessary
        # deviation gain is necessary
        # # remove unnecessary arguments
        # if 'phase_shift_radians' in kwargs:
        #     kwargs.pop('phase_shift_radians')
        return generate_manual_state_correlated_profile(
            integration_result=integration_result, 
            time_interval_durations=time_interval_durations,
            **kwargs
            )
    
    # integration_result: float,
    # states: list,
    # time_interval_durations: list,
    # min_max_ratio: float,
    # states_correlate_profile: bool = True,
    
# *****************************************************************************
# *****************************************************************************
Original line number Diff line number Diff line
@@ -613,20 +613,61 @@ class Network(nx.MultiDiGraph):
        KEY_ARC_TECH_STATIC_LOSS,
    )
    
    def __init__(self, incoming_graph_data=None, **attr):
    NET_TYPE_HYBRID = 0
    NET_TYPE_TREE = 1
    NET_TYPE_REV_TREE = 2
    
    NET_TYPES = (
        NET_TYPE_HYBRID,
        NET_TYPE_TREE,
        NET_TYPE_REV_TREE
        )

    def __init__(self, network_type = NET_TYPE_HYBRID, **kwargs):
        # run base class init routine

        nx.MultiDiGraph.__init__(self, incoming_graph_data=incoming_graph_data, **attr)
        nx.MultiDiGraph.__init__(self, **kwargs)

        # identify node types

        self.identify_node_types()

        # declare variables for the nodes without directed arc limitations
        self.network_type = network_type

        self.nodes_w_in_dir_arc_limitations = dict()
    
        self.nodes_w_out_dir_arc_limitations = dict()

    # *************************************************************************
    # *************************************************************************
    
    def _set_up_node(self, node_key, max_number_in_arcs: int = None, max_number_out_arcs: int = None):
        
        if self.should_be_tree_network():
            # nodes have to be part of a tree: one incoming arc per node at most
            self.nodes_w_in_dir_arc_limitations[node_key] = 1
        elif self.should_be_reverse_tree_network():
            # nodes have to be part of a reverse tree: one outgoing arc per node at most
            self.nodes_w_out_dir_arc_limitations[node_key] = 1
        else: 
            # nodes have no peculiar restrictions or they are defined 1 by 1
            if type(max_number_in_arcs) != type(None):
                self.nodes_w_in_dir_arc_limitations[node_key] = max_number_in_arcs
            if type(max_number_out_arcs) != type(None):
                self.nodes_w_out_dir_arc_limitations[node_key] = max_number_out_arcs

    # *************************************************************************
    # *************************************************************************
    
    def should_be_tree_network(self) -> bool:
        return self.network_type == self.NET_TYPE_TREE

        self.nodes_wo_in_dir_arc_limitations = []
    # *************************************************************************
    # *************************************************************************
    
        self.nodes_wo_out_dir_arc_limitations = []
    def should_be_reverse_tree_network(self) -> bool:
        return self.network_type == self.NET_TYPE_REV_TREE

    # *************************************************************************
    # *************************************************************************
@@ -661,23 +702,25 @@ class Network(nx.MultiDiGraph):

    # add a new supply/demand node

    def add_source_sink_node(self, node_key, base_flow: dict):
    def add_source_sink_node(self, node_key, base_flow: dict, **kwargs):
        node_dict = {
            self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_SOURCE_SINK,
            self.KEY_NODE_BASE_FLOW: base_flow,
        }

        self.add_node(node_key, **node_dict)
        self._set_up_node(node_key, **kwargs)

    # *************************************************************************
    # *************************************************************************

    # add a new waypoint node

    def add_waypoint_node(self, node_key):
    def add_waypoint_node(self, node_key, **kwargs):
        node_dict = {self.KEY_NODE_TYPE: self.KEY_NODE_TYPE_WAY}

        self.add_node(node_key, **node_dict)
        self._set_up_node(node_key, **kwargs)

    # *************************************************************************
    # *************************************************************************
@@ -1256,6 +1299,31 @@ class Network(nx.MultiDiGraph):

            return nx.is_tree(network_view)

    # *************************************************************************
    # *************************************************************************
    
    def has_selected_antiparallel_arcs(self) -> bool:
        "Returns True if any two nodes have selected arcs in both directions."
        return len(self.find_selected_antiparallel_arcs()) != 0

    # *************************************************************************
    # *************************************************************************
    
    def find_selected_antiparallel_arcs(self) -> list:
        """Returns True if any two nodes have (selected) forward and reverse arcs."""
        
        # check the existence of forward and reverse arcs in the same segment
        arcs = [  # get the arcs selected
            arc_key[0:2]
            for arc_key in self.edges(keys=True)
            if True in self.edges[arc_key][Network.KEY_ARC_TECH].options_selected
        ]
        arcs = [  # get the selected arcs that exist both ways
            arc_key
            for arc_key in arcs
            if (arc_key[1], arc_key[0]) in arcs
        ]
        return arcs

# *****************************************************************************
# *****************************************************************************
Original line number Diff line number Diff line
@@ -64,6 +64,15 @@ class InfrastructurePlanningProblem(EnergySystem):
        STATIC_LOSS_MODE_DS,
    )
    
    NODE_PRICE_LAMBDA = 1
    NODE_PRICE_DELTA = 2
    NODE_PRICE_OTHER = 3
    NODE_PRICES = (
        NODE_PRICE_LAMBDA,
        NODE_PRICE_DELTA,
        NODE_PRICE_OTHER
        )

    # *************************************************************************
    # *************************************************************************

@@ -80,6 +89,7 @@ class InfrastructurePlanningProblem(EnergySystem):
        converters: dict = None,
        prepare_model: bool = True,
        validate_inputs: bool = True,
        node_price_model = NODE_PRICE_DELTA
    ):  # TODO: switch to False when everything is more mature
        # *********************************************************************

@@ -1830,22 +1840,14 @@ class InfrastructurePlanningProblem(EnergySystem):
        }

        set_L_max_in_g = {
            g: tuple(
                l
                for l in self.networks[g].nodes
                if l not in self.networks[g].nodes_wo_in_dir_arc_limitations
            )
            g: tuple(self.networks[g].nodes_w_in_dir_arc_limitations.keys())
            for g in self.networks.keys()
            }

        # set_L_max_out_g = {
        #     g: tuple(
        #         l
        #         for l in self.networks[g].nodes
        #         if l not in self.networks[g].nodes_wo_out_dir_arc_limitations
        #         )
        #     for g in self.networks.keys()
        #     }
        set_L_max_out_g = {
            g: tuple(self.networks[g].nodes_w_out_dir_arc_limitations.keys())
            for g in self.networks.keys()
            }

        set_GL = tuple((g, l) for g in set_G for l in set_L[g])

@@ -2547,6 +2549,17 @@ class InfrastructurePlanningProblem(EnergySystem):
            for s in set_S[(g, l, q, p, k)]
        }

        # price function convexity

        param_price_function_is_convex = {
            (g, l, q, p, k): (
                self.networks[g].nodes[l][Network.KEY_NODE_PRICES][(q, p, k)].price_monotonically_increasing_with_volume()
                if l in set_L_imp[g] else
                self.networks[g].nodes[l][Network.KEY_NODE_PRICES][(q, p, k)].price_monotonically_decreasing_with_volume()
                )
            for (g, l, q, p, k) in set_S
        }

        # maximum resource volume per segment (infinity is the default)

        param_v_max_glqpks = {
@@ -3317,7 +3330,7 @@ class InfrastructurePlanningProblem(EnergySystem):
                "set_L_imp": set_L_imp,
                "set_L_exp": set_L_exp,
                "set_L_max_in_g": set_L_max_in_g,
                #'set_L_max_out_g': set_L_max_out_g,
                'set_L_max_out_g': set_L_max_out_g,
                "set_GL": set_GL,
                "set_GL_exp": set_GL_exp,
                "set_GL_imp": set_GL_imp,
@@ -3449,6 +3462,7 @@ class InfrastructurePlanningProblem(EnergySystem):
                "param_c_df_qp": param_c_df_qp,
                "param_c_time_qpk": param_c_time_qpk,
                "param_p_glqpks": param_p_glqpks,
                "param_price_function_is_convex": param_price_function_is_convex,
                "param_v_max_glqpks": param_v_max_glqpks,
                # *****************************************************************
                # converters
Original line number Diff line number Diff line
@@ -12,7 +12,11 @@ from numbers import Real
class ResourcePrice:
    """A class for piece-wise linear resource prices in network problems."""

    def __init__(self, prices: list or int, volumes: list = None):
    def __init__(
            self, 
            prices: list or int, 
            volumes: list = None
            ):
        # how do we keep the size of the object as small as possible
        # if the tariff is time-invariant, how can information be stored?
        # - a flag
@@ -207,29 +211,9 @@ class ResourcePrice:
    # *************************************************************************
    # *************************************************************************
    
    def is_equivalent(self, other) -> bool:
        """Returns True if a given ResourcePrice is equivalent to another."""
        # resources are equivalent if:
        # 1) the prices are the same
        # 2) the volume limits are the same

        # the number of segments has to match
        if self.number_segments() != other.number_segments():
            return False  # the number of segments do not match
        # check the prices
        if self.prices != other.prices:
            return False  # prices are different
        # prices match, check the volumes
        if self.volumes != other.volumes:
            return False  # volumes are different
        return True  # all conditions have been met

    # *************************************************************************
    # *************************************************************************
    
    def __eq__(self, o) -> bool:
        """Returns True if a given ResourcePrice is equivalent to another."""
        return self.is_equivalent(o)
        return hash(self) == hash(o)
    
    def __hash__(self):
        return hash(
@@ -260,9 +244,7 @@ def are_prices_time_invariant(resource_prices_qpk: dict) -> bool:
    # check if the tariffs per period and assessment are equivalent
    for qp, qpk_list in qpk_qp.items():
        for i in range(len(qpk_list) - 1):
            if not resource_prices_qpk[qpk_list[0]].is_equivalent(
                resource_prices_qpk[qpk_list[i + 1]]
            ):
            if not resource_prices_qpk[qpk_list[0]] == resource_prices_qpk[qpk_list[i + 1]]:
                return False
    # all tariffs are equivalent per period and assessment: they are invariant
    return True
Original line number Diff line number Diff line
@@ -134,8 +134,13 @@ class TimeFrame:

    def consecutive_qpk(self, qpk_keyed_dict: dict) -> bool:
        "Returns True if all (q,p,k) tuple keys are valid and consecutive."
        # TODO: here
        raise NotImplementedError
        # all k intervals have to be consecutive for each (q,p) pair
        set_qp = set(qpk[0:2] for qpk in qpk_keyed_dict.keys())
        for qp in set_qp:
            for k in range(self.number_time_intervals(qp[0])):
                if (*qp,k) not in qpk_keyed_dict:
                    return False
        return True

    # *************************************************************************
    # *************************************************************************
@@ -241,6 +246,29 @@ class TimeFrame:
    # *************************************************************************
    # *************************************************************************
    
    def assessments_overlap(self) -> bool:
        "Returns True if any period is covered by more than one assessment."
        # if there is only one assessment, return False
        if self.number_assessments() == 1:
            return False
        else:
            # if there is more than one assessment, check whether two or more
            # cover the same period
            qs = tuple(self.assessments)
            # for each assessment
            for q1, q2 in zip(qs, qs[1:]):
                # for each period in one assessment (q1)
                for p in self.reporting_periods[q1]:
                    # check if it is covered by the other assessment (q2)
                    if p in self.reporting_periods[q2]:
                        # p is covered by at least two assessments (q1 and q2)
                        return True
        # if no period is covered by more than one assessment, return False
        return False
    
    # *************************************************************************
    # *************************************************************************

# *****************************************************************************
# *****************************************************************************

@@ -279,7 +307,7 @@ class EconomicTimeFrame(TimeFrame):
            # dict: 1 value per p and q
            self._discount_rates = dict(discount_rates_q)
        else:
            raise ValueError('Unrecognised inputs.')
            raise TypeError('Unrecognised inputs.')
        
        # TODO: validate the discount rate object
        
Original line number Diff line number Diff line
@@ -3,9 +3,7 @@
# standard

import math
import random
from numbers import Real
from statistics import mean

import geopandas as gpd

@@ -15,7 +13,6 @@ import geopandas as gpd
# local, internal
from src.topupopt.data.gis.utils import read_gdf_file
from src.topupopt.data.buildings.dk import heat
from src.topupopt.data.buildings.dk import bbr

# *****************************************************************************
# *****************************************************************************
@@ -38,9 +35,11 @@ class TestDataBuildingsDK:
            30*24*3600
            for i in range(number_time_intervals)
            ]
        annual_heat_demand_scenario = 1000
        total_area = 1000
        air_temperature_scenario = [10 for i in range(number_time_intervals)]
        total_demand_true = 1000
        total_area_true = 4563  # 5%: 4563 # 100%: 100882
        assessments = ['q']
        annual_heat_demand = {'q': 1000}
        air_temperature =  {'q': [5+i for i in range(number_time_intervals)]}
        
        gdf_osm = gpd.read_file(osm_data_filename)
        gdf_osm.set_index(['element_type', 'osmid'], drop=True, inplace=True)
@@ -51,55 +50,259 @@ class TestDataBuildingsDK:
            index='index'
            )
        
        # order by state
        def verify_result(
                out_dict, 
                out_area, 
                total_demand_true, 
                total_area_true,
                # assessments,
                # number_time_intervals
                ):
            assert type(out_dict) == dict
            assert isinstance(out_area, Real)
            assert len(out_dict) == len(gdf_osm)
            assert math.isclose(out_area, total_area_true, abs_tol=1e-3) # 5%: 4563 # 100%: 100882
            for q in assessments:
                assert math.isclose(
                    sum(sum(v[q]) for k, v in out_dict.items() if len(v[q]) != 0),
                    total_demand_true, 
                    abs_tol=1e-3
                    )
            # output dict must be keyed by entrance id and then by scenario
            for k, v in out_dict.items():
                assert k in gdf_osm.index
                if len(v) == 0:
                    continue
                for q in assessments:
                    assert q in v
                    assert len(v[q]) == number_time_intervals or len(v[q]) == 0
        
        # drop entries to keep things fast
        share_keeper_osm_entries = 0.05 
        number_osm_entries = len(gdf_osm)
        for index in gdf_osm.index:
            if len(gdf_osm) < round(share_keeper_osm_entries*number_osm_entries):
                break
            gdf_osm.drop(index=index, inplace=True)
        
        # create profiles in accordance with a set of states and a positive gain
        
        heat_demand_dict, total_area = heat.heat_demand_profiles(
            gdf_osm=gdf_osm,
            gdf_buildings=gdf_buildings,
            time_interval_durations=intraperiod_time_interval_duration,
            assessments=assessments,
            annual_heat_demand=annual_heat_demand,
            air_temperature=air_temperature,
            deviation_gain=1
            )
        verify_result(heat_demand_dict, total_area, total_demand_true, total_area_true)
            
        # create profiles in accordance with a set of states and a negative gain
        
        heat_demand_dict, total_area = heat.heat_demand_profiles(
            gdf_osm=gdf_osm,
            gdf_buildings=gdf_buildings,
            time_interval_durations=intraperiod_time_interval_duration,
            assessments=assessments,
            annual_heat_demand=annual_heat_demand,
            air_temperature=air_temperature,
            deviation_gain=-1
            )
        verify_result(heat_demand_dict, total_area, total_demand_true, total_area_true)
         
        # create profiles in accordance with a sinusoidal function (no phase shift)
        
        heat_demand_dict = heat.heat_demand_dict_by_building_entrance(
        heat_demand_dict, total_area = heat.heat_demand_profiles(
            gdf_osm=gdf_osm,
            gdf_buildings=gdf_buildings,
            number_intervals=number_time_intervals,
            time_interval_durations=intraperiod_time_interval_duration,
            bdg_min_to_max_ratio={
                index: min_to_max_ratio for index in gdf_buildings.index
                },
            bdg_specific_demand={
                index: annual_heat_demand_scenario/total_area 
                for index in gdf_buildings.index
                },
            bdg_demand_phase_shift=None,
            avg_state=air_temperature_scenario,
            state_correlates_with_output=False
            assessments=assessments,
            annual_heat_demand=annual_heat_demand,
            min_max_ratio=min_to_max_ratio,
            # air_temperature=air_temperature,
            # state_correlates_with_output=False
            # deviation_gain=1
            )
        assert type(heat_demand_dict) == dict
        assert len(heat_demand_dict) == len(gdf_osm)
        verify_result(heat_demand_dict, total_area, total_demand_true, total_area_true)
        
        # no state preference, use phase shift
        # create profiles in accordance with a sinusoidal function (with phase shift)
        
        heat_demand_dict2 = heat.heat_demand_dict_by_building_entrance(
        heat_demand_dict, total_area = heat.heat_demand_profiles(
            gdf_osm=gdf_osm,
            gdf_buildings=gdf_buildings,
            number_intervals=number_time_intervals,
            time_interval_durations=intraperiod_time_interval_duration,
            bdg_min_to_max_ratio={
                index: min_to_max_ratio for index in gdf_buildings.index
                },
            bdg_specific_demand={
                index: annual_heat_demand_scenario/total_area 
                for index in gdf_buildings.index
                },
            bdg_demand_phase_shift={
                index: 2*math.pi*random.random() for index in gdf_buildings.index
                },
            avg_state=None,
            state_correlates_with_output=False
            assessments=assessments,
            annual_heat_demand=annual_heat_demand,
            min_max_ratio=min_to_max_ratio,
            phase_shift_radians=math.pi/2
            # air_temperature=air_temperature,
            # state_correlates_with_output=False
            # deviation_gain=1
            )
        assert type(heat_demand_dict2) == dict
        assert len(heat_demand_dict2) == len(gdf_osm)
        verify_result(heat_demand_dict, total_area, total_demand_true, total_area_true)
            
        # total heating area
        # create profiles in accordance with states but without a predefined gain
    
        # create profile (no optimisation)
        heat_demand_dict, total_area = heat.heat_demand_profiles(
            gdf_osm=gdf_osm,
            gdf_buildings=gdf_buildings,
            time_interval_durations=intraperiod_time_interval_duration,
            assessments=assessments,
            annual_heat_demand=annual_heat_demand,
            air_temperature=air_temperature,
            min_max_ratio=min_to_max_ratio,
            states_correlate_profile=True,
            )
        verify_result(heat_demand_dict, total_area, total_demand_true, total_area_true)
            
        # create profiles in accordance with states but without a predefined gain (optimisation)
        
        # remove all but one osm entry (to keep things light)
        for index in gdf_osm.index:
            if len(gdf_osm) <= 1:
                break
            gdf_osm.drop(index=index, inplace=True)
        
        # create profile
        heat_demand_dict, total_area = heat.heat_demand_profiles(
            gdf_osm=gdf_osm,
            gdf_buildings=gdf_buildings,
            time_interval_durations=intraperiod_time_interval_duration,
            assessments=assessments,
            annual_heat_demand=annual_heat_demand,
            air_temperature=air_temperature,
            min_max_ratio=min_to_max_ratio,
            states_correlate_profile=True,
            solver='glpk'
            )
        total_area_true = 200
        verify_result(heat_demand_dict, total_area, total_demand_true, total_area_true)

    # *************************************************************************
    # *************************************************************************
    
        heating_area = heat.total_heating_area(gdf_osm, gdf_buildings)
        assert isinstance(heating_area, Real)
        assert math.isclose(heating_area, 100882, abs_tol=1e-3)
    # def test_demand_dict3(self):
        
    #     # heat_demand_dict_by_building_entrance
        
    #     osm_data_filename = 'tests/data/gdf_osm.gpkg'
    #     building_data_filename = 'tests/data/gdf_buildings.gpkg'
    #     bdg_gdf_container_columns = ('ejerskaber','koordinater','bygningspunkt')
    #     number_time_intervals = 12
    #     min_to_max_ratio = 0.1
    #     intraperiod_time_interval_duration = [
    #         30*24*3600
    #         for i in range(number_time_intervals)
    #         ]
    #     annual_heat_demand_scenario = 1000
    #     total_area = 1000
    #     states = [10 for i in range(number_time_intervals)]
        
    #     gdf_osm = gpd.read_file(osm_data_filename)
    #     gdf_osm.set_index(['element_type', 'osmid'], drop=True, inplace=True)
    
    #     gdf_buildings = read_gdf_file(
    #         filename=building_data_filename,
    #         packed_columns=bdg_gdf_container_columns,
    #         index='index'
    #         )
        
    #     # sinusoidal
        
    #     heat_demand_dict = heat.heat_demand_dict_by_building_entrance2(
    #         gdf_osm=gdf_osm,
    #         gdf_buildings=gdf_buildings,
    #         number_intervals=number_time_intervals,
    #         time_interval_durations=intraperiod_time_interval_duration,
    #         min_max_ratio=min_to_max_ratio,
    #         specific_demand=annual_heat_demand_scenario/total_area,
    #         )
    #     assert type(heat_demand_dict) == dict
    #     assert len(heat_demand_dict) == len(gdf_osm)
    #     assert math.isclose(
    #         annual_heat_demand_scenario, 
    #         sum(sum(value) for value in heat_demand_dict.values()),
    #         abs_tol=1e-3,
    #         )
        
    #     # sinusoidal with phase shift
    
    #     heat_demand_dict = heat.heat_demand_dict_by_building_entrance2(
    #         gdf_osm=gdf_osm,
    #         gdf_buildings=gdf_buildings,
    #         number_intervals=number_time_intervals,
    #         time_interval_durations=intraperiod_time_interval_duration,
    #         min_max_ratio=min_to_max_ratio,
    #         specific_demand=annual_heat_demand_scenario/total_area ,
    #         phase_shift_radians=math.pi,
    #         )
    #     assert type(heat_demand_dict) == dict
    #     assert len(heat_demand_dict) == len(gdf_osm)
    #     assert math.isclose(
    #         annual_heat_demand_scenario, 
    #         sum(sum(value) for value in heat_demand_dict.values()),
    #         abs_tol=1e-3,
    #         )
        
    #     # predefined deviation gain, positive
    
    #     heat_demand_dict = heat.heat_demand_dict_by_building_entrance2(
    #         gdf_osm=gdf_osm,
    #         gdf_buildings=gdf_buildings,
    #         number_intervals=number_time_intervals,
    #         time_interval_durations=intraperiod_time_interval_duration,
    #         states=states,
    #         specific_demand=annual_heat_demand_scenario/total_area ,
    #         deviation_gain=3,
    #         )
    #     assert type(heat_demand_dict) == dict
    #     assert len(heat_demand_dict) == len(gdf_osm)
    #     assert math.isclose(
    #         annual_heat_demand_scenario, 
    #         sum(sum(value) for value in heat_demand_dict.values()),
    #         abs_tol=1e-3,
    #         )
        
    #     # predefined deviation gain, negative
    
    #     heat_demand_dict = heat.heat_demand_dict_by_building_entrance2(
    #         gdf_osm=gdf_osm,
    #         gdf_buildings=gdf_buildings,
    #         number_intervals=number_time_intervals,
    #         time_interval_durations=intraperiod_time_interval_duration,
    #         states=states,
    #         specific_demand=annual_heat_demand_scenario/total_area ,
    #         deviation_gain=-3,
    #         )
    #     assert type(heat_demand_dict) == dict
    #     assert len(heat_demand_dict) == len(gdf_osm)
    #     assert math.isclose(
    #         annual_heat_demand_scenario, 
    #         sum(sum(value) for value in heat_demand_dict.values()),
    #         abs_tol=1e-3,
    #         )
        
    #     # optimisation
    
    #     heat_demand_dict = heat.heat_demand_dict_by_building_entrance2(
    #         gdf_osm=gdf_osm,
    #         gdf_buildings=gdf_buildings,
    #         number_intervals=number_time_intervals,
    #         time_interval_durations=intraperiod_time_interval_duration,
    #         states=states,
    #         specific_demand=annual_heat_demand_scenario/total_area,
    #         states_correlate_profile=True,
    #         solver='glpk'
    #         )
    #     assert type(heat_demand_dict) == dict
    #     assert len(heat_demand_dict) == len(gdf_osm)
    #     assert math.isclose(
    #         annual_heat_demand_scenario, 
    #         sum(sum(value) for value in heat_demand_dict.values()),
    #         abs_tol=1e-3,
    #         )
    
    # *************************************************************************
    # *************************************************************************
Original line number Diff line number Diff line
@@ -51,5 +51,8 @@ class TestProblemUtils:
            error_raised = True
        assert error_raised
        
    # *************************************************************************
    # *************************************************************************

# *****************************************************************************
# *****************************************************************************