Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • pmag/topupopt
1 result
Show changes
Commits on Source (7)
Showing
with 2735 additions and 2811 deletions
# -*- coding: utf-8 -*-
#from . import mvesipp
\ No newline at end of file
# from . import mvesipp
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
......@@ -5,7 +5,7 @@
import json
import urllib.request
from typing import Tuple # not needed with python 3.9
from typing import Tuple # not needed with python 3.9
# local, external
......@@ -17,22 +17,24 @@ from shapely.geometry import Point
# *****************************************************************************
# URLs
# url prefix to find data using the building entrance id
# url_prefix_entrance = 'https://api.dataforsyningen.dk/adgangsadresser/'
url_prefix_entrance = 'https://api.dataforsyningen.dk/bbrlight/opgange?adgangsadresseid='
url_prefix_entrance = (
"https://api.dataforsyningen.dk/bbrlight/opgange?adgangsadresseid="
)
# url prefix to find BBR building data
# url_prefix_buildings = 'https://api.dataforsyningen.dk/bbrlight/bygninger?id='
url_prefix_buildings = 'https://api.dataforsyningen.dk/bbrlight/bygninger/'
url_prefix_buildings = "https://api.dataforsyningen.dk/bbrlight/bygninger/"
# url prefix to find the building location data
url_prefix_building_point = 'https://api.dataforsyningen.dk/bbrlight/bygningspunkter/'
url_prefix_building_point = "https://api.dataforsyningen.dk/bbrlight/bygningspunkter/"
# *****************************************************************************
# *****************************************************************************
......@@ -69,12 +71,10 @@ BBR_BDG_ENTR_LABELS = [
"Aendr_Funk",
"Ophoert_ts",
"Gyldighedsdato",
"href"]
"href",
]
SELECT_BBR_BDG_ENTR_LABELS = [
"Opgang_id",
"AdgAdr_id",
"Bygning_id"]
SELECT_BBR_BDG_ENTR_LABELS = ["Opgang_id", "AdgAdr_id", "Bygning_id"]
BBR_BDG_POINT_LABELS = [
"ois_id",
......@@ -95,15 +95,16 @@ BBR_BDG_POINT_LABELS = [
"Aendr_Funk",
"Ophoert_ts",
"BygPktKilde",
"koordinater", # 'koordinater' returns a list
"href"]
"koordinater", # 'koordinater' returns a list
"href",
]
SELECT_BBR_BDG_POINT_LABELS = [
"KoorOest",
"KoorNord",
"KoorSystem",
"koordinater" # 'koordinater' returns a list
]
"koordinater", # 'koordinater' returns a list
]
BBR_BDG_LABELS = [
"ois_id",
......@@ -196,8 +197,9 @@ BBR_BDG_LABELS = [
"BygSkadeOmfatFors",
"Gyldighedsdato",
"href",
"ejerskaber", # 'ejerskaber' returns a list
"bygningspunkt"] # 'bygningspunkt' returns a dict
"ejerskaber", # 'ejerskaber' returns a list
"bygningspunkt",
] # 'bygningspunkt' returns a dict
SELECT_BBR_BDG_LABELS = [
"BYG_ANVEND_KODE",
......@@ -220,13 +222,14 @@ SELECT_BBR_BDG_LABELS = [
"VARMEINSTAL_KODE",
"OPVARMNING_KODE",
"VARME_SUPPL_KODE",
"BygPkt_id",]
"BygPkt_id",
]
BBR_CONTAINER_LABELS = {
'bygningspunkt': dict,
'ejerskaber': list,
'koordinater': list,
}
"bygningspunkt": dict,
"ejerskaber": list,
"koordinater": list,
}
# *****************************************************************************
# *****************************************************************************
......@@ -238,16 +241,16 @@ BBR_CONTAINER_LABELS = {
# OPVARMNING_KODE: Arten af det opvarmningsmiddel, der anvendes i eget anlæg
# Translation: Nature of the heating agent used in own system
# Codes:
# Codes:
# OPVARMNING_KODE 1 Elektricitet
# OPVARMNING_KODE 2 Gasværksgas
# OPVARMNING_KODE 3 Flydende brændsel
# OPVARMNING_KODE 4 Fast brændsel
# OPVARMNING_KODE 6 Halm
# OPVARMNING_KODE 7 Naturgas
# OPVARMNING_KODE 9 Andet
# VARME_SUPPL_KODE: Angives når der udover den hovedsagelige varmeinstallation
# OPVARMNING_KODE 9 Andet
# VARME_SUPPL_KODE: Angives når der udover den hovedsagelige varmeinstallation
# tillige opvarmes af en supplerende varmekilde
# Translation: To be indicated when, in addition to the main heating installation,
# it is also heated by a supplementary heat source
......@@ -268,7 +271,7 @@ BBR_CONTAINER_LABELS = {
# anføres koden for den installation, der opvarmer den største delen. Supplerende
# omflyttelige ovne registreres ikke
# Translation: If there are several different heating installations, enter the
# code of the installation that heats the largest part. Additional relocating
# code of the installation that heats the largest part. Additional relocating
# ovens are not registered
# VARMEINSTAL_KODE 1 Fjernvarme/blokvarme
......@@ -283,186 +286,187 @@ BBR_CONTAINER_LABELS = {
# fuel type
label_bbr_fuel_type = 'OPVARMNING_KODE'
label_bbr_fuel_type = "OPVARMNING_KODE"
dict_bbr_fuel_type_codes = {
'1': 'Elektricitet',
'2': 'Gasværksgas',
'3': 'Flydende brændsel',
'4': 'Fast brændsel',
'6': 'Halm',
'7': 'Naturgas',
'9': 'Andet'}
"1": "Elektricitet",
"2": "Gasværksgas",
"3": "Flydende brændsel",
"4": "Fast brændsel",
"6": "Halm",
"7": "Naturgas",
"9": "Andet",
}
# supplementary heating system
label_bbr_extra_heating = 'VARME_SUPPL_KODE'
label_bbr_extra_heating = "VARME_SUPPL_KODE"
dict_bbr_extra_heating_codes = {
'0': 'Ikke oplyst',
'1': 'Varmepumpeanlæg',
'10': 'Biogasanlæg',
'2': 'Ovne til fast eller flydende brændsel',
'3': 'Ovne til flydende brændsel',
'4': 'Solpaneler',
'5': 'Pejs',
'6': 'Gasradiator',
'7': 'Elvarme',
'80': 'Andet',
'90': '(UDFASES) Bygningen har ingen supplerende varme'
}
"0": "Ikke oplyst",
"1": "Varmepumpeanlæg",
"10": "Biogasanlæg",
"2": "Ovne til fast eller flydende brændsel",
"3": "Ovne til flydende brændsel",
"4": "Solpaneler",
"5": "Pejs",
"6": "Gasradiator",
"7": "Elvarme",
"80": "Andet",
"90": "(UDFASES) Bygningen har ingen supplerende varme",
}
# main heating system
label_bbr_heating_system = 'VARMEINSTAL_KODE'
label_bbr_heating_system = "VARMEINSTAL_KODE"
dict_bbr_heating_system_codes = {
'1': 'Fjernvarme/blokvarme',
'2': 'Centralvarme med én fyringsenhed',
'3': 'Ovn til fast og flydende brændsel',
'5': 'Varmepumpe',
'6': 'Centralvarme med to fyringsenheder',
'7': 'Elvarme',
'8': 'Gasradiator',
'9': 'Ingen varmeinstallation',
'99': 'Blandet'
}
"1": "Fjernvarme/blokvarme",
"2": "Centralvarme med én fyringsenhed",
"3": "Ovn til fast og flydende brændsel",
"5": "Varmepumpe",
"6": "Centralvarme med to fyringsenheder",
"7": "Elvarme",
"8": "Gasradiator",
"9": "Ingen varmeinstallation",
"99": "Blandet",
}
# coordinate system
label_bbr_bygningpunkt_koorsys = 'KoorSystem'
label_bbr_bygningpunkt_koorsys = "KoorSystem"
label_bbr_bygningpunkt_koorsys_codes = {
'1': 'System 34',
'2': 'System 45',
'3': 'KP2000 (System 2000)',
'4': 'UTM ED50',
'5': 'WGS 84'
}
"1": "System 34",
"2": "System 45",
"3": "KP2000 (System 2000)",
"4": "UTM ED50",
"5": "WGS 84",
}
# building use
label_bbr_building_uses = 'BYG_ANVEND_KODE' # byganvendelse
label_bbr_building_uses = "BYG_ANVEND_KODE" # byganvendelse
dict_bbr_building_use_codes = {
'110': 'Stuehus til landbrugsejendom',
'120': 'Fritliggende enfamiliehus',
'121': 'Sammenbygget enfamiliehus',
'122': 'Fritliggende enfamiliehus i tæt-lav bebyggelse',
'130': '(UDFASES) Række-, kæde-, eller dobbelthus (lodret adskillelse mellem enhederne).',
'131': 'Række-, kæde- og klyngehus',
'132': 'Dobbelthus',
'140': 'Etagebolig-bygning, flerfamiliehus eller to-familiehus',
'150': 'Kollegium',
'160': 'Boligbygning til døgninstitution',
'185': 'Anneks i tilknytning til helårsbolig.',
'190': 'Anden bygning til helårsbeboelse',
'210': '(UDFASES) Bygning til erhvervsmæssig produktion vedrørende landbrug, gartneri, råstofudvinding o. lign',
'211': 'Stald til svin',
'212': 'Stald til kvæg, får mv.',
'213': 'Stald til fjerkræ',
'214': 'Minkhal',
'215': 'Væksthus',
'216': 'Lade til foder, afgrøder mv.',
'217': 'Maskinhus, garage mv.',
'218': 'Lade til halm, hø mv.',
'219': 'Anden bygning til landbrug mv.',
'220': '(UDFASES) Bygning til erhvervsmæssig produktion vedrørende industri, håndværk m.v. (fabrik, værksted o.lign.)',
'221': 'Bygning til industri med integreret produktionsapparat',
'222': 'Bygning til industri uden integreret produktionsapparat',
'223': 'Værksted',
'229': 'Anden bygning til produktion',
'230': '(UDFASES) El-, gas-, vand- eller varmeværk, forbrændingsanstalt m.v.',
'231': 'Bygning til energiproduktion',
'232': 'Bygning til forsyning- og energidistribution',
'233': 'Bygning til vandforsyning',
'234': 'Bygning til håndtering af affald og spildevand',
'239': 'Anden bygning til energiproduktion og -distribution',
'290': '(UDFASES) Anden bygning til landbrug, industri etc.',
'310': '(UDFASES) Transport- og garageanlæg (fragtmandshal, lufthavnsbygning, banegårdsbygning, parkeringshus). Garage med plads til et eller to køretøjer registreres med anvendelseskode 910',
'311': 'Bygning til jernbane- og busdrift',
'312': 'Bygning til luftfart',
'313': 'Bygning til parkering- og transportanlæg',
'314': 'Bygning til parkering af flere end to køretøjer i tilknytning til boliger',
'315': 'Havneanlæg',
'319': 'Andet transportanlæg',
'320': '(UDFASES) Bygning til kontor, handel, lager, herunder offentlig administration',
'321': 'Bygning til kontor',
'322': 'Bygning til detailhandel',
'323': 'Bygning til lager',
'324': 'Butikscenter',
'325': 'Tankstation',
'329': 'Anden bygning til kontor, handel og lager',
'330': '(UDFASES) Bygning til hotel, restaurant, vaskeri, frisør og anden servicevirksomhed',
'331': 'Hotel, kro eller konferencecenter med overnatning',
'332': 'Bed & breakfast mv.',
'333': 'Restaurant, café og konferencecenter uden overnatning',
'334': 'Privat servicevirksomhed som frisør, vaskeri, netcafé mv.',
'339': 'Anden bygning til serviceerhverv',
'390': '(UDFASES) Anden bygning til transport, handel etc',
'410': '(UDFASES) Bygning til biograf, teater, erhvervsmæssig udstilling, bibliotek, museum, kirke o. lign.',
'411': 'Biograf, teater, koncertsted mv.',
'412': 'Museum',
'413': 'Bibliotek',
'414': 'Kirke eller anden bygning til trosudøvelse for statsanerkendte trossamfund',
'415': 'Forsamlingshus',
'416': 'Forlystelsespark',
'419': 'Anden bygning til kulturelle formål',
'420': '(UDFASES) Bygning til undervisning og forskning (skole, gymnasium, forskningslabratorium o.lign.).',
'421': 'Grundskole',
'422': 'Universitet',
'429': 'Anden bygning til undervisning og forskning',
'430': '(UDFASES) Bygning til hospital, sygehjem, fødeklinik o. lign.',
'431': 'Hospital og sygehus',
'432': 'Hospice, behandlingshjem mv.',
'433': 'Sundhedscenter, lægehus, fødeklinik mv.',
'439': 'Anden bygning til sundhedsformål',
'440': '(UDFASES) Bygning til daginstitution',
'441': 'Daginstitution',
'442': 'Servicefunktion på døgninstitution',
'443': 'Kaserne',
'444': 'Fængsel, arresthus mv.',
'449': 'Anden bygning til institutionsformål',
'490': '(UDFASES) Bygning til anden institution, herunder kaserne, fængsel o. lign.',
'510': 'Sommerhus',
'520': '(UDFASES) Bygning til feriekoloni, vandrehjem o.lign. bortset fra sommerhus',
'521': 'Feriecenter, center til campingplads mv.',
'522': 'Bygning med ferielejligheder til erhvervsmæssig udlejning',
'523': 'Bygning med ferielejligheder til eget brug',
'529': 'Anden bygning til ferieformål',
'530': '(UDFASES) Bygning i forbindelse med idrætsudøvelse (klubhus, idrætshal, svømmehal o. lign.)',
'531': 'Klubhus i forbindelse med fritid og idræt',
'532': 'Svømmehal',
'533': 'Idrætshal',
'534': 'Tribune i forbindelse med stadion',
'535': 'Bygning til træning og opstaldning af heste',
'539': 'Anden bygning til idrætformål',
'540': 'Kolonihavehus',
'585': 'Anneks i tilknytning til fritids- og sommerhus',
'590': 'Anden bygning til fritidsformål',
'910': 'Garage (med plads til et eller to køretøjer)',
'920': 'Carport',
'930': 'Udhus',
'940': 'Drivhus',
'950': 'Fritliggende overdækning',
'960': 'Fritliggende udestue',
'970': 'Tiloversbleven landbrugsbygning',
'990': 'Faldefærdig bygning',
'999': 'Ukendt bygning'
}
"110": "Stuehus til landbrugsejendom",
"120": "Fritliggende enfamiliehus",
"121": "Sammenbygget enfamiliehus",
"122": "Fritliggende enfamiliehus i tæt-lav bebyggelse",
"130": "(UDFASES) Række-, kæde-, eller dobbelthus (lodret adskillelse mellem enhederne).",
"131": "Række-, kæde- og klyngehus",
"132": "Dobbelthus",
"140": "Etagebolig-bygning, flerfamiliehus eller to-familiehus",
"150": "Kollegium",
"160": "Boligbygning til døgninstitution",
"185": "Anneks i tilknytning til helårsbolig.",
"190": "Anden bygning til helårsbeboelse",
"210": "(UDFASES) Bygning til erhvervsmæssig produktion vedrørende landbrug, gartneri, råstofudvinding o. lign",
"211": "Stald til svin",
"212": "Stald til kvæg, får mv.",
"213": "Stald til fjerkræ",
"214": "Minkhal",
"215": "Væksthus",
"216": "Lade til foder, afgrøder mv.",
"217": "Maskinhus, garage mv.",
"218": "Lade til halm, hø mv.",
"219": "Anden bygning til landbrug mv.",
"220": "(UDFASES) Bygning til erhvervsmæssig produktion vedrørende industri, håndværk m.v. (fabrik, værksted o.lign.)",
"221": "Bygning til industri med integreret produktionsapparat",
"222": "Bygning til industri uden integreret produktionsapparat",
"223": "Værksted",
"229": "Anden bygning til produktion",
"230": "(UDFASES) El-, gas-, vand- eller varmeværk, forbrændingsanstalt m.v.",
"231": "Bygning til energiproduktion",
"232": "Bygning til forsyning- og energidistribution",
"233": "Bygning til vandforsyning",
"234": "Bygning til håndtering af affald og spildevand",
"239": "Anden bygning til energiproduktion og -distribution",
"290": "(UDFASES) Anden bygning til landbrug, industri etc.",
"310": "(UDFASES) Transport- og garageanlæg (fragtmandshal, lufthavnsbygning, banegårdsbygning, parkeringshus). Garage med plads til et eller to køretøjer registreres med anvendelseskode 910",
"311": "Bygning til jernbane- og busdrift",
"312": "Bygning til luftfart",
"313": "Bygning til parkering- og transportanlæg",
"314": "Bygning til parkering af flere end to køretøjer i tilknytning til boliger",
"315": "Havneanlæg",
"319": "Andet transportanlæg",
"320": "(UDFASES) Bygning til kontor, handel, lager, herunder offentlig administration",
"321": "Bygning til kontor",
"322": "Bygning til detailhandel",
"323": "Bygning til lager",
"324": "Butikscenter",
"325": "Tankstation",
"329": "Anden bygning til kontor, handel og lager",
"330": "(UDFASES) Bygning til hotel, restaurant, vaskeri, frisør og anden servicevirksomhed",
"331": "Hotel, kro eller konferencecenter med overnatning",
"332": "Bed & breakfast mv.",
"333": "Restaurant, café og konferencecenter uden overnatning",
"334": "Privat servicevirksomhed som frisør, vaskeri, netcafé mv.",
"339": "Anden bygning til serviceerhverv",
"390": "(UDFASES) Anden bygning til transport, handel etc",
"410": "(UDFASES) Bygning til biograf, teater, erhvervsmæssig udstilling, bibliotek, museum, kirke o. lign.",
"411": "Biograf, teater, koncertsted mv.",
"412": "Museum",
"413": "Bibliotek",
"414": "Kirke eller anden bygning til trosudøvelse for statsanerkendte trossamfund",
"415": "Forsamlingshus",
"416": "Forlystelsespark",
"419": "Anden bygning til kulturelle formål",
"420": "(UDFASES) Bygning til undervisning og forskning (skole, gymnasium, forskningslabratorium o.lign.).",
"421": "Grundskole",
"422": "Universitet",
"429": "Anden bygning til undervisning og forskning",
"430": "(UDFASES) Bygning til hospital, sygehjem, fødeklinik o. lign.",
"431": "Hospital og sygehus",
"432": "Hospice, behandlingshjem mv.",
"433": "Sundhedscenter, lægehus, fødeklinik mv.",
"439": "Anden bygning til sundhedsformål",
"440": "(UDFASES) Bygning til daginstitution",
"441": "Daginstitution",
"442": "Servicefunktion på døgninstitution",
"443": "Kaserne",
"444": "Fængsel, arresthus mv.",
"449": "Anden bygning til institutionsformål",
"490": "(UDFASES) Bygning til anden institution, herunder kaserne, fængsel o. lign.",
"510": "Sommerhus",
"520": "(UDFASES) Bygning til feriekoloni, vandrehjem o.lign. bortset fra sommerhus",
"521": "Feriecenter, center til campingplads mv.",
"522": "Bygning med ferielejligheder til erhvervsmæssig udlejning",
"523": "Bygning med ferielejligheder til eget brug",
"529": "Anden bygning til ferieformål",
"530": "(UDFASES) Bygning i forbindelse med idrætsudøvelse (klubhus, idrætshal, svømmehal o. lign.)",
"531": "Klubhus i forbindelse med fritid og idræt",
"532": "Svømmehal",
"533": "Idrætshal",
"534": "Tribune i forbindelse med stadion",
"535": "Bygning til træning og opstaldning af heste",
"539": "Anden bygning til idrætformål",
"540": "Kolonihavehus",
"585": "Anneks i tilknytning til fritids- og sommerhus",
"590": "Anden bygning til fritidsformål",
"910": "Garage (med plads til et eller to køretøjer)",
"920": "Carport",
"930": "Udhus",
"940": "Drivhus",
"950": "Fritliggende overdækning",
"960": "Fritliggende udestue",
"970": "Tiloversbleven landbrugsbygning",
"990": "Faldefærdig bygning",
"999": "Ukendt bygning",
}
# floor types
label_bbr_floor_types = 'ETAGER_AFVIG_KODE'
label_bbr_floor_types = "ETAGER_AFVIG_KODE"
dict_bbr_floor_type_codes = {
'0': 'Bygningen har ikke afvigende etager',
'10': 'Bygningen har afvigende etager',
'11': 'Bygningen indeholder hems',
'12': 'Bygningen indeholder dobbelt højt rum',
'13': 'Bygningen indeholder indskudt etage'
}
"0": "Bygningen har ikke afvigende etager",
"10": "Bygningen har afvigende etager",
"11": "Bygningen indeholder hems",
"12": "Bygningen indeholder dobbelt højt rum",
"13": "Bygningen indeholder indskudt etage",
}
# all codes
bbr_codes = {
......@@ -471,34 +475,34 @@ bbr_codes = {
label_bbr_heating_system: dict_bbr_heating_system_codes,
label_bbr_bygningpunkt_koorsys: label_bbr_bygningpunkt_koorsys_codes,
label_bbr_building_uses: dict_bbr_building_use_codes,
label_bbr_floor_types: dict_bbr_floor_type_codes
}
label_bbr_floor_types: dict_bbr_floor_type_codes,
}
# BBR labels
# label under which the building id can be found in the building entrance obj.
label_bbr_opgang_id = 'Opgang_id'
label_bbr_opgang_id = "Opgang_id"
label_bbr_entrance_id = 'AdgAdr_id'
label_bbr_entrance_id = "AdgAdr_id"
label_bbr_building_id = 'Bygning_id'
label_bbr_building_id = "Bygning_id"
label_bbr_bygningpunkt = 'bygningspunkt'
label_bbr_bygningpunkt = "bygningspunkt"
label_bbr_bygningpunkt_coord = 'koordinater'
label_bbr_bygningpunkt_coord = "koordinater"
label_bbr_opgang_id = 'Opgang_id'
label_bbr_opgang_id = "Opgang_id"
label_bbr_entrance_id = 'AdgAdr_id'
label_bbr_entrance_id = "AdgAdr_id"
label_bbr_building_id = 'Bygning_id'
label_bbr_building_id = "Bygning_id"
label_bbr_building_area = 'BYG_BOLIG_ARL_SAML'
label_bbr_building_area = "BYG_BOLIG_ARL_SAML"
label_bbr_housing_area = 'BYG_BEBYG_ARL'
label_bbr_housing_area = "BYG_BEBYG_ARL"
label_bbr_number_floors = 'ETAGER_ANT'
label_bbr_number_floors = "ETAGER_ANT"
list_labels_bbr = [
label_bbr_building_id,
......@@ -508,286 +512,280 @@ list_labels_bbr = [
label_bbr_building_uses,
label_bbr_number_floors,
label_bbr_floor_types,
label_bbr_extra_heating
]
label_bbr_extra_heating,
]
# *****************************************************************************
# *****************************************************************************
def get_bbr_building_data_geodataframe(
building_entrance_ids: list,
selected_bbr_bdg_entrance_labels: list = SELECT_BBR_BDG_ENTR_LABELS,
selected_bbr_building_labels: list = SELECT_BBR_BDG_LABELS,
selected_bbr_building_point_labels: list = SELECT_BBR_BDG_POINT_LABELS
) -> Tuple[GeoDataFrame,list]:
def get_bbr_building_data_geodataframe(
building_entrance_ids: list,
selected_bbr_bdg_entrance_labels: list = SELECT_BBR_BDG_ENTR_LABELS,
selected_bbr_building_labels: list = SELECT_BBR_BDG_LABELS,
selected_bbr_building_point_labels: list = SELECT_BBR_BDG_POINT_LABELS,
) -> Tuple[GeoDataFrame, list]:
# *************************************************************************
# *************************************************************************
# get data about building entrances
dict_building_entrances, list_failures = fetch_building_entrance_data(
building_entrance_ids
)
)
if selected_bbr_bdg_entrance_labels == None:
# includes all labels
selected_bbr_bdg_entrance_labels = BBR_BDG_ENTR_LABELS
list_entries = [
[value[bbr_key]
for bbr_key in value
if bbr_key in selected_bbr_bdg_entrance_labels]
for key, value in dict_building_entrances.items()]
[
value[bbr_key]
for bbr_key in value
if bbr_key in selected_bbr_bdg_entrance_labels
]
for key, value in dict_building_entrances.items()
]
df_building_entrances = DataFrame(
data=list_entries,
columns=selected_bbr_bdg_entrance_labels,
index=dict_building_entrances.keys()
)
index=dict_building_entrances.keys(),
)
# *************************************************************************
# *************************************************************************
# get data about buildings
dict_buildings = fetch_building_data(
df_building_entrances.index
)
dict_buildings = fetch_building_data(df_building_entrances.index)
if selected_bbr_building_labels == None:
# includes all labels
selected_bbr_building_labels = BBR_BDG_LABELS
# create dataframe with building data
list_entries = [
[value[bbr_key]
for bbr_key in value
if bbr_key in selected_bbr_building_labels]
for key, value in dict_buildings.items()]
[value[bbr_key] for bbr_key in value if bbr_key in selected_bbr_building_labels]
for key, value in dict_buildings.items()
]
df_buildings = DataFrame(
data=list_entries,
columns=selected_bbr_building_labels,
index=dict_buildings.keys()
)
index=dict_buildings.keys(),
)
# *************************************************************************
# *************************************************************************
# get building point data
if selected_bbr_building_point_labels == None:
# includes all labels
selected_bbr_building_point_labels = BBR_BDG_POINT_LABELS
dict_buildings_points = {
building_entrance_id: #(
dict_buildings[
building_entrance_id][
label_bbr_bygningpunkt]
#if building_entrance_id in dict_buildings else None)
building_entrance_id: dict_buildings[building_entrance_id][ # (
label_bbr_bygningpunkt
]
# if building_entrance_id in dict_buildings else None)
for building_entrance_id in dict_building_entrances
if building_entrance_id in dict_buildings # excludes failures
}
if building_entrance_id in dict_buildings # excludes failures
}
# create dataframe with building point data
list_entries = [
[value[bbr_key]
for bbr_key in value
if bbr_key in selected_bbr_building_point_labels]
for key, value in dict_buildings_points.items()]
[
value[bbr_key]
for bbr_key in value
if bbr_key in selected_bbr_building_point_labels
]
for key, value in dict_buildings_points.items()
]
df_building_points = DataFrame(
data=list_entries,
columns=selected_bbr_building_point_labels,
index=dict_buildings_points.keys()
)
index=dict_buildings_points.keys(),
)
# merge all three, two at a time
df_buildings = merge(df_buildings,
df_building_points,
right_index=True,
left_index=True,
suffixes=(None,"_x")) # adds "_x" to duplicate columns
df_buildings = merge(df_buildings,
df_building_entrances,
right_index=True,
left_index=True,
suffixes=(None,"_y")) # adds "_y" to duplicate columns
df_buildings = merge(
df_buildings,
df_building_points,
right_index=True,
left_index=True,
suffixes=(None, "_x"),
) # adds "_x" to duplicate columns
df_buildings = merge(
df_buildings,
df_building_entrances,
right_index=True,
left_index=True,
suffixes=(None, "_y"),
) # adds "_y" to duplicate columns
# *************************************************************************
# *************************************************************************
# create a geodataframe whose geometry is that of building points
# specify the coordinate system
coordinate_system = "EPSG:4326" # latitude, longitude
key_bbr_coordinate_system = 5 # WGS 84 = EPSG:4326
coordinate_system = "EPSG:4326" # latitude, longitude
key_bbr_coordinate_system = 5 # WGS 84 = EPSG:4326
# raise an error if different coordinates systems are being used
for building_entrance_id in dict_building_entrances:
if dict_buildings[building_entrance_id][
label_bbr_bygningpunkt][
label_bbr_bygningpunkt_koorsys] != key_bbr_coordinate_system:
raise NotImplementedError('Only WGS 84 coordinates can be used.')
if (
dict_buildings[building_entrance_id][label_bbr_bygningpunkt][
label_bbr_bygningpunkt_koorsys
]
!= key_bbr_coordinate_system
):
raise NotImplementedError("Only WGS 84 coordinates can be used.")
# create a dictionary with the building point geometries (i.e. points)
dict_building_point_geometry = {
building_entrance_id: Point(
dict_buildings[building_entrance_id][
label_bbr_bygningpunkt][
label_bbr_bygningpunkt_coord]
)
dict_buildings[building_entrance_id][label_bbr_bygningpunkt][
label_bbr_bygningpunkt_coord
]
)
for building_entrance_id in dict_building_entrances
if dict_buildings[building_entrance_id][
label_bbr_bygningpunkt][
label_bbr_bygningpunkt_koorsys] == key_bbr_coordinate_system
}
if dict_buildings[building_entrance_id][label_bbr_bygningpunkt][
label_bbr_bygningpunkt_koorsys
]
== key_bbr_coordinate_system
}
# create geodataframe
gdf_buildings = GeoDataFrame(
data=df_buildings,
geometry=GeoSeries(data=dict_building_point_geometry),
crs=coordinate_system
)
crs=coordinate_system,
)
return gdf_buildings, list_failures
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def fetch_building_entrance_data(building_entrance_ids: list) -> Tuple[dict,
list]:
def fetch_building_entrance_data(building_entrance_ids: list) -> Tuple[dict, list]:
# *************************************************************************
# *************************************************************************
# retrieve data about each node identified through OSM
dict_building_entrances = {}
list_failures = []
# # determine the number of osm entries
# number_building_entrance_ids = len(building_entrance_ids)
# for each building entrance id
for building_entrance_id in building_entrance_ids:
for building_entrance_id in building_entrance_ids:
# compose the url from which to get bbr data associated with the id
_url = url_prefix_entrance+building_entrance_id
_url = url_prefix_entrance + building_entrance_id
try:
# retrieve the building entrance data
with urllib.request.urlopen(_url) as response:
# parse the data
bbr_entrance_data_json = json.loads(
response.read().decode('utf-8')
)
bbr_entrance_data_json = json.loads(response.read().decode("utf-8"))
# store the data
if len(bbr_entrance_data_json) != 0:
for bbr_entry in bbr_entrance_data_json:
dict_building_entrances[
bbr_entry[label_bbr_building_id]
] = bbr_entry
] = bbr_entry
else:
list_failures.append(building_entrance_id)
response.close()
except Exception:
response.close()
# *************************************************************************
# *************************************************************************
return dict_building_entrances, list_failures
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def fetch_building_data(building_codes: list):
# *************************************************************************
# *************************************************************************
# get data about each specific building
dict_buildings = {}
# for each building id
# for each building id
for building_id in building_codes:
# compose a url with it
_url = url_prefix_buildings+building_id
_url = url_prefix_buildings + building_id
# try statement
try:
# retrieve that data
with urllib.request.urlopen(_url) as response:
# parse the data
bbr_data = json.loads(response.read().decode('utf-8'))
bbr_data = json.loads(response.read().decode("utf-8"))
dict_buildings[building_id] = bbr_data
response.close()
except Exception:
response.close()
# *************************************************************************
# *************************************************************************
return dict_buildings
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
......@@ -12,22 +12,19 @@ from .bbr import label_bbr_entrance_id, label_bbr_housing_area
# labels
selected_bbr_adgang_labels = [
"Opgang_id",
"AdgAdr_id",
"Bygning_id"]
selected_bbr_adgang_labels = ["Opgang_id", "AdgAdr_id", "Bygning_id"]
selected_bbr_building_point_labels = [
"KoorOest",
"KoorNord",
"KoorSystem",
#"koordinater" # corresponds to a list, which cannot be written to a file
]
# "koordinater" # corresponds to a list, which cannot be written to a file
]
selected_bbr_building_labels = [
"BYG_ANVEND_KODE",
"OPFOERELSE_AAR", # new
"OMBYG_AAR", # new
"OPFOERELSE_AAR", # new
"OMBYG_AAR", # new
"BYG_ARL_SAML",
"BYG_BOLIG_ARL_SAML",
"ERHV_ARL_SAML",
......@@ -47,145 +44,138 @@ selected_bbr_building_labels = [
"VARMEINSTAL_KODE",
"OPVARMNING_KODE",
"VARME_SUPPL_KODE",
"BygPkt_id"]
"BygPkt_id",
]
# label under which building entrance ids can be found in OSM
label_osm_entrance_id = 'osak:identifier'
label_osm_entrance_id = "osak:identifier"
# *****************************************************************************
# *****************************************************************************
def heat_demand_dict_by_building_entrance(
gdf_osm: GeoDataFrame,
gdf_buildings: GeoDataFrame,
number_intervals: int,
time_interval_durations: list,
bdg_specific_demand: dict,
bdg_ratio_min_max: dict,
bdg_demand_phase_shift: dict = None,
key_osm_entr_id: str = label_osm_entrance_id,
key_bbr_entr_id: str = label_bbr_entrance_id,
avg_state: list = None,
state_correlates_with_output: bool = False
) -> dict:
gdf_osm: GeoDataFrame,
gdf_buildings: GeoDataFrame,
number_intervals: int,
time_interval_durations: list,
bdg_specific_demand: dict,
bdg_ratio_min_max: dict,
bdg_demand_phase_shift: dict = None,
key_osm_entr_id: str = label_osm_entrance_id,
key_bbr_entr_id: str = label_bbr_entrance_id,
avg_state: list = None,
state_correlates_with_output: bool = False,
) -> dict:
# initialise dict for each building entrance
demand_dict = {}
# for each building entrance
for osm_index in gdf_osm.index:
# initialise dict for each building consumption point
heat_demand_profiles = []
# find the indexes for each building leading to the curr. cons. point
building_indexes = (
gdf_buildings[
gdf_buildings[key_bbr_entr_id] ==
gdf_osm.loc[osm_index][key_osm_entr_id]
].index
)
building_indexes = gdf_buildings[
gdf_buildings[key_bbr_entr_id] == gdf_osm.loc[osm_index][key_osm_entr_id]
].index
# for each building
for building_index in building_indexes:
# get relevant data
# base_load_avg_ratio = 0.3
# specific_demand = 107 # kWh/m2/year
area = gdf_buildings.loc[building_index][label_bbr_housing_area]
# estimate its demand
if type(avg_state) == type(None):
# ignore states
heat_demand_profiles.append(
np.array(
discrete_sinusoid_matching_integral(
bdg_specific_demand[building_index]*area,
time_interval_durations=time_interval_durations,
bdg_ratio_min_max=bdg_ratio_min_max[building_index],
bdg_specific_demand[building_index] * area,
time_interval_durations=time_interval_durations,
min_to_max_ratio=bdg_ratio_min_max[building_index],
phase_shift_radians=(
bdg_demand_phase_shift[building_index]
# bdg_demand_phase_shift_amplitude*np.random.random()
# if (type(bdg_demand_phase_shift_amplitude) ==
# bdg_demand_phase_shift_amplitude*np.random.random()
# if (type(bdg_demand_phase_shift_amplitude) ==
# type(None)) else None
)
)
),
)
)
)
else:
# states matter
heat_demand_profiles.append(
np.array(
create_profile_using_time_weighted_state(
integration_result=(
bdg_specific_demand[building_index]*area
),
avg_state=avg_state,
time_interval_durations=time_interval_durations,
bdg_ratio_min_max=bdg_ratio_min_max[building_index],
state_correlates_with_output=state_correlates_with_output
)
bdg_specific_demand[building_index] * area
),
avg_state=avg_state,
time_interval_durations=time_interval_durations,
min_to_max_ratio=bdg_ratio_min_max[building_index],
state_correlates_with_output=state_correlates_with_output,
)
)
)
# *****************************************************************
# add the profiles, time step by time step
if len(heat_demand_profiles) == 0:
final_profile = []
else:
final_profile = sum(profile
for profile in heat_demand_profiles)
final_profile = sum(profile for profile in heat_demand_profiles)
# *********************************************************************
# store the demand profile
demand_dict[osm_index] = final_profile
# *********************************************************************
# return
return demand_dict
# *****************************************************************************
# *****************************************************************************
def total_heating_area(
gdf_osm: GeoDataFrame,
gdf_buildings: GeoDataFrame,
key_osm_entr_id: str = label_osm_entrance_id,
key_bbr_entr_id: str = label_bbr_entrance_id
) -> float:
gdf_osm: GeoDataFrame,
gdf_buildings: GeoDataFrame,
key_osm_entr_id: str = label_osm_entrance_id,
key_bbr_entr_id: str = label_bbr_entrance_id,
) -> float:
area = 0
for osm_index in gdf_osm.index:
# find the indexes for each building leading to the curr. cons. point
building_indexes = (
gdf_buildings[
gdf_buildings[label_bbr_entrance_id] ==
gdf_osm.loc[osm_index][label_osm_entrance_id]
].index
)
building_indexes = gdf_buildings[
gdf_buildings[label_bbr_entrance_id]
== gdf_osm.loc[osm_index][label_osm_entrance_id]
].index
# for each building
for building_index in building_indexes:
# get relevant data
area += gdf_buildings.loc[building_index][label_bbr_housing_area]
return area
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
# -*- coding: utf-8 -*-
......@@ -24,29 +24,29 @@ from ...data.finance.utils import ArcInvestments
# constants
KEY_DHT_OPTIONS_OBJ = 'trench'
KEY_DHT_LENGTH = 'length'
KEY_DHT_UCF = 'capacity_unit_conversion_factor'
KEY_HHT_DHT_PIPES = 'pipes'
KEY_HHT_STD_PIPES = 'pipe_tuple'
KEY_DHT_OPTIONS_OBJ = "trench"
KEY_DHT_LENGTH = "length"
KEY_DHT_UCF = "capacity_unit_conversion_factor"
KEY_HHT_DHT_PIPES = "pipes"
KEY_HHT_STD_PIPES = "pipe_tuple"
# *****************************************************************************
# *****************************************************************************
class PipeTrenchOptions(ArcsWithoutProportionalLosses):
"A class for defining investments in district heating trenches."
def __init__(
self,
trench: SupplyReturnPipeTrench,
name: str,
length: float,
specific_capacity_cost: float or list = None,
minimum_cost: list or tuple = None, # default: pipes
capacity_is_instantaneous: bool = False,
unit_conversion_factor: float = 1.0,
):
self,
trench: SupplyReturnPipeTrench,
name: str,
length: float,
specific_capacity_cost: float or list = None,
minimum_cost: list or tuple = None, # default: pipes
capacity_is_instantaneous: bool = False,
unit_conversion_factor: float = 1.0,
):
# store the unit conversion
self.unit_conversion_factor = unit_conversion_factor
# keep the trench object
......@@ -54,36 +54,33 @@ class PipeTrenchOptions(ArcsWithoutProportionalLosses):
# keep the trench length
self.length = (
[length for i in range(trench.number_options())]
if trench.vector_mode else
length
)
if trench.vector_mode
else length
)
# determine the rated heat capacity
rhc = trench.rated_heat_capacity(
unit_conversion_factor=unit_conversion_factor
)
rhc = trench.rated_heat_capacity(unit_conversion_factor=unit_conversion_factor)
# initialise the object using the mother class
ArcsWithoutProportionalLosses.__init__(
self,
name=name,
static_loss=None,
capacity=[rhc] if isinstance(rhc, Real) else rhc,
minimum_cost=minimum_cost,
self,
name=name,
static_loss=None,
capacity=[rhc] if isinstance(rhc, Real) else rhc,
minimum_cost=minimum_cost,
specific_capacity_cost=(
0
if type(specific_capacity_cost) == type(None) else
specific_capacity_cost
),
capacity_is_instantaneous=False
)
if type(specific_capacity_cost) == type(None)
else specific_capacity_cost
),
capacity_is_instantaneous=False,
)
# initialise the minimum cost
if type(minimum_cost) == type(None):
self.set_minimum_cost()
# *************************************************************************
# *************************************************************************
def set_minimum_cost(self, minimum_cost = None):
def set_minimum_cost(self, minimum_cost=None):
# minimum arc cost
# if no external minimum cost list was provided, calculate it
if type(minimum_cost) == type(None):
......@@ -91,22 +88,21 @@ class PipeTrenchOptions(ArcsWithoutProportionalLosses):
if self.trench.vector_mode:
# multiple options
self.minimum_cost = tuple(
(pipe.sp*length # twin pipes: one twin pipe
if self.trench.twin_pipes else
pipe.sp*length*2) # single pipes: two single pipes
for pipe, length in zip(
self.trench.supply_pipe,
self.length
)
)
else: # only one option
self.minimum_cost = (self.trench.supply_pipe.sp*self.length,)
else: # use an external minimum cost
(
pipe.sp * length # twin pipes: one twin pipe
if self.trench.twin_pipes
else pipe.sp * length * 2
) # single pipes: two single pipes
for pipe, length in zip(self.trench.supply_pipe, self.length)
)
else: # only one option
self.minimum_cost = (self.trench.supply_pipe.sp * self.length,)
else: # use an external minimum cost
self.minimum_cost = tuple(minimum_cost)
# *************************************************************************
# *************************************************************************
def set_capacity(self, **kwargs):
# retrieve the rated heat capacity
rhc = self.trench.rated_heat_capacity(**kwargs)
......@@ -116,56 +112,52 @@ class PipeTrenchOptions(ArcsWithoutProportionalLosses):
else:
# one option, rhc is one value
self.capacity = (rhc,)
# *************************************************************************
# *************************************************************************
def set_static_losses(
self,
scenario_key,
ground_thermal_conductivity: float or list,
ground_air_heat_transfer_coefficient: float or list,
time_interval_duration: float or list,
temperature_surroundings: float or list,
length: float or list = None,
unit_conversion_factor: float = None,
**kwargs):
self,
scenario_key,
ground_thermal_conductivity: float or list,
ground_air_heat_transfer_coefficient: float or list,
time_interval_duration: float or list,
temperature_surroundings: float or list,
length: float or list = None,
unit_conversion_factor: float = None,
**kwargs
):
hts = self.trench.heat_transfer_surroundings(
ground_thermal_conductivity=ground_thermal_conductivity,
ground_air_heat_transfer_coefficient=(
ground_air_heat_transfer_coefficient),
ground_air_heat_transfer_coefficient=(ground_air_heat_transfer_coefficient),
time_interval_duration=time_interval_duration,
temperature_surroundings=temperature_surroundings,
length=(
self.length
if type(length) == type(None) else
length
),
length=(self.length if type(length) == type(None) else length),
unit_conversion_factor=(
self.unit_conversion_factor
if type(unit_conversion_factor) == type(None) else
unit_conversion_factor
),
**kwargs)
self.unit_conversion_factor
if type(unit_conversion_factor) == type(None)
else unit_conversion_factor
),
**kwargs
)
if self.trench.vector_mode:
# multiple options: hts is a vector
if (hasattr(self, "static_loss") and
type(self.static_loss) != type(None)):
if hasattr(self, "static_loss") and type(self.static_loss) != type(None):
# update the static loss dictionary
if type(hts[0]) == list:
# multiple time intervals
self.static_loss.update({
(h, scenario_key, k): hts[h][k]
for h, hts_h in enumerate(hts)
for k, hts_hk in enumerate(hts_h)
})
else: # not a list: one time interval
self.static_loss.update({
(h, scenario_key, 0): hts[h]
for h, hts_h in enumerate(hts)
})
self.static_loss.update(
{
(h, scenario_key, k): hts[h][k]
for h, hts_h in enumerate(hts)
for k, hts_hk in enumerate(hts_h)
}
)
else: # not a list: one time interval
self.static_loss.update(
{(h, scenario_key, 0): hts[h] for h, hts_h in enumerate(hts)}
)
else:
# no static loss dictionary, create it
if type(hts[0]) == list:
......@@ -174,59 +166,52 @@ class PipeTrenchOptions(ArcsWithoutProportionalLosses):
(h, scenario_key, k): hts[h][k]
for h, hts_h in enumerate(hts)
for k, hts_hk in enumerate(hts_h)
}
else: # not a list: one time interval
}
else: # not a list: one time interval
self.static_loss = {
(h, scenario_key, 0): hts[h]
for h, hts_h in enumerate(hts)
}
(h, scenario_key, 0): hts[h] for h, hts_h in enumerate(hts)
}
else:
# one option: hts might be a number
if (hasattr(self, "static_loss") and
type(self.static_loss) != type(None)):
if hasattr(self, "static_loss") and type(self.static_loss) != type(None):
# update the static loss dictionary
if not isinstance(hts, Real):
# multiple time intervals
self.static_loss.update({
(0, scenario_key, k): hts[k]
for k, hts_k in enumerate(hts)
})
else: # not a list: one time interval
self.static_loss.update({
(0, scenario_key, 0): hts
})
self.static_loss.update(
{(0, scenario_key, k): hts[k] for k, hts_k in enumerate(hts)}
)
else: # not a list: one time interval
self.static_loss.update({(0, scenario_key, 0): hts})
else:
# no static loss dictionary, create it
if not isinstance(hts, Real):
# multiple time intervals
self.static_loss = {
(0, scenario_key, k): hts_k
for k, hts_k in enumerate(hts)
}
else: # not a list: one time interval
self.static_loss = {
(0, scenario_key, 0): hts
}
(0, scenario_key, k): hts_k for k, hts_k in enumerate(hts)
}
else: # not a list: one time interval
self.static_loss = {(0, scenario_key, 0): hts}
# *****************************************************************************
# *****************************************************************************
class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
"A class for defining investments in district heating trenches."
def __init__(
self,
trench: SupplyReturnPipeTrench,
name: str,
length: float,
investments: tuple,
static_loss: dict = None,
specific_capacity_cost: float or list = None,
capacity_is_instantaneous: bool = False,
unit_conversion_factor: float = 1.0,
**kwargs
):
self,
trench: SupplyReturnPipeTrench,
name: str,
length: float,
investments: tuple,
static_loss: dict = None,
specific_capacity_cost: float or list = None,
capacity_is_instantaneous: bool = False,
unit_conversion_factor: float = 1.0,
**kwargs
):
# store the unit conversion
self.unit_conversion_factor = unit_conversion_factor
# keep the trench object
......@@ -234,36 +219,34 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
# keep the trench length
self.length = (
[length for i in range(trench.number_options())]
if trench.vector_mode else
length
)
if trench.vector_mode
else length
)
# determine the rated heat capacity
rhc = trench.rated_heat_capacity(
unit_conversion_factor=unit_conversion_factor
)
rhc = trench.rated_heat_capacity(unit_conversion_factor=unit_conversion_factor)
# initialise the object using the mother class
ArcInvestments.__init__(
self,
investments=investments,
name=name,
efficiency=None,
efficiency_reverse=None,
self,
investments=investments,
name=name,
efficiency=None,
efficiency_reverse=None,
static_loss=static_loss,
capacity=[rhc] if isinstance(rhc, Real) else rhc,
specific_capacity_cost=(
0
if type(specific_capacity_cost) == type(None) else
specific_capacity_cost
),
capacity_is_instantaneous=False,
validate=False
)
if type(specific_capacity_cost) == type(None)
else specific_capacity_cost
),
capacity_is_instantaneous=False,
validate=False,
)
# # *************************************************************************
# # *************************************************************************
# def set_minimum_cost(self, minimum_cost = None):
# # minimum arc cost
# # if no external minimum cost list was provided, calculate it
# if type(minimum_cost) == type(None):
......@@ -272,10 +255,10 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
# # multiple options
# self.minimum_cost = tuple(
# (pipe.sp*length # twin pipes: one twin pipe
# if self.trench.twin_pipes else
# if self.trench.twin_pipes else
# pipe.sp*length*2) # single pipes: two single pipes
# for pipe, length in zip(
# self.trench.supply_pipe,
# self.trench.supply_pipe,
# self.length
# )
# )
......@@ -283,10 +266,10 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
# self.minimum_cost = (self.trench.supply_pipe.sp*self.length,)
# else: # use an external minimum cost
# self.minimum_cost = tuple(minimum_cost)
# # *************************************************************************
# # *************************************************************************
# def set_capacity(self, **kwargs):
# # retrieve the rated heat capacity
# rhc = self.trench.rated_heat_capacity(**kwargs)
......@@ -296,12 +279,12 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
# else:
# # one option, rhc is one value
# self.capacity = (rhc,)
# # *************************************************************************
# # *************************************************************************
# def set_static_losses(
# self,
# self,
# scenario_key,
# ground_thermal_conductivity: float or list,
# ground_air_heat_transfer_coefficient: float or list,
......@@ -310,7 +293,7 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
# length: float or list = None,
# unit_conversion_factor: float = None,
# **kwargs):
# hts = self.trench.heat_transfer_surroundings(
# ground_thermal_conductivity=ground_thermal_conductivity,
# ground_air_heat_transfer_coefficient=(
......@@ -318,20 +301,20 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
# time_interval_duration=time_interval_duration,
# temperature_surroundings=temperature_surroundings,
# length=(
# self.length
# if type(length) == type(None) else
# self.length
# if type(length) == type(None) else
# length
# ),
# unit_conversion_factor=(
# self.unit_conversion_factor
# if type(unit_conversion_factor) == type(None) else
# self.unit_conversion_factor
# if type(unit_conversion_factor) == type(None) else
# unit_conversion_factor
# ),
# **kwargs)
# if self.trench.vector_mode:
# # multiple options: hts is a vector
# if (hasattr(self, "static_loss") and
# if (hasattr(self, "static_loss") and
# type(self.static_loss) != type(None)):
# # update the static loss dictionary
# if type(hts[0]) == list:
......@@ -362,7 +345,7 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
# }
# else:
# # one option: hts might be a number
# if (hasattr(self, "static_loss") and
# if (hasattr(self, "static_loss") and
# type(self.static_loss) != type(None)):
# # update the static loss dictionary
# if not isinstance(hts, Real):
......@@ -387,21 +370,25 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
# self.static_loss = {
# (0, scenario_key, 0): hts
# }
# *****************************************************************************
# *****************************************************************************
class ExistingPipeTrench(PipeTrenchOptions):
"A class for existing pipe trenches."
def __init__(self, option_selected: int, **kwargs):
# initialise
PipeTrenchOptions.__init__(
self,
minimum_cost=[0 for i in range(kwargs['trench'].number_options())],
**kwargs)
minimum_cost=[0 for i in range(kwargs["trench"].number_options())],
**kwargs
)
# define the option that already exists
self.options_selected[option_selected] = True
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
......@@ -15,12 +15,12 @@ from ...problems.esipp.network import Network
from .network import PipeTrenchOptions
from topupheat.pipes.trenches import SupplyReturnPipeTrench
from numbers import Real
# *****************************************************************************
# *****************************************************************************
def cost_pipes(trench: SupplyReturnPipeTrench,
length: float or tuple) -> tuple:
def cost_pipes(trench: SupplyReturnPipeTrench, length: float or tuple) -> tuple:
"""
Returns the costs of each trench option for a given trench length.
......@@ -45,84 +45,87 @@ def cost_pipes(trench: SupplyReturnPipeTrench,
# use the specific pipe cost that features in the database
if trench.vector_mode:
# multiple options
if (type(length) == tuple and
len(length) == trench.number_options()):
if type(length) == tuple and len(length) == trench.number_options():
# multiple trench lengths
return tuple(
(pipe.sp*length # twin pipes: one twin pipe
if trench.twin_pipes else
pipe.sp*length*2) # single pipes: two single pipes
(
pipe.sp * length # twin pipes: one twin pipe
if trench.twin_pipes
else pipe.sp * length * 2
) # single pipes: two single pipes
for pipe, length in zip(trench.supply_pipe, length)
)
)
elif isinstance(length, Real):
# one trench length
return tuple(
(pipe.sp*length # twin pipes: one twin pipe
if trench.twin_pipes else
pipe.sp*length*2) # single pipes: two single pipes
(
pipe.sp * length # twin pipes: one twin pipe
if trench.twin_pipes
else pipe.sp * length * 2
) # single pipes: two single pipes
for pipe in trench.supply_pipe
)
)
else:
raise ValueError('Unrecognised input combination.')
elif (not trench.vector_mode and isinstance(length, Real)):
raise ValueError("Unrecognised input combination.")
elif not trench.vector_mode and isinstance(length, Real):
# only one option
return (trench.supply_pipe.sp*length,)
else: # only one option
raise ValueError('Unrecognised input combination.')
return (trench.supply_pipe.sp * length,)
else: # only one option
raise ValueError("Unrecognised input combination.")
# # keep the trench length
# self.length = (
# [length for i in range(trench.number_options())]
# if trench.vector_mode else
# if trench.vector_mode else
# length
# )
# *****************************************************************************
# *****************************************************************************
def summarise_network_by_pipe_technology(
network: Network,
print_output: bool = False
) -> dict:
network: Network, print_output: bool = False
) -> dict:
"A method to summarise a network by pipe technology."
# *************************************************************************
# *************************************************************************
# create a dictionary that compiles the lengths of each arc technology
length_dict = {}
# *************************************************************************
# *************************************************************************
# for each arc
for arc_key in network.edges(keys=True):
# check if it is a PipeTrench object
if not isinstance(
network.edges[arc_key][Network.KEY_ARC_TECH],
PipeTrenchOptions
):
network.edges[arc_key][Network.KEY_ARC_TECH], PipeTrenchOptions
):
# if not, skip arc
continue
# for each arc technology option
for h, tech_option in enumerate(
network.edges[arc_key][Network.KEY_ARC_TECH].options_selected
):
network.edges[arc_key][Network.KEY_ARC_TECH].options_selected
):
# check if the tech option was selected
if tech_option:
# technology option was selected
# get the length of the arc
arc_length = (
network.edges[arc_key][Network.KEY_ARC_TECH].length[h]
if type(network.edges[arc_key][
Network.KEY_ARC_TECH].length) == list else
network.edges[arc_key][Network.KEY_ARC_TECH].length
)
if type(network.edges[arc_key][Network.KEY_ARC_TECH].length) == list
else network.edges[arc_key][Network.KEY_ARC_TECH].length
)
# identify the option
tech_option_label = network.edges[arc_key][
Network.KEY_ARC_TECH].trench.printable_description(h)
Network.KEY_ARC_TECH
].trench.printable_description(h)
# if the arc technology has been previously selected...
if tech_option_label in length_dict:
# ...increment the total length
......@@ -130,158 +133,158 @@ def summarise_network_by_pipe_technology(
else:
# if not, add a new arc technology to the dictionary
length_dict[tech_option_label] = arc_length
# *************************************************************************
# *************************************************************************
if print_output:
print('printing the arc technologies selected by pipe size...')
if print_output:
print("printing the arc technologies selected by pipe size...")
for key, value in sorted(
(tech, length)
for tech, length in length_dict.items()
):
print(str(key)+': '+str(value))
print('total: '+str(sum(length_dict.values())))
(tech, length) for tech, length in length_dict.items()
):
print(str(key) + ": " + str(value))
print("total: " + str(sum(length_dict.values())))
return length_dict
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def plot_network_layout(network: Network,
include_basemap: bool = False,
figure_size: tuple = (25, 25),
min_linewidth: float = 1.0,
max_linewidth: float = 3.0,
legend_fontsize: float = 20.0,
basemap_zoom_level: float = 15,
legend_location: str = 'lower left',
legend_with_brand_model: bool = False,
legend_transparency: float = None):
def plot_network_layout(
network: Network,
include_basemap: bool = False,
figure_size: tuple = (25, 25),
min_linewidth: float = 1.0,
max_linewidth: float = 3.0,
legend_fontsize: float = 20.0,
basemap_zoom_level: float = 15,
legend_location: str = "lower left",
legend_with_brand_model: bool = False,
legend_transparency: float = None,
):
# convert graph object to GDF
_, my_gdf_arcs = ox.graph_to_gdfs(network)
# convert to final plot CRS
my_gdf = my_gdf_arcs.to_crs(epsg=3857)
# dict: keys are the pipe tuples and the values are lists of edge keys
arc_tech_summary_dict = {}
# for each edge
for arc_key in my_gdf.index:
# check if it is a PipeTrenchOptions object
if not isinstance(
network.edges[arc_key][Network.KEY_ARC_TECH],
PipeTrenchOptions
):
network.edges[arc_key][Network.KEY_ARC_TECH], PipeTrenchOptions
):
# if not, skip arc
continue
# find the trench's description, if it was selected
try:
try:
selected_option = (
my_gdf[Network.KEY_ARC_TECH].loc[
arc_key].trench.printable_description(
my_gdf[Network.KEY_ARC_TECH].loc[
arc_key].options_selected.index(True)
)
my_gdf[Network.KEY_ARC_TECH]
.loc[arc_key]
.trench.printable_description(
my_gdf[Network.KEY_ARC_TECH]
.loc[arc_key]
.options_selected.index(True)
)
)
except ValueError:
continue
# if the pipe tuple already exists as a key in the dict
if selected_option in arc_tech_summary_dict:
# append the edge_key to the list obtained via that pipe tuple key
arc_tech_summary_dict[selected_option].append(arc_key)
else: # if not
else: # if not
# add a new dict entry whose key is the pipe tuple and create a list
arc_tech_summary_dict[selected_option] = [arc_key]
list_sorted = sorted(
(int(printable_description[2:]), printable_description)
for printable_description in arc_tech_summary_dict.keys()
)
(list_sorted_dn,
list_sorted_descriptions) = list(map(list,zip(*list_sorted)))
list_arc_widths = [
min_linewidth+
(max_linewidth-min_linewidth)*
iteration/(len(list_sorted_dn)-1)
for iteration, _ in enumerate(list_sorted_dn)
] if len(list_sorted_dn) != 1 else [(max_linewidth+min_linewidth)/2]
)
(list_sorted_dn, list_sorted_descriptions) = list(map(list, zip(*list_sorted)))
list_arc_widths = (
[
min_linewidth
+ (max_linewidth - min_linewidth) * iteration / (len(list_sorted_dn) - 1)
for iteration, _ in enumerate(list_sorted_dn)
]
if len(list_sorted_dn) != 1
else [(max_linewidth + min_linewidth) / 2]
)
# *************************************************************************
# *************************************************************************
fig, ax = plt.subplots(1,1)
fig, ax = plt.subplots(1, 1)
fig.set_size_inches(*figure_size)
for description, arc_width in zip(
list_sorted_descriptions,
list_arc_widths
):
for description, arc_width in zip(list_sorted_descriptions, list_arc_widths):
# prepare plot
my_gdf.loc[arc_tech_summary_dict[description]].plot(
edgecolor='k',
legend=True,
linewidth=arc_width,
ax=ax)
edgecolor="k", legend=True, linewidth=arc_width, ax=ax
)
# adjust legend labels
ax.legend(list_sorted_descriptions,
fontsize=legend_fontsize,
loc=legend_location,
framealpha=(
legend_transparency
if type(legend_transparency) != type(None) else None
)
)
ax.legend(
list_sorted_descriptions,
fontsize=legend_fontsize,
loc=legend_location,
framealpha=(
legend_transparency if type(legend_transparency) != type(None) else None
),
)
# add base map
if include_basemap:
cx.add_basemap(ax,
zoom=basemap_zoom_level,
source=cx.providers.OpenStreetMap.Mapnik,
#crs=gdf_map.crs,
)
cx.add_basemap(
ax,
zoom=basemap_zoom_level,
source=cx.providers.OpenStreetMap.Mapnik,
# crs=gdf_map.crs,
)
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def plot_heating_demand(
losses: list,
end_use_demand: list,
labels: list,
ylabel: str = 'Heating demand [MWh]',
title: str = 'Heat demand by month'
):
losses: list,
end_use_demand: list,
labels: list,
ylabel: str = "Heating demand [MWh]",
title: str = "Heat demand by month",
):
energy_totals = {
'Losses (optimised)': np.array(losses),
'End use (estimated)': np.array(end_use_demand),
}
"Losses (optimised)": np.array(losses),
"End use (estimated)": np.array(end_use_demand),
}
colors = {
'Losses (optimised)': 'tab:orange',
'End use (estimated)': 'tab:blue',
}
"Losses (optimised)": "tab:orange",
"End use (estimated)": "tab:blue",
}
# width = 0.8 # the width of the bars: can also be len(x) sequence
# make sure the grid lines are behind the bars
......@@ -290,28 +293,28 @@ def plot_heating_demand(
fig, ax = plt.subplots()
bottom = np.zeros(len(labels))
figure_size = (8,4)
figure_size = (8, 4)
fig.set_size_inches(figure_size[0], figure_size[1])
for energy_category, energy_total in energy_totals.items():
p = ax.bar(
labels,
energy_total,
label=energy_category,
bottom=bottom,
color=colors[energy_category],
zorder=zorder_bars
)
labels,
energy_total,
label=energy_category,
bottom=bottom,
color=colors[energy_category],
zorder=zorder_bars,
)
bottom += energy_total
ax.bar_label(p, fmt='{:,.0f}', label_type='center')
ax.bar_label(p, fmt="{:,.0f}", label_type="center")
# ax.bar_label(p, fmt='{:,.0f}')
ax.grid(zorder=zorder_grid) # zorder=0 to make the grid
ax.grid(zorder=zorder_grid) # zorder=0 to make the grid
ax.set(ylabel=ylabel, title=title)
ax.legend()
plt.show()
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
# -*- coding: utf-8 -*-
......@@ -10,19 +10,22 @@ from statistics import mean
# TODO: enable swapping the polarity
class Investment:
"""This class is meant to enable analysis of specific investments."""
# self.discount_rates: N samples
# self.net_cash_flows: N+1 samples
# TODO: consider using dicts to make things more intuitive, time-wise
def __init__(self,
discount_rates: list,
net_cash_flows: list = None,
discount_rate: float = None,
analysis_period_span: int = None):
def __init__(
self,
discount_rates: list,
net_cash_flows: list = None,
discount_rate: float = None,
analysis_period_span: int = None,
):
"""
Create an object for investment analysis using typical information.
......@@ -39,186 +42,161 @@ class Investment:
"""
# validate the inputs
if type(discount_rates) != type(None):
# discount_rates is not None:
# discount_rates is not None:
if type(discount_rates) != tuple:
raise TypeError(
'The discount rates must be provided as a tuple.')
raise TypeError("The discount rates must be provided as a tuple.")
self.discount_rates = tuple(discount_rates)
self.analysis_period_span = len(self.discount_rates)
self.analysis_period_span = len(self.discount_rates)
if self.analysis_period_span <= 0:
raise ValueError(
'The duration of the period under analysis must be '+
'positive.'
)
"The duration of the period under analysis must be " + "positive."
)
else:
# discount_rates is None:
# discount_rates is None:
# discount rate must be positive real under 1
# analysis_period_span must be an int
if type(discount_rate) != float:
raise TypeError(
'The discount rate must be provided as a float.')
raise TypeError("The discount rate must be provided as a float.")
if discount_rate <= 0 or discount_rate >= 1:
raise ValueError(
'The discount rate must be in the open interval between 0'+
' and 1.'
)
"The discount rate must be in the open interval between 0"
+ " and 1."
)
if type(analysis_period_span) != int:
raise TypeError(
'The duration of the period under consideration must be '+
'provided as an integer.')
"The duration of the period under consideration must be "
+ "provided as an integer."
)
if analysis_period_span <= 0:
raise ValueError(
'The duration of the period under analysis must be '+
'positive.'
)
"The duration of the period under analysis must be " + "positive."
)
self.analysis_period_span = analysis_period_span
self.discount_rates = tuple(
discount_rate for i in range(self.analysis_period_span)
)
)
# check the net cash flows
if type(net_cash_flows) != type(None):
if type(net_cash_flows) != list:
raise TypeError(
'The net cash flows must be provided as a list.')
if len(net_cash_flows) != self.analysis_period_span+1:
raise ValueError(
'The inputs are consistent in terms of length.'
)
raise TypeError("The net cash flows must be provided as a list.")
if len(net_cash_flows) != self.analysis_period_span + 1:
raise ValueError("The inputs are consistent in terms of length.")
self.net_cash_flows = list(net_cash_flows)
else:
# net_cash_flows is None: initialise it as a list of zeros
self.net_cash_flows = list(
0 for i in range(self.analysis_period_span+1)
)
self.net_cash_flows = list(0 for i in range(self.analysis_period_span + 1))
# discount factors
self.discount_factors = tuple(
discount_factor(self.discount_rates[:i])
for i in range(self.analysis_period_span+1)
)
for i in range(self.analysis_period_span + 1)
)
# *************************************************************************
# *************************************************************************
def add_investment(self,
investment: float,
investment_period: int,
investment_longevity: int,
commissioning_delay_after_investment: int = 0,
salvage_value_method: str = 'annuity'):
if salvage_value_method == 'annuity':
def add_investment(
self,
investment: float,
investment_period: int,
investment_longevity: int,
commissioning_delay_after_investment: int = 0,
salvage_value_method: str = "annuity",
):
if salvage_value_method == "annuity":
mean_discount_rate = mean(self.discount_rates)
residual_value = salvage_value_annuity(
investment=investment,
investment_longevity=investment_longevity,
investment_period=investment_period,
discount_rate=mean_discount_rate,
analysis_period_span=self.analysis_period_span
)
investment=investment,
investment_longevity=investment_longevity,
investment_period=investment_period,
discount_rate=mean_discount_rate,
analysis_period_span=self.analysis_period_span,
)
self.net_cash_flows[investment_period] += investment
self.net_cash_flows[self.analysis_period_span] += -residual_value
else:
residual_value = salvage_value_linear_depreciation(
investment=investment,
investment_period=investment_period,
investment_longevity=investment_longevity,
investment=investment,
investment_period=investment_period,
investment_longevity=investment_longevity,
analysis_period_span=self.analysis_period_span,
commissioning_delay_after_investment=(
commissioning_delay_after_investment
)
)
),
)
self.net_cash_flows[investment_period] += investment
self.net_cash_flows[self.analysis_period_span] += -residual_value
# *************************************************************************
# *************************************************************************
def add_operational_cash_flows(self,
cash_flow: float or int,
start_period: int,
longevity: int = None):
def add_operational_cash_flows(
self, cash_flow: float or int, start_period: int, longevity: int = None
):
"""Adds a sequence of cash flows to the analysis."""
if type(longevity) == type(None):
# until the planning horizon
for i in range(self.analysis_period_span-start_period+1):
for i in range(self.analysis_period_span - start_period + 1):
# add operational cash flows
self.net_cash_flows[i+start_period] += cash_flow
self.net_cash_flows[i + start_period] += cash_flow
else:
# limited longevity
for i in range(longevity):
if i+start_period >= self.analysis_period_span+1:
if i + start_period >= self.analysis_period_span + 1:
break
# add operational cash flows
self.net_cash_flows[i+start_period] += cash_flow
self.net_cash_flows[i + start_period] += cash_flow
# *************************************************************************
# *************************************************************************
def net_present_value(self):
"""Returns the net present value for the investment under analysis."""
return npv(self.discount_rates, self.net_cash_flows)
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def npv(discount_rates: list,
net_cash_flows: list,
return_discount_factors: bool = False) -> float or tuple:
def npv(
discount_rates: list, net_cash_flows: list, return_discount_factors: bool = False
) -> float or tuple:
"""
Calculates the net present value using the information provided.
......@@ -244,49 +222,48 @@ def npv(discount_rates: list,
If True, returns the net present value in addition to a list with
the discount factors used in the calculation.
"""
"""
# check sizes
if len(discount_rates) != len(net_cash_flows)-1:
if len(discount_rates) != len(net_cash_flows) - 1:
# the inputs do not match, return None
raise ValueError('The inputs are inconsistent.')
raise ValueError("The inputs are inconsistent.")
discount_factors = [
discount_factor(discount_rates[:t])
for t in range(len(discount_rates)+1)
]
discount_factor(discount_rates[:t]) for t in range(len(discount_rates) + 1)
]
if return_discount_factors:
return sum(
ncf_t*df_t
for (ncf_t, df_t) in zip(net_cash_flows, discount_factors)
), discount_factors
return (
sum(
ncf_t * df_t for (ncf_t, df_t) in zip(net_cash_flows, discount_factors)
),
discount_factors,
)
else:
return sum(
ncf_t*df_t
for (ncf_t, df_t) in zip(net_cash_flows, discount_factors)
)
ncf_t * df_t for (ncf_t, df_t) in zip(net_cash_flows, discount_factors)
)
# *****************************************************************************
# *****************************************************************************
def discount_factor(discount_rates: list) -> float:
"""
Return the discount factor consistent with the discount rates provided.
To calculate the net present value, we need to quantify the effect time
has on the net cash flows. This amounts to a factor, which depends on
has on the net cash flows. This amounts to a factor, which depends on
the discount rates between the time of the cash flow and the present.
Parameters
----------
discount_rates : list
A list with the discount rates for each time interval between a
A list with the discount rates for each time interval between a
given time interval and the present. The order is irrelevant.
Returns
......@@ -295,24 +272,27 @@ def discount_factor(discount_rates: list) -> float:
The discount factor consistent with the discount rates provided. It
uses all discount rates in the list.
"""
return prod([1/(1+i) for i in discount_rates])
"""
return prod([1 / (1 + i) for i in discount_rates])
# *****************************************************************************
# *****************************************************************************
def salvage_value_linear_depreciation(
investment: int or float,
investment_period: int,
investment_longevity: int,
analysis_period_span: int,
commissioning_delay_after_investment: int = 1) -> float:
investment: int or float,
investment_period: int,
investment_longevity: int,
analysis_period_span: int,
commissioning_delay_after_investment: int = 1,
) -> float:
"""
Determine an asset\'s salvage value by the end of an analysis period.
The depreciation is assumed to be linear: the asset is initially rated at
The depreciation is assumed to be linear: the asset is initially rated at
100% of the investment made and then 0% by the time it is no longer usable.
The salvage value is the asset\'s value after the analysis period.
Parameters
......@@ -339,77 +319,89 @@ def salvage_value_linear_depreciation(
float
The salvage value.
"""
if investment_period >= analysis_period_span+1:
"""
if investment_period >= analysis_period_span + 1:
raise ValueError(
'The investment has to be made within the period being analysed.'
)
"The investment has to be made within the period being analysed."
)
# calculate the salvage value
return (
investment_longevity+
investment_period+
commissioning_delay_after_investment-1-
analysis_period_span
)*investment/investment_longevity
(
investment_longevity
+ investment_period
+ commissioning_delay_after_investment
- 1
- analysis_period_span
)
* investment
/ investment_longevity
)
# *****************************************************************************
# *****************************************************************************
def salvage_value_annuity(investment: int or float,
discount_rate: float,
investment_longevity: int,
investment_period: int,
analysis_period_span: int) -> float:
def salvage_value_annuity(
investment: int or float,
discount_rate: float,
investment_longevity: int,
investment_period: int,
analysis_period_span: int,
) -> float:
npv_salvage = present_salvage_value_annuity(
investment=investment,
investment=investment,
investment_longevity=investment_longevity,
investment_period=investment_period,
discount_rate=discount_rate,
analysis_period_span=analysis_period_span,
return_annuity=False
)
return npv_salvage/discount_factor(
return_annuity=False,
)
return npv_salvage / discount_factor(
tuple(discount_rate for i in range(analysis_period_span))
)
)
# *****************************************************************************
# *****************************************************************************
def annuity(investment: int or float,
investment_longevity: int,
discount_rate: float) -> float:
def annuity(
investment: int or float, investment_longevity: int, discount_rate: float
) -> float:
"Returns the annuity value for a given investment sum and longevity."
return (
investment*
discount_rate/(1-(1+discount_rate)**(
-investment_longevity
))
)
investment
* discount_rate
/ (1 - (1 + discount_rate) ** (-investment_longevity))
)
# *****************************************************************************
# *****************************************************************************
def present_salvage_value_annuity(investment: int or float,
investment_longevity: int,
investment_period: int,
discount_rate: float,
analysis_period_span: int,
return_annuity: bool = False) -> float:
def present_salvage_value_annuity(
investment: int or float,
investment_longevity: int,
investment_period: int,
discount_rate: float,
analysis_period_span: int,
return_annuity: bool = False,
) -> float:
"""
Calculates the present value of an asset after a given analysis period.
The calculation is based on the annuity method, which assumes that the
investment could produce a steady revenue stream for a given period after
the investment is made -- the investment longevity.
This method assumes that the investment precedes the annuity payments.
Parameters
----------
investment : int or float
......@@ -440,62 +432,52 @@ def present_salvage_value_annuity(investment: int or float,
The annuity.
"""
if investment_period >= analysis_period_span+1:
if investment_period >= analysis_period_span + 1:
raise ValueError(
'The investment has to be made within the period being analysed.'
)
"The investment has to be made within the period being analysed."
)
# the present salvage value requires the lifetime to extend beyond the hor.
if analysis_period_span >= investment_longevity+investment_period:
if analysis_period_span >= investment_longevity + investment_period:
if return_annuity:
return 0, annuity(
investment=investment,
investment_longevity=investment_longevity,
discount_rate=discount_rate
)
discount_rate=discount_rate,
)
else:
return 0
# the annuity has to consider the asset longevity and the commission. delay
value_annuity = annuity(
investment=investment,
investment_longevity=investment_longevity,
discount_rate=discount_rate
)
discount_rate=discount_rate,
)
discount_rates = tuple(
discount_rate
for i in range(investment_longevity+investment_period)
)
discount_rate for i in range(investment_longevity + investment_period)
)
net_cash_flows = list(
value_annuity
for i in range(investment_longevity+investment_period+1)
)
for year_index in range(analysis_period_span+1):
value_annuity for i in range(investment_longevity + investment_period + 1)
)
for year_index in range(analysis_period_span + 1):
net_cash_flows[year_index] = 0
if return_annuity:
return npv(
discount_rates=discount_rates,
net_cash_flows=net_cash_flows
), value_annuity
return (
npv(discount_rates=discount_rates, net_cash_flows=net_cash_flows),
value_annuity,
)
else:
return npv(
discount_rates=discount_rates,
net_cash_flows=net_cash_flows
)
return npv(discount_rates=discount_rates, net_cash_flows=net_cash_flows)
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
# *****************************************************************************
# *****************************************************************************
from ...problems.esipp.network import Arcs
# *****************************************************************************
# *****************************************************************************
class ArcInvestments(Arcs):
"""A class for defining arcs linked to investments."""
# *************************************************************************
# *************************************************************************
def __init__(self, investments: tuple, **kwargs):
# keep investment data
self.investments = investments
# initialise object
Arcs.__init__(
self,
minimum_cost=tuple([inv.net_present_value() for inv in self.investments]),
# validate=False,
**kwargs
)
# *************************************************************************
# *************************************************************************
def update_minimum_cost(self):
"Updates the minimum costs using the Investment objects."
self.minimum_cost = tuple([inv.net_present_value() for inv in self.investments])
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
# -*- coding: utf-8 -*-
# import osm
\ No newline at end of file
# import osm
# imports
from math import inf
......@@ -22,10 +21,11 @@ from ..gis import identify as ident
# *****************************************************************************
# *****************************************************************************
def edge_lengths(network: MultiDiGraph, edge_keys: tuple = None) -> dict:
"""
Calculate edge lengths in a OSMnx-formatted MultiDiGraph network object.
The calculation method changes depending on whether the coordinates are
projected and depending on whether the edges are simplified.
......@@ -44,60 +44,66 @@ def edge_lengths(network: MultiDiGraph, edge_keys: tuple = None) -> dict:
"""
# determine if the graph is projected or not
graph_is_projected = is_projected(network.graph['crs'])
graph_is_projected = is_projected(network.graph["crs"])
# check if edge keys were specified
if type(edge_keys) == type(None):
# no particular edge keys were provided: consider all edges (default)
edge_keys = network.edges(keys=True) # tuple(network.edges(keys=True))
# initialise length dict
edge_keys = network.edges(keys=True) # tuple(network.edges(keys=True))
# initialise length dict
length_dict = {}
# for each edge on the graph
for edge_key in edge_keys:
# calculate it using the library
if graph_is_projected:
# calculate it using projected coordinates
if osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
if osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
# use geometry
length_dict[edge_key] = length(
network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY]
)
)
else:
# use (projected) coordinates
start_point = Point(
(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y])
(
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
)
)
end_point = Point(
(network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y])
(
network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
)
)
length_dict[edge_key] = start_point.distance(end_point)
else:
# calculate it using unprojected coordinates (lat/long)
if osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
if osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
# use geometry
length_dict[edge_key] = great_circle_distance_along_path(
network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY]
)
)
else:
# use (unprojected) coordinates
length_dict[edge_key] = great_circle(
lat1=network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
lon1=network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
lat2=network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
lon2=network.nodes[edge_key[1]][osm.KEY_OSMNX_X]
)
lat1=network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
lon1=network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
lat2=network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
lon2=network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
)
# return the dict with lengths of each edge
return length_dict
# *****************************************************************************
# *****************************************************************************
def great_circle_distance_along_path(path: LineString) -> float:
"""
Computes the great circle distance along a given path.
The distance is to be calculated using a shapely LineString object made of
(longitude, latitude) coordinate tuples. The calculation is vectorised.
......@@ -118,16 +124,18 @@ def great_circle_distance_along_path(path: LineString) -> float:
# sum individual distances and return
return sum(
great_circle(
lat[:-1], # latitudes of starting points
lon[:-1], # longitudes of starting points
lat[:-1], # latitudes of starting points
lon[:-1], # longitudes of starting points
lat[1:], # latitudes of ending points
lon[1:] # longitudes of ending points
)
lon[1:], # longitudes of ending points
)
)
# *****************************************************************************
# *****************************************************************************
def update_street_count(network: MultiDiGraph):
"""
Updates the street count attributes of nodes in a MultiDiGraph object.
......@@ -145,22 +153,26 @@ def update_street_count(network: MultiDiGraph):
# update street count
street_count_dict = count_streets_per_node(network)
network.add_nodes_from(
((key, {osm.KEY_OSMNX_STREET_COUNT:value})
for key, value in street_count_dict.items())
(
(key, {osm.KEY_OSMNX_STREET_COUNT: value})
for key, value in street_count_dict.items()
)
)
# *****************************************************************************
# *****************************************************************************
def node_path_length(network: MultiDiGraph,
path: list,
return_minimum_length_only: bool = True) -> list or float:
def node_path_length(
network: MultiDiGraph, path: list, return_minimum_length_only: bool = True
) -> list or float:
"""
Returns the length or lengths of a path defined using nodes.
If more than one edge connects adjacent nodes along the path, a length value
will be returned for each possible path combination.
Parameters
----------
network : MultiDiGraph
......@@ -176,15 +188,15 @@ def node_path_length(network: MultiDiGraph,
The path\'s length or all lengths consistent with the path provided.
"""
# direction matters
path_length = len(path)
if path_length == 0:
return inf
# if the path is given as a list of node keys, then it is subjective
# i.e., it may refer to many paths, namely if parallel edges exist
# check if the path object qualifies as such
if not is_path(network, path):
# it does not, exit
......@@ -192,69 +204,64 @@ def node_path_length(network: MultiDiGraph,
return inf
else:
return [inf]
# prepare a list with all possible paths given as lists of edge keys
list_of_edge_key_paths = [[]] # a list of edge key lists
list_of_edge_key_paths = [[]] # a list of edge key lists
# for each pair of nodes in the path
for node_pair in range(path_length-1):
for node_pair in range(path_length - 1):
# get the edges between these two nodes
edge_keys = ident.get_edges_from_a_to_b(
network,
path[node_pair],
path[node_pair+1]
)
network, path[node_pair], path[node_pair + 1]
)
number_edge_keys = len(edge_keys)
if number_edge_keys == 1:
# only one edge exists: append its key to all existing lists/paths
for edge_key_path in list_of_edge_key_paths:
edge_key_path.append(edge_keys[0])
else: # multiple edges exist: each path identified so far has to be
if number_edge_keys == 1:
# only one edge exists: append its key to all existing lists/paths
for edge_key_path in list_of_edge_key_paths:
edge_key_path.append(edge_keys[0])
else: # multiple edges exist: each path identified so far has to be
# replicated a total of number_edge_keys times and then updated
number_paths = len(list_of_edge_key_paths)
# for each parallel edge
for edge_key_index in range(number_edge_keys-1):
# replicate all paths
for path_index in range(number_paths):
number_paths = len(list_of_edge_key_paths)
# for each parallel edge
for edge_key_index in range(number_edge_keys - 1):
# replicate all paths
for path_index in range(number_paths):
list_of_edge_key_paths.append(
list(list_of_edge_key_paths[path_index])
)
# paths have been replicated, now add the edges
for edge_key_index in range(number_edge_keys):
for path_index in range(number_paths):
# add the new edge
)
# paths have been replicated, now add the edges
for edge_key_index in range(number_edge_keys):
for path_index in range(number_paths):
# add the new edge
list_of_edge_key_paths[
path_index+edge_key_index*number_paths
].append(
edge_keys[edge_key_index]
)
path_index + edge_key_index * number_paths
].append(edge_keys[edge_key_index])
# *************************************************************************
path_lenths = [
sum(network.edges[edge_key][osm.KEY_OSMNX_LENGTH]
for edge_key in edge_key_path)
sum(network.edges[edge_key][osm.KEY_OSMNX_LENGTH] for edge_key in edge_key_path)
for edge_key_path in list_of_edge_key_paths
]
if return_minimum_length_only:
return min(path_lenths)
else:
]
if return_minimum_length_only:
return min(path_lenths)
else:
return path_lenths
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def edge_path_length(network: MultiDiGraph,
path: list,
**kwargs) -> float:
def edge_path_length(network: MultiDiGraph, path: list, **kwargs) -> float:
"""
Returns the total length of a path defined using edges.
If the path does not exist, or if no path is provided, the result will be
infinity (math.inf).
Parameters
----------
network : MultiDiGraph
......@@ -268,29 +275,29 @@ def edge_path_length(network: MultiDiGraph,
The path\'s length or all lengths consistent with the path provided.
"""
# check the number of
# check the number of
path_length = len(path)
if path_length == 0:
return inf
if ident.is_edge_path(network, path, **kwargs):
return sum(
network.edges[edge_key][osm.KEY_OSMNX_LENGTH] for edge_key in path
)
return sum(network.edges[edge_key][osm.KEY_OSMNX_LENGTH] for edge_key in path)
else:
# no path provided
return inf
# *****************************************************************************
# *****************************************************************************
def count_ocurrences(gdf: GeoDataFrame,
column: str,
column_entries: list = None) -> dict:
def count_ocurrences(
gdf: GeoDataFrame, column: str, column_entries: list = None
) -> dict:
"""
Counts the number of occurrences per entry in a DataFrame object's column.
If a list is provided, only the entries that match those in the list are
If a list is provided, only the entries that match those in the list are
counted. If no list is provided, all unique entries are counted.
Parameters
......@@ -309,7 +316,7 @@ def count_ocurrences(gdf: GeoDataFrame,
A dictionary with the counts whose keys are the values counted.
"""
if type(column_entries) == list:
# find entries also present in the dict
# initialise dict
......@@ -317,12 +324,12 @@ def count_ocurrences(gdf: GeoDataFrame,
# for each key in the dict
for key in column_entries:
# # store the number of rows
# count_dict[key] = gdf[gdf[column]==key].shape[0]
# count_dict[key] = gdf[gdf[column]==key].shape[0]
# count the number of rows with this key
if isna(key):
count_dict[key] = gdf[gdf[column].isnull()].shape[0]
count_dict[key] = gdf[gdf[column].isnull()].shape[0]
else:
count_dict[key] = gdf[gdf[column]==key].shape[0]
count_dict[key] = gdf[gdf[column] == key].shape[0]
else:
# find all unique entries
# initialise dict
......@@ -333,12 +340,13 @@ def count_ocurrences(gdf: GeoDataFrame,
# it is, skip
continue
# it is not, count and store the number of rows with said entry
if isna(entry): #type(entry) == type(None):
count_dict[entry] = gdf[gdf[column].isnull()].shape[0]
if isna(entry): # type(entry) == type(None):
count_dict[entry] = gdf[gdf[column].isnull()].shape[0]
else:
count_dict[entry] = gdf[gdf[column]==entry].shape[0]
count_dict[entry] = gdf[gdf[column] == entry].shape[0]
# return statement
return count_dict
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
......@@ -20,11 +20,12 @@ from ..gis import osm
# *****************************************************************************
# *****************************************************************************
def is_edge_consistent_with_geometry(network: nx.MultiDiGraph, edge_key):
"""
Returns True if a given edge in an OSMnx-formatted graph is declared in the
same order as its geometry. That is, if the source node corresponds to the
first position in the geometry attribute and so forth.
first position in the geometry attribute and so forth.
Parameters
----------
......@@ -51,23 +52,24 @@ def is_edge_consistent_with_geometry(network: nx.MultiDiGraph, edge_key):
if not network.has_edge(*edge_key):
# the edge does not exist
raise ValueError(
'No edge was found matching the key provided: '+str(edge_key)
)
"No edge was found matching the key provided: " + str(edge_key)
)
elif osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
# edge exists and has a geometry attribute: check the geometry
# check if the first point on the geometry matches the first node
return (
tuple(network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY].coords)[0] ==
(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y])
)
return tuple(network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY].coords)[0] == (
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
)
else:
# edge exists but has no geometry attribute: it is consistent
return True
# *****************************************************************************
# *****************************************************************************
def find_edges_in_reverse(network: nx.MultiDiGraph) -> dict:
"""
Finds edges in reverse within an OSMnx-formatted MultiDiGraph object.
......@@ -87,30 +89,32 @@ def find_edges_in_reverse(network: nx.MultiDiGraph) -> dict:
edge_key: [
other_edge_key
for other_edge_key in get_edges_from_a_to_b(
network,
node_start=edge_key[1],
node_end=edge_key[0]
)
network, node_start=edge_key[1], node_end=edge_key[0]
)
if edges_are_in_reverse(network, edge_key, other_edge_key)
]
]
for edge_key in network.edges(keys=True)
}
}
# *****************************************************************************
# *****************************************************************************
def is_edge_osmnx_compliant(network: nx.MultiDiGraph, edge_key) -> bool:
"""Returns True if a given edge is osmnx compliant and False otherwise."""
# check if the edge exists
if not network.has_edge(*edge_key):
raise ValueError('Edge not found.')
raise ValueError("Edge not found.")
# check compatibility with osmnx
return is_edge_data_osmnx_compliant(network.get_edge_data(*edge_key))
# *****************************************************************************
# *****************************************************************************
def is_edge_data_osmnx_compliant(edge_data: dict):
"""Returns True a given MultiDiGraph edge data dict is OSMnx-compliant."""
# check if all the essential attributes are in
......@@ -156,26 +160,28 @@ def is_edge_data_osmnx_compliant(edge_data: dict):
return False
# else: # no action
return True
# *****************************************************************************
# *****************************************************************************
def edges_are_in_reverse(
network: nx.MultiDiGraph,
edge_a: tuple,
edge_b: tuple,
tolerance: float or int = 1e-3
) -> bool:
network: nx.MultiDiGraph,
edge_a: tuple,
edge_b: tuple,
tolerance: float or int = 1e-3,
) -> bool:
"""
Returns True if two edges in a graph represent the same one but in reverse.
The conditions the two edges must observe are the following:
- the set of each attribute appearing as a list must match;
- the geometries must be identical but in reverse;
- the lengths must be within a given tolerance of one another;
- the reversed flags must be opposite, unless they are a list;
- every other attribute must be identical.
The graph should be formatted by OSMnx standards.
Parameters
......@@ -200,111 +206,134 @@ def edges_are_in_reverse(
If True, the edges are in reverse.
"""
# the edges are the same but in reverse:
# - all attributes have to be the same or lists with
# the same content, as in a set, except for
# - all attributes have to be the same or lists with
# the same content, as in a set, except for
# the geometry and reversed attributes
# must involve the same nodes in reverse order
if edge_a[0] != edge_b[1] or edge_a[1] != edge_b[0]:
# the nodes do not match
return False
# make sure both edges exist and comply with osmnx
if (not is_edge_osmnx_compliant(network, edge_a) or
not is_edge_osmnx_compliant(network, edge_b)):
raise ValueError('One or more of the edges is not OSMnx-compliant.')
if not is_edge_osmnx_compliant(network, edge_a) or not is_edge_osmnx_compliant(
network, edge_b
):
raise ValueError("One or more of the edges is not OSMnx-compliant.")
fw_dict = network.get_edge_data(*edge_a)
rv_dict = network.get_edge_data(*edge_b)
# check if any of the non-mandatory attributes exist in one dict but not
# the other
key_attr = set([osm.KEY_OSMNX_GEOMETRY, osm.KEY_OSMNX_REVERSED])
for _attr in key_attr:
if ((_attr in fw_dict.keys() and _attr not in rv_dict.keys()) or
(_attr not in fw_dict.keys() and _attr in rv_dict.keys())):
if (_attr in fw_dict.keys() and _attr not in rv_dict.keys()) or (
_attr not in fw_dict.keys() and _attr in rv_dict.keys()
):
# incoherent inputs
return False
# for each key, value pair in the forward edge's dict
for attr_key, attr_value in fw_dict.items():
if (type(attr_value) == list and
((type(rv_dict[attr_key]) == list and
set(attr_value) != set(rv_dict[attr_key])) or
type(rv_dict[attr_key]) != list)):
if type(attr_value) == list and (
(
type(rv_dict[attr_key]) == list
and set(attr_value) != set(rv_dict[attr_key])
)
or type(rv_dict[attr_key]) != list
):
# the sets of list arguments do not match
# or, the arguments are not equivalent
return False
elif (type(attr_value) == list and
type(rv_dict[attr_key]) == list and
set(attr_value) == set(rv_dict[attr_key])):
elif (
type(attr_value) == list
and type(rv_dict[attr_key]) == list
and set(attr_value) == set(rv_dict[attr_key])
):
# the sets of list arguments match
continue
elif (attr_key == osm.KEY_OSMNX_GEOMETRY and
((type(rv_dict[attr_key]) == LineString and
tuple(attr_value.coords) !=
tuple(rv_dict[attr_key].reverse().coords)) or
attr_key not in rv_dict or
type(rv_dict[attr_key]) != LineString)):
elif attr_key == osm.KEY_OSMNX_GEOMETRY and (
(
type(rv_dict[attr_key]) == LineString
and tuple(attr_value.coords)
!= tuple(rv_dict[attr_key].reverse().coords)
)
or attr_key not in rv_dict
or type(rv_dict[attr_key]) != LineString
):
# either the geometries are not reversed
# or, there is no geometry attribute in the reverse dict
# or, the geometry in the reverse edge is not for a LineString
return False
elif (attr_key == osm.KEY_OSMNX_GEOMETRY and
type(rv_dict[attr_key]) == LineString and
tuple(attr_value.coords) ==
tuple(rv_dict[attr_key].reverse().coords)):
elif (
attr_key == osm.KEY_OSMNX_GEOMETRY
and type(rv_dict[attr_key]) == LineString
and tuple(attr_value.coords) == tuple(rv_dict[attr_key].reverse().coords)
):
# the geometries are reversed
continue
elif (attr_key == osm.KEY_OSMNX_REVERSED and
((attr_key in rv_dict and
attr_value == rv_dict[attr_key]) or
attr_key not in rv_dict or
type(rv_dict[attr_key]) != bool)):
elif attr_key == osm.KEY_OSMNX_REVERSED and (
(attr_key in rv_dict and attr_value == rv_dict[attr_key])
or attr_key not in rv_dict
or type(rv_dict[attr_key]) != bool
):
# either the reversed flags match
# or, there is no reversed flag in the reverse dict
return False
elif (attr_key == osm.KEY_OSMNX_REVERSED and
attr_key in rv_dict and
not attr_value == rv_dict[attr_key]):
elif (
attr_key == osm.KEY_OSMNX_REVERSED
and attr_key in rv_dict
and not attr_value == rv_dict[attr_key]
):
# the reversed flags are logical opposites
continue
elif (attr_key == osm.KEY_OSMNX_LENGTH and
((attr_key in rv_dict and
#isinstance(rv_dict[attr_key], Real) and
abs(attr_value-rv_dict[attr_key]) > tolerance) or
attr_key not in rv_dict)):
elif attr_key == osm.KEY_OSMNX_LENGTH and (
(
attr_key in rv_dict
and
# isinstance(rv_dict[attr_key], Real) and
abs(attr_value - rv_dict[attr_key]) > tolerance
)
or attr_key not in rv_dict
):
# either the lengths differ too much
# or, there is no length attribute in the reverse dict
# or it is not a numeric type
return False
elif (attr_key == osm.KEY_OSMNX_LENGTH and
attr_key in rv_dict and
#isinstance(rv_dict[attr_key], Real) and
abs(attr_value-rv_dict[attr_key]) <= tolerance):
elif (
attr_key == osm.KEY_OSMNX_LENGTH
and attr_key in rv_dict
and
# isinstance(rv_dict[attr_key], Real) and
abs(attr_value - rv_dict[attr_key]) <= tolerance
):
# the lengths are within the tolerance
continue
elif attr_key in rv_dict and attr_value != rv_dict[attr_key]:
# either the attributes do not match
return False
# else: # the argument does not exist
# all other possibilities have been exhausted: return True
return True
# *****************************************************************************
# *****************************************************************************
def close_to_extremities(
line: LineString,
points: tuple,
tolerance: float = 7/3-4/3-1,
use_start_point_equidistant: bool = True,
return_distances: bool = False) -> tuple:
line: LineString,
points: tuple,
tolerance: float = 7 / 3 - 4 / 3 - 1,
use_start_point_equidistant: bool = True,
return_distances: bool = False,
) -> tuple:
"""
Determines which points are close to a line\'s start and end points.
Closeness between points is defined by being within a given tolerance of
each other.
......@@ -334,31 +363,32 @@ def close_to_extremities(
points that are closest to the end point.
"""
# calculate the distances to the line
line_distances = line.distance(points)
# identify the start and end points
start_point = Point(line.coords[0])
end_point = Point(line.coords[-1])
# calculate the distances to the start and end points
start_distances = start_point.distance(points)
end_distances = end_point.distance(points)
# for each point
_start = []
_end = []
for i, (line_distance, start_distance, end_distance) in enumerate(
zip(line_distances, start_distances, end_distances)):
for i, (line_distance, start_distance, end_distance) in enumerate(
zip(line_distances, start_distances, end_distances)
):
if start_distance < end_distance:
# the point is closer to the start point than to the end point
if abs(start_distance-line_distance) <= tolerance:
if abs(start_distance - line_distance) <= tolerance:
# the point is within range of the start point
_start.append(i)
elif start_distance > end_distance:
# the point is closer to the end point than to the start point
if abs(end_distance-line_distance) <= tolerance:
if abs(end_distance - line_distance) <= tolerance:
# the point is within range of the end point
_end.append(i)
else:
......@@ -367,44 +397,48 @@ def close_to_extremities(
# the point is closer to the line than to the start/end points
continue
# reach these statements
if use_start_point_equidistant:
if use_start_point_equidistant:
_start.append(i)
else:
_end.append(i)
# return statement
if return_distances:
return _start, _end, line_distances, start_distances, end_distances
else:
return _start, _end
# *****************************************************************************
# *****************************************************************************
def find_roundabouts(network: nx.MultiDiGraph,
maximum_perimeter: float = None,
minimum_perimeter: float = None,
maximum_number_nodes: int = None,
minimum_number_nodes: int = None) -> list:
def find_roundabouts(
network: nx.MultiDiGraph,
maximum_perimeter: float = None,
minimum_perimeter: float = None,
maximum_number_nodes: int = None,
minimum_number_nodes: int = None,
) -> list:
"""
Finds sequences of nodes in a network that constitute roundabouts.
A roundabout is defined as a sequence of nodes connected through one-way
edges to form an endless loop. One-way edges are identified by the presence
of the 'oneway' attribute equal to True in the edge dictionary. The minimum
of the 'oneway' attribute equal to True in the edge dictionary. The minimum
and maximum roundabout perimeter can be used to filter out roundabouts that
are too big or too small, as can minimum and maximum number of nodes.
Parameters
----------
network : nx.MultiDiGraph
The object describing the network.
maximum_perimeter : float, optional
The maximum perimeter for an ordered sequence of nodes to qualify as a
The maximum perimeter for an ordered sequence of nodes to qualify as a
roundabout. The units are those of OSMnx: meters. The default is None,
which leads to the maximum perimeter being ignored.
minimum_perimeter : float, optional
The minimum perimeter for an ordered sequence of nodes to qualify as a
The minimum perimeter for an ordered sequence of nodes to qualify as a
roundabout. The units are those of OSMNX: meters. The default is None,
which leads to the minimum perimeter being ignored.
maximum_number_nodes : int, optional
......@@ -422,33 +456,29 @@ def find_roundabouts(network: nx.MultiDiGraph,
A list of lists with node keys representing a roundabout's nodes. The
nodes are ordered in the manner needed to go around the roundabout.
"""
# copy the network object
new_network = network.copy()
# remove edges that do not qualify:
# remove edges that do not qualify:
# 1) self-loops
# 2) edges that are not oneway
# 3) edges that do not have the necessary attributes
# node number limits
there_are_upper_node_number_limits = (
True if maximum_number_nodes != None else False)
there_are_lower_node_number_limits = (
True if minimum_number_nodes != None else False)
there_are_node_number_limits = (
there_are_upper_node_number_limits or
there_are_lower_node_number_limits)
there_are_upper_node_number_limits = True if maximum_number_nodes != None else False
there_are_lower_node_number_limits = True if minimum_number_nodes != None else False
there_are_node_number_limits = (
there_are_upper_node_number_limits or there_are_lower_node_number_limits
)
# perimeter limits
there_are_upper_perimeter_limits = (
True if maximum_perimeter != None else False)
there_are_lower_perimeter_limits = (
True if minimum_perimeter != None else False)
there_are_perimeter_limits = (
there_are_upper_perimeter_limits or
there_are_lower_perimeter_limits)
there_are_upper_perimeter_limits = True if maximum_perimeter != None else False
there_are_lower_perimeter_limits = True if minimum_perimeter != None else False
there_are_perimeter_limits = (
there_are_upper_perimeter_limits or there_are_lower_perimeter_limits
)
# find edges that are not one way
list_removable_edges = []
......@@ -482,30 +512,28 @@ def find_roundabouts(network: nx.MultiDiGraph,
for node_key in list_selflooping_nodes:
while new_network.has_edge(u=node_key, v=node_key):
new_network.remove_edge(u=node_key, v=node_key)
# *************************************************************************
# *************************************************************************
# find loops
node_paths = nx.simple_cycles(new_network)
# *************************************************************************
# *************************************************************************
# exclude paths based on the perimeter and the number of nodes, if set
if not there_are_node_number_limits and not there_are_perimeter_limits:
return list(node_paths)
else: # each potential candidate needs to be checked
else: # each potential candidate needs to be checked
final_node_paths = []
# for each node group
for node_path in node_paths:
if there_are_perimeter_limits:
# compute the total length for each node
total_length = node_path_length(
network,
node_path,
return_minimum_length_only=True
)
network, node_path, return_minimum_length_only=True
)
if there_are_lower_perimeter_limits:
if total_length < minimum_perimeter:
continue
......@@ -524,19 +552,21 @@ def find_roundabouts(network: nx.MultiDiGraph,
final_node_paths.append(node_path)
# return the final list
return final_node_paths
# *********************************************************************
# *********************************************************************
# *****************************************************************************
# *****************************************************************************
def is_roundabout(network: nx.MultiDiGraph,
path: list,
path_as_node_keys: bool = True) -> bool:
def is_roundabout(
network: nx.MultiDiGraph, path: list, path_as_node_keys: bool = True
) -> bool:
"""
Returns True if a given path constitutes a roundabout in a directed graph.
A roundabout is defined as a sequence of nodes connected through one-way
edges to form an endless loop. One-way edges are identified by the presence
of the 'oneway' attribute equal to True in the edge dictionary.
......@@ -556,123 +586,106 @@ def is_roundabout(network: nx.MultiDiGraph,
True, if the path constitutes a roundabout and False otherwise.
"""
# paths are given as node lists
# roundabouts require at least two nodes
if len(path) <= 1:
raise ValueError('Node paths require at least two nodes.')
raise ValueError("Node paths require at least two nodes.")
# for each node in path
for node_key in path:
# check if it exists in the network
if not network.has_node(node_key):
return False
# there should be no repetitions
if path.count(node_key) > 1:
return False
# check if the last node connects to the first
edge_keys = get_edges_from_a_to_b(network,
path[-1],
path[0])
edge_keys = get_edges_from_a_to_b(network, path[-1], path[0])
if len(edge_keys) == 0:
return False
else:
# among the edges between them, find at least one compatible
compatible_edge_exists = False
for edge_key in edge_keys:
for edge_key in edge_keys:
# get its data
edge_data_dict = network.get_edge_data(u=edge_key[0],
v=edge_key[1],
key=edge_key[2])
edge_data_dict = network.get_edge_data(
u=edge_key[0], v=edge_key[1], key=edge_key[2]
)
# ensure that this edge has the oneway attribute
if osm.KEY_OSMNX_ONEWAY in edge_data_dict:
# ensure that it is true
if edge_data_dict[osm.KEY_OSMNX_ONEWAY]:
compatible_edge_exists = True
break
# check for compatible edges
if not compatible_edge_exists:
# no compatible edges exist between these two nodes
return False
# for each other node pair
for node_pair in range(len(path)-1):
for node_pair in range(len(path) - 1):
# for each edge between them, find at least one compatible edge
compatible_edge_exists = False
for edge_key in get_edges_from_a_to_b(network,
path[node_pair],
path[node_pair+1]):
for edge_key in get_edges_from_a_to_b(
network, path[node_pair], path[node_pair + 1]
):
# get its data
edge_data_dict = network.get_edge_data(u=edge_key[0],
v=edge_key[1],
key=edge_key[2])
edge_data_dict = network.get_edge_data(
u=edge_key[0], v=edge_key[1], key=edge_key[2]
)
# ensure that this edge has the oneway attribute
if osm.KEY_OSMNX_ONEWAY in edge_data_dict:
# ensure that it is true
if edge_data_dict[osm.KEY_OSMNX_ONEWAY]:
compatible_edge_exists = True
break
# check for compatible edges
if not compatible_edge_exists:
# no compatible edges exist between these two nodes
return False
# otherwise, it is a roundabout
return True
# *****************************************************************************
# *****************************************************************************
def get_edges_from_a_to_b(network: nx.MultiDiGraph,
node_start,
node_end) -> list:
def get_edges_from_a_to_b(network: nx.MultiDiGraph, node_start, node_end) -> list:
"""
Retrieve the keys for edges from one node to another.
......@@ -692,18 +705,21 @@ def get_edges_from_a_to_b(network: nx.MultiDiGraph,
"""
if network.has_edge(u=node_start, v=node_end):
return [(node_start, node_end, key)
for key in network._adj[node_start][node_end]]
return [
(node_start, node_end, key) for key in network._adj[node_start][node_end]
]
else:
return []
# *****************************************************************************
# *****************************************************************************
def get_edges_between_two_nodes(network: nx.MultiDiGraph, u, v) -> list:
"""
Retrieve the keys for all edges involving two specific nodes.
The keys concern edges in both directions. For a single direction, consider
using the method get_edges_from_a_to_b instead.
......@@ -722,13 +738,13 @@ def get_edges_between_two_nodes(network: nx.MultiDiGraph, u, v) -> list:
A list of edge keys involving both nodes, in both directions.
"""
if network.has_edge(u, v):
# edges exist from u to v
_out = [(u,v,k) for k in network._adj[u][v]]
_out = [(u, v, k) for k in network._adj[u][v]]
try:
# try finding out if edges exist from v to u
_out.extend([(v,u,k) for k in network._adj[v][u]])
_out.extend([(v, u, k) for k in network._adj[v][u]])
except KeyError:
# edges do not exist from v to u
pass
......@@ -736,22 +752,26 @@ def get_edges_between_two_nodes(network: nx.MultiDiGraph, u, v) -> list:
return _out
elif network.has_edge(v, u):
# edges do not exist from u to v but exist from v to u
return [(v,u,k) for k in network._adj[v][u]]
return [(v, u, k) for k in network._adj[v][u]]
else:
# no edges found
return []
# *****************************************************************************
# *****************************************************************************
def get_edges_involving_node(network: nx.MultiDiGraph,
node_key,
include_outgoing_edges: bool = True,
include_incoming_edges: bool = True,
include_self_loops: bool = True) -> list:
def get_edges_involving_node(
network: nx.MultiDiGraph,
node_key,
include_outgoing_edges: bool = True,
include_incoming_edges: bool = True,
include_self_loops: bool = True,
) -> list:
"""
Retrieve the keys for all edges involving a specific node.
The keys concern incoming and outgoing edges. Optionally, the keys retrieved
can concern only incoming or outgoing edges, self-loops included or not.
......@@ -774,31 +794,39 @@ def get_edges_involving_node(network: nx.MultiDiGraph,
A list of edge keys involving the specified node.
"""
return [
edge_key
for edge_key in network.edges(keys=True)
if node_key in edge_key[0:2]
# outgoing edges
if ((node_key != edge_key[0] and not include_outgoing_edges) or
include_outgoing_edges)
if (
(node_key != edge_key[0] and not include_outgoing_edges)
or include_outgoing_edges
)
# incoming edges
if ((node_key != edge_key[1] and not include_incoming_edges) or
include_incoming_edges)
if (
(node_key != edge_key[1] and not include_incoming_edges)
or include_incoming_edges
)
# self-loops
if ((edge_key[0] != edge_key[1] and not include_self_loops) or
include_self_loops)
]
if (
(edge_key[0] != edge_key[1] and not include_self_loops)
or include_self_loops
)
]
# *****************************************************************************
# *****************************************************************************
def neighbours(network: nx.MultiDiGraph or nx.MultiGraph,
node_key,
ignore_self_loops: bool = True):
def neighbours(
network: nx.MultiDiGraph or nx.MultiGraph, node_key, ignore_self_loops: bool = True
):
"""
Return a given node\'s neighbours.
This method relies on networkx\'s neighbours method but adds the option to
ignore self-loops.
......@@ -819,29 +847,28 @@ def neighbours(network: nx.MultiDiGraph or nx.MultiGraph,
"""
if network.has_edge(node_key, node_key) and ignore_self_loops:
return (
_node_key
_node_key
for _node_key in nx.all_neighbors(network, node_key)
if _node_key != node_key
)
)
else:
return nx.all_neighbors(network, node_key)
# *****************************************************************************
# *****************************************************************************
def is_node_path(
network: nx.MultiDiGraph,
path: list,
consider_reversed_edges: bool = False) -> bool:
network: nx.MultiDiGraph, path: list, consider_reversed_edges: bool = False
) -> bool:
"""
Indicates if a given path qualifies as a node path in a directed network.
A node path consists of a sequence of nodes connected by directed edges.
The sequence must include at least two nodes.
Parameters
......@@ -875,15 +902,19 @@ def is_node_path(
return False
return True
else:
return nx.is_path(network, path)
return nx.is_path(network, path)
# *****************************************************************************
# *****************************************************************************
def is_edge_path(network: nx.MultiDiGraph,
path: list,
ignore_edge_direction: bool = False,
allow_multiple_formats: bool = False) -> bool:
def is_edge_path(
network: nx.MultiDiGraph,
path: list,
ignore_edge_direction: bool = False,
allow_multiple_formats: bool = False,
) -> bool:
"""
Indicates if a given path qualifies as an edge path in a directed network.
......@@ -906,106 +937,104 @@ def is_edge_path(network: nx.MultiDiGraph,
Returns True if the path qualifies as an edge path and False otherwise.
"""
if len(path) == 0:
# empty path
return False
else:
# all the edges have to exist
previous_edge_key_length = len(path[0])
for edge_i, tentative_edge_key in enumerate(path):
edge_key_length = len(tentative_edge_key)
if not allow_multiple_formats:
if previous_edge_key_length != edge_key_length:
# the edge key format changes: not a path
raise ValueError(
'The path must be provided using only one edge format.'
)
"The path must be provided using only one edge format."
)
# find out if the edge exists
if edge_key_length == 3:
# 3-tuple format
if not network.has_edge(u=tentative_edge_key[0],
v=tentative_edge_key[1],
key=tentative_edge_key[2]):
if not network.has_edge(
u=tentative_edge_key[0],
v=tentative_edge_key[1],
key=tentative_edge_key[2],
):
# the edge does not exist as specified
return False
elif edge_key_length == 2:
# 2-tuple format
if not network.has_edge(u=tentative_edge_key[0],
v=tentative_edge_key[1]):
if not network.has_edge(
u=tentative_edge_key[0], v=tentative_edge_key[1]
):
# the edge does not exist as specified
return False
else:
# unknown format
return False
# the edge exists: check if it forms a sequence
if edge_i != 0: # skip the first iteration
if edge_i != 0: # skip the first iteration
# if none of the current edge's nodes is mentioned in the
# previous edge, then no sequence is formed
if (tentative_edge_key[0] not in path[edge_i-1][0:2] and
tentative_edge_key[1] not in path[edge_i-1][0:2] and
ignore_edge_direction):
if (
tentative_edge_key[0] not in path[edge_i - 1][0:2]
and tentative_edge_key[1] not in path[edge_i - 1][0:2]
and ignore_edge_direction
):
return False
# if the previous edge's end node is not the current edge's
# start node, then it is not a valid edge path
if (path[edge_i-1][1] != tentative_edge_key[0] and
not ignore_edge_direction):
if (
path[edge_i - 1][1] != tentative_edge_key[0]
and not ignore_edge_direction
):
return False
# # check the formats, if necessary
# if (not allow_multiple_formats and
# len(path[edge_i-1]) != len(tentative_edge_key)):
# return False
previous_edge_key_length = edge_key_length
return True
# *****************************************************************************
# *****************************************************************************
def is_path_straight(network: nx.MultiDiGraph,
path: list,
consider_reversed_edges: bool = False,
ignore_self_loops: bool = False) -> bool:
def is_path_straight(
network: nx.MultiDiGraph,
path: list,
consider_reversed_edges: bool = False,
ignore_self_loops: bool = False,
) -> bool:
"""
Returns True if the path is straight and False otherwise.
A path is defined to be straight if it presents no options along it.
Parameters
......@@ -1033,33 +1062,42 @@ def is_path_straight(network: nx.MultiDiGraph,
# a straight path requires at least two nodes
path_length = len(path)
if path_length == 2:
return True # path with two nodes is always straight
return True # path with two nodes is always straight
# check if the intermediate nodes have the right number of neighbours
for intermediate_node in path[1:-1]:
if len(set(neighbours(
network,
intermediate_node,
ignore_self_loops=ignore_self_loops))
) != 2:
# the path is not straight if the intermediate nodes do not have
if (
len(
set(
neighbours(
network, intermediate_node, ignore_self_loops=ignore_self_loops
)
)
)
!= 2
):
# the path is not straight if the intermediate nodes do not have
# two distinct neighbours
return False
return False
# if all intermediate nodes have two neighbours, return True
return True
# *****************************************************************************
# *****************************************************************************
def find_simplifiable_paths(network: nx.MultiDiGraph,
excluded_nodes: list,
ignore_self_loops: bool = False,
consider_reversed_edges: bool = False,
include_both_directions: bool = False) -> list:
def find_simplifiable_paths(
network: nx.MultiDiGraph,
excluded_nodes: list,
ignore_self_loops: bool = False,
consider_reversed_edges: bool = False,
include_both_directions: bool = False,
) -> list:
"""
Enumerates the simplifiable paths found in a given graph.
A path is defined to be simplifiable if it presents no options along it
and involves at least three different nodes: two-node paths are straight
but are not simplifiable, with or without reversed edges.
......@@ -1075,7 +1113,7 @@ def find_simplifiable_paths(network: nx.MultiDiGraph,
paths including self-loops cannot be straight. The default is False.
consider_reversed_edges : bool, optional
If True, a straight path can include nodes connected in reverse. If
False, only edges in the stated direction will be considered. The
False, only edges in the stated direction will be considered. The
default is False.
include_both_directions : bool, optional
If True, and if reverse edges are allowed, simplifiable paths will
......@@ -1087,107 +1125,102 @@ def find_simplifiable_paths(network: nx.MultiDiGraph,
A list of the straight paths in the graph.
"""
# a straight path is a path where the intermediate nodes (all excluding the
# first and the last) have to exist and have exactly 2 distinct neighbours
# *************************************************************************
# *************************************************************************
# locate all the non-excluded nodes that can form straight paths
intermediate_candidate_nodes = set([
node_key
for node_key in network.nodes()
# the node cannot be among those excluded
if node_key not in excluded_nodes
# the node has to be linked to two other nodes other than itself
if len(set(
neighbours(
network,
node_key,
ignore_self_loops=True
)
)) == 2
# exclude nodes with self-loops if desired:
# 1) self-loops are tolerated (no need to check)
# 2) self-loops are not tolerated and they do not exist
if (ignore_self_loops or
(not ignore_self_loops and
not network.has_edge(node_key, node_key)))
])
intermediate_candidate_nodes = set(
[
node_key
for node_key in network.nodes()
# the node cannot be among those excluded
if node_key not in excluded_nodes
# the node has to be linked to two other nodes other than itself
if len(set(neighbours(network, node_key, ignore_self_loops=True))) == 2
# exclude nodes with self-loops if desired:
# 1) self-loops are tolerated (no need to check)
# 2) self-loops are not tolerated and they do not exist
if (
ignore_self_loops
or (not ignore_self_loops and not network.has_edge(node_key, node_key))
)
]
)
# *************************************************************************
# find out paths around the nodes identified
list_paths = []
list_paths_nodes = []
list_nodes_joined = set([])
# try to form paths around the candidate nodes
# try to form paths around the candidate nodes
for candidate_node in intermediate_candidate_nodes:
# skip if the node is already in a path
if candidate_node in list_nodes_joined:
continue
# select method
if consider_reversed_edges:
# reversed edges are accepted
new_sequence = _find_path_direction_insensitive(
network,
list_valid_nodes=intermediate_candidate_nodes-list_nodes_joined,
list_valid_nodes=intermediate_candidate_nodes - list_nodes_joined,
start_node=candidate_node,
ignore_self_loops=ignore_self_loops
)
ignore_self_loops=ignore_self_loops,
)
else:
# reversed edges are not accepted
new_sequence = _find_path_direction_sensitive(
network,
list_valid_nodes=intermediate_candidate_nodes-list_nodes_joined,
list_valid_nodes=intermediate_candidate_nodes - list_nodes_joined,
start_node=candidate_node,
ignore_self_loops=ignore_self_loops
)
ignore_self_loops=ignore_self_loops,
)
# make sure the sequence is not redundant
if (len(new_sequence) <= 2 or
new_sequence in list_paths):
if len(new_sequence) <= 2 or new_sequence in list_paths:
# path is just one edge or has already been included
continue
# add the path
# add the path
list_paths.append(new_sequence)
list_paths_nodes.append(set(new_sequence))
if consider_reversed_edges and include_both_directions:
# directions do not matter:
# directions do not matter:
list_paths.append(new_sequence[::-1])
# update the list of intermediate nodes already on paths
# update the list of intermediate nodes already on paths
list_nodes_joined.update(set(new_sequence[1:-1]))
# *************************************************************************
# *************************************************************************
return list_paths
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def _find_path_direction_sensitive(
network: nx.MultiDiGraph,
list_valid_nodes: list,
start_node,
ignore_self_loops: bool
) -> list:
def find_path_forward(network: nx.MultiDiGraph,
current_node,
path: list):
network: nx.MultiDiGraph,
list_valid_nodes: list,
start_node,
ignore_self_loops: bool,
) -> list:
def find_path_forward(network: nx.MultiDiGraph, current_node, path: list):
# identify the last node's neighbours
current_neighbours = set(
neighbours(network, current_node, ignore_self_loops=True)
)
)
# check each neighbour
for a_neighbour in current_neighbours:
# check the direction of edge towards the neighbour
......@@ -1206,12 +1239,8 @@ def _find_path_direction_sensitive(
# add the neighbour to the end of the path
path.append(a_neighbour)
# recursive call with extended path
return find_path_forward(
network,
path[-1],
path
)
else: # is not a valid node: path ends
return find_path_forward(network, path[-1], path)
else: # is not a valid node: path ends
# add the neighbour to the end of the path:
path.append(a_neighbour)
# return the path
......@@ -1221,32 +1250,38 @@ def _find_path_direction_sensitive(
# neighbour is already on the path, matches the start,
# and has two neighbours other than itself:
# close the loop and return the path
if (len(set(neighbours(
network,
a_neighbour,
ignore_self_loops=ignore_self_loops))) == 2):
if (
len(
set(
neighbours(
network,
a_neighbour,
ignore_self_loops=ignore_self_loops,
)
)
)
== 2
):
# add the neighbour to the end of the path:
path.append(a_neighbour)
# return the path
return path
# all neighbours have been visited: return the current path
return path
def find_path_backward(network: nx.MultiDiGraph,
current_node,
path: list):
def find_path_backward(network: nx.MultiDiGraph, current_node, path: list):
# identify the last node's neighbours
current_neighbours = set(
neighbours(network, current_node, ignore_self_loops=True)
)
)
# check each neighbour
# 1) if the neighbour is ahead and is a valid node:
# 1) if the neighbour is ahead and is a valid node:
# >> recursion w/ neighbour and then return the final path
# 2) if the neighbour is ahead and is not a valid node:
# 2) if the neighbour is ahead and is not a valid node:
# >> add it to the path and then return it
# 3) if the neighbour is not ahead and is on the path:
# 3) if the neighbour is not ahead and is on the path:
# >> continue, namely to check the other neighbour
# 4) if the neighbour is not ahead and is not on the path:
# 4) if the neighbour is not ahead and is not on the path:
# >> add it to the beginning of the path and continue
# check each neighbour
for a_neighbour in current_neighbours:
......@@ -1266,12 +1301,8 @@ def _find_path_direction_sensitive(
# add the neighbour to the start of the path
path.insert(0, a_neighbour)
# recursive call with extended path
return find_path_backward(
network,
path[0],
path
)
else: # is not a valid node: path ends
return find_path_backward(network, path[0], path)
else: # is not a valid node: path ends
# add the neighbour to the start of the path
path.insert(0, a_neighbour)
# return the path
......@@ -1282,8 +1313,8 @@ def _find_path_direction_sensitive(
# and has two neighbours other than itself:
# close the loop and return the path
# if (len(set(neighbours(
# network,
# a_neighbour,
# network,
# a_neighbour,
# ignore_self_loops=ignore_self_loops))) == 2):
# # add the neighbour to the start of the path
# path.insert(0, a_neighbour)
......@@ -1294,47 +1325,37 @@ def _find_path_direction_sensitive(
return path
# all neighbours have been visited: return the current path
return path
# *************************************************************************
# find the path forward, check for cycles and then find the path backwards
# find the forward path segment
path = find_path_forward(
network,
start_node,
[start_node]
)
path = find_path_forward(network, start_node, [start_node])
# cycles have to be detected on the first try
if len(path) >= 3 and path[0] == path[-1]:
if len(path) >= 3 and path[0] == path[-1]:
# it is a cycle: no need to search backwards
return path
# find the backward path segment
return find_path_backward(
network,
path[0],
path
)
return find_path_backward(network, path[0], path)
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def _find_path_direction_insensitive(
network: nx.MultiDiGraph,
list_valid_nodes: list,
start_node,
ignore_self_loops: bool
) -> list:
def find_path_forward(network: nx.MultiDiGraph,
current_node,
path: list):
network: nx.MultiDiGraph,
list_valid_nodes: list,
start_node,
ignore_self_loops: bool,
) -> list:
def find_path_forward(network: nx.MultiDiGraph, current_node, path: list):
# identify the last node's neighbours
current_neighbours = set(
neighbours(network, current_node, ignore_self_loops=True)
)
)
# check each neighbour
for a_neighbour in current_neighbours:
# check the direction of edge towards the neighbour:
......@@ -1351,12 +1372,8 @@ def _find_path_direction_insensitive(
# add the neighbour to the end of the path
path.append(a_neighbour)
# recursive call with extended path
return find_path_forward(
network,
path[-1],
path
)
else: # is not a valid node: path ends
return find_path_forward(network, path[-1], path)
else: # is not a valid node: path ends
# add the neighbour to the end of the path:
path.append(a_neighbour)
# return the path
......@@ -1366,35 +1383,40 @@ def _find_path_direction_insensitive(
# neighbour is already on the path, matches the start,
# and has two neighbours other than itself:
# close the loop and return the path
if (len(set(neighbours(
network,
a_neighbour,
ignore_self_loops=ignore_self_loops))) == 2):
if (
len(
set(
neighbours(
network,
a_neighbour,
ignore_self_loops=ignore_self_loops,
)
)
)
== 2
):
# add the neighbour to the end of the path:
path.append(a_neighbour)
# return the path
return path
# all neighbours have been visited: return the current path
return path
def find_path_backward(network: nx.MultiDiGraph,
current_node,
path: list):
def find_path_backward(network: nx.MultiDiGraph, current_node, path: list):
# identify the last node's neighbours
current_neighbours = set(
neighbours(network, current_node, ignore_self_loops=True)
)
)
# check each neighbour
# 1) if the neighbour is ahead and is a valid node:
# 1) if the neighbour is ahead and is a valid node:
# >> recursion w/ neighbour and then return the final path
# 2) if the neighbour is ahead and is not a valid node:
# 2) if the neighbour is ahead and is not a valid node:
# >> add it to the path and then return it
# 3) if the neighbour is not ahead and is on the path:
# 3) if the neighbour is not ahead and is on the path:
# >> continue, namely to check the other neighbour
# 4) if the neighbour is not ahead and is not on the path:
# 4) if the neighbour is not ahead and is not on the path:
# >> add it to the beginning of the path and continue
# check each neighbour
for a_neighbour in current_neighbours:
# check the direction of edge towards the neighbour
......@@ -1411,12 +1433,8 @@ def _find_path_direction_insensitive(
# add the neighbour to the start of the path
path.insert(0, a_neighbour)
# recursive call with extended path
return find_path_backward(
network,
path[0],
path
)
else: # is not a valid node: path ends
return find_path_backward(network, path[0], path)
else: # is not a valid node: path ends
# add the neighbour to the start of the path
path.insert(0, a_neighbour)
# return the path
......@@ -1427,8 +1445,8 @@ def _find_path_direction_insensitive(
# and has two neighbours other than itself:
# close the loop and return the path
# if (len(set(neighbours(
# network,
# a_neighbour,
# network,
# a_neighbour,
# ignore_self_loops=ignore_self_loops))) == 2):
# # add the neighbour to the start of the path
# path.insert(0, a_neighbour)
......@@ -1440,33 +1458,27 @@ def _find_path_direction_insensitive(
return path
# all neighbours have been visited: return the current path
return path
# *************************************************************************
# find the path forward, check for cycles and then find the path backwards
# explore paths in the forward sense
path = find_path_forward(
network,
start_node,
[start_node]
)
path = find_path_forward(network, start_node, [start_node])
# check for cycles
if len(path) >= 3 and path[0] == path[-1]:
if len(path) >= 3 and path[0] == path[-1]:
# it is a cycle: no need to search backwards
return path
# explore paths in the backward sense and return the path
return find_path_backward(
network,
path[0],
path
)
return find_path_backward(network, path[0], path)
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def find_self_loops(network: nx.MultiDiGraph) -> list:
"""
Returns a list with the nodes that connect to themselves.
......@@ -1484,18 +1496,20 @@ def find_self_loops(network: nx.MultiDiGraph) -> list:
"""
return (
node_key
node_key
for node_key in network.nodes()
if network.has_edge(u=node_key, v=node_key)
)
)
# *****************************************************************************
# *****************************************************************************
def find_unconnected_nodes(network: nx.MultiDiGraph) -> list:
"""
Returns a list with the nodes that are not connected whilst in the network.
The method is meant to be used with MultiDiGraph objects.
Parameters
......@@ -1509,22 +1523,24 @@ def find_unconnected_nodes(network: nx.MultiDiGraph) -> list:
The list of unconnected nodes.
"""
return [
node_key
node_key
for node_key in network.nodes()
if len(tuple(network.neighbors(node_key))) == 0
]
]
# *****************************************************************************
# *****************************************************************************
def nearest_nodes_other_than_themselves(network: nx.MultiDiGraph,
node_keys: list,
return_dist: bool = False) -> list:
def nearest_nodes_other_than_themselves(
network: nx.MultiDiGraph, node_keys: list, return_dist: bool = False
) -> list:
"""
Returns a list with the keys of nodes closest to another set of nodes.
The method relies on osmnx\'s nearest_nodes method.
Parameters
......@@ -1540,38 +1556,38 @@ def nearest_nodes_other_than_themselves(network: nx.MultiDiGraph,
Returns
-------
nn or (nn, dist) : int/list or tuple
nearest node IDs or optionally a tuple where dist contains distances
nearest node IDs or optionally a tuple where dist contains distances
between the points and their nearest nodes (note: from nearest_nodes).
"""
# create a copy of the network
network_copy = network.copy()
# remove selected node keys from the copy
network_copy.remove_nodes_from(node_keys)
# use nearest_nodes from osmnx to find the nearest nodes in the copy
nearest_node_keys = nearest_nodes(
network_copy,
[network.nodes[node_key]['x']
for node_key in node_keys],
[network.nodes[node_key]['y']
for node_key in node_keys],
network_copy,
[network.nodes[node_key]["x"] for node_key in node_keys],
[network.nodes[node_key]["y"] for node_key in node_keys],
return_dist=return_dist,
)
)
return nearest_node_keys
# *****************************************************************************
# *****************************************************************************
def is_start_or_end_point_or_close(line: LineString,
point: Point,
tolerance: float = 1e-3) -> bool:
def is_start_or_end_point_or_close(
line: LineString, point: Point, tolerance: float = 1e-3
) -> bool:
"""
Returns True if a given point is near the start or end points of a line.
......@@ -1591,87 +1607,85 @@ def is_start_or_end_point_or_close(line: LineString,
end point.
"""
# get the start and end points
start_coords, *_, end_coords = line.coords
# # compare the coordinates
# if (tuple(point.coords)[0] == start_coords or
# if (tuple(point.coords)[0] == start_coords or
# tuple(point.coords)[0] == end_coords):
# return True
# compare with the start point
start_point = Point(start_coords)
if start_point.distance(point) <= tolerance:
return True
# compare with the end point
end_point = Point(end_coords)
if end_point.distance(point) <= tolerance:
return True
# return statement
return False
# *****************************************************************************
# *****************************************************************************
def is_start_or_end_point(line: LineString,
point: Point) -> bool:
def is_start_or_end_point(line: LineString, point: Point) -> bool:
"""
Returns True if a given point is the start or end point of a line.
Parameters
----------
line : LineString
The object describing the line.
point : Point
The point under consideration.
Returns
-------
bool
A boolean indicating whether the point is the start or end points.
"""
# get the start and end points
start_coords, *_, end_coords = line.coords
# compare the coordinates
if (tuple(point.coords)[0] == start_coords or
tuple(point.coords)[0] == end_coords):
if tuple(point.coords)[0] == start_coords or tuple(point.coords)[0] == end_coords:
return True
# return statement
return False
# *****************************************************************************
# *****************************************************************************
def identify_edge_closest_to_node(
network: nx.MultiDiGraph,
node_keys: list,
crs: str = None) -> Tuple[list, nx.MultiDiGraph]:
network: nx.MultiDiGraph, node_keys: list, crs: str = None
) -> Tuple[list, nx.MultiDiGraph]:
"""
Identify the edges that are closest to a given set of nodes.
The network object should formatted according to OSMnx standards.
Distances are calculated using projected coordinates, unless a specific
coordinate system is given.
......@@ -1682,7 +1696,7 @@ def identify_edge_closest_to_node(
node_keys : list
A list of keys corresponding to the nodes under consideration.
crs : str, optional
The CRS under which the operation is to be done. The default is None.
The CRS under which the operation is to be done. The default is None.
If None, the CRS is determined automatically via the OSMnx library.
Returns
......@@ -1693,42 +1707,44 @@ def identify_edge_closest_to_node(
The object for the projected network.
"""
# *************************************************************************
# 1) ensure that the network crs is correct and convert if not
# 1) ensure that the network crs is correct and convert if not
# 2) identify the edges that are nearest to the nodes
# 3) revert network crs back to the original, if necessary
# *************************************************************************
# if it is a geographic CRS, convert it to a projected CRS
if not is_projected(network.graph['crs']) or type(crs) != type(None):
if not is_projected(network.graph["crs"]) or type(crs) != type(None):
# convert to a projected CRS (including if crs=None)
projected_network = project_graph(network, to_crs=crs)
else:
projected_network = network
# *************************************************************************
# 2) identify the edges that are nearest to the nodes
nearest_edge_keys = nearest_edges(
projected_network,
X=[projected_network.nodes[node_key][osm.KEY_OSMNX_X]
for node_key in node_keys],
Y=[projected_network.nodes[node_key][osm.KEY_OSMNX_Y]
for node_key in node_keys],
return_dist=False)
projected_network,
X=[
projected_network.nodes[node_key][osm.KEY_OSMNX_X] for node_key in node_keys
],
Y=[
projected_network.nodes[node_key][osm.KEY_OSMNX_Y] for node_key in node_keys
],
return_dist=False,
)
# return statement
return nearest_edge_keys, projected_network
# *****************************************************************************
# *****************************************************************************
......@@ -19,6 +19,7 @@ from ...problems.esipp.utils import unused_node_key
from ..misc.utils import generate_pseudo_unique_key
from ..gis import osm
from ..gis import identify as gis_iden
# from ..gis import identify as gis_calc
from .identify import close_to_extremities
from .calculate import update_street_count, edge_lengths
......@@ -26,6 +27,7 @@ from .calculate import update_street_count, edge_lengths
# *****************************************************************************
# *****************************************************************************
def remove_self_loops(network: nx.MultiDiGraph):
"""
Removes self-loops from a directed graph defined in a MultiDiGraph object.
......@@ -41,22 +43,24 @@ def remove_self_loops(network: nx.MultiDiGraph):
The keys to the nodes whose self-loops were removed.
"""
selflooping_nodes = list(gis_iden.find_self_loops(network))
for node in selflooping_nodes:
while network.has_edge(u=node, v=node):
network.remove_edge(u=node, v=node)
return selflooping_nodes
# *****************************************************************************
# *****************************************************************************
def transform_roundabouts_into_crossroads(
network: nx.MultiDiGraph,
roundabouts: list) -> list:
network: nx.MultiDiGraph, roundabouts: list
) -> list:
"""
Transform roundabouts into crossroads.
If there are roundabouts that encompass other roundabouts, the latter will
be ignored.
......@@ -73,10 +77,10 @@ def transform_roundabouts_into_crossroads(
A list of the node keys for the intersections created.
"""
# declare the output list
list_roundabout_centroids = []
# for each roundabout
for roundabout in roundabouts:
# make sure roundabout qualifies as a roundabout
......@@ -101,52 +105,53 @@ def transform_roundabouts_into_crossroads(
# break out of the loop
break
if roundabout_overlaps:
# the roundabout overlaps with some other one, skip it
list_roundabout_centroids.append(None)
continue
# the roundabout overlaps with some other one, skip it
list_roundabout_centroids.append(None)
continue
# *********************************************************************
# *********************************************************************
# create a new node whose location is the roundabout's centroid
list_point_coordinates = [
(network.nodes[node_key][osm.KEY_OSMNX_X],
network.nodes[node_key][osm.KEY_OSMNX_Y])
(
network.nodes[node_key][osm.KEY_OSMNX_X],
network.nodes[node_key][osm.KEY_OSMNX_Y],
)
for node_key in roundabout
]
]
new_geo = LineString(list_point_coordinates)
roundabout_centroid_key = generate_pseudo_unique_key(network)
network.add_node(
roundabout_centroid_key,
**{osm.KEY_OSMNX_X: new_geo.centroid.coords.xy[0][0],
osm.KEY_OSMNX_Y: new_geo.centroid.coords.xy[1][0]}
)
roundabout_centroid_key,
**{
osm.KEY_OSMNX_X: new_geo.centroid.coords.xy[0][0],
osm.KEY_OSMNX_Y: new_geo.centroid.coords.xy[1][0],
}
)
list_roundabout_centroids.append(roundabout_centroid_key)
# *********************************************************************
# *********************************************************************
# create new edges to link each node leading to the roundabout to the
# create new edges to link each node leading to the roundabout to the
# node just created (new_node_key)
# find the edges leading to the roundabout
list_edges_leading_to_roundabout = [
edge_key
# for each node in the roundabout
for node_key in roundabout
for node_key in roundabout
# for each neighbouring nodes
for other_node_key in gis_iden.neighbours(network, node_key)
for other_node_key in gis_iden.neighbours(network, node_key)
# if it is not in the roundabout itself
if other_node_key not in roundabout
if other_node_key not in roundabout
# for each edge between the two nodes
for edge_key in gis_iden.get_edges_between_two_nodes(
network,
node_key,
other_node_key)
]
network, node_key, other_node_key
)
]
# for each edge leading to the roundabout
for edge_key in list_edges_leading_to_roundabout:
# replace it with a new edge to the new node
# get edge dict
edge_dict = network.get_edge_data(edge_key[0],
edge_key[1],
edge_key[2])
edge_dict = network.get_edge_data(edge_key[0], edge_key[1], edge_key[2])
if osm.KEY_OSMNX_GEOMETRY in edge_dict:
# geometry exists
old_geometry = edge_dict[osm.KEY_OSMNX_GEOMETRY]
......@@ -154,25 +159,31 @@ def transform_roundabouts_into_crossroads(
# geometry does not exist
# create it
old_geometry = LineString(
[(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y]),
(network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y])]
)
[
(
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
),
(
network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
),
]
)
# if osm.KEY_OSMNX_LENGTH in edge_dict:
# # length exists
# old_length = edge_dict[osm.KEY_OSMNX_LENGTH]
# else:
# # length does not exist
# old_length = edge_lengths(
# network,
# network,
# edge_keys=[edge_key])[edge_key]
# the old length has to exist
old_length = edge_dict[osm.KEY_OSMNX_LENGTH]
# *****************************************************************
# *****************************************************************
# find closest point
if edge_key[0] in roundabout:
# this edge starts from the roundabout
......@@ -181,23 +192,27 @@ def transform_roundabouts_into_crossroads(
# create geometry object between old roundabout point to the
# roundabout's centroid
extra_geometry = LineString(
[(network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y]),
(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y])]
)
if is_projected(network.graph['crs']):
[
(
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y],
),
(
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
),
]
)
if is_projected(network.graph["crs"]):
# projected graph: use direct method
extra_length = length(extra_geometry)
else: # unprojected graph: use great circle method
else: # unprojected graph: use great circle method
extra_length = great_circle(
lat1=network.nodes[
roundabout_centroid_key][osm.KEY_OSMNX_Y],
lon1=network.nodes[
roundabout_centroid_key][osm.KEY_OSMNX_X],
lat2=network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
lon2=network.nodes[edge_key[0]][osm.KEY_OSMNX_X]
)
lat1=network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y],
lon1=network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
lat2=network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
lon2=network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
)
elif edge_key[1] in roundabout:
# this edge ends in the roundabout
new_edge_start_node = edge_key[0]
......@@ -205,41 +220,43 @@ def transform_roundabouts_into_crossroads(
# create geometry object between old roundabout point to the
# roundabout's centroid
extra_geometry = LineString(
[(network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y]),
(network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y])]
)
if is_projected(network.graph['crs']):
[
(
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y],
),
(
network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
),
]
)
if is_projected(network.graph["crs"]):
# projected graph, use direct method
extra_length = length(extra_geometry)
else:
# unprojected graph, use great circle method
extra_length = great_circle(
lat1=network.nodes[
roundabout_centroid_key][osm.KEY_OSMNX_Y],
lon1=network.nodes[
roundabout_centroid_key][osm.KEY_OSMNX_X],
lat2=network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
lon2=network.nodes[edge_key[1]][osm.KEY_OSMNX_X]
)
lat1=network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y],
lon1=network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
lat2=network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
lon2=network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
)
# *****************************************************************
# *****************************************************************
edge_dict[osm.KEY_OSMNX_GEOMETRY] = linemerge(
[old_geometry,
extra_geometry])
edge_dict[osm.KEY_OSMNX_LENGTH] = old_length+extra_length
network.add_edge(new_edge_start_node,
new_edge_end_node,
**edge_dict)
[old_geometry, extra_geometry]
)
edge_dict[osm.KEY_OSMNX_LENGTH] = old_length + extra_length
network.add_edge(new_edge_start_node, new_edge_end_node, **edge_dict)
# *************************************************************************
# *************************************************************************
# remove the roundabout nodes
for roundabout_index, roundabout in enumerate(roundabouts):
# if the transformation of the roundabout was successful...
if list_roundabout_centroids[roundabout_index] != None:
......@@ -247,21 +264,23 @@ def transform_roundabouts_into_crossroads(
network.remove_nodes_from(roundabout)
# return
return list_roundabout_centroids
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
# TODO: develop algorithm to traverse the graph in search of dead ends
def remove_dead_ends(network: nx.MultiDiGraph,
keepers: tuple = None,
max_iterations: int = 1) -> list:
def remove_dead_ends(
network: nx.MultiDiGraph, keepers: tuple = None, max_iterations: int = 1
) -> list:
"""
Removes dead ends (non-cyclical branches) from a directed graph.
The removal process is iterative and nodes that are initially not dead ends
may be removed in subsequent iterations.
......@@ -281,10 +300,10 @@ def remove_dead_ends(network: nx.MultiDiGraph,
A list of the keys for the nodes that were removed.
"""
if type(keepers) == type(None):
keepers = []
# while true
nodes_removed = []
iteration_counter = 0
......@@ -296,12 +315,9 @@ def remove_dead_ends(network: nx.MultiDiGraph,
for node_key in network.nodes()
if node_key not in keepers
# if it has at most one neighbour other than itself
if len(set(gis_iden.neighbours(
network,
node_key,
ignore_self_loops=True)
)) <= 1
]
if len(set(gis_iden.neighbours(network, node_key, ignore_self_loops=True)))
<= 1
]
# if there no nodes meeting those conditions, break out of loop
if len(target_nodes) == 0:
break
......@@ -312,27 +328,26 @@ def remove_dead_ends(network: nx.MultiDiGraph,
iteration_counter += 1
# store the nodes removed
nodes_removed.extend(target_nodes)
# return the list of nodes removed
# return the list of nodes removed
return nodes_removed
# *****************************************************************************
# *****************************************************************************
def replace_path(
network: nx.MultiDiGraph,
path: list
) -> tuple:
def replace_path(network: nx.MultiDiGraph, path: list) -> tuple:
"""
Replaces a simplifiable path with one equivalent edge linking both ends.
If there are parallel or anti-parallel edges along the path, only one will
be used to create the new edge.
There should only be one edge between each of the nodes on the path, since
only one will be used to create the new edge. By default, the edges between
the nodes should be in the forward direction, but this restriction can be
lifted. In that case, the edges between the nodes can be in any direction.
The intermediate nodes on the path will be removed.
Parameters
......@@ -353,69 +368,65 @@ def replace_path(
The key for the new edge.
"""
# *************************************************************************
# make sure path it is a simplifiable path
if not gis_iden.is_path_straight(
network,
path,
consider_reversed_edges=True,
ignore_self_loops=True
):
raise ValueError('The path cannot be simplified.')
network, path, consider_reversed_edges=True, ignore_self_loops=True
):
raise ValueError("The path cannot be simplified.")
# *************************************************************************
# create the new edge
# create the geometry/linestring
list_oneway = []
list_reversed = []
list_osmid = []
list_geometries = []
edge_length = 0
for node_pair_index in range(len(path)-1):
for node_pair_index in range(len(path) - 1):
# get one edge for this node pair
edge_key = list(gis_iden.get_edges_between_two_nodes(
network,
path[node_pair_index],
path[node_pair_index+1]
))
edge_key = list(
gis_iden.get_edges_between_two_nodes(
network, path[node_pair_index], path[node_pair_index + 1]
)
)
edge_key = sorted(
(network.edges[_key][osm.KEY_OSMNX_LENGTH], _key)
for _key in edge_key
)[0][1]
(network.edges[_key][osm.KEY_OSMNX_LENGTH], _key) for _key in edge_key
)[0][1]
if osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
# geometry exists: possibly a composite geometry
# check if the geometry is consistent with the edge declaration
if gis_iden.is_edge_consistent_with_geometry(network, edge_key):
# the geometry is not reversed
list_geometries.append(
network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY]
)
else: # the geometry is reversed
list_geometries.append(network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY])
else: # the geometry is reversed
list_geometries.append(
network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY].reverse()
)
)
else:
# geometry does not exist: direct path
# the edge is not reversed: use it as is
list_geometries.append(
LineString(
[(network.nodes[
path[node_pair_index]][osm.KEY_OSMNX_X],
network.nodes[
path[node_pair_index]][osm.KEY_OSMNX_Y]),
(network.nodes[
path[node_pair_index+1]][osm.KEY_OSMNX_X],
network.nodes[
path[node_pair_index+1]][osm.KEY_OSMNX_Y])]
)
[
(
network.nodes[path[node_pair_index]][osm.KEY_OSMNX_X],
network.nodes[path[node_pair_index]][osm.KEY_OSMNX_Y],
),
(
network.nodes[path[node_pair_index + 1]][osm.KEY_OSMNX_X],
network.nodes[path[node_pair_index + 1]][osm.KEY_OSMNX_Y],
),
]
)
)
# osmid
if type(network.edges[edge_key][osm.KEY_OSMNX_OSMID]) == list:
list_osmid.extend(network.edges[edge_key][osm.KEY_OSMNX_OSMID])
......@@ -423,29 +434,21 @@ def replace_path(
list_osmid.append(network.edges[edge_key][osm.KEY_OSMNX_OSMID])
# reversed
if type(network.edges[edge_key][osm.KEY_OSMNX_REVERSED]) == list:
list_reversed.extend(
network.edges[edge_key][osm.KEY_OSMNX_REVERSED]
)
list_reversed.extend(network.edges[edge_key][osm.KEY_OSMNX_REVERSED])
else:
list_reversed.append(
network.edges[edge_key][osm.KEY_OSMNX_REVERSED]
)
list_reversed.append(network.edges[edge_key][osm.KEY_OSMNX_REVERSED])
# oneway
if type(network.edges[edge_key][osm.KEY_OSMNX_ONEWAY]) == list:
list_oneway.extend(
network.edges[edge_key][osm.KEY_OSMNX_ONEWAY]
)
list_oneway.extend(network.edges[edge_key][osm.KEY_OSMNX_ONEWAY])
else:
list_oneway.append(
network.edges[edge_key][osm.KEY_OSMNX_ONEWAY]
)
list_oneway.append(network.edges[edge_key][osm.KEY_OSMNX_ONEWAY])
# update the edge length
edge_length += network.edges[edge_key][osm.KEY_OSMNX_LENGTH]
# merge the geometries
new_geo = linemerge(list_geometries)
# verify that it led to the creation of a linestring object
if type(new_geo) != LineString:
# TODO: make sure this is still relevant and add tests
......@@ -454,62 +457,59 @@ def replace_path(
list_geometries = []
# snap each consecutive geometry pair in the MultiLineString object
# since linemerge separates linestrings that are not contiguous
for geo_pair_index in range(len(new_geo.geoms)-1):
for geo_pair_index in range(len(new_geo.geoms) - 1):
list_geometries.append(
snap(
new_geo.geoms[geo_pair_index],
new_geo.geoms[geo_pair_index+1],
tolerance=1e-3
)
new_geo.geoms[geo_pair_index + 1],
tolerance=1e-3,
)
)
new_geo = linemerge(list_geometries)
# prepare edge data dict
list_osmid = list(set(list_osmid))
list_oneway = list(set(list_oneway))
list_reversed = list(set(list_reversed))
# create the dict
edge_dict = {
osm.KEY_OSMNX_LENGTH: edge_length,
osm.KEY_OSMNX_LENGTH: edge_length,
osm.KEY_OSMNX_GEOMETRY: new_geo,
osm.KEY_OSMNX_ONEWAY: (
list_oneway if len(list_oneway) != 1 else list_oneway[0]
),
),
osm.KEY_OSMNX_REVERSED: (
list_reversed if len(list_reversed) != 1 else list_reversed[0]
),
osm.KEY_OSMNX_OSMID: (
list_osmid if len(list_osmid) != 1 else list_osmid[0]
)
}
),
osm.KEY_OSMNX_OSMID: (list_osmid if len(list_osmid) != 1 else list_osmid[0]),
}
# *************************************************************************
# add edges
start_node = path[0]
start_node = path[0]
end_node = path[-1]
# create the forward edge
for_k = network.add_edge(
start_node,
end_node,
**edge_dict
)
for_k = network.add_edge(start_node, end_node, **edge_dict)
# delete all intermediate nodes
network.remove_nodes_from(path[1:-1])
# return the edge key
return (start_node, end_node, for_k)
# *****************************************************************************
# *****************************************************************************
def remove_longer_parallel_edges(network: nx.MultiDiGraph,
ignore_edge_directions: bool = False) -> list:
def remove_longer_parallel_edges(
network: nx.MultiDiGraph, ignore_edge_directions: bool = False
) -> list:
"""
Removes longer parallel edges from the network.
Parallel edges are those connecting the same nodes in the same direction. If
there are parallel edges between any given pair of nodes, the longer ones
will be removed. If desired, edge directions can be ignored. In that case,
......@@ -526,73 +526,69 @@ def remove_longer_parallel_edges(network: nx.MultiDiGraph,
A list of the edges removed.
"""
# redundancy: having more than one edge between two nodes
# solution: remove the longest one, leave the shortest one
# for each node pair
edges_removed = []
for node_one in network.nodes():
for node_one in network.nodes():
for node_two in network.nodes():
# skip self-loops
if node_one == node_two:
if node_one == node_two:
continue
# get the edges between the two nodes
if ignore_edge_directions: # both directions
if ignore_edge_directions: # both directions
list_edges = gis_iden.get_edges_between_two_nodes(
network,
node_one,
node_two
)
else: # one direction
network, node_one, node_two
)
else: # one direction
list_edges = gis_iden.get_edges_from_a_to_b(
network,
node_start=node_one,
node_end=node_two
)
network, node_start=node_one, node_end=node_two
)
# if none exist, skip
if len(list_edges) == 0:
continue
# otherwise, find out which is the shortest one
sorted_edges = sorted(
(network.edges[edge_key][osm.KEY_OSMNX_LENGTH], edge_key)
for edge_key in list_edges
)
network.remove_edges_from(
edge_tuple[1] for edge_tuple in sorted_edges[1:]
)
)
network.remove_edges_from(edge_tuple[1] for edge_tuple in sorted_edges[1:])
edges_removed.extend(edge_tuple[1] for edge_tuple in sorted_edges[1:])
return edges_removed
# *****************************************************************************
# *****************************************************************************
def merge_points_into_linestring(
line: LineString,
points: tuple or list,
tolerance: float = 7./3-4./3-1,
fixed_extremities: bool = True,
use_start_point_equidistant: bool = True
) -> LineString:
line: LineString,
points: tuple or list,
tolerance: float = 7.0 / 3 - 4.0 / 3 - 1,
fixed_extremities: bool = True,
use_start_point_equidistant: bool = True,
) -> LineString:
"""
Merge points into a line where they are closest to it.
The points are merged in succession and the line keeps changing as the
The points are merged in succession and the line keeps changing as the
points are merged but the original points remain on it.
The points added do not need to be close to the line. The tolerance
parameter is only used to determine closeness to the points already on the
line.
If a point is closest to a point on the line shared by two line segments,
then the former is placed after the latter, and not before.
If a point is closest to multiple line segments, then the point is placed
between the start and end points of the first among the closest segments.
......@@ -607,7 +603,7 @@ def merge_points_into_linestring(
The default is 7/3-4/3-1 (2.220446049250313e-16).
fixed_extremities : bool, optional
If False, the line can be extended beyond the original start and end
points. If not, points closest to the extremities are not merged and
points. If not, points closest to the extremities are not merged and
are instead linked to the closest extremities. The default is True.
use_start_point_equidistant : bool, optional
If True, the start point is adopted as the closest point when the start
......@@ -624,128 +620,127 @@ def merge_points_into_linestring(
A list with the indices of the points closest to the end point.
"""
if fixed_extremities:
# the line cannot be extended
# identify which points are close to the start and end points
# note: these points will not be merged
(close_to_start,
close_to_end,
line_distances,
start_distances,
end_distances) = close_to_extremities(
line,
(
close_to_start,
close_to_end,
line_distances,
start_distances,
end_distances,
) = close_to_extremities(
line,
points,
tolerance=tolerance,
use_start_point_equidistant=use_start_point_equidistant,
return_distances=True
)
return_distances=True,
)
# for each mew point
for i in range(len(points)):
if i in close_to_start or i in close_to_end:
# the point is close to the start or end nodes: skip iteration
continue
if points[i].coords[0] in line.coords:
# this point is already on the line: skip iteration
continue
# merge the points that are between the start and end points...
# create line segments for each pair of points on the line
line_segments = [
LineString([line.coords[j],line.coords[j+1]])
for j in range(len(line.coords)-1)
]
LineString([line.coords[j], line.coords[j + 1]])
for j in range(len(line.coords) - 1)
]
# calculate the distances between point i and each point on the li.
line_segment_distances = points[i].distance(line_segments)
# locate the closest pair of points
sorted_distances = sorted(
(line_segment_distance, j)
for j, line_segment_distance in enumerate(
line_segment_distances
)
)
(line_segment_distance, j)
for j, line_segment_distance in enumerate(line_segment_distances)
)
# prepare new line coordinates with the new point
line_coords = list(line.coords)
if (len(sorted_distances) >= 2 and
sorted_distances[0][0] == sorted_distances[1][0]):
line_coords = list(line.coords)
if (
len(sorted_distances) >= 2
and sorted_distances[0][0] == sorted_distances[1][0]
):
# there are 2(+) segments that are equally close to the point
# if the closest points are end/start points of a segment, then
# place the point after the second point of the first segment
if abs(Point(
line_segments[
sorted_distances[0][1]
].coords[-1]
).distance(points[i])-line_distances[i]) <= tolerance:
if (
abs(
Point(
line_segments[sorted_distances[0][1]].coords[-1]
).distance(points[i])
- line_distances[i]
)
<= tolerance
):
line_coords.insert(
# sorted_distances[0][1]+1,
sorted_distances[0][1]+2,
tuple(points[i].coords[0])
)
# sorted_distances[0][1]+1,
sorted_distances[0][1] + 2,
tuple(points[i].coords[0]),
)
else:
line_coords.insert(
sorted_distances[0][1]+1,
# sorted_distances[0][1]+2,
tuple(points[i].coords[0])
)
sorted_distances[0][1] + 1,
# sorted_distances[0][1]+2,
tuple(points[i].coords[0]),
)
else:
# there is only segment with the minimum distance:
# place the new point where the end point of the segment is
line_coords.insert(
sorted_distances[0][1]+1, # i.e., the segment number + 1
tuple(points[i].coords[0])
)
sorted_distances[0][1] + 1, # i.e., the segment number + 1
tuple(points[i].coords[0]),
)
# create new line
line = LineString(line_coords)
else:
# the line can be extended
raise NotImplementedError
# return statement
return line, close_to_start, close_to_end
# *****************************************************************************
# *****************************************************************************
def split_linestring(line: LineString,
points: list,
tolerance: float = 7./3-4./3-1):
def split_linestring(
line: LineString, points: list, tolerance: float = 7.0 / 3 - 4.0 / 3 - 1
):
"""
Split a line into segments according to a set of cutting points.
The cutting points should be close to the original line, since they will be
merged into it before the cutting takes place. If they are not close to the
line, the resulting segments will not resemble the original line.
......@@ -770,21 +765,18 @@ def split_linestring(line: LineString,
A list with the indices of the points closest to the end point.
"""
# add the points to the linestring
new_line, close_to_start, close_to_end = merge_points_into_linestring(
line=line,
points=points,
tolerance=tolerance,
fixed_extremities=True
)
if len(close_to_end)+len(close_to_start) == len(points):
line=line, points=points, tolerance=tolerance, fixed_extremities=True
)
if len(close_to_end) + len(close_to_start) == len(points):
# no changes to the line (assuming no swaps)
return [], close_to_start, close_to_end
# split the linestring object (new_line)
line_segments = []
# split the linestring object (new_line)
line_segments = []
previous_split_index = 0
# for each point on the new line (they should be ordered)
for coords_index, coords in enumerate(new_line.coords):
......@@ -795,57 +787,53 @@ def split_linestring(line: LineString,
# it is a start or end point: skip the iteration
# line_segments.append(None)
continue
# if it is not a start nor an end point, and it is on the original
# line, then
# if not a start nor an end point, build the segment between the
# line, then
# if not a start nor an end point, build the segment between the
# previous split point and the current input point
line_segments.append(
LineString(
new_line.coords[previous_split_index:coords_index+1]
)
)
LineString(new_line.coords[previous_split_index : coords_index + 1])
)
# store new end/start point
previous_split_index = coords_index
# else:
# # the point is not on the original line: split point
# pass
# next iteration
# add the last segment
line_segments.append(
LineString(
new_line.coords[previous_split_index:]
)
)
line_segments.append(LineString(new_line.coords[previous_split_index:]))
# return the geometries for each segment and the relevant points by order
return line_segments, close_to_start, close_to_end
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def recreate_edges(network: nx.MultiDiGraph,
points: dict,
tolerance: float = 7./3-4./3-1) -> tuple:
def recreate_edges(
network: nx.MultiDiGraph, points: dict, tolerance: float = 7.0 / 3 - 4.0 / 3 - 1
) -> tuple:
"""
Recreates OSMnx-type edges by splitting them into multiple smaller edges,
which are defined by points along the original edge.
If the points are closest to the extremities than other parts of an edge,
no changes are introduced and the points become synonymous with the closest
extremity (i.e., the start or end points).
If the points are closest to other parts of an edge, the edge is split there
with new nodes and edges being created to replace the original edge.
Parameters
----------
network : nx.MultiDiGraph
......@@ -864,19 +852,18 @@ def recreate_edges(network: nx.MultiDiGraph,
A dictionary keyed by edge and holding the keys for the edges that were
created to replace the original edge. If a given edge was not recreated,
its key does not appear in the dictionary.
"""
# declare outputs
connection_node_keys_per_edge = {}
recreated_edges = {}
# for each edge that is to be split
for edge_key, points_in_edge in points.items():
# check if there is a geometry already
if osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
# get the geometry
......@@ -888,26 +875,31 @@ def recreate_edges(network: nx.MultiDiGraph,
else:
# there is not geometry, create it
line = LineString(
[(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y]),
(network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y])]
)
[
(
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
),
(
network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
),
]
)
# split the line into segments using the intermediate points
# note: points that are close to the start and end points are handled
# separately
line_segments, close_to_start, close_to_end = split_linestring(
line=line,
points=points_in_edge
)
line=line, points=points_in_edge
)
# link each point to a node key:
# those closer to the start node: edge_key[0]
# those closer to the end node: edge_key[1]
# intermediate points: new node key via unused_node_key
# _node_keys_by_point = {
# points_in_edge[i]: (
# edge_key[0] if i in close_to_start else edge_key[1]
......@@ -916,136 +908,123 @@ def recreate_edges(network: nx.MultiDiGraph,
# ) else unused_node_key(network)
# for i in range(len(points_in_edge))
# }
_node_keys_by_point = {}
for i in range(len(points_in_edge)):
if i in close_to_start or i in close_to_end:
# point i is close to the extremities: use start/end node key
_node_keys_by_point[points_in_edge[i]] = (
edge_key[0] if i in close_to_start else edge_key[1]
)
)
else:
# point i is not close to the extremities: new node key
_node_keys_by_point[
points_in_edge[i]
] = unused_node_key(network)
_node_keys_by_point[points_in_edge[i]] = unused_node_key(network)
network.add_node(_node_keys_by_point[points_in_edge[i]])
# _node_keys = [
# (edge_key[0] if i in close_to_start else edge_key[1]) if (
# i in close_to_start or i in close_to_end
# ) else unused_node_key(network)
# for i in range(len(points_in_edge))
# ]
# should be the same order as in the inputs
_node_keys = [
_node_keys_by_point[point]
for point in points_in_edge
]
_node_keys = [_node_keys_by_point[point] for point in points_in_edge]
# *********************************************************************
# create new edges between the points to rebuild the edge
segment_keys = []
edge_dict = dict(network.get_edge_data(*edge_key))
for line_index, line_segment in enumerate(line_segments):
edge_dict[osm.KEY_OSMNX_GEOMETRY] = line_segment
if line_index == 0:
# initial segment
v_key = _node_keys_by_point[
Point(line_segment.coords[-1])
]
v_key = _node_keys_by_point[Point(line_segment.coords[-1])]
k_key = network.add_edge(
u_for_edge=edge_key[0],
v_for_edge=v_key,
**edge_dict
)
u_for_edge=edge_key[0], v_for_edge=v_key, **edge_dict
)
network.add_node(
v_key,
**{osm.KEY_OSMNX_X: line_segment.coords[-1][0],
osm.KEY_OSMNX_Y: line_segment.coords[-1][1]}
)
segment_keys.append((edge_key[0],v_key,k_key))
elif line_index == len(line_segments)-1:
v_key,
**{
osm.KEY_OSMNX_X: line_segment.coords[-1][0],
osm.KEY_OSMNX_Y: line_segment.coords[-1][1],
}
)
segment_keys.append((edge_key[0], v_key, k_key))
elif line_index == len(line_segments) - 1:
# final segment
u_key = _node_keys_by_point[
Point(line_segment.coords[0])
]
u_key = _node_keys_by_point[Point(line_segment.coords[0])]
k_key = network.add_edge(
u_for_edge=u_key,
v_for_edge=edge_key[1],
**edge_dict
)
u_for_edge=u_key, v_for_edge=edge_key[1], **edge_dict
)
network.add_node(
u_key,
**{osm.KEY_OSMNX_X: line_segment.coords[0][0],
osm.KEY_OSMNX_Y: line_segment.coords[0][1]}
)
segment_keys.append((u_key,edge_key[1],k_key))
else: # intermediate segment
u_key = _node_keys_by_point[
Point(line_segment.coords[0])
]
v_key = _node_keys_by_point[
Point(line_segment.coords[-1])
]
u_key,
**{
osm.KEY_OSMNX_X: line_segment.coords[0][0],
osm.KEY_OSMNX_Y: line_segment.coords[0][1],
}
)
segment_keys.append((u_key, edge_key[1], k_key))
else: # intermediate segment
u_key = _node_keys_by_point[Point(line_segment.coords[0])]
v_key = _node_keys_by_point[Point(line_segment.coords[-1])]
k_key = network.add_edge(
u_for_edge=u_key,
v_for_edge=v_key,
**edge_dict
)
u_for_edge=u_key, v_for_edge=v_key, **edge_dict
)
network.add_node(
u_key,
**{osm.KEY_OSMNX_X: line_segment.coords[0][0],
osm.KEY_OSMNX_Y: line_segment.coords[0][1]}
)
u_key,
**{
osm.KEY_OSMNX_X: line_segment.coords[0][0],
osm.KEY_OSMNX_Y: line_segment.coords[0][1],
}
)
network.add_node(
v_key,
**{osm.KEY_OSMNX_X: line_segment.coords[-1][0],
osm.KEY_OSMNX_Y: line_segment.coords[-1][1]}
)
segment_keys.append((u_key,v_key,k_key))
v_key,
**{
osm.KEY_OSMNX_X: line_segment.coords[-1][0],
osm.KEY_OSMNX_Y: line_segment.coords[-1][1],
}
)
segment_keys.append((u_key, v_key, k_key))
# TODO: use network.add_edges_from() for performance?
# TODO: try to create all the edges (with lengths included) in one go
# calculate the lengths
edge_lengths_by_dict = edge_lengths(network, edge_keys=segment_keys)
network.add_edges_from(
tuple(
(*segment_key,
{osm.KEY_OSMNX_LENGTH: edge_lengths_by_dict[segment_key]})
for segment_key in segment_keys
(
*segment_key,
{osm.KEY_OSMNX_LENGTH: edge_lengths_by_dict[segment_key]},
)
for segment_key in segment_keys
)
)
# update the outputs
if len(line_segments) > 0:
recreated_edges[edge_key] = segment_keys
......@@ -1053,19 +1032,21 @@ def recreate_edges(network: nx.MultiDiGraph,
# return statement
return connection_node_keys_per_edge, recreated_edges
# *****************************************************************************
# *****************************************************************************
def connect_nodes_to_edges(
network: nx.MultiDiGraph,
node_keys: list,
edge_keys: list,
store_unsimplified_geometries: bool = False,
use_one_edge_per_direction: bool = False
) -> tuple:
network: nx.MultiDiGraph,
node_keys: list,
edge_keys: list,
store_unsimplified_geometries: bool = False,
use_one_edge_per_direction: bool = False,
) -> tuple:
"""
Connects nodes to edges using additional edges in an OSMnx-formatted graph.
Parameters
----------
network : nx.MultiDiGraph
......@@ -1079,7 +1060,7 @@ def connect_nodes_to_edges(
If True, straight line geometries that are created are also preserved.
If False, they are not preserved. The default is False.
use_one_edge_per_direction : bool, optional
If True, two edges are used for each new edge created to connect a node.
If True, two edges are used for each new edge created to connect a node.
If False, only one (forward) edge will be created. The default is False.
Returns
......@@ -1088,7 +1069,7 @@ def connect_nodes_to_edges(
A network graph object where the node and edge pairs are connected.
new_edge_keys : list
An ordered list containing the keys for the new edges created to connect
each node.
each node.
connection_node_keys_per_edge : dict
A dictionary keyed by edge and holding the node keys for the points that
were provided initially to split the edge. These node keys are for nodes
......@@ -1099,39 +1080,38 @@ def connect_nodes_to_edges(
its key does not appear in the dictionary.
"""
# *************************************************************************
# 1) group nodes by the edge they are closest to
# 2) for each edge, and node that is to be connected to it, find its closest
# 2) for each edge, and node that is to be connected to it, find its closest
# point on the edge
# 3) recreate each edge after dividing it at the specified points
# 4) connect the nodes to the edges
# 5) delete the original edges, if they have been split
# 6) calculate or update the edge attributes
# *************************************************************************
# *************************************************************************
# 1) group nodes by the edge they are closest to
nodes_to_connect_to_edge = {
edge_key: tuple(
node_key
for other_edge_key, node_key in zip(edge_keys, node_keys)
if other_edge_key == edge_key
)
)
for edge_key in set(edge_keys)
}
}
# *************************************************************************
# *************************************************************************
# 2) for each edge, and node that is to be connected to it, find its closest
# 2) for each edge, and node that is to be connected to it, find its closest
# point on the edge
points_per_edge = {}
for edge_key, _node_keys in nodes_to_connect_to_edge.items():
# check if the geometry exists
if osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
# the geometry object exists, get it
......@@ -1139,58 +1119,66 @@ def connect_nodes_to_edges(
else:
# the geometry object does not exist, make it
edge_geo = LineString(
[(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y]),
(network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y])]
)
[
(
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
),
(
network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
),
]
)
# store the geometry
if store_unsimplified_geometries:
# update the edge
network.add_edge(*edge_key,
**{osm.KEY_OSMNX_GEOMETRY: edge_geo})
network.add_edge(*edge_key, **{osm.KEY_OSMNX_GEOMETRY: edge_geo})
# use nearest_points to locate the closest points on the edge
points_per_edge[edge_key] = [
nearest_points(
edge_geo,
Point(network.nodes[node_key][osm.KEY_OSMNX_X],
network.nodes[node_key][osm.KEY_OSMNX_Y])
)[0] # [0] to get the point on the edge
edge_geo,
Point(
network.nodes[node_key][osm.KEY_OSMNX_X],
network.nodes[node_key][osm.KEY_OSMNX_Y],
),
)[
0
] # [0] to get the point on the edge
for node_key in _node_keys
]
]
# TIP: exclude the points that can be considered close to the start or end nodes
# TIP: use the shortest line method to obtain the line geometry
# *************************************************************************
# *************************************************************************
# 3) recreate each edge after dividing it at the specified points
connection_node_keys_per_edge, recreated_edges = recreate_edges(
network,
points=points_per_edge
)
network, points=points_per_edge
)
# put the keys for the connection nodes
connection_node_keys = [
connection_node_keys_per_edge[edge_key][
nodes_to_connect_to_edge[edge_key].index(node_key)
]
for node_key, edge_key in zip(node_keys, edge_keys)
]
for node_key, edge_key in zip(node_keys, edge_keys)
]
# delete the original edges, if they have been split
network.remove_edges_from(recreated_edges)
# *************************************************************************
# *************************************************************************
# 4) connect the nodes to the edges
connection_edge_containers = []
for node_key, connection_node_key in zip(node_keys, connection_node_keys):
# skip self-loops
if node_key == connection_node_key:
......@@ -1198,73 +1186,65 @@ def connect_nodes_to_edges(
# proceed with other types of edges
if use_one_edge_per_direction:
# add one directed edge per direction
connection_edge_containers.append(
(node_key, connection_node_key)
)
connection_edge_containers.append(
(connection_node_key, node_key)
)
connection_edge_containers.append((node_key, connection_node_key))
connection_edge_containers.append((connection_node_key, node_key))
else:
# add one directed edge starting from the edge and ending in the node
connection_edge_containers.append(
(connection_node_key, node_key)
)
connection_edge_containers.append((connection_node_key, node_key))
edge_keys = network.add_edges_from(connection_edge_containers)
# *************************************************************************
# *************************************************************************
# 5) calculate or update the edge attributes
# calculate edge lengths and street counts for the new edges
if len(edge_keys) != 0:
# there are new edges: calculate the lengths and add them
new_edge_keys = [
(*edge_tuple[0:2], edge_key) # apply it only to specific edges
for edge_tuple, edge_key in zip(
connection_edge_containers, edge_keys)
]
if is_projected(network.graph['crs']):
(*edge_tuple[0:2], edge_key) # apply it only to specific edges
for edge_tuple, edge_key in zip(connection_edge_containers, edge_keys)
]
if is_projected(network.graph["crs"]):
# projected crs: use own method
lengths_dict = edge_lengths(
network,
edge_keys=new_edge_keys
)
lengths_dict = edge_lengths(network, edge_keys=new_edge_keys)
network.add_edges_from(
tuple(
(*edge_key,
{osm.KEY_OSMNX_LENGTH: lengths_dict[edge_key],
osm.KEY_OSMNX_ONEWAY: False,
osm.KEY_OSMNX_REVERSED: False,
osm.KEY_OSMNX_OSMID: None})
for edge_key in new_edge_keys
(
*edge_key,
{
osm.KEY_OSMNX_LENGTH: lengths_dict[edge_key],
osm.KEY_OSMNX_ONEWAY: False,
osm.KEY_OSMNX_REVERSED: False,
osm.KEY_OSMNX_OSMID: None,
},
)
for edge_key in new_edge_keys
)
)
else:
# unprojected crs: use the osmnx method
network = add_edge_lengths(network, edges=new_edge_keys)
# update the street count
update_street_count(network)
else:
new_edge_keys = []
# *************************************************************************
# *************************************************************************
return network, new_edge_keys, connection_node_keys_per_edge, recreated_edges
# *****************************************************************************
# *****************************************************************************
def remove_reversed_edges(
network: nx.MultiDiGraph,
reversed_attr: bool = True
) -> list:
def remove_reversed_edges(network: nx.MultiDiGraph, reversed_attr: bool = True) -> list:
"""
Removes reversed edges from an OSMnx-formatted multi directed edge graph.
......@@ -1291,11 +1271,13 @@ def remove_reversed_edges(
edge_key
for edge_key, reverse_edges in reversed_edges.items()
# at least one reversed edge
if len(reverse_edges) >= 1
if len(reverse_edges) >= 1
# must satisfy the reversed attribute value
if (network.edges[edge_key][osm.KEY_OSMNX_REVERSED] == reversed_attr
or type(network.edges[edge_key][osm.KEY_OSMNX_REVERSED]) == list)
]
if (
network.edges[edge_key][osm.KEY_OSMNX_REVERSED] == reversed_attr
or type(network.edges[edge_key][osm.KEY_OSMNX_REVERSED]) == list
)
]
# filter
for edge_key in edges_removed:
for other_edge_key in reversed_edges[edge_key]:
......@@ -1305,17 +1287,19 @@ def remove_reversed_edges(
network.remove_edges_from(edges_removed)
# return
return edges_removed
# *****************************************************************************
# *****************************************************************************
def create_reverse_edges(
network: nx.MultiDiGraph,
edge_keys: list = None,
) -> list:
network: nx.MultiDiGraph,
edge_keys: list = None,
) -> list:
"""
Creates reversed edges for all or select edges in a network.
A reversed edge has the same attributes as the original except that it is
declared in the opposite direction and has reversed geometry and related
attributes. The edges created are compliant with OSMnx.
......@@ -1339,16 +1323,16 @@ def create_reverse_edges(
A list with the keys for the new edges.
"""
edges_created = []
if type(edge_keys) == type(None):
edge_keys = tuple(network.edges(keys=True))
for edge_key in edge_keys:
# make sure the edge exists
if not network.has_edge(*edge_key):
raise ValueError('Unknown edge: '+str(edge_key))
raise ValueError("Unknown edge: " + str(edge_key))
# get its data
edge_dict = network.get_edge_data(*edge_key)
# create a dict for the reversed edge
......@@ -1356,34 +1340,33 @@ def create_reverse_edges(
# check for the reversed keyword
if type(edge_dict[osm.KEY_OSMNX_REVERSED]) == bool:
# boolean: negate it
rev_edge_dict[osm.KEY_OSMNX_REVERSED] = (
not edge_dict[osm.KEY_OSMNX_REVERSED]
)
elif (type(edge_dict[osm.KEY_OSMNX_REVERSED]) == list and
len(edge_dict[osm.KEY_OSMNX_REVERSED]) == 2 and
len(set(edge_dict[osm.KEY_OSMNX_REVERSED])) == 2):
# list:
rev_edge_dict[osm.KEY_OSMNX_REVERSED] = not edge_dict[
osm.KEY_OSMNX_REVERSED
]
elif (
type(edge_dict[osm.KEY_OSMNX_REVERSED]) == list
and len(edge_dict[osm.KEY_OSMNX_REVERSED]) == 2
and len(set(edge_dict[osm.KEY_OSMNX_REVERSED])) == 2
):
# list:
rev_edge_dict[osm.KEY_OSMNX_REVERSED] = [True, False]
else:
raise ValueError(
'The edge '+str(edge_key)+'is not compliant with OSMnx.'
)
"The edge " + str(edge_key) + "is not compliant with OSMnx."
)
# check for the geometry keyword
if osm.KEY_OSMNX_GEOMETRY in edge_dict:
# a geometry exists, reverse it for the reverse edge dict
rev_edge_dict[osm.KEY_OSMNX_GEOMETRY] = (
edge_dict[osm.KEY_OSMNX_GEOMETRY].reverse()
)
rev_edge_dict[osm.KEY_OSMNX_GEOMETRY] = edge_dict[
osm.KEY_OSMNX_GEOMETRY
].reverse()
# add the edge
rev_k = network.add_edge(
edge_key[1],
edge_key[0],
**rev_edge_dict
)
rev_k = network.add_edge(edge_key[1], edge_key[0], **rev_edge_dict)
edges_created.append((edge_key[1], edge_key[0], rev_k))
# return the keys for the edges created
return edges_created
# *****************************************************************************
# *****************************************************************************
......@@ -5,14 +5,14 @@
# general
KEY_OSM_CITY = 'addr:city'
KEY_OSM_COUNTRY = 'addr:country'
KEY_OSM_HOUSE_NUMBER = 'addr:housenumber'
KEY_OSM_MUNICIPALITY = 'addr:municipality'
KEY_OSM_PLACE = 'addr:place'
KEY_OSM_POSTCODE = 'addr:postcode'
KEY_OSM_STREET = 'addr:street'
KEY_OSM_SOURCE = 'source'
KEY_OSM_CITY = "addr:city"
KEY_OSM_COUNTRY = "addr:country"
KEY_OSM_HOUSE_NUMBER = "addr:housenumber"
KEY_OSM_MUNICIPALITY = "addr:municipality"
KEY_OSM_PLACE = "addr:place"
KEY_OSM_POSTCODE = "addr:postcode"
KEY_OSM_STREET = "addr:street"
KEY_OSM_SOURCE = "source"
KEYS_OSM = [
KEY_OSM_CITY,
......@@ -22,41 +22,39 @@ KEYS_OSM = [
KEY_OSM_PLACE,
KEY_OSM_POSTCODE,
KEY_OSM_STREET,
KEY_OSM_SOURCE
]
KEY_OSM_SOURCE,
]
# country specific
KEY_COUNTRY_DK = 'dk'
KEY_COUNTRY_DK = "dk"
KEY_OSM_DK_BUILDING_ENTRANCE_ID = 'osak:identifier'
KEY_OSM_DK_BUILDING_ENTRANCE_ID = "osak:identifier"
KEY_OSM_BUILDING_ENTRANCE_ID = {KEY_COUNTRY_DK: KEY_OSM_DK_BUILDING_ENTRANCE_ID}
KEY_OSM_BUILDING_ENTRANCE_ID = {
KEY_COUNTRY_DK: KEY_OSM_DK_BUILDING_ENTRANCE_ID
}
# *****************************************************************************
# osmnx
KEY_OSMNX_OSMID = 'osmid'
KEY_OSMNX_ELEMENT_TYPE = 'element_type'
KEY_OSMNX_OSMID = "osmid"
KEY_OSMNX_ELEMENT_TYPE = "element_type"
KEY_OSMNX_NAME = 'name'
KEY_OSMNX_GEOMETRY = 'geometry'
KEY_OSMNX_REVERSED = 'reversed'
KEY_OSMNX_LENGTH = 'length'
KEY_OSMNX_ONEWAY = 'oneway'
KEY_OSMNX_X = 'x'
KEY_OSMNX_Y = 'y'
KEY_OSMNX_LON = 'lon'
KEY_OSMNX_LAT = 'lat'
KEY_OSMNX_STREET_COUNT = 'street_count'
KEY_OSMNX_NAME = "name"
KEY_OSMNX_GEOMETRY = "geometry"
KEY_OSMNX_REVERSED = "reversed"
KEY_OSMNX_LENGTH = "length"
KEY_OSMNX_ONEWAY = "oneway"
KEY_OSMNX_X = "x"
KEY_OSMNX_Y = "y"
KEY_OSMNX_LON = "lon"
KEY_OSMNX_LAT = "lat"
KEY_OSMNX_STREET_COUNT = "street_count"
KEYS_OSMNX = [
KEY_OSMNX_OSMID, # one half of multi-index for geodataframes from osmnx
KEY_OSMNX_ELEMENT_TYPE, # the other half of the multi-index from osmnx
KEY_OSMNX_OSMID, # one half of multi-index for geodataframes from osmnx
KEY_OSMNX_ELEMENT_TYPE, # the other half of the multi-index from osmnx
KEY_OSMNX_NAME,
KEY_OSMNX_GEOMETRY,
KEY_OSMNX_REVERSED,
......@@ -66,8 +64,8 @@ KEYS_OSMNX = [
KEY_OSMNX_Y,
KEY_OSMNX_LON,
KEY_OSMNX_LAT,
KEY_OSMNX_STREET_COUNT
]
KEY_OSMNX_STREET_COUNT,
]
KEYS_OSMNX_NODES = {
KEY_OSMNX_OSMID,
......@@ -77,28 +75,24 @@ KEYS_OSMNX_NODES = {
KEY_OSMNX_Y,
KEY_OSMNX_LON,
KEY_OSMNX_LAT,
KEY_OSMNX_STREET_COUNT
}
KEY_OSMNX_STREET_COUNT,
}
KEYS_OSMNX_NODES_ESSENTIAL = {
KEY_OSMNX_OSMID,
KEY_OSMNX_NAME,
KEY_OSMNX_STREET_COUNT
}
KEYS_OSMNX_NODES_ESSENTIAL = {KEY_OSMNX_OSMID, KEY_OSMNX_NAME, KEY_OSMNX_STREET_COUNT}
KEYS_OSMNX_EDGES = {
KEY_OSMNX_OSMID,
KEY_OSMNX_LENGTH,
KEY_OSMNX_ONEWAY,
KEY_OSMNX_GEOMETRY,
KEY_OSMNX_REVERSED
}
KEY_OSMNX_REVERSED,
}
KEYS_OSMNX_EDGES_ESSENTIAL = {
KEY_OSMNX_OSMID,
KEY_OSMNX_LENGTH,
KEY_OSMNX_ONEWAY,
KEY_OSMNX_REVERSED
}
KEY_OSMNX_REVERSED,
}
# *****************************************************************************
\ No newline at end of file
# *****************************************************************************
# imports
from ast import literal_eval
......@@ -27,109 +26,108 @@ from ..gis import calculate as gis_calc
# constants
KEY_GPD_CRS = 'crs'
KEY_GPD_GEOMETRY = 'geometry'
KEY_GPD_CRS = "crs"
KEY_GPD_GEOMETRY = "geometry"
RKW_GPKG = 'packed'
RKW_GPKG = "packed"
# *****************************************************************************
# *****************************************************************************
# TODO: complete method
def find_gpkg_packable_columns(gdf: GeoDataFrame) -> set:
# columns incompatible with GPKG format:
# 1) columns with equivalent lowercase names
# 2) columns of Nones (fiona 1.9.3; appears to work with fiona 1.8.x)
# 3) columns with lists, dicts (keys become strings), tuples and sets
# 4) columns with other types except 'geometry' types in the geometry col.
# 5) columns with multiple types
# packable columns: 1), 2) and 3)
# *************************************************************************
# 1) columns with equivalent lowercase names
lowercase_columns = tuple(
column.lower()
for column in gdf.columns
)
lowercase_columns = tuple(column.lower() for column in gdf.columns)
set_columns = set(
column
for column, lccolumn in zip(gdf.columns, lowercase_columns)
if lowercase_columns.count(lccolumn) >= 2
)
)
# *************************************************************************
# for each column
for column in gdf.columns:
# if the column has already been identified, or if it is the geometry
# one (identified via KEY_GPD_GEOMETRY), skip the current column
if column == KEY_GPD_GEOMETRY or column in set_columns:
continue
# 2) columns of Nones (fiona 1.9.3; appears to work with fiona 1.8.x)
# 3) columns with lists, dicts (keys become strings), tuples and sets
# identify the type of objects in each row
set_types = set(
type(gdf.loc[(index,column)])
for index in gdf.index
)
set_types = set(type(gdf.loc[(index, column)]) for index in gdf.index)
# allowed types: int, float, numpy floats
if (len(set_types) == 1 and
(str in set_types or float in set_types or int in set_types or
bool in set_types or float64 in set_types or int64 in set_types)):
# if (len(set_types) == 1 and
# (str in set_types or float in set_types or int in set_types or
# bool in set_types or float64 in set_types or int64 in set_types or
# type(None) in set_types)
# ):
if len(set_types) == 1 and (
str in set_types
or float in set_types
or int in set_types
or bool in set_types
or float64 in set_types
or int64 in set_types
):
# if (len(set_types) == 1 and
# (str in set_types or float in set_types or int in set_types or
# bool in set_types or float64 in set_types or int64 in set_types or
# type(None) in set_types)
# ):
# these are allowed
continue
else:
# two or more different types are not allowed
set_columns.add(column)
# *************************************************************************
return set_columns
# *****************************************************************************
# *****************************************************************************
def write_gdf_file(gdf: GeoDataFrame,
filename: str,
columns_to_pack: tuple = None,
preserve_original: bool = True,
**kwargs):
def write_gdf_file(
gdf: GeoDataFrame,
filename: str,
columns_to_pack: tuple = None,
preserve_original: bool = True,
**kwargs
):
"""
Writes the contents of a GeoDataFrame object into a GIS-compatible file.
The method differs from the GeoDataFrame.to_file() method by allowing
The method differs from the GeoDataFrame.to_file() method by allowing
objects with columns whose elements are containers to be written to a file.
For this, it relies on the repr() method. For correctly recognising these
For this, it relies on the repr() method. For correctly recognising these
elements while reading the file, the literal_eval() method should be used.
Note that the literal_eval() is not completely safe (e.g., is vulnerable to
Note that the literal_eval() is not completely safe (e.g., is vulnerable to
denial of service attacks) but does not allow for arbitrary code execution.
Other format rules:
- Missing values in object columns should be specified as None types
......@@ -159,84 +157,79 @@ def write_gdf_file(gdf: GeoDataFrame,
-------
None.
"""
"""
if preserve_original:
# copy the original (slower)
new_gdf = gdf.copy()
else:
# just point to the original (faster)
new_gdf = gdf
if type(columns_to_pack) != tuple:
# no columns identified, find the columns with containers
# TODO: reach this statement
columns_to_pack = tuple(find_gpkg_packable_columns(gdf))
else:
# focus on specific columns
for column in columns_to_pack:
if column not in new_gdf.columns:
# TODO: reach this statement
raise ValueError('Unknown column: '+str(column))
raise ValueError("Unknown column: " + str(column))
# handle NaN and other values
#new_gdf[column].fillna("NaN", inplace=True)
# new_gdf[column].fillna("NaN", inplace=True)
# containers have to be transformed into strings
new_gdf[column] = new_gdf[column].apply(lambda x: repr(x))
# format specific limitations
# GPKG: columns with the same lower case equivalent are not allowed
if '.gpkg' in filename: # solution: use reserved words and numbers
# identify incompatible columns
lowercase_columns = tuple(
column.lower()
for column in gdf.columns
)
if ".gpkg" in filename: # solution: use reserved words and numbers
# identify incompatible columns
lowercase_columns = tuple(column.lower() for column in gdf.columns)
# place all their contents into one new column
pack_columns(
gdf=new_gdf,
gdf=new_gdf,
columns=list(
column
for column, lccolumn in zip(gdf.columns, lowercase_columns)
if lowercase_columns.count(lccolumn) >= 2
)
)
),
)
# the GeoDataFrame object is ready: write it
new_gdf.to_file(filename, **kwargs)
# *****************************************************************************
# *****************************************************************************
def pack_columns(gdf: GeoDataFrame,
columns: list,
packed_column_name: str = RKW_GPKG,
convert_to_string: bool = True):
def pack_columns(
gdf: GeoDataFrame,
columns: list,
packed_column_name: str = RKW_GPKG,
convert_to_string: bool = True,
):
"""
Places the contents of multiple GeoDataFrame columns into a single one.
This method is intended to prepare a GeoDataFrame object for I/O, since so-
me file formats (e.g., GPKG) place restrictions on column names. By placing
the contents of various columns into a single one, these can be correctly
the contents of various columns into a single one, these can be correctly
unpacked later, provided some conditions are met concerning the contents.
Parameters
......@@ -261,54 +254,52 @@ def pack_columns(gdf: GeoDataFrame,
"""
# if only one or no columns are specified, change nothing
if len(columns) <= 1:
return
# if the new column name is pre-existing, raise error
if packed_column_name in gdf.columns:
# TODO: reach this statement
raise ValueError('The desired column name already exists.')
raise ValueError("The desired column name already exists.")
# create a new data dict
data_dict = {
index: {
column: gdf.loc[(index,column)] # gdf[repeated_column].loc[index]
column: gdf.loc[(index, column)] # gdf[repeated_column].loc[index]
# column: repr(gdf.loc[(index,column)])
for column in columns
}
for index in gdf.index
}
for index in gdf.index
}
# add a new column
gdf[packed_column_name] = Series(data=data_dict, index=gdf.index)
# convert it to a string, if needed
if convert_to_string:
gdf[packed_column_name] = gdf[packed_column_name].apply(
lambda x: repr(x)
)
gdf[packed_column_name] = gdf[packed_column_name].apply(lambda x: repr(x))
# drop original columns
gdf.drop(labels=columns, axis=1, inplace=True)
# *****************************************************************************
# *****************************************************************************
def unpack_columns(gdf: GeoDataFrame, packed_column_name: str = RKW_GPKG):
"""
Unpacks a specific GeoDataFrame column into multiple columns.
This method is intended to allow reading GeoDataFrame data from files, sin-
ce the conventional formats (e.g., GPKG) introduce some restrictions.
Parameters
----------
gdf : GeoDataFrame
......@@ -328,45 +319,45 @@ def unpack_columns(gdf: GeoDataFrame, packed_column_name: str = RKW_GPKG):
"""
if packed_column_name not in gdf.columns:
# TODO: reach this statement
raise ValueError('The column specified does not exist.')
raise ValueError("The column specified does not exist.")
# if there are no rows, there is nothing to unpack
if len(gdf) != 0:
# the object is not empty
# create a dict with one dict per merged column
# each dict corresponds to one packed column, to be keyed with index
column_content_dict = {
merged_column: {
index: gdf.loc[(index,packed_column_name)][merged_column]
#index: gdf[packed_column_name].loc[index][merged_column]
index: gdf.loc[(index, packed_column_name)][merged_column]
# index: gdf[packed_column_name].loc[index][merged_column]
for index in gdf.index
}
}
for merged_column in gdf[packed_column_name].iloc[0]
}
# create the columns
}
# create the columns
for name, content in column_content_dict.items():
gdf[name] = Series(data=content, index=gdf.index)
# delete the packed column
gdf.drop(labels=packed_column_name, axis=1, inplace=True)
# *****************************************************************************
# *****************************************************************************
def read_gdf_file(filename: str,
packed_columns: tuple = None,
index: str or list = None) -> GeoDataFrame:
def read_gdf_file(
filename: str, packed_columns: tuple = None, index: str or list = None
) -> GeoDataFrame:
"""
Loads the contents of a file with GIS data into a GeoDataFrame object.
The method differs from the GeoDataFrame.read_file() method by recognising
elements with container data. For this, it relies on the literal_eval() me-
tho, which is not completely safe (e.g., is vulnerable to denial of service
......@@ -396,257 +387,236 @@ def read_gdf_file(filename: str,
GeoDataFrame
The GeoDataFrame object with the data loaded from the file.
"""
"""
gdf = read_file(filename)
# unpack special columns
if '.gpkg' in filename and RKW_GPKG in gdf.columns:
if ".gpkg" in filename and RKW_GPKG in gdf.columns:
# packed column appears to exist: decode column contents
gdf[RKW_GPKG] = gdf[RKW_GPKG].apply(
lambda x: literal_eval(x)
)
gdf[RKW_GPKG] = gdf[RKW_GPKG].apply(lambda x: literal_eval(x))
# unpack it
unpack_columns(gdf=gdf, packed_column_name=RKW_GPKG)
# handle types
if type(index) != type(None):
# a specific index is required, replace existing one
gdf.set_index(index, drop=True, inplace=True)
if type(packed_columns) != tuple:
# figure out which ones need it...
# TODO: reach this statement
raise NotImplementedError
#packed_columns = tuple(find_gpkg_packable_columns(gdf))
# packed_columns = tuple(find_gpkg_packable_columns(gdf))
# focus on specific columns
for column in packed_columns:
if column not in gdf.columns:
# TODO: reach this statement
raise ValueError('Unknown column: '+str(column))
gdf[column] = gdf[column].apply(
lambda x: literal_eval(x)
)
raise ValueError("Unknown column: " + str(column))
gdf[column] = gdf[column].apply(lambda x: literal_eval(x))
return gdf
# *****************************************************************************
# *****************************************************************************
# create osmnx-like geodataframes for nodes
def create_node_geodataframe(longitudes: tuple or list,
latitudes: tuple or list,
osmids: tuple or list = None,
crs: str = "EPSG:4326",
**kwargs) -> GeoDataFrame:
def create_node_geodataframe(
longitudes: tuple or list,
latitudes: tuple or list,
osmids: tuple or list = None,
crs: str = "EPSG:4326",
**kwargs
) -> GeoDataFrame:
if len(longitudes) != len(latitudes):
raise ValueError('The input parameters have mismatched sizes.')
raise ValueError("The input parameters have mismatched sizes.")
if type(osmids) != type(None):
# check sizes
if len(longitudes) != len(osmids):
raise ValueError('The input parameters have mismatched sizes.')
raise ValueError("The input parameters have mismatched sizes.")
else:
# generate node keys
# generate node keys
osmids = (str(uuid4()) for i in range(len(longitudes)))
data_dict = {
osm.KEY_OSMNX_GEOMETRY: [
Point(longitude, latitude)
for longitude, latitude in zip(longitudes, latitudes)
],
}
],
}
for kwarg in kwargs:
data_dict[kwarg] = kwargs[kwarg]
return GeoDataFrame(
data_dict,
index=MultiIndex.from_tuples(
[('node', osmid) for osmid in osmids],
names=[osm.KEY_OSMNX_ELEMENT_TYPE,
osm.KEY_OSMNX_OSMID]
),
crs=crs
)
[("node", osmid) for osmid in osmids],
names=[osm.KEY_OSMNX_ELEMENT_TYPE, osm.KEY_OSMNX_OSMID],
),
crs=crs,
)
# *****************************************************************************
# *****************************************************************************
def prepare_node_data_from_geodataframe(
gdf: GeoDataFrame,
node_key_column: str = None,
include_columns: list = None,
include_geometry: bool = False) -> tuple:
gdf: GeoDataFrame,
node_key_column: str = None,
include_columns: list = None,
include_geometry: bool = False,
) -> tuple:
"""Prepare a container with node data from a GeoDataFrame object."""
node_keys = []
node_data_container = []
node_key_to_gdf_index_dict = {}
# check if the GeoDataFrame has the right type of index
if gdf.index.names != [osm.KEY_OSMNX_ELEMENT_TYPE, osm.KEY_OSMNX_OSMID]:
raise ValueError(
'The GeoDataFrame object does not have the right index.')
raise ValueError("The GeoDataFrame object does not have the right index.")
# for entry in the gdf object
for gdf_entry in range(len(gdf)):
# select key
if type(node_key_column) == str:
# the node_key_column has been specified: use a specific column as key
node_key = gdf.iloc[gdf_entry][node_key_column]
else: # default case: the key is the OSM identifier (should be unique)
else: # default case: the key is the OSM identifier (should be unique)
# use the OSMID as the node key
node_key = gdf.index[gdf_entry][1]
# select node data
geo = gdf.iloc[gdf_entry][KEY_GPD_GEOMETRY]
node_dict = {
osm.KEY_OSMNX_X: geo.x,
osm.KEY_OSMNX_Y: geo.y
}
# add geometry
node_dict = {osm.KEY_OSMNX_X: geo.x, osm.KEY_OSMNX_Y: geo.y}
# add geometry
if include_geometry:
node_dict[osm.KEY_OSMNX_GEOMETRY] = geo
# add extra columns
if type(include_columns) == list:
for other_column in include_columns:
node_dict[other_column] = gdf.iloc[gdf_entry][other_column]
# create new entry in container
node_data_container.append(
(node_key,
node_dict)
)
node_data_container.append((node_key, node_dict))
# store node key
node_keys.append(node_key)
# update the dict
node_key_to_gdf_index_dict[node_key] = gdf.index[gdf_entry]
# *************************************************************************
return node_keys, node_data_container, node_key_to_gdf_index_dict
# *****************************************************************************
# *****************************************************************************
# TODO: simplify the passing of options to the methods relied upon
def plot_discrete_attributes(gdf_buildings: GeoDataFrame,
column: str,
category_to_label: dict,
zoom_level: int = 15,
figsize: tuple = (25,25),
legend_title: str = None,
markersize: int = 50,
edgecolor: str = 'k',
linewidth: float = 0.5,
markeredgewidth: float = 0.5,
markeredgecolor: str = 'k',
include_basemap: bool = False):
def plot_discrete_attributes(
gdf_buildings: GeoDataFrame,
column: str,
category_to_label: dict,
zoom_level: int = 15,
figsize: tuple = (25, 25),
legend_title: str = None,
markersize: int = 50,
edgecolor: str = "k",
linewidth: float = 0.5,
markeredgewidth: float = 0.5,
markeredgecolor: str = "k",
include_basemap: bool = False,
):
"""Plots a map with discrete attributes found in GeoDataFrame column."""
gdf_map = gdf_buildings.to_crs(epsg=3857)
ax = gdf_map.plot(figsize=figsize,
legend=True,
categorical=True,
column=column,
markersize=markersize,
edgecolor=edgecolor,
linewidth=linewidth
)
ax = gdf_map.plot(
figsize=figsize,
legend=True,
categorical=True,
column=column,
markersize=markersize,
edgecolor=edgecolor,
linewidth=linewidth,
)
# adjust legend labels
legend_handles = ax.legend_.legend_handles
for legend_handle in legend_handles:
for legend_handle in legend_handles:
legend_handle.set_markeredgewidth(markeredgewidth)
legend_handle.set_markeredgecolor(markeredgecolor)
# convert keys to string (since that is what the method asks for)
_category_to_label = {
str(key):value for key, value in category_to_label.items()
}
legend_texts = [
_category_to_label[text.get_text()] for text in ax.legend_.texts
]
ax.legend(
legend_handles,
legend_texts,
title=legend_title
)
_category_to_label = {str(key): value for key, value in category_to_label.items()}
legend_texts = [_category_to_label[text.get_text()] for text in ax.legend_.texts]
ax.legend(legend_handles, legend_texts, title=legend_title)
# add base map
if include_basemap:
cx.add_basemap(ax,
#crs="EPSG:4326", # switch to another crs
zoom=zoom_level,
source=cx.providers.OpenStreetMap.Mapnik)
cx.add_basemap(
ax,
# crs="EPSG:4326", # switch to another crs
zoom=zoom_level,
source=cx.providers.OpenStreetMap.Mapnik,
)
# *****************************************************************************
# *****************************************************************************
def count_ocurrences(gdf: GeoDataFrame,
column: str,
column_entries: list = None) -> dict:
def count_ocurrences(
gdf: GeoDataFrame, column: str, column_entries: list = None
) -> dict:
"""
Counts the number of occurrences per entry in a DataFrame object's column.
If a list is provided, only the entries that match those in the list are
If a list is provided, only the entries that match those in the list are
counted. If no list is provided, all unique entries are counted.
Parameters
......@@ -665,70 +635,64 @@ def count_ocurrences(gdf: GeoDataFrame,
A dictionary with the counts whose keys are the values counted.
"""
if type(column_entries) == list:
# find entries also present in the dict
# initialise dict
count_dict = {}
# for each key in the dict
for key in column_entries:
# store the number of rows
count_dict[key] = gdf[gdf[column]==key].shape[0]
count_dict[key] = gdf[gdf[column] == key].shape[0]
# count the number of rows with this key
if type(key) == type(None):
count_dict[key] = gdf[gdf[column].isnull()].shape[0]
count_dict[key] = gdf[gdf[column].isnull()].shape[0]
else:
count_dict[key] = gdf[gdf[column]==key].shape[0]
count_dict[key] = gdf[gdf[column] == key].shape[0]
else:
# find all unique entries
# initialise dict
count_dict = {}
for entry in gdf[column]:
# check if it is already in the dict
if entry in count_dict:
# it is, skip
continue
# it is not, count and store the number of rows with said entry
if type(entry) == type(None):
count_dict[entry] = gdf[gdf[column].isnull()].shape[0]
count_dict[entry] = gdf[gdf[column].isnull()].shape[0]
else:
count_dict[entry] = gdf[gdf[column]==entry].shape[0]
count_dict[entry] = gdf[gdf[column] == entry].shape[0]
# return statement
return count_dict
# *****************************************************************************
# *****************************************************************************
def get_directed(network: MultiGraph,
drop_unsimplified_geometries: bool = True) -> MultiDiGraph:
def get_directed(
network: MultiGraph, drop_unsimplified_geometries: bool = True
) -> MultiDiGraph:
"""
Converts an OSMnx-formatted MultiGraph object into a MultiDiGraph one.
......@@ -746,41 +710,42 @@ def get_directed(network: MultiGraph,
An object describing the transformed graph.
"""
directed_network = MultiDiGraph()
directed_network.add_nodes_from(network.nodes(data=True))
for edge_key in network.edges(keys=True):
edge_data = dict(network.edges[edge_key])
u = edge_data['from']
v = edge_data['to']
edge_data.pop('from')
edge_data.pop('to')
if (drop_unsimplified_geometries and
osm.KEY_OSMNX_GEOMETRY in edge_data and
len(edge_data[osm.KEY_OSMNX_GEOMETRY].coords) == 2):
u = edge_data["from"]
v = edge_data["to"]
edge_data.pop("from")
edge_data.pop("to")
if (
drop_unsimplified_geometries
and osm.KEY_OSMNX_GEOMETRY in edge_data
and len(edge_data[osm.KEY_OSMNX_GEOMETRY].coords) == 2
):
edge_data.pop(osm.KEY_OSMNX_GEOMETRY)
directed_network.add_edge(
u_for_edge=u,
v_for_edge=v,
**edge_data)
directed_network.add_edge(u_for_edge=u, v_for_edge=v, **edge_data)
return directed_network
# *****************************************************************************
# *****************************************************************************
def simplify_network(network: MultiDiGraph,
protected_nodes: list,
dead_end_probing_depth: int = 5,
remove_opposite_parallel_edges: bool = False,
update_street_count_per_node: bool = True,
**roundabout_conditions):
def simplify_network(
network: MultiDiGraph,
protected_nodes: list,
dead_end_probing_depth: int = 5,
remove_opposite_parallel_edges: bool = False,
update_street_count_per_node: bool = True,
**roundabout_conditions
):
"""
Simplifies a network described in a OSMnx-formatted MultiDiGraph object.
......@@ -793,7 +758,7 @@ def simplify_network(network: MultiDiGraph,
dead_end_probing_depth: int
The maximum number of nodes a dead end can have to be detectable.
remove_opposite_parallel_edges : bool, optional
If True, longer parallel edges in opposite directions are also removed.
If True, longer parallel edges in opposite directions are also removed.
The default is False.
update_street_count_per_node : bool, optional
If True, updates the street count on each node. The default is True.
......@@ -805,59 +770,51 @@ def simplify_network(network: MultiDiGraph,
None.
"""
# 1) remove dead ends (tends to create straight paths)
gis_mod.remove_dead_ends(
network,
protected_nodes,
max_iterations=dead_end_probing_depth
)
network, protected_nodes, max_iterations=dead_end_probing_depth
)
# 2) remove longer parallel edges (tends to create straight paths)
gis_mod.remove_longer_parallel_edges(
network,
ignore_edge_directions=remove_opposite_parallel_edges
)
network, ignore_edge_directions=remove_opposite_parallel_edges
)
# 3) remove self loops (tends to create straight paths and dead ends)
gis_mod.remove_self_loops(network)
# 4) join segments (can create self-loops)
simplifiable_paths = gis_iden.find_simplifiable_paths(
network,
protected_nodes
)
# 4) join segments (can create self-loops)
simplifiable_paths = gis_iden.find_simplifiable_paths(network, protected_nodes)
for path in simplifiable_paths:
gis_mod.replace_path(network, path)
# 4) remove self loops (tends to create straight paths and dead ends)
gis_mod.remove_self_loops(network)
# 5) transform roundabouts into crossroads (can create straight paths)
list_roundabout_nodes = gis_iden.find_roundabouts(
network,
**roundabout_conditions)
gis_mod.transform_roundabouts_into_crossroads(
network,
list_roundabout_nodes
)
list_roundabout_nodes = gis_iden.find_roundabouts(network, **roundabout_conditions)
gis_mod.transform_roundabouts_into_crossroads(network, list_roundabout_nodes)
# 6) update street count
if update_street_count_per_node:
gis_calc.update_street_count(network)
# *****************************************************************************
# *****************************************************************************
def identify_building_entrance_edges(
gdf: GeoDataFrame,
gdf_street_column: str,
network: gis_iden.nx.MultiDiGraph,
node_key_to_gdf_index_dict: dict,
crs: str = None,
revert_to_original_crs: bool = False) -> tuple:
gdf: GeoDataFrame,
gdf_street_column: str,
network: gis_iden.nx.MultiDiGraph,
node_key_to_gdf_index_dict: dict,
crs: str = None,
revert_to_original_crs: bool = False,
) -> tuple:
"""
Identifies the edges that can be linked to special nodes in an OSMnx graph
through a OSMnx-formatted GeoDataFrame object.
The links between nodes and edges are determined by:
- the edge being the closest one to the node;
- the node and edge being associated through a string in the GeoDataFrame.
When a node\'s closest edge cannot be linked to it by a string, the node\'s
string is used to search for suitable alternatives, among which the closest
is selected. If none are available, the closest edge is selected.
......@@ -886,301 +843,277 @@ def identify_building_entrance_edges(
-------
dict
A dictionary keyed by node and holding the selected edge key.
dict
A dictionary keyed by node and holding the key to its closest edge.
dict
A dictionary keyed by node and holding the key to its closest edge.
nx.MultiDiGraph
The object for the network used in the method.
"""
# Notes:
# - Each building is expected to have a street name associated with it;
# - If a building does not have a street name associated with it, then the
# edge corresponding to the street must be determined using distances.
# 1) for each node (building entrance), identify the closest edge
# 2) identify which edges identified before cannot be linked back to their
# respective nodes via street names or via (only) one intermediate edge
# 3) for the nodes whose closest edges that cannot be linked back to the no-
# des, find the edges that can, if any, (via their street names) and select
# des, find the edges that can, if any, (via their street names) and select
# the closest one among them as a substitute for the closest one in general
# 4) for all other cases, use the closest edge among all
# output: a list of edge keys (one per building entrance)
# exceptions: if a building cannot be linked to an edge key, link it to None
# *************************************************************************
if revert_to_original_crs:
original_crs = network.graph['crs']
original_crs = network.graph["crs"]
# *************************************************************************
# 1) for each building (entrance), identify the closest edge
node_keys = list(node_key_to_gdf_index_dict.keys())
node_keys = list(node_key_to_gdf_index_dict.keys())
closest_edge_keys, network = gis_iden.identify_edge_closest_to_node(
network=network,
node_keys=node_keys,
crs=crs) # do not revert back to the original yet
network=network, node_keys=node_keys, crs=crs
) # do not revert back to the original yet
# create a dict for the closest edge keys: {node keys: closest edge keys}
building_entrance_edges = dict(zip(node_keys, closest_edge_keys))
_closest_edge_keys_dict = dict(building_entrance_edges)
# *************************************************************************
# 2) identify the nodes that require additional precautions (i.e., those
# that should not be connected to their closest edges)
# the nodes not requiring additional precautions are the following:
# i) those that do not concern buildings (no address);
# ii) those whose closest edge has the same street name as the node;
# iii) those whose closest edge is a nameless intermediate edge that connects
# with another edge which has the same street name as the node (driveway).
# the nodes that require special precautions are:
# iv) those whose closest edges have names that do not match the node's;
# v) those whose closest edges do not have street names and do not lead to
# an edge whose street name matches that of the building address.
# in both cases, the solution is to find edges whose street names match
# those of the node and connect the one that is closest among them. If not
# possible (no edges), then the solution is to connect to the closest edge.
# 2.1) generate a dict with the correspondence between streets and nodes
node_street_names = {
node_key: gdf.loc[
node_key_to_gdf_index_dict[node_key]][gdf_street_column]
node_key: gdf.loc[node_key_to_gdf_index_dict[node_key]][gdf_street_column]
for node_key in node_keys
}
}
trouble_nodes = []
for node_key, closest_edge_key in zip(node_keys, closest_edge_keys):
# check if the street name is a string
if type(node_street_names[node_key]) != str:
# not a string, this node is not problematic (case i)
continue
# check if the edge has a name attribute
if osm.KEY_OSMNX_NAME in network.edges[closest_edge_key]:
# edge object has name attribute, check if the street names match
if type(network.edges[closest_edge_key][osm.KEY_OSMNX_NAME]) == str:
# the address is a string
if (network.edges[closest_edge_key][osm.KEY_OSMNX_NAME] in
node_street_names[node_key]):
# the address is a string
if (
network.edges[closest_edge_key][osm.KEY_OSMNX_NAME]
in node_street_names[node_key]
):
# the street names match, this is not a problematic node (ii)
continue
else:
else:
# the streets names differ, this is a problematic node (iv)
trouble_nodes.append(node_key)
continue
else: # the address is not a string: it should be a list (osmnx)
else: # the address is not a string: it should be a list (osmnx)
# if the node street is found among the elements
matching_street_name_found_list = tuple(
_name in node_street_names[node_key]
for _name in network.edges[closest_edge_key][
osm.KEY_OSMNX_NAME]
)
for _name in network.edges[closest_edge_key][osm.KEY_OSMNX_NAME]
)
if True in matching_street_name_found_list:
# the street names match, this is not a problematic node (ii)
continue
else:
else:
# the streets names differ, this is a problematic node (iv)
trouble_nodes.append(node_key)
continue
# otherwise, the edge is nameless but may not lead to the right street
# get adjacent/neighbouring edges
other_edges = gis_iden.get_edges_involving_node(
network=network,
node_key=closest_edge_key[0],
include_self_loops=False
)
network=network, node_key=closest_edge_key[0], include_self_loops=False
)
other_edges.extend(
gis_iden.get_edges_involving_node(
network=network,
node_key=closest_edge_key[1],
include_self_loops=False
)
network=network, node_key=closest_edge_key[1], include_self_loops=False
)
)
matching_street_name_found = False
# for each neighbour
for other_edge_key in other_edges:
# check if the current edge is the closest one
if closest_edge_key == other_edge_key:
# it is: skip, since it has already been considered
continue
continue
# check if the current edge has the address/name attribute
if osm.KEY_OSMNX_NAME in network.edges[other_edge_key]:
# it does, now check if it is a string
if type(network.edges[other_edge_key][
osm.KEY_OSMNX_NAME]) == str:
if type(network.edges[other_edge_key][osm.KEY_OSMNX_NAME]) == str:
# it is, now check if the street names match
if (network.edges[other_edge_key][osm.KEY_OSMNX_NAME] in
node_street_names[node_key]):
if (
network.edges[other_edge_key][osm.KEY_OSMNX_NAME]
in node_street_names[node_key]
):
# an edge with a matching street name was found (iii)
matching_street_name_found = True
break
else:
# if the node street is found among the elements
matching_street_name_found_list = tuple(
_name in node_street_names[node_key]
for _name in network.edges[other_edge_key][
osm.KEY_OSMNX_NAME]
)
for _name in network.edges[other_edge_key][osm.KEY_OSMNX_NAME]
)
if True in matching_street_name_found_list:
# the street names match, this node is okay (case iii)
matching_street_name_found = True
break
# check if a matching street name was found among the neighbours
if matching_street_name_found:
# one was, this is not a problematic case (case iii)
continue
# all other cases are problematic: case v
trouble_nodes.append(node_key)
trouble_nodes.append(node_key)
# *************************************************************************
# 3) for the nodes whose closest edges that cannot be linked back to the no-
# des, find the edges that can, if any, (via their street names) and select
# des, find the edges that can, if any, (via their street names) and select
# the closest one among them as a substitute for the closest one in general
# 3.1) generate the list of edge keys per street
unique_street_names = set(
node_street_names[node_key] for node_key in trouble_nodes
)
unique_street_names = set(node_street_names[node_key] for node_key in trouble_nodes)
# edge keys with a given street name
edges_per_street_name = {
street_name: [
edge_key for edge_key in network.edges(keys=True)
edge_key
for edge_key in network.edges(keys=True)
if osm.KEY_OSMNX_NAME in network.edges[edge_key]
if street_name in network.edges[edge_key][osm.KEY_OSMNX_NAME]
]
]
for street_name in unique_street_names
}
}
# 3.2) for each troublesome node, identify the edges that mention the same
# street and pick the closest on
for node_key in trouble_nodes:
# check the edges keys relevant for this node
other_edge_keys = edges_per_street_name[node_street_names[node_key]]
# check if there are no edges mentioning the street
if len(other_edge_keys) == 0:
# no edges mentioning that street, skip
continue
# create a view
new_network = network.edge_subgraph(edges=other_edge_keys)
# pick the one that is closest
other_closest_edge = gis_iden.nearest_edges(
new_network,
X=network.nodes[node_key][osm.KEY_OSMNX_X],
new_network,
X=network.nodes[node_key][osm.KEY_OSMNX_X],
Y=network.nodes[node_key][osm.KEY_OSMNX_Y],
return_dist=False)
return_dist=False,
)
# replace previous entry
building_entrance_edges[node_key] = other_closest_edge
# *************************************************************************
# 4) for all other cases, use the closest edge among all
# *************************************************************************
# revert network crs back to the original, if necessary
if revert_to_original_crs:
network = gis_iden.project_graph(network, to_crs=original_crs)
# return edge keys
return building_entrance_edges, _closest_edge_keys_dict, network
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def convert_edge_path(network: MultiDiGraph,
path: list,
allow_reversed_edges: bool = False) -> list:
def convert_edge_path(
network: MultiDiGraph, path: list, allow_reversed_edges: bool = False
) -> list:
"""
Converts a path of edge keys into a path of node keys.
......@@ -1191,7 +1124,7 @@ def convert_edge_path(network: MultiDiGraph,
path : list
A list of sequential edge keys that form a path.
allow_reversed_edges : bool, optional
If True, edges in the opposite direction also count to form paths, as
If True, edges in the opposite direction also count to form paths, as
long as the same nodes are involved. The default is False.
Returns
......@@ -1200,15 +1133,13 @@ def convert_edge_path(network: MultiDiGraph,
A list of node keys forming a path.
"""
# check if the path corresponds to an edge path
if not gis_iden.is_edge_path(
network,
path,
ignore_edge_direction=allow_reversed_edges
):
raise ValueError('No edge path was provided.')
network, path, ignore_edge_direction=allow_reversed_edges
):
raise ValueError("No edge path was provided.")
# path is a sequence of edge keys: convert to node path
if allow_reversed_edges:
# reverse edges are allowed
......@@ -1216,24 +1147,24 @@ def convert_edge_path(network: MultiDiGraph,
edge_path = [
edge_key
for edge_key in path
if edge_key[0] != edge_key[1] # exclude self loops
]
# if there is only one edge, the node path is straightforward
if edge_key[0] != edge_key[1] # exclude self loops
]
# if there is only one edge, the node path is straightforward
if len(edge_path) == 1:
return [edge_path[0][0], edge_path[0][1]]
node_path = []
for edge_index, edge_key in enumerate(edge_path):
# if there are no nodes yet on the path
if len(node_path) == 0:
# find out which node comes first
if edge_key[0] in edge_path[1]:
# the start node is in the second edge too: reversed
node_path.append(edge_key[1])
# the start node is in the second edge too: reversed
node_path.append(edge_key[1])
node_path.append(edge_key[0])
else: # the edge is not reversed
node_path.append(edge_key[0])
else: # the edge is not reversed
node_path.append(edge_key[0])
node_path.append(edge_key[1])
else:
# find out which node comes after the previous node
......@@ -1248,12 +1179,13 @@ def convert_edge_path(network: MultiDiGraph,
node_path = [
edge_key[0]
for edge_key in path
if edge_key[0] != edge_key[1] # exclude self loops
]
if edge_key[0] != edge_key[1] # exclude self loops
]
# add the last edge's end node
node_path.append(path[-1][1])
# return statement
return node_path
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
# -*- coding: utf-8 -*-
# constants
# *****************************************************************************
# *****************************************************************************
......@@ -9,18 +9,18 @@
kWh_DIV_MWh = 1000
GJ_DIV_kWh = 1e9/(3600*1e3)
GJ_DIV_kWh = 1e9 / (3600 * 1e3)
GJ_DIV_MWh = 1000 / 3600
GJ_DIV_MWh = 1000/3600
MWh_DIV_J = 1 / (3600 * 1000 * 1000)
MWh_DIV_J = 1/(3600*1000*1000)
# *****************************************************************************
# *****************************************************************************
# currency conversions
EUR_DIV_DKK = 1/(743.95/100)
EUR_DIV_DKK = 1 / (743.95 / 100)
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file