Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • master
1 result

Target

Select target project
  • pmag/topupopt
1 result
Select Git revision
  • master
1 result
Show changes
Commits on Source (7)
Showing
with 2735 additions and 2811 deletions
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
......@@ -22,17 +22,19 @@ from shapely.geometry import Point
# url_prefix_entrance = 'https://api.dataforsyningen.dk/adgangsadresser/'
url_prefix_entrance = 'https://api.dataforsyningen.dk/bbrlight/opgange?adgangsadresseid='
url_prefix_entrance = (
"https://api.dataforsyningen.dk/bbrlight/opgange?adgangsadresseid="
)
# url prefix to find BBR building data
# url_prefix_buildings = 'https://api.dataforsyningen.dk/bbrlight/bygninger?id='
url_prefix_buildings = 'https://api.dataforsyningen.dk/bbrlight/bygninger/'
url_prefix_buildings = "https://api.dataforsyningen.dk/bbrlight/bygninger/"
# url prefix to find the building location data
url_prefix_building_point = 'https://api.dataforsyningen.dk/bbrlight/bygningspunkter/'
url_prefix_building_point = "https://api.dataforsyningen.dk/bbrlight/bygningspunkter/"
# *****************************************************************************
# *****************************************************************************
......@@ -69,12 +71,10 @@ BBR_BDG_ENTR_LABELS = [
"Aendr_Funk",
"Ophoert_ts",
"Gyldighedsdato",
"href"]
"href",
]
SELECT_BBR_BDG_ENTR_LABELS = [
"Opgang_id",
"AdgAdr_id",
"Bygning_id"]
SELECT_BBR_BDG_ENTR_LABELS = ["Opgang_id", "AdgAdr_id", "Bygning_id"]
BBR_BDG_POINT_LABELS = [
"ois_id",
......@@ -96,13 +96,14 @@ BBR_BDG_POINT_LABELS = [
"Ophoert_ts",
"BygPktKilde",
"koordinater", # 'koordinater' returns a list
"href"]
"href",
]
SELECT_BBR_BDG_POINT_LABELS = [
"KoorOest",
"KoorNord",
"KoorSystem",
"koordinater" # 'koordinater' returns a list
"koordinater", # 'koordinater' returns a list
]
BBR_BDG_LABELS = [
......@@ -197,7 +198,8 @@ BBR_BDG_LABELS = [
"Gyldighedsdato",
"href",
"ejerskaber", # 'ejerskaber' returns a list
"bygningspunkt"] # 'bygningspunkt' returns a dict
"bygningspunkt",
] # 'bygningspunkt' returns a dict
SELECT_BBR_BDG_LABELS = [
"BYG_ANVEND_KODE",
......@@ -220,12 +222,13 @@ SELECT_BBR_BDG_LABELS = [
"VARMEINSTAL_KODE",
"OPVARMNING_KODE",
"VARME_SUPPL_KODE",
"BygPkt_id",]
"BygPkt_id",
]
BBR_CONTAINER_LABELS = {
'bygningspunkt': dict,
'ejerskaber': list,
'koordinater': list,
"bygningspunkt": dict,
"ejerskaber": list,
"koordinater": list,
}
# *****************************************************************************
......@@ -283,184 +286,185 @@ BBR_CONTAINER_LABELS = {
# fuel type
label_bbr_fuel_type = 'OPVARMNING_KODE'
label_bbr_fuel_type = "OPVARMNING_KODE"
dict_bbr_fuel_type_codes = {
'1': 'Elektricitet',
'2': 'Gasværksgas',
'3': 'Flydende brændsel',
'4': 'Fast brændsel',
'6': 'Halm',
'7': 'Naturgas',
'9': 'Andet'}
"1": "Elektricitet",
"2": "Gasværksgas",
"3": "Flydende brændsel",
"4": "Fast brændsel",
"6": "Halm",
"7": "Naturgas",
"9": "Andet",
}
# supplementary heating system
label_bbr_extra_heating = 'VARME_SUPPL_KODE'
label_bbr_extra_heating = "VARME_SUPPL_KODE"
dict_bbr_extra_heating_codes = {
'0': 'Ikke oplyst',
'1': 'Varmepumpeanlæg',
'10': 'Biogasanlæg',
'2': 'Ovne til fast eller flydende brændsel',
'3': 'Ovne til flydende brændsel',
'4': 'Solpaneler',
'5': 'Pejs',
'6': 'Gasradiator',
'7': 'Elvarme',
'80': 'Andet',
'90': '(UDFASES) Bygningen har ingen supplerende varme'
"0": "Ikke oplyst",
"1": "Varmepumpeanlæg",
"10": "Biogasanlæg",
"2": "Ovne til fast eller flydende brændsel",
"3": "Ovne til flydende brændsel",
"4": "Solpaneler",
"5": "Pejs",
"6": "Gasradiator",
"7": "Elvarme",
"80": "Andet",
"90": "(UDFASES) Bygningen har ingen supplerende varme",
}
# main heating system
label_bbr_heating_system = 'VARMEINSTAL_KODE'
label_bbr_heating_system = "VARMEINSTAL_KODE"
dict_bbr_heating_system_codes = {
'1': 'Fjernvarme/blokvarme',
'2': 'Centralvarme med én fyringsenhed',
'3': 'Ovn til fast og flydende brændsel',
'5': 'Varmepumpe',
'6': 'Centralvarme med to fyringsenheder',
'7': 'Elvarme',
'8': 'Gasradiator',
'9': 'Ingen varmeinstallation',
'99': 'Blandet'
"1": "Fjernvarme/blokvarme",
"2": "Centralvarme med én fyringsenhed",
"3": "Ovn til fast og flydende brændsel",
"5": "Varmepumpe",
"6": "Centralvarme med to fyringsenheder",
"7": "Elvarme",
"8": "Gasradiator",
"9": "Ingen varmeinstallation",
"99": "Blandet",
}
# coordinate system
label_bbr_bygningpunkt_koorsys = 'KoorSystem'
label_bbr_bygningpunkt_koorsys = "KoorSystem"
label_bbr_bygningpunkt_koorsys_codes = {
'1': 'System 34',
'2': 'System 45',
'3': 'KP2000 (System 2000)',
'4': 'UTM ED50',
'5': 'WGS 84'
"1": "System 34",
"2": "System 45",
"3": "KP2000 (System 2000)",
"4": "UTM ED50",
"5": "WGS 84",
}
# building use
label_bbr_building_uses = 'BYG_ANVEND_KODE' # byganvendelse
label_bbr_building_uses = "BYG_ANVEND_KODE" # byganvendelse
dict_bbr_building_use_codes = {
'110': 'Stuehus til landbrugsejendom',
'120': 'Fritliggende enfamiliehus',
'121': 'Sammenbygget enfamiliehus',
'122': 'Fritliggende enfamiliehus i tæt-lav bebyggelse',
'130': '(UDFASES) Række-, kæde-, eller dobbelthus (lodret adskillelse mellem enhederne).',
'131': 'Række-, kæde- og klyngehus',
'132': 'Dobbelthus',
'140': 'Etagebolig-bygning, flerfamiliehus eller to-familiehus',
'150': 'Kollegium',
'160': 'Boligbygning til døgninstitution',
'185': 'Anneks i tilknytning til helårsbolig.',
'190': 'Anden bygning til helårsbeboelse',
'210': '(UDFASES) Bygning til erhvervsmæssig produktion vedrørende landbrug, gartneri, råstofudvinding o. lign',
'211': 'Stald til svin',
'212': 'Stald til kvæg, får mv.',
'213': 'Stald til fjerkræ',
'214': 'Minkhal',
'215': 'Væksthus',
'216': 'Lade til foder, afgrøder mv.',
'217': 'Maskinhus, garage mv.',
'218': 'Lade til halm, hø mv.',
'219': 'Anden bygning til landbrug mv.',
'220': '(UDFASES) Bygning til erhvervsmæssig produktion vedrørende industri, håndværk m.v. (fabrik, værksted o.lign.)',
'221': 'Bygning til industri med integreret produktionsapparat',
'222': 'Bygning til industri uden integreret produktionsapparat',
'223': 'Værksted',
'229': 'Anden bygning til produktion',
'230': '(UDFASES) El-, gas-, vand- eller varmeværk, forbrændingsanstalt m.v.',
'231': 'Bygning til energiproduktion',
'232': 'Bygning til forsyning- og energidistribution',
'233': 'Bygning til vandforsyning',
'234': 'Bygning til håndtering af affald og spildevand',
'239': 'Anden bygning til energiproduktion og -distribution',
'290': '(UDFASES) Anden bygning til landbrug, industri etc.',
'310': '(UDFASES) Transport- og garageanlæg (fragtmandshal, lufthavnsbygning, banegårdsbygning, parkeringshus). Garage med plads til et eller to køretøjer registreres med anvendelseskode 910',
'311': 'Bygning til jernbane- og busdrift',
'312': 'Bygning til luftfart',
'313': 'Bygning til parkering- og transportanlæg',
'314': 'Bygning til parkering af flere end to køretøjer i tilknytning til boliger',
'315': 'Havneanlæg',
'319': 'Andet transportanlæg',
'320': '(UDFASES) Bygning til kontor, handel, lager, herunder offentlig administration',
'321': 'Bygning til kontor',
'322': 'Bygning til detailhandel',
'323': 'Bygning til lager',
'324': 'Butikscenter',
'325': 'Tankstation',
'329': 'Anden bygning til kontor, handel og lager',
'330': '(UDFASES) Bygning til hotel, restaurant, vaskeri, frisør og anden servicevirksomhed',
'331': 'Hotel, kro eller konferencecenter med overnatning',
'332': 'Bed & breakfast mv.',
'333': 'Restaurant, café og konferencecenter uden overnatning',
'334': 'Privat servicevirksomhed som frisør, vaskeri, netcafé mv.',
'339': 'Anden bygning til serviceerhverv',
'390': '(UDFASES) Anden bygning til transport, handel etc',
'410': '(UDFASES) Bygning til biograf, teater, erhvervsmæssig udstilling, bibliotek, museum, kirke o. lign.',
'411': 'Biograf, teater, koncertsted mv.',
'412': 'Museum',
'413': 'Bibliotek',
'414': 'Kirke eller anden bygning til trosudøvelse for statsanerkendte trossamfund',
'415': 'Forsamlingshus',
'416': 'Forlystelsespark',
'419': 'Anden bygning til kulturelle formål',
'420': '(UDFASES) Bygning til undervisning og forskning (skole, gymnasium, forskningslabratorium o.lign.).',
'421': 'Grundskole',
'422': 'Universitet',
'429': 'Anden bygning til undervisning og forskning',
'430': '(UDFASES) Bygning til hospital, sygehjem, fødeklinik o. lign.',
'431': 'Hospital og sygehus',
'432': 'Hospice, behandlingshjem mv.',
'433': 'Sundhedscenter, lægehus, fødeklinik mv.',
'439': 'Anden bygning til sundhedsformål',
'440': '(UDFASES) Bygning til daginstitution',
'441': 'Daginstitution',
'442': 'Servicefunktion på døgninstitution',
'443': 'Kaserne',
'444': 'Fængsel, arresthus mv.',
'449': 'Anden bygning til institutionsformål',
'490': '(UDFASES) Bygning til anden institution, herunder kaserne, fængsel o. lign.',
'510': 'Sommerhus',
'520': '(UDFASES) Bygning til feriekoloni, vandrehjem o.lign. bortset fra sommerhus',
'521': 'Feriecenter, center til campingplads mv.',
'522': 'Bygning med ferielejligheder til erhvervsmæssig udlejning',
'523': 'Bygning med ferielejligheder til eget brug',
'529': 'Anden bygning til ferieformål',
'530': '(UDFASES) Bygning i forbindelse med idrætsudøvelse (klubhus, idrætshal, svømmehal o. lign.)',
'531': 'Klubhus i forbindelse med fritid og idræt',
'532': 'Svømmehal',
'533': 'Idrætshal',
'534': 'Tribune i forbindelse med stadion',
'535': 'Bygning til træning og opstaldning af heste',
'539': 'Anden bygning til idrætformål',
'540': 'Kolonihavehus',
'585': 'Anneks i tilknytning til fritids- og sommerhus',
'590': 'Anden bygning til fritidsformål',
'910': 'Garage (med plads til et eller to køretøjer)',
'920': 'Carport',
'930': 'Udhus',
'940': 'Drivhus',
'950': 'Fritliggende overdækning',
'960': 'Fritliggende udestue',
'970': 'Tiloversbleven landbrugsbygning',
'990': 'Faldefærdig bygning',
'999': 'Ukendt bygning'
"110": "Stuehus til landbrugsejendom",
"120": "Fritliggende enfamiliehus",
"121": "Sammenbygget enfamiliehus",
"122": "Fritliggende enfamiliehus i tæt-lav bebyggelse",
"130": "(UDFASES) Række-, kæde-, eller dobbelthus (lodret adskillelse mellem enhederne).",
"131": "Række-, kæde- og klyngehus",
"132": "Dobbelthus",
"140": "Etagebolig-bygning, flerfamiliehus eller to-familiehus",
"150": "Kollegium",
"160": "Boligbygning til døgninstitution",
"185": "Anneks i tilknytning til helårsbolig.",
"190": "Anden bygning til helårsbeboelse",
"210": "(UDFASES) Bygning til erhvervsmæssig produktion vedrørende landbrug, gartneri, råstofudvinding o. lign",
"211": "Stald til svin",
"212": "Stald til kvæg, får mv.",
"213": "Stald til fjerkræ",
"214": "Minkhal",
"215": "Væksthus",
"216": "Lade til foder, afgrøder mv.",
"217": "Maskinhus, garage mv.",
"218": "Lade til halm, hø mv.",
"219": "Anden bygning til landbrug mv.",
"220": "(UDFASES) Bygning til erhvervsmæssig produktion vedrørende industri, håndværk m.v. (fabrik, værksted o.lign.)",
"221": "Bygning til industri med integreret produktionsapparat",
"222": "Bygning til industri uden integreret produktionsapparat",
"223": "Værksted",
"229": "Anden bygning til produktion",
"230": "(UDFASES) El-, gas-, vand- eller varmeværk, forbrændingsanstalt m.v.",
"231": "Bygning til energiproduktion",
"232": "Bygning til forsyning- og energidistribution",
"233": "Bygning til vandforsyning",
"234": "Bygning til håndtering af affald og spildevand",
"239": "Anden bygning til energiproduktion og -distribution",
"290": "(UDFASES) Anden bygning til landbrug, industri etc.",
"310": "(UDFASES) Transport- og garageanlæg (fragtmandshal, lufthavnsbygning, banegårdsbygning, parkeringshus). Garage med plads til et eller to køretøjer registreres med anvendelseskode 910",
"311": "Bygning til jernbane- og busdrift",
"312": "Bygning til luftfart",
"313": "Bygning til parkering- og transportanlæg",
"314": "Bygning til parkering af flere end to køretøjer i tilknytning til boliger",
"315": "Havneanlæg",
"319": "Andet transportanlæg",
"320": "(UDFASES) Bygning til kontor, handel, lager, herunder offentlig administration",
"321": "Bygning til kontor",
"322": "Bygning til detailhandel",
"323": "Bygning til lager",
"324": "Butikscenter",
"325": "Tankstation",
"329": "Anden bygning til kontor, handel og lager",
"330": "(UDFASES) Bygning til hotel, restaurant, vaskeri, frisør og anden servicevirksomhed",
"331": "Hotel, kro eller konferencecenter med overnatning",
"332": "Bed & breakfast mv.",
"333": "Restaurant, café og konferencecenter uden overnatning",
"334": "Privat servicevirksomhed som frisør, vaskeri, netcafé mv.",
"339": "Anden bygning til serviceerhverv",
"390": "(UDFASES) Anden bygning til transport, handel etc",
"410": "(UDFASES) Bygning til biograf, teater, erhvervsmæssig udstilling, bibliotek, museum, kirke o. lign.",
"411": "Biograf, teater, koncertsted mv.",
"412": "Museum",
"413": "Bibliotek",
"414": "Kirke eller anden bygning til trosudøvelse for statsanerkendte trossamfund",
"415": "Forsamlingshus",
"416": "Forlystelsespark",
"419": "Anden bygning til kulturelle formål",
"420": "(UDFASES) Bygning til undervisning og forskning (skole, gymnasium, forskningslabratorium o.lign.).",
"421": "Grundskole",
"422": "Universitet",
"429": "Anden bygning til undervisning og forskning",
"430": "(UDFASES) Bygning til hospital, sygehjem, fødeklinik o. lign.",
"431": "Hospital og sygehus",
"432": "Hospice, behandlingshjem mv.",
"433": "Sundhedscenter, lægehus, fødeklinik mv.",
"439": "Anden bygning til sundhedsformål",
"440": "(UDFASES) Bygning til daginstitution",
"441": "Daginstitution",
"442": "Servicefunktion på døgninstitution",
"443": "Kaserne",
"444": "Fængsel, arresthus mv.",
"449": "Anden bygning til institutionsformål",
"490": "(UDFASES) Bygning til anden institution, herunder kaserne, fængsel o. lign.",
"510": "Sommerhus",
"520": "(UDFASES) Bygning til feriekoloni, vandrehjem o.lign. bortset fra sommerhus",
"521": "Feriecenter, center til campingplads mv.",
"522": "Bygning med ferielejligheder til erhvervsmæssig udlejning",
"523": "Bygning med ferielejligheder til eget brug",
"529": "Anden bygning til ferieformål",
"530": "(UDFASES) Bygning i forbindelse med idrætsudøvelse (klubhus, idrætshal, svømmehal o. lign.)",
"531": "Klubhus i forbindelse med fritid og idræt",
"532": "Svømmehal",
"533": "Idrætshal",
"534": "Tribune i forbindelse med stadion",
"535": "Bygning til træning og opstaldning af heste",
"539": "Anden bygning til idrætformål",
"540": "Kolonihavehus",
"585": "Anneks i tilknytning til fritids- og sommerhus",
"590": "Anden bygning til fritidsformål",
"910": "Garage (med plads til et eller to køretøjer)",
"920": "Carport",
"930": "Udhus",
"940": "Drivhus",
"950": "Fritliggende overdækning",
"960": "Fritliggende udestue",
"970": "Tiloversbleven landbrugsbygning",
"990": "Faldefærdig bygning",
"999": "Ukendt bygning",
}
# floor types
label_bbr_floor_types = 'ETAGER_AFVIG_KODE'
label_bbr_floor_types = "ETAGER_AFVIG_KODE"
dict_bbr_floor_type_codes = {
'0': 'Bygningen har ikke afvigende etager',
'10': 'Bygningen har afvigende etager',
'11': 'Bygningen indeholder hems',
'12': 'Bygningen indeholder dobbelt højt rum',
'13': 'Bygningen indeholder indskudt etage'
"0": "Bygningen har ikke afvigende etager",
"10": "Bygningen har afvigende etager",
"11": "Bygningen indeholder hems",
"12": "Bygningen indeholder dobbelt højt rum",
"13": "Bygningen indeholder indskudt etage",
}
# all codes
......@@ -471,34 +475,34 @@ bbr_codes = {
label_bbr_heating_system: dict_bbr_heating_system_codes,
label_bbr_bygningpunkt_koorsys: label_bbr_bygningpunkt_koorsys_codes,
label_bbr_building_uses: dict_bbr_building_use_codes,
label_bbr_floor_types: dict_bbr_floor_type_codes
label_bbr_floor_types: dict_bbr_floor_type_codes,
}
# BBR labels
# label under which the building id can be found in the building entrance obj.
label_bbr_opgang_id = 'Opgang_id'
label_bbr_opgang_id = "Opgang_id"
label_bbr_entrance_id = 'AdgAdr_id'
label_bbr_entrance_id = "AdgAdr_id"
label_bbr_building_id = 'Bygning_id'
label_bbr_building_id = "Bygning_id"
label_bbr_bygningpunkt = 'bygningspunkt'
label_bbr_bygningpunkt = "bygningspunkt"
label_bbr_bygningpunkt_coord = 'koordinater'
label_bbr_bygningpunkt_coord = "koordinater"
label_bbr_opgang_id = 'Opgang_id'
label_bbr_opgang_id = "Opgang_id"
label_bbr_entrance_id = 'AdgAdr_id'
label_bbr_entrance_id = "AdgAdr_id"
label_bbr_building_id = 'Bygning_id'
label_bbr_building_id = "Bygning_id"
label_bbr_building_area = 'BYG_BOLIG_ARL_SAML'
label_bbr_building_area = "BYG_BOLIG_ARL_SAML"
label_bbr_housing_area = 'BYG_BEBYG_ARL'
label_bbr_housing_area = "BYG_BEBYG_ARL"
label_bbr_number_floors = 'ETAGER_ANT'
label_bbr_number_floors = "ETAGER_ANT"
list_labels_bbr = [
label_bbr_building_id,
......@@ -508,19 +512,19 @@ list_labels_bbr = [
label_bbr_building_uses,
label_bbr_number_floors,
label_bbr_floor_types,
label_bbr_extra_heating
label_bbr_extra_heating,
]
# *****************************************************************************
# *****************************************************************************
def get_bbr_building_data_geodataframe(
building_entrance_ids: list,
selected_bbr_bdg_entrance_labels: list = SELECT_BBR_BDG_ENTR_LABELS,
selected_bbr_building_labels: list = SELECT_BBR_BDG_LABELS,
selected_bbr_building_point_labels: list = SELECT_BBR_BDG_POINT_LABELS
selected_bbr_building_point_labels: list = SELECT_BBR_BDG_POINT_LABELS,
) -> Tuple[GeoDataFrame, list]:
# *************************************************************************
# *************************************************************************
......@@ -531,21 +535,23 @@ def get_bbr_building_data_geodataframe(
)
if selected_bbr_bdg_entrance_labels == None:
# includes all labels
selected_bbr_bdg_entrance_labels = BBR_BDG_ENTR_LABELS
list_entries = [
[value[bbr_key]
[
value[bbr_key]
for bbr_key in value
if bbr_key in selected_bbr_bdg_entrance_labels]
for key, value in dict_building_entrances.items()]
if bbr_key in selected_bbr_bdg_entrance_labels
]
for key, value in dict_building_entrances.items()
]
df_building_entrances = DataFrame(
data=list_entries,
columns=selected_bbr_bdg_entrance_labels,
index=dict_building_entrances.keys()
index=dict_building_entrances.keys(),
)
# *************************************************************************
......@@ -553,12 +559,9 @@ def get_bbr_building_data_geodataframe(
# get data about buildings
dict_buildings = fetch_building_data(
df_building_entrances.index
)
dict_buildings = fetch_building_data(df_building_entrances.index)
if selected_bbr_building_labels == None:
# includes all labels
selected_bbr_building_labels = BBR_BDG_LABELS
......@@ -566,15 +569,14 @@ def get_bbr_building_data_geodataframe(
# create dataframe with building data
list_entries = [
[value[bbr_key]
for bbr_key in value
if bbr_key in selected_bbr_building_labels]
for key, value in dict_buildings.items()]
[value[bbr_key] for bbr_key in value if bbr_key in selected_bbr_building_labels]
for key, value in dict_buildings.items()
]
df_buildings = DataFrame(
data=list_entries,
columns=selected_bbr_building_labels,
index=dict_buildings.keys()
index=dict_buildings.keys(),
)
# *************************************************************************
......@@ -583,16 +585,14 @@ def get_bbr_building_data_geodataframe(
# get building point data
if selected_bbr_building_point_labels == None:
# includes all labels
selected_bbr_building_point_labels = BBR_BDG_POINT_LABELS
dict_buildings_points = {
building_entrance_id: #(
dict_buildings[
building_entrance_id][
label_bbr_bygningpunkt]
building_entrance_id: dict_buildings[building_entrance_id][ # (
label_bbr_bygningpunkt
]
# if building_entrance_id in dict_buildings else None)
for building_entrance_id in dict_building_entrances
if building_entrance_id in dict_buildings # excludes failures
......@@ -601,30 +601,37 @@ def get_bbr_building_data_geodataframe(
# create dataframe with building point data
list_entries = [
[value[bbr_key]
[
value[bbr_key]
for bbr_key in value
if bbr_key in selected_bbr_building_point_labels]
for key, value in dict_buildings_points.items()]
if bbr_key in selected_bbr_building_point_labels
]
for key, value in dict_buildings_points.items()
]
df_building_points = DataFrame(
data=list_entries,
columns=selected_bbr_building_point_labels,
index=dict_buildings_points.keys()
index=dict_buildings_points.keys(),
)
# merge all three, two at a time
df_buildings = merge(df_buildings,
df_buildings = merge(
df_buildings,
df_building_points,
right_index=True,
left_index=True,
suffixes=(None,"_x")) # adds "_x" to duplicate columns
suffixes=(None, "_x"),
) # adds "_x" to duplicate columns
df_buildings = merge(df_buildings,
df_buildings = merge(
df_buildings,
df_building_entrances,
right_index=True,
left_index=True,
suffixes=(None,"_y")) # adds "_y" to duplicate columns
suffixes=(None, "_y"),
) # adds "_y" to duplicate columns
# *************************************************************************
# *************************************************************************
......@@ -640,25 +647,27 @@ def get_bbr_building_data_geodataframe(
# raise an error if different coordinates systems are being used
for building_entrance_id in dict_building_entrances:
if dict_buildings[building_entrance_id][
label_bbr_bygningpunkt][
label_bbr_bygningpunkt_koorsys] != key_bbr_coordinate_system:
raise NotImplementedError('Only WGS 84 coordinates can be used.')
if (
dict_buildings[building_entrance_id][label_bbr_bygningpunkt][
label_bbr_bygningpunkt_koorsys
]
!= key_bbr_coordinate_system
):
raise NotImplementedError("Only WGS 84 coordinates can be used.")
# create a dictionary with the building point geometries (i.e. points)
dict_building_point_geometry = {
building_entrance_id: Point(
dict_buildings[building_entrance_id][
label_bbr_bygningpunkt][
label_bbr_bygningpunkt_coord]
dict_buildings[building_entrance_id][label_bbr_bygningpunkt][
label_bbr_bygningpunkt_coord
]
)
for building_entrance_id in dict_building_entrances
if dict_buildings[building_entrance_id][
label_bbr_bygningpunkt][
label_bbr_bygningpunkt_koorsys] == key_bbr_coordinate_system
if dict_buildings[building_entrance_id][label_bbr_bygningpunkt][
label_bbr_bygningpunkt_koorsys
]
== key_bbr_coordinate_system
}
# create geodataframe
......@@ -666,7 +675,7 @@ def get_bbr_building_data_geodataframe(
gdf_buildings = GeoDataFrame(
data=df_buildings,
geometry=GeoSeries(data=dict_building_point_geometry),
crs=coordinate_system
crs=coordinate_system,
)
return gdf_buildings, list_failures
......@@ -674,12 +683,12 @@ def get_bbr_building_data_geodataframe(
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def fetch_building_entrance_data(building_entrance_ids: list) -> Tuple[dict,
list]:
def fetch_building_entrance_data(building_entrance_ids: list) -> Tuple[dict, list]:
# *************************************************************************
# *************************************************************************
......@@ -696,41 +705,32 @@ def fetch_building_entrance_data(building_entrance_ids: list) -> Tuple[dict,
# for each building entrance id
for building_entrance_id in building_entrance_ids:
# compose the url from which to get bbr data associated with the id
_url = url_prefix_entrance + building_entrance_id
try:
# retrieve the building entrance data
with urllib.request.urlopen(_url) as response:
# parse the data
bbr_entrance_data_json = json.loads(
response.read().decode('utf-8')
)
bbr_entrance_data_json = json.loads(response.read().decode("utf-8"))
# store the data
if len(bbr_entrance_data_json) != 0:
for bbr_entry in bbr_entrance_data_json:
dict_building_entrances[
bbr_entry[label_bbr_building_id]
] = bbr_entry
else:
list_failures.append(building_entrance_id)
response.close()
except Exception:
response.close()
# *************************************************************************
......@@ -741,11 +741,12 @@ def fetch_building_entrance_data(building_entrance_ids: list) -> Tuple[dict,
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def fetch_building_data(building_codes: list):
def fetch_building_data(building_codes: list):
# *************************************************************************
# *************************************************************************
......@@ -756,7 +757,6 @@ def fetch_building_data(building_codes: list):
# for each building id
for building_id in building_codes:
# compose a url with it
_url = url_prefix_buildings + building_id
......@@ -764,21 +764,18 @@ def fetch_building_data(building_codes: list):
# try statement
try:
# retrieve that data
with urllib.request.urlopen(_url) as response:
# parse the data
bbr_data = json.loads(response.read().decode('utf-8'))
bbr_data = json.loads(response.read().decode("utf-8"))
dict_buildings[building_id] = bbr_data
response.close()
except Exception:
response.close()
# *************************************************************************
......@@ -789,5 +786,6 @@ def fetch_building_data(building_codes: list):
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
......@@ -12,10 +12,7 @@ from .bbr import label_bbr_entrance_id, label_bbr_housing_area
# labels
selected_bbr_adgang_labels = [
"Opgang_id",
"AdgAdr_id",
"Bygning_id"]
selected_bbr_adgang_labels = ["Opgang_id", "AdgAdr_id", "Bygning_id"]
selected_bbr_building_point_labels = [
"KoorOest",
......@@ -47,15 +44,17 @@ selected_bbr_building_labels = [
"VARMEINSTAL_KODE",
"OPVARMNING_KODE",
"VARME_SUPPL_KODE",
"BygPkt_id"]
"BygPkt_id",
]
# label under which building entrance ids can be found in OSM
label_osm_entrance_id = 'osak:identifier'
label_osm_entrance_id = "osak:identifier"
# *****************************************************************************
# *****************************************************************************
def heat_demand_dict_by_building_entrance(
gdf_osm: GeoDataFrame,
gdf_buildings: GeoDataFrame,
......@@ -67,9 +66,8 @@ def heat_demand_dict_by_building_entrance(
key_osm_entr_id: str = label_osm_entrance_id,
key_bbr_entr_id: str = label_bbr_entrance_id,
avg_state: list = None,
state_correlates_with_output: bool = False
state_correlates_with_output: bool = False,
) -> dict:
# initialise dict for each building entrance
demand_dict = {}
......@@ -77,24 +75,19 @@ def heat_demand_dict_by_building_entrance(
# for each building entrance
for osm_index in gdf_osm.index:
# initialise dict for each building consumption point
heat_demand_profiles = []
# find the indexes for each building leading to the curr. cons. point
building_indexes = (
gdf_buildings[
gdf_buildings[key_bbr_entr_id] ==
gdf_osm.loc[osm_index][key_osm_entr_id]
building_indexes = gdf_buildings[
gdf_buildings[key_bbr_entr_id] == gdf_osm.loc[osm_index][key_osm_entr_id]
].index
)
# for each building
for building_index in building_indexes:
# get relevant data
# base_load_avg_ratio = 0.3
......@@ -106,7 +99,6 @@ def heat_demand_dict_by_building_entrance(
# estimate its demand
if type(avg_state) == type(None):
# ignore states
heat_demand_profiles.append(
......@@ -114,19 +106,18 @@ def heat_demand_dict_by_building_entrance(
discrete_sinusoid_matching_integral(
bdg_specific_demand[building_index] * area,
time_interval_durations=time_interval_durations,
bdg_ratio_min_max=bdg_ratio_min_max[building_index],
min_to_max_ratio=bdg_ratio_min_max[building_index],
phase_shift_radians=(
bdg_demand_phase_shift[building_index]
# bdg_demand_phase_shift_amplitude*np.random.random()
# if (type(bdg_demand_phase_shift_amplitude) ==
# type(None)) else None
)
),
)
)
)
else:
# states matter
heat_demand_profiles.append(
......@@ -137,8 +128,8 @@ def heat_demand_dict_by_building_entrance(
),
avg_state=avg_state,
time_interval_durations=time_interval_durations,
bdg_ratio_min_max=bdg_ratio_min_max[building_index],
state_correlates_with_output=state_correlates_with_output
min_to_max_ratio=bdg_ratio_min_max[building_index],
state_correlates_with_output=state_correlates_with_output,
)
)
)
......@@ -149,8 +140,7 @@ def heat_demand_dict_by_building_entrance(
if len(heat_demand_profiles) == 0:
final_profile = []
else:
final_profile = sum(profile
for profile in heat_demand_profiles)
final_profile = sum(profile for profile in heat_demand_profiles)
# *********************************************************************
......@@ -162,30 +152,30 @@ def heat_demand_dict_by_building_entrance(
# return
return demand_dict
# *****************************************************************************
# *****************************************************************************
def total_heating_area(
gdf_osm: GeoDataFrame,
gdf_buildings: GeoDataFrame,
key_osm_entr_id: str = label_osm_entrance_id,
key_bbr_entr_id: str = label_bbr_entrance_id
key_bbr_entr_id: str = label_bbr_entrance_id,
) -> float:
area = 0
for osm_index in gdf_osm.index:
# find the indexes for each building leading to the curr. cons. point
building_indexes = (
gdf_buildings[
gdf_buildings[label_bbr_entrance_id] ==
gdf_osm.loc[osm_index][label_osm_entrance_id]
building_indexes = gdf_buildings[
gdf_buildings[label_bbr_entrance_id]
== gdf_osm.loc[osm_index][label_osm_entrance_id]
].index
)
# for each building
for building_index in building_indexes:
# get relevant data
area += gdf_buildings.loc[building_index][label_bbr_housing_area]
return area
# *****************************************************************************
# *****************************************************************************
# -*- coding: utf-8 -*-
......@@ -24,15 +24,16 @@ from ...data.finance.utils import ArcInvestments
# constants
KEY_DHT_OPTIONS_OBJ = 'trench'
KEY_DHT_LENGTH = 'length'
KEY_DHT_UCF = 'capacity_unit_conversion_factor'
KEY_HHT_DHT_PIPES = 'pipes'
KEY_HHT_STD_PIPES = 'pipe_tuple'
KEY_DHT_OPTIONS_OBJ = "trench"
KEY_DHT_LENGTH = "length"
KEY_DHT_UCF = "capacity_unit_conversion_factor"
KEY_HHT_DHT_PIPES = "pipes"
KEY_HHT_STD_PIPES = "pipe_tuple"
# *****************************************************************************
# *****************************************************************************
class PipeTrenchOptions(ArcsWithoutProportionalLosses):
"A class for defining investments in district heating trenches."
......@@ -46,7 +47,6 @@ class PipeTrenchOptions(ArcsWithoutProportionalLosses):
capacity_is_instantaneous: bool = False,
unit_conversion_factor: float = 1.0,
):
# store the unit conversion
self.unit_conversion_factor = unit_conversion_factor
# keep the trench object
......@@ -54,13 +54,11 @@ class PipeTrenchOptions(ArcsWithoutProportionalLosses):
# keep the trench length
self.length = (
[length for i in range(trench.number_options())]
if trench.vector_mode else
length
if trench.vector_mode
else length
)
# determine the rated heat capacity
rhc = trench.rated_heat_capacity(
unit_conversion_factor=unit_conversion_factor
)
rhc = trench.rated_heat_capacity(unit_conversion_factor=unit_conversion_factor)
# initialise the object using the mother class
ArcsWithoutProportionalLosses.__init__(
self,
......@@ -70,10 +68,10 @@ class PipeTrenchOptions(ArcsWithoutProportionalLosses):
minimum_cost=minimum_cost,
specific_capacity_cost=(
0
if type(specific_capacity_cost) == type(None) else
specific_capacity_cost
if type(specific_capacity_cost) == type(None)
else specific_capacity_cost
),
capacity_is_instantaneous=False
capacity_is_instantaneous=False,
)
# initialise the minimum cost
if type(minimum_cost) == type(None):
......@@ -83,7 +81,6 @@ class PipeTrenchOptions(ArcsWithoutProportionalLosses):
# *************************************************************************
def set_minimum_cost(self, minimum_cost=None):
# minimum arc cost
# if no external minimum cost list was provided, calculate it
if type(minimum_cost) == type(None):
......@@ -91,13 +88,12 @@ class PipeTrenchOptions(ArcsWithoutProportionalLosses):
if self.trench.vector_mode:
# multiple options
self.minimum_cost = tuple(
(pipe.sp*length # twin pipes: one twin pipe
if self.trench.twin_pipes else
pipe.sp*length*2) # single pipes: two single pipes
for pipe, length in zip(
self.trench.supply_pipe,
self.length
)
(
pipe.sp * length # twin pipes: one twin pipe
if self.trench.twin_pipes
else pipe.sp * length * 2
) # single pipes: two single pipes
for pipe, length in zip(self.trench.supply_pipe, self.length)
)
else: # only one option
self.minimum_cost = (self.trench.supply_pipe.sp * self.length,)
......@@ -129,43 +125,39 @@ class PipeTrenchOptions(ArcsWithoutProportionalLosses):
temperature_surroundings: float or list,
length: float or list = None,
unit_conversion_factor: float = None,
**kwargs):
**kwargs
):
hts = self.trench.heat_transfer_surroundings(
ground_thermal_conductivity=ground_thermal_conductivity,
ground_air_heat_transfer_coefficient=(
ground_air_heat_transfer_coefficient),
ground_air_heat_transfer_coefficient=(ground_air_heat_transfer_coefficient),
time_interval_duration=time_interval_duration,
temperature_surroundings=temperature_surroundings,
length=(
self.length
if type(length) == type(None) else
length
),
length=(self.length if type(length) == type(None) else length),
unit_conversion_factor=(
self.unit_conversion_factor
if type(unit_conversion_factor) == type(None) else
unit_conversion_factor
if type(unit_conversion_factor) == type(None)
else unit_conversion_factor
),
**kwargs)
**kwargs
)
if self.trench.vector_mode:
# multiple options: hts is a vector
if (hasattr(self, "static_loss") and
type(self.static_loss) != type(None)):
if hasattr(self, "static_loss") and type(self.static_loss) != type(None):
# update the static loss dictionary
if type(hts[0]) == list:
# multiple time intervals
self.static_loss.update({
self.static_loss.update(
{
(h, scenario_key, k): hts[h][k]
for h, hts_h in enumerate(hts)
for k, hts_hk in enumerate(hts_h)
})
}
)
else: # not a list: one time interval
self.static_loss.update({
(h, scenario_key, 0): hts[h]
for h, hts_h in enumerate(hts)
})
self.static_loss.update(
{(h, scenario_key, 0): hts[h] for h, hts_h in enumerate(hts)}
)
else:
# no static loss dictionary, create it
if type(hts[0]) == list:
......@@ -177,40 +169,34 @@ class PipeTrenchOptions(ArcsWithoutProportionalLosses):
}
else: # not a list: one time interval
self.static_loss = {
(h, scenario_key, 0): hts[h]
for h, hts_h in enumerate(hts)
(h, scenario_key, 0): hts[h] for h, hts_h in enumerate(hts)
}
else:
# one option: hts might be a number
if (hasattr(self, "static_loss") and
type(self.static_loss) != type(None)):
if hasattr(self, "static_loss") and type(self.static_loss) != type(None):
# update the static loss dictionary
if not isinstance(hts, Real):
# multiple time intervals
self.static_loss.update({
(0, scenario_key, k): hts[k]
for k, hts_k in enumerate(hts)
})
self.static_loss.update(
{(0, scenario_key, k): hts[k] for k, hts_k in enumerate(hts)}
)
else: # not a list: one time interval
self.static_loss.update({
(0, scenario_key, 0): hts
})
self.static_loss.update({(0, scenario_key, 0): hts})
else:
# no static loss dictionary, create it
if not isinstance(hts, Real):
# multiple time intervals
self.static_loss = {
(0, scenario_key, k): hts_k
for k, hts_k in enumerate(hts)
(0, scenario_key, k): hts_k for k, hts_k in enumerate(hts)
}
else: # not a list: one time interval
self.static_loss = {
(0, scenario_key, 0): hts
}
self.static_loss = {(0, scenario_key, 0): hts}
# *****************************************************************************
# *****************************************************************************
class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
"A class for defining investments in district heating trenches."
......@@ -226,7 +212,6 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
unit_conversion_factor: float = 1.0,
**kwargs
):
# store the unit conversion
self.unit_conversion_factor = unit_conversion_factor
# keep the trench object
......@@ -234,13 +219,11 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
# keep the trench length
self.length = (
[length for i in range(trench.number_options())]
if trench.vector_mode else
length
if trench.vector_mode
else length
)
# determine the rated heat capacity
rhc = trench.rated_heat_capacity(
unit_conversion_factor=unit_conversion_factor
)
rhc = trench.rated_heat_capacity(unit_conversion_factor=unit_conversion_factor)
# initialise the object using the mother class
ArcInvestments.__init__(
self,
......@@ -252,11 +235,11 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
capacity=[rhc] if isinstance(rhc, Real) else rhc,
specific_capacity_cost=(
0
if type(specific_capacity_cost) == type(None) else
specific_capacity_cost
if type(specific_capacity_cost) == type(None)
else specific_capacity_cost
),
capacity_is_instantaneous=False,
validate=False
validate=False,
)
# # *************************************************************************
......@@ -388,9 +371,11 @@ class PipeTrenchInvestments(ArcInvestments, PipeTrenchOptions):
# (0, scenario_key, 0): hts
# }
# *****************************************************************************
# *****************************************************************************
class ExistingPipeTrench(PipeTrenchOptions):
"A class for existing pipe trenches."
......@@ -398,10 +383,12 @@ class ExistingPipeTrench(PipeTrenchOptions):
# initialise
PipeTrenchOptions.__init__(
self,
minimum_cost=[0 for i in range(kwargs['trench'].number_options())],
**kwargs)
minimum_cost=[0 for i in range(kwargs["trench"].number_options())],
**kwargs
)
# define the option that already exists
self.options_selected[option_selected] = True
# *****************************************************************************
# *****************************************************************************
......@@ -19,8 +19,8 @@ from numbers import Real
# *****************************************************************************
# *****************************************************************************
def cost_pipes(trench: SupplyReturnPipeTrench,
length: float or tuple) -> tuple:
def cost_pipes(trench: SupplyReturnPipeTrench, length: float or tuple) -> tuple:
"""
Returns the costs of each trench option for a given trench length.
......@@ -45,30 +45,33 @@ def cost_pipes(trench: SupplyReturnPipeTrench,
# use the specific pipe cost that features in the database
if trench.vector_mode:
# multiple options
if (type(length) == tuple and
len(length) == trench.number_options()):
if type(length) == tuple and len(length) == trench.number_options():
# multiple trench lengths
return tuple(
(pipe.sp*length # twin pipes: one twin pipe
if trench.twin_pipes else
pipe.sp*length*2) # single pipes: two single pipes
(
pipe.sp * length # twin pipes: one twin pipe
if trench.twin_pipes
else pipe.sp * length * 2
) # single pipes: two single pipes
for pipe, length in zip(trench.supply_pipe, length)
)
elif isinstance(length, Real):
# one trench length
return tuple(
(pipe.sp*length # twin pipes: one twin pipe
if trench.twin_pipes else
pipe.sp*length*2) # single pipes: two single pipes
(
pipe.sp * length # twin pipes: one twin pipe
if trench.twin_pipes
else pipe.sp * length * 2
) # single pipes: two single pipes
for pipe in trench.supply_pipe
)
else:
raise ValueError('Unrecognised input combination.')
elif (not trench.vector_mode and isinstance(length, Real)):
raise ValueError("Unrecognised input combination.")
elif not trench.vector_mode and isinstance(length, Real):
# only one option
return (trench.supply_pipe.sp * length,)
else: # only one option
raise ValueError('Unrecognised input combination.')
raise ValueError("Unrecognised input combination.")
# # keep the trench length
# self.length = (
......@@ -77,12 +80,13 @@ def cost_pipes(trench: SupplyReturnPipeTrench,
# length
# )
# *****************************************************************************
# *****************************************************************************
def summarise_network_by_pipe_technology(
network: Network,
print_output: bool = False
network: Network, print_output: bool = False
) -> dict:
"A method to summarise a network by pipe technology."
......@@ -100,8 +104,7 @@ def summarise_network_by_pipe_technology(
for arc_key in network.edges(keys=True):
# check if it is a PipeTrench object
if not isinstance(
network.edges[arc_key][Network.KEY_ARC_TECH],
PipeTrenchOptions
network.edges[arc_key][Network.KEY_ARC_TECH], PipeTrenchOptions
):
# if not, skip arc
continue
......@@ -116,13 +119,13 @@ def summarise_network_by_pipe_technology(
# get the length of the arc
arc_length = (
network.edges[arc_key][Network.KEY_ARC_TECH].length[h]
if type(network.edges[arc_key][
Network.KEY_ARC_TECH].length) == list else
network.edges[arc_key][Network.KEY_ARC_TECH].length
if type(network.edges[arc_key][Network.KEY_ARC_TECH].length) == list
else network.edges[arc_key][Network.KEY_ARC_TECH].length
)
# identify the option
tech_option_label = network.edges[arc_key][
Network.KEY_ARC_TECH].trench.printable_description(h)
Network.KEY_ARC_TECH
].trench.printable_description(h)
# if the arc technology has been previously selected...
if tech_option_label in length_dict:
# ...increment the total length
......@@ -135,33 +138,35 @@ def summarise_network_by_pipe_technology(
# *************************************************************************
if print_output:
print('printing the arc technologies selected by pipe size...')
print("printing the arc technologies selected by pipe size...")
for key, value in sorted(
(tech, length)
for tech, length in length_dict.items()
(tech, length) for tech, length in length_dict.items()
):
print(str(key)+': '+str(value))
print('total: '+str(sum(length_dict.values())))
print(str(key) + ": " + str(value))
print("total: " + str(sum(length_dict.values())))
return length_dict
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def plot_network_layout(network: Network,
def plot_network_layout(
network: Network,
include_basemap: bool = False,
figure_size: tuple = (25, 25),
min_linewidth: float = 1.0,
max_linewidth: float = 3.0,
legend_fontsize: float = 20.0,
basemap_zoom_level: float = 15,
legend_location: str = 'lower left',
legend_location: str = "lower left",
legend_with_brand_model: bool = False,
legend_transparency: float = None):
legend_transparency: float = None,
):
# convert graph object to GDF
_, my_gdf_arcs = ox.graph_to_gdfs(network)
......@@ -178,8 +183,7 @@ def plot_network_layout(network: Network,
for arc_key in my_gdf.index:
# check if it is a PipeTrenchOptions object
if not isinstance(
network.edges[arc_key][Network.KEY_ARC_TECH],
PipeTrenchOptions
network.edges[arc_key][Network.KEY_ARC_TECH], PipeTrenchOptions
):
# if not, skip arc
continue
......@@ -188,10 +192,12 @@ def plot_network_layout(network: Network,
try:
selected_option = (
my_gdf[Network.KEY_ARC_TECH].loc[
arc_key].trench.printable_description(
my_gdf[Network.KEY_ARC_TECH].loc[
arc_key].options_selected.index(True)
my_gdf[Network.KEY_ARC_TECH]
.loc[arc_key]
.trench.printable_description(
my_gdf[Network.KEY_ARC_TECH]
.loc[arc_key]
.options_selected.index(True)
)
)
except ValueError:
......@@ -209,15 +215,17 @@ def plot_network_layout(network: Network,
(int(printable_description[2:]), printable_description)
for printable_description in arc_tech_summary_dict.keys()
)
(list_sorted_dn,
list_sorted_descriptions) = list(map(list,zip(*list_sorted)))
(list_sorted_dn, list_sorted_descriptions) = list(map(list, zip(*list_sorted)))
list_arc_widths = [
min_linewidth+
(max_linewidth-min_linewidth)*
iteration/(len(list_sorted_dn)-1)
list_arc_widths = (
[
min_linewidth
+ (max_linewidth - min_linewidth) * iteration / (len(list_sorted_dn) - 1)
for iteration, _ in enumerate(list_sorted_dn)
] if len(list_sorted_dn) != 1 else [(max_linewidth+min_linewidth)/2]
]
if len(list_sorted_dn) != 1
else [(max_linewidth + min_linewidth) / 2]
)
# *************************************************************************
# *************************************************************************
......@@ -226,35 +234,29 @@ def plot_network_layout(network: Network,
fig.set_size_inches(*figure_size)
for description, arc_width in zip(
list_sorted_descriptions,
list_arc_widths
):
for description, arc_width in zip(list_sorted_descriptions, list_arc_widths):
# prepare plot
my_gdf.loc[arc_tech_summary_dict[description]].plot(
edgecolor='k',
legend=True,
linewidth=arc_width,
ax=ax)
edgecolor="k", legend=True, linewidth=arc_width, ax=ax
)
# adjust legend labels
ax.legend(list_sorted_descriptions,
ax.legend(
list_sorted_descriptions,
fontsize=legend_fontsize,
loc=legend_location,
framealpha=(
legend_transparency
if type(legend_transparency) != type(None) else None
)
legend_transparency if type(legend_transparency) != type(None) else None
),
)
# add base map
if include_basemap:
cx.add_basemap(ax,
cx.add_basemap(
ax,
zoom=basemap_zoom_level,
source=cx.providers.OpenStreetMap.Mapnik,
# crs=gdf_map.crs,
......@@ -263,24 +265,25 @@ def plot_network_layout(network: Network,
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def plot_heating_demand(
losses: list,
end_use_demand: list,
labels: list,
ylabel: str = 'Heating demand [MWh]',
title: str = 'Heat demand by month'
ylabel: str = "Heating demand [MWh]",
title: str = "Heat demand by month",
):
energy_totals = {
'Losses (optimised)': np.array(losses),
'End use (estimated)': np.array(end_use_demand),
"Losses (optimised)": np.array(losses),
"End use (estimated)": np.array(end_use_demand),
}
colors = {
'Losses (optimised)': 'tab:orange',
'End use (estimated)': 'tab:blue',
"Losses (optimised)": "tab:orange",
"End use (estimated)": "tab:blue",
}
# width = 0.8 # the width of the bars: can also be len(x) sequence
......@@ -293,18 +296,17 @@ def plot_heating_demand(
figure_size = (8, 4)
fig.set_size_inches(figure_size[0], figure_size[1])
for energy_category, energy_total in energy_totals.items():
p = ax.bar(
labels,
energy_total,
label=energy_category,
bottom=bottom,
color=colors[energy_category],
zorder=zorder_bars
zorder=zorder_bars,
)
bottom += energy_total
ax.bar_label(p, fmt='{:,.0f}', label_type='center')
ax.bar_label(p, fmt="{:,.0f}", label_type="center")
# ax.bar_label(p, fmt='{:,.0f}')
ax.grid(zorder=zorder_grid) # zorder=0 to make the grid
......@@ -313,5 +315,6 @@ def plot_heating_demand(
plt.show()
# *****************************************************************************
# *****************************************************************************
# -*- coding: utf-8 -*-
......@@ -10,6 +10,7 @@ from statistics import mean
# TODO: enable swapping the polarity
class Investment:
"""This class is meant to enable analysis of specific investments."""
......@@ -18,11 +19,13 @@ class Investment:
# TODO: consider using dicts to make things more intuitive, time-wise
def __init__(self,
def __init__(
self,
discount_rates: list,
net_cash_flows: list = None,
discount_rate: float = None,
analysis_period_span: int = None):
analysis_period_span: int = None,
):
"""
Create an object for investment analysis using typical information.
......@@ -41,54 +44,43 @@ class Investment:
# validate the inputs
if type(discount_rates) != type(None):
# discount_rates is not None:
if type(discount_rates) != tuple:
raise TypeError(
'The discount rates must be provided as a tuple.')
raise TypeError("The discount rates must be provided as a tuple.")
self.discount_rates = tuple(discount_rates)
self.analysis_period_span = len(self.discount_rates)
if self.analysis_period_span <= 0:
raise ValueError(
'The duration of the period under analysis must be '+
'positive.'
"The duration of the period under analysis must be " + "positive."
)
else:
# discount_rates is None:
# discount rate must be positive real under 1
# analysis_period_span must be an int
if type(discount_rate) != float:
raise TypeError(
'The discount rate must be provided as a float.')
raise TypeError("The discount rate must be provided as a float.")
if discount_rate <= 0 or discount_rate >= 1:
raise ValueError(
'The discount rate must be in the open interval between 0'+
' and 1.'
"The discount rate must be in the open interval between 0"
+ " and 1."
)
if type(analysis_period_span) != int:
raise TypeError(
'The duration of the period under consideration must be '+
'provided as an integer.')
"The duration of the period under consideration must be "
+ "provided as an integer."
)
if analysis_period_span <= 0:
raise ValueError(
'The duration of the period under analysis must be '+
'positive.'
"The duration of the period under analysis must be " + "positive."
)
self.analysis_period_span = analysis_period_span
......@@ -100,27 +92,18 @@ class Investment:
# check the net cash flows
if type(net_cash_flows) != type(None):
if type(net_cash_flows) != list:
raise TypeError(
'The net cash flows must be provided as a list.')
raise TypeError("The net cash flows must be provided as a list.")
if len(net_cash_flows) != self.analysis_period_span + 1:
raise ValueError(
'The inputs are consistent in terms of length.'
)
raise ValueError("The inputs are consistent in terms of length.")
self.net_cash_flows = list(net_cash_flows)
else:
# net_cash_flows is None: initialise it as a list of zeros
self.net_cash_flows = list(
0 for i in range(self.analysis_period_span+1)
)
self.net_cash_flows = list(0 for i in range(self.analysis_period_span + 1))
# discount factors
......@@ -132,15 +115,15 @@ class Investment:
# *************************************************************************
# *************************************************************************
def add_investment(self,
def add_investment(
self,
investment: float,
investment_period: int,
investment_longevity: int,
commissioning_delay_after_investment: int = 0,
salvage_value_method: str = 'annuity'):
if salvage_value_method == 'annuity':
salvage_value_method: str = "annuity",
):
if salvage_value_method == "annuity":
mean_discount_rate = mean(self.discount_rates)
residual_value = salvage_value_annuity(
......@@ -148,14 +131,13 @@ class Investment:
investment_longevity=investment_longevity,
investment_period=investment_period,
discount_rate=mean_discount_rate,
analysis_period_span=self.analysis_period_span
analysis_period_span=self.analysis_period_span,
)
self.net_cash_flows[investment_period] += investment
self.net_cash_flows[self.analysis_period_span] += -residual_value
else:
residual_value = salvage_value_linear_depreciation(
investment=investment,
investment_period=investment_period,
......@@ -163,7 +145,7 @@ class Investment:
analysis_period_span=self.analysis_period_span,
commissioning_delay_after_investment=(
commissioning_delay_after_investment
)
),
)
self.net_cash_flows[investment_period] += investment
......@@ -172,30 +154,24 @@ class Investment:
# *************************************************************************
# *************************************************************************
def add_operational_cash_flows(self,
cash_flow: float or int,
start_period: int,
longevity: int = None):
def add_operational_cash_flows(
self, cash_flow: float or int, start_period: int, longevity: int = None
):
"""Adds a sequence of cash flows to the analysis."""
if type(longevity) == type(None):
# until the planning horizon
for i in range(self.analysis_period_span - start_period + 1):
# add operational cash flows
self.net_cash_flows[i + start_period] += cash_flow
else:
# limited longevity
for i in range(longevity):
if i + start_period >= self.analysis_period_span + 1:
break
# add operational cash flows
......@@ -213,12 +189,14 @@ class Investment:
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def npv(discount_rates: list,
net_cash_flows: list,
return_discount_factors: bool = False) -> float or tuple:
def npv(
discount_rates: list, net_cash_flows: list, return_discount_factors: bool = False
) -> float or tuple:
"""
Calculates the net present value using the information provided.
......@@ -248,33 +226,32 @@ def npv(discount_rates: list,
# check sizes
if len(discount_rates) != len(net_cash_flows) - 1:
# the inputs do not match, return None
raise ValueError('The inputs are inconsistent.')
raise ValueError("The inputs are inconsistent.")
discount_factors = [
discount_factor(discount_rates[:t])
for t in range(len(discount_rates)+1)
discount_factor(discount_rates[:t]) for t in range(len(discount_rates) + 1)
]
if return_discount_factors:
return sum(
ncf_t*df_t
for (ncf_t, df_t) in zip(net_cash_flows, discount_factors)
), discount_factors
return (
sum(
ncf_t * df_t for (ncf_t, df_t) in zip(net_cash_flows, discount_factors)
),
discount_factors,
)
else:
return sum(
ncf_t*df_t
for (ncf_t, df_t) in zip(net_cash_flows, discount_factors)
ncf_t * df_t for (ncf_t, df_t) in zip(net_cash_flows, discount_factors)
)
# *****************************************************************************
# *****************************************************************************
def discount_factor(discount_rates: list) -> float:
"""
Return the discount factor consistent with the discount rates provided.
......@@ -298,15 +275,18 @@ def discount_factor(discount_rates: list) -> float:
"""
return prod([1 / (1 + i) for i in discount_rates])
# *****************************************************************************
# *****************************************************************************
def salvage_value_linear_depreciation(
investment: int or float,
investment_period: int,
investment_longevity: int,
analysis_period_span: int,
commissioning_delay_after_investment: int = 1) -> float:
commissioning_delay_after_investment: int = 1,
) -> float:
"""
Determine an asset\'s salvage value by the end of an analysis period.
......@@ -341,66 +321,78 @@ def salvage_value_linear_depreciation(
"""
if investment_period >= analysis_period_span + 1:
raise ValueError(
'The investment has to be made within the period being analysed.'
"The investment has to be made within the period being analysed."
)
# calculate the salvage value
return (
investment_longevity+
investment_period+
commissioning_delay_after_investment-1-
analysis_period_span
)*investment/investment_longevity
(
investment_longevity
+ investment_period
+ commissioning_delay_after_investment
- 1
- analysis_period_span
)
* investment
/ investment_longevity
)
# *****************************************************************************
# *****************************************************************************
def salvage_value_annuity(investment: int or float,
def salvage_value_annuity(
investment: int or float,
discount_rate: float,
investment_longevity: int,
investment_period: int,
analysis_period_span: int) -> float:
analysis_period_span: int,
) -> float:
npv_salvage = present_salvage_value_annuity(
investment=investment,
investment_longevity=investment_longevity,
investment_period=investment_period,
discount_rate=discount_rate,
analysis_period_span=analysis_period_span,
return_annuity=False
return_annuity=False,
)
return npv_salvage / discount_factor(
tuple(discount_rate for i in range(analysis_period_span))
)
# *****************************************************************************
# *****************************************************************************
def annuity(investment: int or float,
investment_longevity: int,
discount_rate: float) -> float:
def annuity(
investment: int or float, investment_longevity: int, discount_rate: float
) -> float:
"Returns the annuity value for a given investment sum and longevity."
return (
investment*
discount_rate/(1-(1+discount_rate)**(
-investment_longevity
))
investment
* discount_rate
/ (1 - (1 + discount_rate) ** (-investment_longevity))
)
# *****************************************************************************
# *****************************************************************************
def present_salvage_value_annuity(investment: int or float,
def present_salvage_value_annuity(
investment: int or float,
investment_longevity: int,
investment_period: int,
discount_rate: float,
analysis_period_span: int,
return_annuity: bool = False) -> float:
return_annuity: bool = False,
) -> float:
"""
Calculates the present value of an asset after a given analysis period.
......@@ -441,25 +433,21 @@ def present_salvage_value_annuity(investment: int or float,
"""
if investment_period >= analysis_period_span + 1:
raise ValueError(
'The investment has to be made within the period being analysed.'
"The investment has to be made within the period being analysed."
)
# the present salvage value requires the lifetime to extend beyond the hor.
if analysis_period_span >= investment_longevity + investment_period:
if return_annuity:
return 0, annuity(
investment=investment,
investment_longevity=investment_longevity,
discount_rate=discount_rate
discount_rate=discount_rate,
)
else:
return 0
# the annuity has to consider the asset longevity and the commission. delay
......@@ -467,35 +455,29 @@ def present_salvage_value_annuity(investment: int or float,
value_annuity = annuity(
investment=investment,
investment_longevity=investment_longevity,
discount_rate=discount_rate
discount_rate=discount_rate,
)
discount_rates = tuple(
discount_rate
for i in range(investment_longevity+investment_period)
discount_rate for i in range(investment_longevity + investment_period)
)
net_cash_flows = list(
value_annuity
for i in range(investment_longevity+investment_period+1)
value_annuity for i in range(investment_longevity + investment_period + 1)
)
for year_index in range(analysis_period_span + 1):
net_cash_flows[year_index] = 0
if return_annuity:
return npv(
discount_rates=discount_rates,
net_cash_flows=net_cash_flows
), value_annuity
return (
npv(discount_rates=discount_rates, net_cash_flows=net_cash_flows),
value_annuity,
)
else:
return npv(discount_rates=discount_rates, net_cash_flows=net_cash_flows)
return npv(
discount_rates=discount_rates,
net_cash_flows=net_cash_flows
)
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
# *****************************************************************************
from ...problems.esipp.network import Arcs
# *****************************************************************************
# *****************************************************************************
class ArcInvestments(Arcs):
"""A class for defining arcs linked to investments."""
# *************************************************************************
# *************************************************************************
def __init__(self, investments: tuple, **kwargs):
# keep investment data
self.investments = investments
# initialise object
Arcs.__init__(
self,
minimum_cost=tuple([inv.net_present_value() for inv in self.investments]),
# validate=False,
**kwargs
)
# *************************************************************************
# *************************************************************************
def update_minimum_cost(self):
"Updates the minimum costs using the Investment objects."
self.minimum_cost = tuple([inv.net_present_value() for inv in self.investments])
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
\ No newline at end of file
# imports
from math import inf
......@@ -22,6 +21,7 @@ from ..gis import identify as ident
# *****************************************************************************
# *****************************************************************************
def edge_lengths(network: MultiDiGraph, edge_keys: tuple = None) -> dict:
"""
Calculate edge lengths in a OSMnx-formatted MultiDiGraph network object.
......@@ -44,7 +44,7 @@ def edge_lengths(network: MultiDiGraph, edge_keys: tuple = None) -> dict:
"""
# determine if the graph is projected or not
graph_is_projected = is_projected(network.graph['crs'])
graph_is_projected = is_projected(network.graph["crs"])
# check if edge keys were specified
if type(edge_keys) == type(None):
# no particular edge keys were provided: consider all edges (default)
......@@ -64,12 +64,16 @@ def edge_lengths(network: MultiDiGraph, edge_keys: tuple = None) -> dict:
else:
# use (projected) coordinates
start_point = Point(
(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y])
(
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
)
)
end_point = Point(
(network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y])
(
network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
)
)
length_dict[edge_key] = start_point.distance(end_point)
......@@ -86,14 +90,16 @@ def edge_lengths(network: MultiDiGraph, edge_keys: tuple = None) -> dict:
lat1=network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
lon1=network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
lat2=network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
lon2=network.nodes[edge_key[1]][osm.KEY_OSMNX_X]
lon2=network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
)
# return the dict with lengths of each edge
return length_dict
# *****************************************************************************
# *****************************************************************************
def great_circle_distance_along_path(path: LineString) -> float:
"""
Computes the great circle distance along a given path.
......@@ -121,13 +127,15 @@ def great_circle_distance_along_path(path: LineString) -> float:
lat[:-1], # latitudes of starting points
lon[:-1], # longitudes of starting points
lat[1:], # latitudes of ending points
lon[1:] # longitudes of ending points
lon[1:], # longitudes of ending points
)
)
# *****************************************************************************
# *****************************************************************************
def update_street_count(network: MultiDiGraph):
"""
Updates the street count attributes of nodes in a MultiDiGraph object.
......@@ -145,16 +153,20 @@ def update_street_count(network: MultiDiGraph):
# update street count
street_count_dict = count_streets_per_node(network)
network.add_nodes_from(
((key, {osm.KEY_OSMNX_STREET_COUNT:value})
for key, value in street_count_dict.items())
(
(key, {osm.KEY_OSMNX_STREET_COUNT: value})
for key, value in street_count_dict.items()
)
)
# *****************************************************************************
# *****************************************************************************
def node_path_length(network: MultiDiGraph,
path: list,
return_minimum_length_only: bool = True) -> list or float:
def node_path_length(
network: MultiDiGraph, path: list, return_minimum_length_only: bool = True
) -> list or float:
"""
Returns the length or lengths of a path defined using nodes.
......@@ -200,9 +212,7 @@ def node_path_length(network: MultiDiGraph,
for node_pair in range(path_length - 1):
# get the edges between these two nodes
edge_keys = ident.get_edges_from_a_to_b(
network,
path[node_pair],
path[node_pair+1]
network, path[node_pair], path[node_pair + 1]
)
number_edge_keys = len(edge_keys)
if number_edge_keys == 1:
......@@ -225,15 +235,12 @@ def node_path_length(network: MultiDiGraph,
# add the new edge
list_of_edge_key_paths[
path_index + edge_key_index * number_paths
].append(
edge_keys[edge_key_index]
)
].append(edge_keys[edge_key_index])
# *************************************************************************
path_lenths = [
sum(network.edges[edge_key][osm.KEY_OSMNX_LENGTH]
for edge_key in edge_key_path)
sum(network.edges[edge_key][osm.KEY_OSMNX_LENGTH] for edge_key in edge_key_path)
for edge_key_path in list_of_edge_key_paths
]
if return_minimum_length_only:
......@@ -243,12 +250,12 @@ def node_path_length(network: MultiDiGraph,
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def edge_path_length(network: MultiDiGraph,
path: list,
**kwargs) -> float:
def edge_path_length(network: MultiDiGraph, path: list, **kwargs) -> float:
"""
Returns the total length of a path defined using edges.
......@@ -274,19 +281,19 @@ def edge_path_length(network: MultiDiGraph,
if path_length == 0:
return inf
if ident.is_edge_path(network, path, **kwargs):
return sum(
network.edges[edge_key][osm.KEY_OSMNX_LENGTH] for edge_key in path
)
return sum(network.edges[edge_key][osm.KEY_OSMNX_LENGTH] for edge_key in path)
else:
# no path provided
return inf
# *****************************************************************************
# *****************************************************************************
def count_ocurrences(gdf: GeoDataFrame,
column: str,
column_entries: list = None) -> dict:
def count_ocurrences(
gdf: GeoDataFrame, column: str, column_entries: list = None
) -> dict:
"""
Counts the number of occurrences per entry in a DataFrame object's column.
......@@ -340,5 +347,6 @@ def count_ocurrences(gdf: GeoDataFrame,
# return statement
return count_dict
# *****************************************************************************
# *****************************************************************************
......@@ -20,6 +20,7 @@ from ..gis import osm
# *****************************************************************************
# *****************************************************************************
def is_edge_consistent_with_geometry(network: nx.MultiDiGraph, edge_key):
"""
Returns True if a given edge in an OSMnx-formatted graph is declared in the
......@@ -51,23 +52,24 @@ def is_edge_consistent_with_geometry(network: nx.MultiDiGraph, edge_key):
if not network.has_edge(*edge_key):
# the edge does not exist
raise ValueError(
'No edge was found matching the key provided: '+str(edge_key)
"No edge was found matching the key provided: " + str(edge_key)
)
elif osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
# edge exists and has a geometry attribute: check the geometry
# check if the first point on the geometry matches the first node
return (
tuple(network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY].coords)[0] ==
(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y])
return tuple(network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY].coords)[0] == (
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
)
else:
# edge exists but has no geometry attribute: it is consistent
return True
# *****************************************************************************
# *****************************************************************************
def find_edges_in_reverse(network: nx.MultiDiGraph) -> dict:
"""
Finds edges in reverse within an OSMnx-formatted MultiDiGraph object.
......@@ -87,30 +89,32 @@ def find_edges_in_reverse(network: nx.MultiDiGraph) -> dict:
edge_key: [
other_edge_key
for other_edge_key in get_edges_from_a_to_b(
network,
node_start=edge_key[1],
node_end=edge_key[0]
network, node_start=edge_key[1], node_end=edge_key[0]
)
if edges_are_in_reverse(network, edge_key, other_edge_key)
]
for edge_key in network.edges(keys=True)
}
# *****************************************************************************
# *****************************************************************************
def is_edge_osmnx_compliant(network: nx.MultiDiGraph, edge_key) -> bool:
"""Returns True if a given edge is osmnx compliant and False otherwise."""
# check if the edge exists
if not network.has_edge(*edge_key):
raise ValueError('Edge not found.')
raise ValueError("Edge not found.")
# check compatibility with osmnx
return is_edge_data_osmnx_compliant(network.get_edge_data(*edge_key))
# *****************************************************************************
# *****************************************************************************
def is_edge_data_osmnx_compliant(edge_data: dict):
"""Returns True a given MultiDiGraph edge data dict is OSMnx-compliant."""
# check if all the essential attributes are in
......@@ -157,14 +161,16 @@ def is_edge_data_osmnx_compliant(edge_data: dict):
# else: # no action
return True
# *****************************************************************************
# *****************************************************************************
def edges_are_in_reverse(
network: nx.MultiDiGraph,
edge_a: tuple,
edge_b: tuple,
tolerance: float or int = 1e-3
tolerance: float or int = 1e-3,
) -> bool:
"""
Returns True if two edges in a graph represent the same one but in reverse.
......@@ -211,9 +217,10 @@ def edges_are_in_reverse(
# the nodes do not match
return False
# make sure both edges exist and comply with osmnx
if (not is_edge_osmnx_compliant(network, edge_a) or
not is_edge_osmnx_compliant(network, edge_b)):
raise ValueError('One or more of the edges is not OSMnx-compliant.')
if not is_edge_osmnx_compliant(network, edge_a) or not is_edge_osmnx_compliant(
network, edge_b
):
raise ValueError("One or more of the edges is not OSMnx-compliant.")
fw_dict = network.get_edge_data(*edge_a)
rv_dict = network.get_edge_data(*edge_b)
......@@ -222,67 +229,86 @@ def edges_are_in_reverse(
# the other
key_attr = set([osm.KEY_OSMNX_GEOMETRY, osm.KEY_OSMNX_REVERSED])
for _attr in key_attr:
if ((_attr in fw_dict.keys() and _attr not in rv_dict.keys()) or
(_attr not in fw_dict.keys() and _attr in rv_dict.keys())):
if (_attr in fw_dict.keys() and _attr not in rv_dict.keys()) or (
_attr not in fw_dict.keys() and _attr in rv_dict.keys()
):
# incoherent inputs
return False
# for each key, value pair in the forward edge's dict
for attr_key, attr_value in fw_dict.items():
if (type(attr_value) == list and
((type(rv_dict[attr_key]) == list and
set(attr_value) != set(rv_dict[attr_key])) or
type(rv_dict[attr_key]) != list)):
if type(attr_value) == list and (
(
type(rv_dict[attr_key]) == list
and set(attr_value) != set(rv_dict[attr_key])
)
or type(rv_dict[attr_key]) != list
):
# the sets of list arguments do not match
# or, the arguments are not equivalent
return False
elif (type(attr_value) == list and
type(rv_dict[attr_key]) == list and
set(attr_value) == set(rv_dict[attr_key])):
elif (
type(attr_value) == list
and type(rv_dict[attr_key]) == list
and set(attr_value) == set(rv_dict[attr_key])
):
# the sets of list arguments match
continue
elif (attr_key == osm.KEY_OSMNX_GEOMETRY and
((type(rv_dict[attr_key]) == LineString and
tuple(attr_value.coords) !=
tuple(rv_dict[attr_key].reverse().coords)) or
attr_key not in rv_dict or
type(rv_dict[attr_key]) != LineString)):
elif attr_key == osm.KEY_OSMNX_GEOMETRY and (
(
type(rv_dict[attr_key]) == LineString
and tuple(attr_value.coords)
!= tuple(rv_dict[attr_key].reverse().coords)
)
or attr_key not in rv_dict
or type(rv_dict[attr_key]) != LineString
):
# either the geometries are not reversed
# or, there is no geometry attribute in the reverse dict
# or, the geometry in the reverse edge is not for a LineString
return False
elif (attr_key == osm.KEY_OSMNX_GEOMETRY and
type(rv_dict[attr_key]) == LineString and
tuple(attr_value.coords) ==
tuple(rv_dict[attr_key].reverse().coords)):
elif (
attr_key == osm.KEY_OSMNX_GEOMETRY
and type(rv_dict[attr_key]) == LineString
and tuple(attr_value.coords) == tuple(rv_dict[attr_key].reverse().coords)
):
# the geometries are reversed
continue
elif (attr_key == osm.KEY_OSMNX_REVERSED and
((attr_key in rv_dict and
attr_value == rv_dict[attr_key]) or
attr_key not in rv_dict or
type(rv_dict[attr_key]) != bool)):
elif attr_key == osm.KEY_OSMNX_REVERSED and (
(attr_key in rv_dict and attr_value == rv_dict[attr_key])
or attr_key not in rv_dict
or type(rv_dict[attr_key]) != bool
):
# either the reversed flags match
# or, there is no reversed flag in the reverse dict
return False
elif (attr_key == osm.KEY_OSMNX_REVERSED and
attr_key in rv_dict and
not attr_value == rv_dict[attr_key]):
elif (
attr_key == osm.KEY_OSMNX_REVERSED
and attr_key in rv_dict
and not attr_value == rv_dict[attr_key]
):
# the reversed flags are logical opposites
continue
elif (attr_key == osm.KEY_OSMNX_LENGTH and
((attr_key in rv_dict and
elif attr_key == osm.KEY_OSMNX_LENGTH and (
(
attr_key in rv_dict
and
# isinstance(rv_dict[attr_key], Real) and
abs(attr_value-rv_dict[attr_key]) > tolerance) or
attr_key not in rv_dict)):
abs(attr_value - rv_dict[attr_key]) > tolerance
)
or attr_key not in rv_dict
):
# either the lengths differ too much
# or, there is no length attribute in the reverse dict
# or it is not a numeric type
return False
elif (attr_key == osm.KEY_OSMNX_LENGTH and
attr_key in rv_dict and
elif (
attr_key == osm.KEY_OSMNX_LENGTH
and attr_key in rv_dict
and
# isinstance(rv_dict[attr_key], Real) and
abs(attr_value-rv_dict[attr_key]) <= tolerance):
abs(attr_value - rv_dict[attr_key]) <= tolerance
):
# the lengths are within the tolerance
continue
elif attr_key in rv_dict and attr_value != rv_dict[attr_key]:
......@@ -293,15 +319,18 @@ def edges_are_in_reverse(
# all other possibilities have been exhausted: return True
return True
# *****************************************************************************
# *****************************************************************************
def close_to_extremities(
line: LineString,
points: tuple,
tolerance: float = 7 / 3 - 4 / 3 - 1,
use_start_point_equidistant: bool = True,
return_distances: bool = False) -> tuple:
return_distances: bool = False,
) -> tuple:
"""
Determines which points are close to a line\'s start and end points.
......@@ -350,7 +379,8 @@ def close_to_extremities(
_start = []
_end = []
for i, (line_distance, start_distance, end_distance) in enumerate(
zip(line_distances, start_distances, end_distances)):
zip(line_distances, start_distances, end_distances)
):
if start_distance < end_distance:
# the point is closer to the start point than to the end point
if abs(start_distance - line_distance) <= tolerance:
......@@ -378,14 +408,18 @@ def close_to_extremities(
else:
return _start, _end
# *****************************************************************************
# *****************************************************************************
def find_roundabouts(network: nx.MultiDiGraph,
def find_roundabouts(
network: nx.MultiDiGraph,
maximum_perimeter: float = None,
minimum_perimeter: float = None,
maximum_number_nodes: int = None,
minimum_number_nodes: int = None) -> list:
minimum_number_nodes: int = None,
) -> list:
"""
Finds sequences of nodes in a network that constitute roundabouts.
......@@ -433,22 +467,18 @@ def find_roundabouts(network: nx.MultiDiGraph,
# 3) edges that do not have the necessary attributes
# node number limits
there_are_upper_node_number_limits = (
True if maximum_number_nodes != None else False)
there_are_lower_node_number_limits = (
True if minimum_number_nodes != None else False)
there_are_upper_node_number_limits = True if maximum_number_nodes != None else False
there_are_lower_node_number_limits = True if minimum_number_nodes != None else False
there_are_node_number_limits = (
there_are_upper_node_number_limits or
there_are_lower_node_number_limits)
there_are_upper_node_number_limits or there_are_lower_node_number_limits
)
# perimeter limits
there_are_upper_perimeter_limits = (
True if maximum_perimeter != None else False)
there_are_lower_perimeter_limits = (
True if minimum_perimeter != None else False)
there_are_upper_perimeter_limits = True if maximum_perimeter != None else False
there_are_lower_perimeter_limits = True if minimum_perimeter != None else False
there_are_perimeter_limits = (
there_are_upper_perimeter_limits or
there_are_lower_perimeter_limits)
there_are_upper_perimeter_limits or there_are_lower_perimeter_limits
)
# find edges that are not one way
list_removable_edges = []
......@@ -502,9 +532,7 @@ def find_roundabouts(network: nx.MultiDiGraph,
if there_are_perimeter_limits:
# compute the total length for each node
total_length = node_path_length(
network,
node_path,
return_minimum_length_only=True
network, node_path, return_minimum_length_only=True
)
if there_are_lower_perimeter_limits:
if total_length < minimum_perimeter:
......@@ -528,12 +556,14 @@ def find_roundabouts(network: nx.MultiDiGraph,
# *********************************************************************
# *********************************************************************
# *****************************************************************************
# *****************************************************************************
def is_roundabout(network: nx.MultiDiGraph,
path: list,
path_as_node_keys: bool = True) -> bool:
def is_roundabout(
network: nx.MultiDiGraph, path: list, path_as_node_keys: bool = True
) -> bool:
"""
Returns True if a given path constitutes a roundabout in a directed graph.
......@@ -562,57 +592,46 @@ def is_roundabout(network: nx.MultiDiGraph,
# roundabouts require at least two nodes
if len(path) <= 1:
raise ValueError('Node paths require at least two nodes.')
raise ValueError("Node paths require at least two nodes.")
# for each node in path
for node_key in path:
# check if it exists in the network
if not network.has_node(node_key):
return False
# there should be no repetitions
if path.count(node_key) > 1:
return False
# check if the last node connects to the first
edge_keys = get_edges_from_a_to_b(network,
path[-1],
path[0])
edge_keys = get_edges_from_a_to_b(network, path[-1], path[0])
if len(edge_keys) == 0:
return False
else:
# among the edges between them, find at least one compatible
compatible_edge_exists = False
for edge_key in edge_keys:
# get its data
edge_data_dict = network.get_edge_data(u=edge_key[0],
v=edge_key[1],
key=edge_key[2])
edge_data_dict = network.get_edge_data(
u=edge_key[0], v=edge_key[1], key=edge_key[2]
)
# ensure that this edge has the oneway attribute
if osm.KEY_OSMNX_ONEWAY in edge_data_dict:
# ensure that it is true
if edge_data_dict[osm.KEY_OSMNX_ONEWAY]:
compatible_edge_exists = True
break
......@@ -620,7 +639,6 @@ def is_roundabout(network: nx.MultiDiGraph,
# check for compatible edges
if not compatible_edge_exists:
# no compatible edges exist between these two nodes
return False
......@@ -628,29 +646,25 @@ def is_roundabout(network: nx.MultiDiGraph,
# for each other node pair
for node_pair in range(len(path) - 1):
# for each edge between them, find at least one compatible edge
compatible_edge_exists = False
for edge_key in get_edges_from_a_to_b(network,
path[node_pair],
path[node_pair+1]):
for edge_key in get_edges_from_a_to_b(
network, path[node_pair], path[node_pair + 1]
):
# get its data
edge_data_dict = network.get_edge_data(u=edge_key[0],
v=edge_key[1],
key=edge_key[2])
edge_data_dict = network.get_edge_data(
u=edge_key[0], v=edge_key[1], key=edge_key[2]
)
# ensure that this edge has the oneway attribute
if osm.KEY_OSMNX_ONEWAY in edge_data_dict:
# ensure that it is true
if edge_data_dict[osm.KEY_OSMNX_ONEWAY]:
compatible_edge_exists = True
break
......@@ -658,7 +672,6 @@ def is_roundabout(network: nx.MultiDiGraph,
# check for compatible edges
if not compatible_edge_exists:
# no compatible edges exist between these two nodes
return False
......@@ -667,12 +680,12 @@ def is_roundabout(network: nx.MultiDiGraph,
return True
# *****************************************************************************
# *****************************************************************************
def get_edges_from_a_to_b(network: nx.MultiDiGraph,
node_start,
node_end) -> list:
def get_edges_from_a_to_b(network: nx.MultiDiGraph, node_start, node_end) -> list:
"""
Retrieve the keys for edges from one node to another.
......@@ -692,14 +705,17 @@ def get_edges_from_a_to_b(network: nx.MultiDiGraph,
"""
if network.has_edge(u=node_start, v=node_end):
return [(node_start, node_end, key)
for key in network._adj[node_start][node_end]]
return [
(node_start, node_end, key) for key in network._adj[node_start][node_end]
]
else:
return []
# *****************************************************************************
# *****************************************************************************
def get_edges_between_two_nodes(network: nx.MultiDiGraph, u, v) -> list:
"""
Retrieve the keys for all edges involving two specific nodes.
......@@ -741,14 +757,18 @@ def get_edges_between_two_nodes(network: nx.MultiDiGraph, u, v) -> list:
# no edges found
return []
# *****************************************************************************
# *****************************************************************************
def get_edges_involving_node(network: nx.MultiDiGraph,
def get_edges_involving_node(
network: nx.MultiDiGraph,
node_key,
include_outgoing_edges: bool = True,
include_incoming_edges: bool = True,
include_self_loops: bool = True) -> list:
include_self_loops: bool = True,
) -> list:
"""
Retrieve the keys for all edges involving a specific node.
......@@ -780,22 +800,30 @@ def get_edges_involving_node(network: nx.MultiDiGraph,
for edge_key in network.edges(keys=True)
if node_key in edge_key[0:2]
# outgoing edges
if ((node_key != edge_key[0] and not include_outgoing_edges) or
include_outgoing_edges)
if (
(node_key != edge_key[0] and not include_outgoing_edges)
or include_outgoing_edges
)
# incoming edges
if ((node_key != edge_key[1] and not include_incoming_edges) or
include_incoming_edges)
if (
(node_key != edge_key[1] and not include_incoming_edges)
or include_incoming_edges
)
# self-loops
if ((edge_key[0] != edge_key[1] and not include_self_loops) or
include_self_loops)
if (
(edge_key[0] != edge_key[1] and not include_self_loops)
or include_self_loops
)
]
# *****************************************************************************
# *****************************************************************************
def neighbours(network: nx.MultiDiGraph or nx.MultiGraph,
node_key,
ignore_self_loops: bool = True):
def neighbours(
network: nx.MultiDiGraph or nx.MultiGraph, node_key, ignore_self_loops: bool = True
):
"""
Return a given node\'s neighbours.
......@@ -819,7 +847,6 @@ def neighbours(network: nx.MultiDiGraph or nx.MultiGraph,
"""
if network.has_edge(node_key, node_key) and ignore_self_loops:
return (
_node_key
for _node_key in nx.all_neighbors(network, node_key)
......@@ -827,16 +854,16 @@ def neighbours(network: nx.MultiDiGraph or nx.MultiGraph,
)
else:
return nx.all_neighbors(network, node_key)
# *****************************************************************************
# *****************************************************************************
def is_node_path(
network: nx.MultiDiGraph,
path: list,
consider_reversed_edges: bool = False) -> bool:
network: nx.MultiDiGraph, path: list, consider_reversed_edges: bool = False
) -> bool:
"""
Indicates if a given path qualifies as a node path in a directed network.
......@@ -877,13 +904,17 @@ def is_node_path(
else:
return nx.is_path(network, path)
# *****************************************************************************
# *****************************************************************************
def is_edge_path(network: nx.MultiDiGraph,
def is_edge_path(
network: nx.MultiDiGraph,
path: list,
ignore_edge_direction: bool = False,
allow_multiple_formats: bool = False) -> bool:
allow_multiple_formats: bool = False,
) -> bool:
"""
Indicates if a given path qualifies as an edge path in a directed network.
......@@ -908,58 +939,51 @@ def is_edge_path(network: nx.MultiDiGraph,
"""
if len(path) == 0:
# empty path
return False
else:
# all the edges have to exist
previous_edge_key_length = len(path[0])
for edge_i, tentative_edge_key in enumerate(path):
edge_key_length = len(tentative_edge_key)
if not allow_multiple_formats:
if previous_edge_key_length != edge_key_length:
# the edge key format changes: not a path
raise ValueError(
'The path must be provided using only one edge format.'
"The path must be provided using only one edge format."
)
# find out if the edge exists
if edge_key_length == 3:
# 3-tuple format
if not network.has_edge(u=tentative_edge_key[0],
if not network.has_edge(
u=tentative_edge_key[0],
v=tentative_edge_key[1],
key=tentative_edge_key[2]):
key=tentative_edge_key[2],
):
# the edge does not exist as specified
return False
elif edge_key_length == 2:
# 2-tuple format
if not network.has_edge(u=tentative_edge_key[0],
v=tentative_edge_key[1]):
if not network.has_edge(
u=tentative_edge_key[0], v=tentative_edge_key[1]
):
# the edge does not exist as specified
return False
else:
# unknown format
return False
......@@ -967,22 +991,23 @@ def is_edge_path(network: nx.MultiDiGraph,
# the edge exists: check if it forms a sequence
if edge_i != 0: # skip the first iteration
# if none of the current edge's nodes is mentioned in the
# previous edge, then no sequence is formed
if (tentative_edge_key[0] not in path[edge_i-1][0:2] and
tentative_edge_key[1] not in path[edge_i-1][0:2] and
ignore_edge_direction):
if (
tentative_edge_key[0] not in path[edge_i - 1][0:2]
and tentative_edge_key[1] not in path[edge_i - 1][0:2]
and ignore_edge_direction
):
return False
# if the previous edge's end node is not the current edge's
# start node, then it is not a valid edge path
if (path[edge_i-1][1] != tentative_edge_key[0] and
not ignore_edge_direction):
if (
path[edge_i - 1][1] != tentative_edge_key[0]
and not ignore_edge_direction
):
return False
# # check the formats, if necessary
......@@ -996,13 +1021,17 @@ def is_edge_path(network: nx.MultiDiGraph,
return True
# *****************************************************************************
# *****************************************************************************
def is_path_straight(network: nx.MultiDiGraph,
def is_path_straight(
network: nx.MultiDiGraph,
path: list,
consider_reversed_edges: bool = False,
ignore_self_loops: bool = False) -> bool:
ignore_self_loops: bool = False,
) -> bool:
"""
Returns True if the path is straight and False otherwise.
......@@ -1037,11 +1066,16 @@ def is_path_straight(network: nx.MultiDiGraph,
# check if the intermediate nodes have the right number of neighbours
for intermediate_node in path[1:-1]:
if len(set(neighbours(
network,
intermediate_node,
ignore_self_loops=ignore_self_loops))
) != 2:
if (
len(
set(
neighbours(
network, intermediate_node, ignore_self_loops=ignore_self_loops
)
)
)
!= 2
):
# the path is not straight if the intermediate nodes do not have
# two distinct neighbours
return False
......@@ -1049,14 +1083,18 @@ def is_path_straight(network: nx.MultiDiGraph,
# if all intermediate nodes have two neighbours, return True
return True
# *****************************************************************************
# *****************************************************************************
def find_simplifiable_paths(network: nx.MultiDiGraph,
def find_simplifiable_paths(
network: nx.MultiDiGraph,
excluded_nodes: list,
ignore_self_loops: bool = False,
consider_reversed_edges: bool = False,
include_both_directions: bool = False) -> list:
include_both_directions: bool = False,
) -> list:
"""
Enumerates the simplifiable paths found in a given graph.
......@@ -1096,26 +1134,23 @@ def find_simplifiable_paths(network: nx.MultiDiGraph,
# locate all the non-excluded nodes that can form straight paths
intermediate_candidate_nodes = set([
intermediate_candidate_nodes = set(
[
node_key
for node_key in network.nodes()
# the node cannot be among those excluded
if node_key not in excluded_nodes
# the node has to be linked to two other nodes other than itself
if len(set(
neighbours(
network,
node_key,
ignore_self_loops=True
)
)) == 2
if len(set(neighbours(network, node_key, ignore_self_loops=True))) == 2
# exclude nodes with self-loops if desired:
# 1) self-loops are tolerated (no need to check)
# 2) self-loops are not tolerated and they do not exist
if (ignore_self_loops or
(not ignore_self_loops and
not network.has_edge(node_key, node_key)))
])
if (
ignore_self_loops
or (not ignore_self_loops and not network.has_edge(node_key, node_key))
)
]
)
# *************************************************************************
......@@ -1138,7 +1173,7 @@ def find_simplifiable_paths(network: nx.MultiDiGraph,
network,
list_valid_nodes=intermediate_candidate_nodes - list_nodes_joined,
start_node=candidate_node,
ignore_self_loops=ignore_self_loops
ignore_self_loops=ignore_self_loops,
)
else:
# reversed edges are not accepted
......@@ -1146,11 +1181,10 @@ def find_simplifiable_paths(network: nx.MultiDiGraph,
network,
list_valid_nodes=intermediate_candidate_nodes - list_nodes_joined,
start_node=candidate_node,
ignore_self_loops=ignore_self_loops
ignore_self_loops=ignore_self_loops,
)
# make sure the sequence is not redundant
if (len(new_sequence) <= 2 or
new_sequence in list_paths):
if len(new_sequence) <= 2 or new_sequence in list_paths:
# path is just one edge or has already been included
continue
......@@ -1171,19 +1205,18 @@ def find_simplifiable_paths(network: nx.MultiDiGraph,
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def _find_path_direction_sensitive(
network: nx.MultiDiGraph,
list_valid_nodes: list,
start_node,
ignore_self_loops: bool
ignore_self_loops: bool,
) -> list:
def find_path_forward(network: nx.MultiDiGraph,
current_node,
path: list):
def find_path_forward(network: nx.MultiDiGraph, current_node, path: list):
# identify the last node's neighbours
current_neighbours = set(
neighbours(network, current_node, ignore_self_loops=True)
......@@ -1206,11 +1239,7 @@ def _find_path_direction_sensitive(
# add the neighbour to the end of the path
path.append(a_neighbour)
# recursive call with extended path
return find_path_forward(
network,
path[-1],
path
)
return find_path_forward(network, path[-1], path)
else: # is not a valid node: path ends
# add the neighbour to the end of the path:
path.append(a_neighbour)
......@@ -1221,10 +1250,18 @@ def _find_path_direction_sensitive(
# neighbour is already on the path, matches the start,
# and has two neighbours other than itself:
# close the loop and return the path
if (len(set(neighbours(
if (
len(
set(
neighbours(
network,
a_neighbour,
ignore_self_loops=ignore_self_loops))) == 2):
ignore_self_loops=ignore_self_loops,
)
)
)
== 2
):
# add the neighbour to the end of the path:
path.append(a_neighbour)
# return the path
......@@ -1232,9 +1269,7 @@ def _find_path_direction_sensitive(
# all neighbours have been visited: return the current path
return path
def find_path_backward(network: nx.MultiDiGraph,
current_node,
path: list):
def find_path_backward(network: nx.MultiDiGraph, current_node, path: list):
# identify the last node's neighbours
current_neighbours = set(
neighbours(network, current_node, ignore_self_loops=True)
......@@ -1266,11 +1301,7 @@ def _find_path_direction_sensitive(
# add the neighbour to the start of the path
path.insert(0, a_neighbour)
# recursive call with extended path
return find_path_backward(
network,
path[0],
path
)
return find_path_backward(network, path[0], path)
else: # is not a valid node: path ends
# add the neighbour to the start of the path
path.insert(0, a_neighbour)
......@@ -1299,38 +1330,28 @@ def _find_path_direction_sensitive(
# find the path forward, check for cycles and then find the path backwards
# find the forward path segment
path = find_path_forward(
network,
start_node,
[start_node]
)
path = find_path_forward(network, start_node, [start_node])
# cycles have to be detected on the first try
if len(path) >= 3 and path[0] == path[-1]:
# it is a cycle: no need to search backwards
return path
# find the backward path segment
return find_path_backward(
network,
path[0],
path
)
return find_path_backward(network, path[0], path)
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def _find_path_direction_insensitive(
network: nx.MultiDiGraph,
list_valid_nodes: list,
start_node,
ignore_self_loops: bool
ignore_self_loops: bool,
) -> list:
def find_path_forward(network: nx.MultiDiGraph,
current_node,
path: list):
def find_path_forward(network: nx.MultiDiGraph, current_node, path: list):
# identify the last node's neighbours
current_neighbours = set(
neighbours(network, current_node, ignore_self_loops=True)
......@@ -1351,11 +1372,7 @@ def _find_path_direction_insensitive(
# add the neighbour to the end of the path
path.append(a_neighbour)
# recursive call with extended path
return find_path_forward(
network,
path[-1],
path
)
return find_path_forward(network, path[-1], path)
else: # is not a valid node: path ends
# add the neighbour to the end of the path:
path.append(a_neighbour)
......@@ -1366,10 +1383,18 @@ def _find_path_direction_insensitive(
# neighbour is already on the path, matches the start,
# and has two neighbours other than itself:
# close the loop and return the path
if (len(set(neighbours(
if (
len(
set(
neighbours(
network,
a_neighbour,
ignore_self_loops=ignore_self_loops))) == 2):
ignore_self_loops=ignore_self_loops,
)
)
)
== 2
):
# add the neighbour to the end of the path:
path.append(a_neighbour)
# return the path
......@@ -1377,10 +1402,7 @@ def _find_path_direction_insensitive(
# all neighbours have been visited: return the current path
return path
def find_path_backward(network: nx.MultiDiGraph,
current_node,
path: list):
def find_path_backward(network: nx.MultiDiGraph, current_node, path: list):
# identify the last node's neighbours
current_neighbours = set(
neighbours(network, current_node, ignore_self_loops=True)
......@@ -1411,11 +1433,7 @@ def _find_path_direction_insensitive(
# add the neighbour to the start of the path
path.insert(0, a_neighbour)
# recursive call with extended path
return find_path_backward(
network,
path[0],
path
)
return find_path_backward(network, path[0], path)
else: # is not a valid node: path ends
# add the neighbour to the start of the path
path.insert(0, a_neighbour)
......@@ -1445,28 +1463,22 @@ def _find_path_direction_insensitive(
# find the path forward, check for cycles and then find the path backwards
# explore paths in the forward sense
path = find_path_forward(
network,
start_node,
[start_node]
)
path = find_path_forward(network, start_node, [start_node])
# check for cycles
if len(path) >= 3 and path[0] == path[-1]:
# it is a cycle: no need to search backwards
return path
# explore paths in the backward sense and return the path
return find_path_backward(
network,
path[0],
path
)
return find_path_backward(network, path[0], path)
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def find_self_loops(network: nx.MultiDiGraph) -> list:
"""
Returns a list with the nodes that connect to themselves.
......@@ -1489,9 +1501,11 @@ def find_self_loops(network: nx.MultiDiGraph) -> list:
if network.has_edge(u=node_key, v=node_key)
)
# *****************************************************************************
# *****************************************************************************
def find_unconnected_nodes(network: nx.MultiDiGraph) -> list:
"""
Returns a list with the nodes that are not connected whilst in the network.
......@@ -1516,12 +1530,14 @@ def find_unconnected_nodes(network: nx.MultiDiGraph) -> list:
if len(tuple(network.neighbors(node_key))) == 0
]
# *****************************************************************************
# *****************************************************************************
def nearest_nodes_other_than_themselves(network: nx.MultiDiGraph,
node_keys: list,
return_dist: bool = False) -> list:
def nearest_nodes_other_than_themselves(
network: nx.MultiDiGraph, node_keys: list, return_dist: bool = False
) -> list:
"""
Returns a list with the keys of nodes closest to another set of nodes.
......@@ -1557,21 +1573,21 @@ def nearest_nodes_other_than_themselves(network: nx.MultiDiGraph,
nearest_node_keys = nearest_nodes(
network_copy,
[network.nodes[node_key]['x']
for node_key in node_keys],
[network.nodes[node_key]['y']
for node_key in node_keys],
[network.nodes[node_key]["x"] for node_key in node_keys],
[network.nodes[node_key]["y"] for node_key in node_keys],
return_dist=return_dist,
)
return nearest_node_keys
# *****************************************************************************
# *****************************************************************************
def is_start_or_end_point_or_close(line: LineString,
point: Point,
tolerance: float = 1e-3) -> bool:
def is_start_or_end_point_or_close(
line: LineString, point: Point, tolerance: float = 1e-3
) -> bool:
"""
Returns True if a given point is near the start or end points of a line.
......@@ -1608,7 +1624,6 @@ def is_start_or_end_point_or_close(line: LineString,
start_point = Point(start_coords)
if start_point.distance(point) <= tolerance:
return True
# compare with the end point
......@@ -1616,18 +1631,18 @@ def is_start_or_end_point_or_close(line: LineString,
end_point = Point(end_coords)
if end_point.distance(point) <= tolerance:
return True
# return statement
return False
# *****************************************************************************
# *****************************************************************************
def is_start_or_end_point(line: LineString,
point: Point) -> bool:
def is_start_or_end_point(line: LineString, point: Point) -> bool:
"""
Returns True if a given point is the start or end point of a line.
......@@ -1651,22 +1666,21 @@ def is_start_or_end_point(line: LineString,
# compare the coordinates
if (tuple(point.coords)[0] == start_coords or
tuple(point.coords)[0] == end_coords):
if tuple(point.coords)[0] == start_coords or tuple(point.coords)[0] == end_coords:
return True
# return statement
return False
# *****************************************************************************
# *****************************************************************************
def identify_edge_closest_to_node(
network: nx.MultiDiGraph,
node_keys: list,
crs: str = None) -> Tuple[list, nx.MultiDiGraph]:
network: nx.MultiDiGraph, node_keys: list, crs: str = None
) -> Tuple[list, nx.MultiDiGraph]:
"""
Identify the edges that are closest to a given set of nodes.
......@@ -1704,14 +1718,12 @@ def identify_edge_closest_to_node(
# if it is a geographic CRS, convert it to a projected CRS
if not is_projected(network.graph['crs']) or type(crs) != type(None):
if not is_projected(network.graph["crs"]) or type(crs) != type(None):
# convert to a projected CRS (including if crs=None)
projected_network = project_graph(network, to_crs=crs)
else:
projected_network = network
# *************************************************************************
......@@ -1720,15 +1732,19 @@ def identify_edge_closest_to_node(
nearest_edge_keys = nearest_edges(
projected_network,
X=[projected_network.nodes[node_key][osm.KEY_OSMNX_X]
for node_key in node_keys],
Y=[projected_network.nodes[node_key][osm.KEY_OSMNX_Y]
for node_key in node_keys],
return_dist=False)
X=[
projected_network.nodes[node_key][osm.KEY_OSMNX_X] for node_key in node_keys
],
Y=[
projected_network.nodes[node_key][osm.KEY_OSMNX_Y] for node_key in node_keys
],
return_dist=False,
)
# return statement
return nearest_edge_keys, projected_network
# *****************************************************************************
# *****************************************************************************
......@@ -19,6 +19,7 @@ from ...problems.esipp.utils import unused_node_key
from ..misc.utils import generate_pseudo_unique_key
from ..gis import osm
from ..gis import identify as gis_iden
# from ..gis import identify as gis_calc
from .identify import close_to_extremities
from .calculate import update_street_count, edge_lengths
......@@ -26,6 +27,7 @@ from .calculate import update_street_count, edge_lengths
# *****************************************************************************
# *****************************************************************************
def remove_self_loops(network: nx.MultiDiGraph):
"""
Removes self-loops from a directed graph defined in a MultiDiGraph object.
......@@ -48,12 +50,14 @@ def remove_self_loops(network: nx.MultiDiGraph):
network.remove_edge(u=node, v=node)
return selflooping_nodes
# *****************************************************************************
# *****************************************************************************
def transform_roundabouts_into_crossroads(
network: nx.MultiDiGraph,
roundabouts: list) -> list:
network: nx.MultiDiGraph, roundabouts: list
) -> list:
"""
Transform roundabouts into crossroads.
......@@ -109,16 +113,20 @@ def transform_roundabouts_into_crossroads(
# *********************************************************************
# create a new node whose location is the roundabout's centroid
list_point_coordinates = [
(network.nodes[node_key][osm.KEY_OSMNX_X],
network.nodes[node_key][osm.KEY_OSMNX_Y])
(
network.nodes[node_key][osm.KEY_OSMNX_X],
network.nodes[node_key][osm.KEY_OSMNX_Y],
)
for node_key in roundabout
]
new_geo = LineString(list_point_coordinates)
roundabout_centroid_key = generate_pseudo_unique_key(network)
network.add_node(
roundabout_centroid_key,
**{osm.KEY_OSMNX_X: new_geo.centroid.coords.xy[0][0],
osm.KEY_OSMNX_Y: new_geo.centroid.coords.xy[1][0]}
**{
osm.KEY_OSMNX_X: new_geo.centroid.coords.xy[0][0],
osm.KEY_OSMNX_Y: new_geo.centroid.coords.xy[1][0],
}
)
list_roundabout_centroids.append(roundabout_centroid_key)
# *********************************************************************
......@@ -136,17 +144,14 @@ def transform_roundabouts_into_crossroads(
if other_node_key not in roundabout
# for each edge between the two nodes
for edge_key in gis_iden.get_edges_between_two_nodes(
network,
node_key,
other_node_key)
network, node_key, other_node_key
)
]
# for each edge leading to the roundabout
for edge_key in list_edges_leading_to_roundabout:
# replace it with a new edge to the new node
# get edge dict
edge_dict = network.get_edge_data(edge_key[0],
edge_key[1],
edge_key[2])
edge_dict = network.get_edge_data(edge_key[0], edge_key[1], edge_key[2])
if osm.KEY_OSMNX_GEOMETRY in edge_dict:
# geometry exists
old_geometry = edge_dict[osm.KEY_OSMNX_GEOMETRY]
......@@ -154,10 +159,16 @@ def transform_roundabouts_into_crossroads(
# geometry does not exist
# create it
old_geometry = LineString(
[(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y]),
(network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y])]
[
(
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
),
(
network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
),
]
)
# if osm.KEY_OSMNX_LENGTH in edge_dict:
# # length exists
......@@ -181,22 +192,26 @@ def transform_roundabouts_into_crossroads(
# create geometry object between old roundabout point to the
# roundabout's centroid
extra_geometry = LineString(
[(network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y]),
(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y])]
[
(
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y],
),
(
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
),
]
)
if is_projected(network.graph['crs']):
if is_projected(network.graph["crs"]):
# projected graph: use direct method
extra_length = length(extra_geometry)
else: # unprojected graph: use great circle method
extra_length = great_circle(
lat1=network.nodes[
roundabout_centroid_key][osm.KEY_OSMNX_Y],
lon1=network.nodes[
roundabout_centroid_key][osm.KEY_OSMNX_X],
lat1=network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y],
lon1=network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
lat2=network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
lon2=network.nodes[edge_key[0]][osm.KEY_OSMNX_X]
lon2=network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
)
elif edge_key[1] in roundabout:
# this edge ends in the roundabout
......@@ -205,35 +220,37 @@ def transform_roundabouts_into_crossroads(
# create geometry object between old roundabout point to the
# roundabout's centroid
extra_geometry = LineString(
[(network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y]),
(network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y])]
[
(
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y],
),
(
network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
),
]
)
if is_projected(network.graph['crs']):
if is_projected(network.graph["crs"]):
# projected graph, use direct method
extra_length = length(extra_geometry)
else:
# unprojected graph, use great circle method
extra_length = great_circle(
lat1=network.nodes[
roundabout_centroid_key][osm.KEY_OSMNX_Y],
lon1=network.nodes[
roundabout_centroid_key][osm.KEY_OSMNX_X],
lat1=network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_Y],
lon1=network.nodes[roundabout_centroid_key][osm.KEY_OSMNX_X],
lat2=network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
lon2=network.nodes[edge_key[1]][osm.KEY_OSMNX_X]
lon2=network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
)
# *****************************************************************
# *****************************************************************
edge_dict[osm.KEY_OSMNX_GEOMETRY] = linemerge(
[old_geometry,
extra_geometry])
[old_geometry, extra_geometry]
)
edge_dict[osm.KEY_OSMNX_LENGTH] = old_length + extra_length
network.add_edge(new_edge_start_node,
new_edge_end_node,
**edge_dict)
network.add_edge(new_edge_start_node, new_edge_end_node, **edge_dict)
# *************************************************************************
# *************************************************************************
......@@ -251,14 +268,16 @@ def transform_roundabouts_into_crossroads(
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
# TODO: develop algorithm to traverse the graph in search of dead ends
def remove_dead_ends(network: nx.MultiDiGraph,
keepers: tuple = None,
max_iterations: int = 1) -> list:
def remove_dead_ends(
network: nx.MultiDiGraph, keepers: tuple = None, max_iterations: int = 1
) -> list:
"""
Removes dead ends (non-cyclical branches) from a directed graph.
......@@ -296,11 +315,8 @@ def remove_dead_ends(network: nx.MultiDiGraph,
for node_key in network.nodes()
if node_key not in keepers
# if it has at most one neighbour other than itself
if len(set(gis_iden.neighbours(
network,
node_key,
ignore_self_loops=True)
)) <= 1
if len(set(gis_iden.neighbours(network, node_key, ignore_self_loops=True)))
<= 1
]
# if there no nodes meeting those conditions, break out of loop
if len(target_nodes) == 0:
......@@ -315,13 +331,12 @@ def remove_dead_ends(network: nx.MultiDiGraph,
# return the list of nodes removed
return nodes_removed
# *****************************************************************************
# *****************************************************************************
def replace_path(
network: nx.MultiDiGraph,
path: list
) -> tuple:
def replace_path(network: nx.MultiDiGraph, path: list) -> tuple:
"""
Replaces a simplifiable path with one equivalent edge linking both ends.
......@@ -358,12 +373,9 @@ def replace_path(
# make sure path it is a simplifiable path
if not gis_iden.is_path_straight(
network,
path,
consider_reversed_edges=True,
ignore_self_loops=True
network, path, consider_reversed_edges=True, ignore_self_loops=True
):
raise ValueError('The path cannot be simplified.')
raise ValueError("The path cannot be simplified.")
# *************************************************************************
......@@ -379,23 +391,20 @@ def replace_path(
for node_pair_index in range(len(path) - 1):
# get one edge for this node pair
edge_key = list(gis_iden.get_edges_between_two_nodes(
network,
path[node_pair_index],
path[node_pair_index+1]
))
edge_key = list(
gis_iden.get_edges_between_two_nodes(
network, path[node_pair_index], path[node_pair_index + 1]
)
)
edge_key = sorted(
(network.edges[_key][osm.KEY_OSMNX_LENGTH], _key)
for _key in edge_key
(network.edges[_key][osm.KEY_OSMNX_LENGTH], _key) for _key in edge_key
)[0][1]
if osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
# geometry exists: possibly a composite geometry
# check if the geometry is consistent with the edge declaration
if gis_iden.is_edge_consistent_with_geometry(network, edge_key):
# the geometry is not reversed
list_geometries.append(
network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY]
)
list_geometries.append(network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY])
else: # the geometry is reversed
list_geometries.append(
network.edges[edge_key][osm.KEY_OSMNX_GEOMETRY].reverse()
......@@ -405,14 +414,16 @@ def replace_path(
# the edge is not reversed: use it as is
list_geometries.append(
LineString(
[(network.nodes[
path[node_pair_index]][osm.KEY_OSMNX_X],
network.nodes[
path[node_pair_index]][osm.KEY_OSMNX_Y]),
(network.nodes[
path[node_pair_index+1]][osm.KEY_OSMNX_X],
network.nodes[
path[node_pair_index+1]][osm.KEY_OSMNX_Y])]
[
(
network.nodes[path[node_pair_index]][osm.KEY_OSMNX_X],
network.nodes[path[node_pair_index]][osm.KEY_OSMNX_Y],
),
(
network.nodes[path[node_pair_index + 1]][osm.KEY_OSMNX_X],
network.nodes[path[node_pair_index + 1]][osm.KEY_OSMNX_Y],
),
]
)
)
......@@ -423,22 +434,14 @@ def replace_path(
list_osmid.append(network.edges[edge_key][osm.KEY_OSMNX_OSMID])
# reversed
if type(network.edges[edge_key][osm.KEY_OSMNX_REVERSED]) == list:
list_reversed.extend(
network.edges[edge_key][osm.KEY_OSMNX_REVERSED]
)
list_reversed.extend(network.edges[edge_key][osm.KEY_OSMNX_REVERSED])
else:
list_reversed.append(
network.edges[edge_key][osm.KEY_OSMNX_REVERSED]
)
list_reversed.append(network.edges[edge_key][osm.KEY_OSMNX_REVERSED])
# oneway
if type(network.edges[edge_key][osm.KEY_OSMNX_ONEWAY]) == list:
list_oneway.extend(
network.edges[edge_key][osm.KEY_OSMNX_ONEWAY]
)
list_oneway.extend(network.edges[edge_key][osm.KEY_OSMNX_ONEWAY])
else:
list_oneway.append(
network.edges[edge_key][osm.KEY_OSMNX_ONEWAY]
)
list_oneway.append(network.edges[edge_key][osm.KEY_OSMNX_ONEWAY])
# update the edge length
edge_length += network.edges[edge_key][osm.KEY_OSMNX_LENGTH]
......@@ -459,7 +462,7 @@ def replace_path(
snap(
new_geo.geoms[geo_pair_index],
new_geo.geoms[geo_pair_index + 1],
tolerance=1e-3
tolerance=1e-3,
)
)
new_geo = linemerge(list_geometries)
......@@ -478,9 +481,7 @@ def replace_path(
osm.KEY_OSMNX_REVERSED: (
list_reversed if len(list_reversed) != 1 else list_reversed[0]
),
osm.KEY_OSMNX_OSMID: (
list_osmid if len(list_osmid) != 1 else list_osmid[0]
)
osm.KEY_OSMNX_OSMID: (list_osmid if len(list_osmid) != 1 else list_osmid[0]),
}
# *************************************************************************
......@@ -490,11 +491,7 @@ def replace_path(
end_node = path[-1]
# create the forward edge
for_k = network.add_edge(
start_node,
end_node,
**edge_dict
)
for_k = network.add_edge(start_node, end_node, **edge_dict)
# delete all intermediate nodes
network.remove_nodes_from(path[1:-1])
......@@ -502,11 +499,14 @@ def replace_path(
# return the edge key
return (start_node, end_node, for_k)
# *****************************************************************************
# *****************************************************************************
def remove_longer_parallel_edges(network: nx.MultiDiGraph,
ignore_edge_directions: bool = False) -> list:
def remove_longer_parallel_edges(
network: nx.MultiDiGraph, ignore_edge_directions: bool = False
) -> list:
"""
Removes longer parallel edges from the network.
......@@ -541,15 +541,11 @@ def remove_longer_parallel_edges(network: nx.MultiDiGraph,
# get the edges between the two nodes
if ignore_edge_directions: # both directions
list_edges = gis_iden.get_edges_between_two_nodes(
network,
node_one,
node_two
network, node_one, node_two
)
else: # one direction
list_edges = gis_iden.get_edges_from_a_to_b(
network,
node_start=node_one,
node_end=node_two
network, node_start=node_one, node_end=node_two
)
# if none exist, skip
......@@ -562,23 +558,23 @@ def remove_longer_parallel_edges(network: nx.MultiDiGraph,
for edge_key in list_edges
)
network.remove_edges_from(
edge_tuple[1] for edge_tuple in sorted_edges[1:]
)
network.remove_edges_from(edge_tuple[1] for edge_tuple in sorted_edges[1:])
edges_removed.extend(edge_tuple[1] for edge_tuple in sorted_edges[1:])
return edges_removed
# *****************************************************************************
# *****************************************************************************
def merge_points_into_linestring(
line: LineString,
points: tuple or list,
tolerance: float = 7./3-4./3-1,
tolerance: float = 7.0 / 3 - 4.0 / 3 - 1,
fixed_extremities: bool = True,
use_start_point_equidistant: bool = True
use_start_point_equidistant: bool = True,
) -> LineString:
"""
Merge points into a line where they are closest to it.
......@@ -626,36 +622,34 @@ def merge_points_into_linestring(
"""
if fixed_extremities:
# the line cannot be extended
# identify which points are close to the start and end points
# note: these points will not be merged
(close_to_start,
(
close_to_start,
close_to_end,
line_distances,
start_distances,
end_distances) = close_to_extremities(
end_distances,
) = close_to_extremities(
line,
points,
tolerance=tolerance,
use_start_point_equidistant=use_start_point_equidistant,
return_distances=True
return_distances=True,
)
# for each mew point
for i in range(len(points)):
if i in close_to_start or i in close_to_end:
# the point is close to the start or end nodes: skip iteration
continue
if points[i].coords[0] in line.coords:
# this point is already on the line: skip iteration
continue
......@@ -677,50 +671,50 @@ def merge_points_into_linestring(
sorted_distances = sorted(
(line_segment_distance, j)
for j, line_segment_distance in enumerate(
line_segment_distances
)
for j, line_segment_distance in enumerate(line_segment_distances)
)
# prepare new line coordinates with the new point
line_coords = list(line.coords)
if (len(sorted_distances) >= 2 and
sorted_distances[0][0] == sorted_distances[1][0]):
if (
len(sorted_distances) >= 2
and sorted_distances[0][0] == sorted_distances[1][0]
):
# there are 2(+) segments that are equally close to the point
# if the closest points are end/start points of a segment, then
# place the point after the second point of the first segment
if abs(Point(
line_segments[
sorted_distances[0][1]
].coords[-1]
).distance(points[i])-line_distances[i]) <= tolerance:
if (
abs(
Point(
line_segments[sorted_distances[0][1]].coords[-1]
).distance(points[i])
- line_distances[i]
)
<= tolerance
):
line_coords.insert(
# sorted_distances[0][1]+1,
sorted_distances[0][1] + 2,
tuple(points[i].coords[0])
tuple(points[i].coords[0]),
)
else:
line_coords.insert(
sorted_distances[0][1] + 1,
# sorted_distances[0][1]+2,
tuple(points[i].coords[0])
tuple(points[i].coords[0]),
)
else:
# there is only segment with the minimum distance:
# place the new point where the end point of the segment is
line_coords.insert(
sorted_distances[0][1] + 1, # i.e., the segment number + 1
tuple(points[i].coords[0])
tuple(points[i].coords[0]),
)
# create new line
......@@ -728,7 +722,6 @@ def merge_points_into_linestring(
line = LineString(line_coords)
else:
# the line can be extended
raise NotImplementedError
......@@ -737,12 +730,14 @@ def merge_points_into_linestring(
return line, close_to_start, close_to_end
# *****************************************************************************
# *****************************************************************************
def split_linestring(line: LineString,
points: list,
tolerance: float = 7./3-4./3-1):
def split_linestring(
line: LineString, points: list, tolerance: float = 7.0 / 3 - 4.0 / 3 - 1
):
"""
Split a line into segments according to a set of cutting points.
......@@ -773,10 +768,7 @@ def split_linestring(line: LineString,
# add the points to the linestring
new_line, close_to_start, close_to_end = merge_points_into_linestring(
line=line,
points=points,
tolerance=tolerance,
fixed_extremities=True
line=line, points=points, tolerance=tolerance, fixed_extremities=True
)
if len(close_to_end) + len(close_to_start) == len(points):
......@@ -801,9 +793,7 @@ def split_linestring(line: LineString,
# if not a start nor an end point, build the segment between the
# previous split point and the current input point
line_segments.append(
LineString(
new_line.coords[previous_split_index:coords_index+1]
)
LineString(new_line.coords[previous_split_index : coords_index + 1])
)
# store new end/start point
......@@ -818,23 +808,21 @@ def split_linestring(line: LineString,
# next iteration
# add the last segment
line_segments.append(
LineString(
new_line.coords[previous_split_index:]
)
)
line_segments.append(LineString(new_line.coords[previous_split_index:]))
# return the geometries for each segment and the relevant points by order
return line_segments, close_to_start, close_to_end
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def recreate_edges(network: nx.MultiDiGraph,
points: dict,
tolerance: float = 7./3-4./3-1) -> tuple:
def recreate_edges(
network: nx.MultiDiGraph, points: dict, tolerance: float = 7.0 / 3 - 4.0 / 3 - 1
) -> tuple:
"""
Recreates OSMnx-type edges by splitting them into multiple smaller edges,
which are defined by points along the original edge.
......@@ -876,7 +864,6 @@ def recreate_edges(network: nx.MultiDiGraph,
# for each edge that is to be split
for edge_key, points_in_edge in points.items():
# check if there is a geometry already
if osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
# get the geometry
......@@ -888,10 +875,16 @@ def recreate_edges(network: nx.MultiDiGraph,
else:
# there is not geometry, create it
line = LineString(
[(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y]),
(network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y])]
[
(
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
),
(
network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
),
]
)
# split the line into segments using the intermediate points
......@@ -899,8 +892,7 @@ def recreate_edges(network: nx.MultiDiGraph,
# separately
line_segments, close_to_start, close_to_end = split_linestring(
line=line,
points=points_in_edge
line=line, points=points_in_edge
)
# link each point to a node key:
......@@ -927,9 +919,7 @@ def recreate_edges(network: nx.MultiDiGraph,
)
else:
# point i is not close to the extremities: new node key
_node_keys_by_point[
points_in_edge[i]
] = unused_node_key(network)
_node_keys_by_point[points_in_edge[i]] = unused_node_key(network)
network.add_node(_node_keys_by_point[points_in_edge[i]])
# _node_keys = [
......@@ -941,10 +931,7 @@ def recreate_edges(network: nx.MultiDiGraph,
# should be the same order as in the inputs
_node_keys = [
_node_keys_by_point[point]
for point in points_in_edge
]
_node_keys = [_node_keys_by_point[point] for point in points_in_edge]
# *********************************************************************
......@@ -955,79 +942,69 @@ def recreate_edges(network: nx.MultiDiGraph,
edge_dict = dict(network.get_edge_data(*edge_key))
for line_index, line_segment in enumerate(line_segments):
edge_dict[osm.KEY_OSMNX_GEOMETRY] = line_segment
if line_index == 0:
# initial segment
v_key = _node_keys_by_point[
Point(line_segment.coords[-1])
]
v_key = _node_keys_by_point[Point(line_segment.coords[-1])]
k_key = network.add_edge(
u_for_edge=edge_key[0],
v_for_edge=v_key,
**edge_dict
u_for_edge=edge_key[0], v_for_edge=v_key, **edge_dict
)
network.add_node(
v_key,
**{osm.KEY_OSMNX_X: line_segment.coords[-1][0],
osm.KEY_OSMNX_Y: line_segment.coords[-1][1]}
**{
osm.KEY_OSMNX_X: line_segment.coords[-1][0],
osm.KEY_OSMNX_Y: line_segment.coords[-1][1],
}
)
segment_keys.append((edge_key[0], v_key, k_key))
elif line_index == len(line_segments) - 1:
# final segment
u_key = _node_keys_by_point[
Point(line_segment.coords[0])
]
u_key = _node_keys_by_point[Point(line_segment.coords[0])]
k_key = network.add_edge(
u_for_edge=u_key,
v_for_edge=edge_key[1],
**edge_dict
u_for_edge=u_key, v_for_edge=edge_key[1], **edge_dict
)
network.add_node(
u_key,
**{osm.KEY_OSMNX_X: line_segment.coords[0][0],
osm.KEY_OSMNX_Y: line_segment.coords[0][1]}
**{
osm.KEY_OSMNX_X: line_segment.coords[0][0],
osm.KEY_OSMNX_Y: line_segment.coords[0][1],
}
)
segment_keys.append((u_key, edge_key[1], k_key))
else: # intermediate segment
u_key = _node_keys_by_point[Point(line_segment.coords[0])]
u_key = _node_keys_by_point[
Point(line_segment.coords[0])
]
v_key = _node_keys_by_point[
Point(line_segment.coords[-1])
]
v_key = _node_keys_by_point[Point(line_segment.coords[-1])]
k_key = network.add_edge(
u_for_edge=u_key,
v_for_edge=v_key,
**edge_dict
u_for_edge=u_key, v_for_edge=v_key, **edge_dict
)
network.add_node(
u_key,
**{osm.KEY_OSMNX_X: line_segment.coords[0][0],
osm.KEY_OSMNX_Y: line_segment.coords[0][1]}
**{
osm.KEY_OSMNX_X: line_segment.coords[0][0],
osm.KEY_OSMNX_Y: line_segment.coords[0][1],
}
)
network.add_node(
v_key,
**{osm.KEY_OSMNX_X: line_segment.coords[-1][0],
osm.KEY_OSMNX_Y: line_segment.coords[-1][1]}
**{
osm.KEY_OSMNX_X: line_segment.coords[-1][0],
osm.KEY_OSMNX_Y: line_segment.coords[-1][1],
}
)
segment_keys.append((u_key, v_key, k_key))
......@@ -1040,8 +1017,10 @@ def recreate_edges(network: nx.MultiDiGraph,
edge_lengths_by_dict = edge_lengths(network, edge_keys=segment_keys)
network.add_edges_from(
tuple(
(*segment_key,
{osm.KEY_OSMNX_LENGTH: edge_lengths_by_dict[segment_key]})
(
*segment_key,
{osm.KEY_OSMNX_LENGTH: edge_lengths_by_dict[segment_key]},
)
for segment_key in segment_keys
)
)
......@@ -1053,15 +1032,17 @@ def recreate_edges(network: nx.MultiDiGraph,
# return statement
return connection_node_keys_per_edge, recreated_edges
# *****************************************************************************
# *****************************************************************************
def connect_nodes_to_edges(
network: nx.MultiDiGraph,
node_keys: list,
edge_keys: list,
store_unsimplified_geometries: bool = False,
use_one_edge_per_direction: bool = False
use_one_edge_per_direction: bool = False,
) -> tuple:
"""
Connects nodes to edges using additional edges in an OSMnx-formatted graph.
......@@ -1131,7 +1112,6 @@ def connect_nodes_to_edges(
# point on the edge
points_per_edge = {}
for edge_key, _node_keys in nodes_to_connect_to_edge.items():
# check if the geometry exists
if osm.KEY_OSMNX_GEOMETRY in network.edges[edge_key]:
# the geometry object exists, get it
......@@ -1139,23 +1119,32 @@ def connect_nodes_to_edges(
else:
# the geometry object does not exist, make it
edge_geo = LineString(
[(network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y]),
(network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y])]
[
(
network.nodes[edge_key[0]][osm.KEY_OSMNX_X],
network.nodes[edge_key[0]][osm.KEY_OSMNX_Y],
),
(
network.nodes[edge_key[1]][osm.KEY_OSMNX_X],
network.nodes[edge_key[1]][osm.KEY_OSMNX_Y],
),
]
)
# store the geometry
if store_unsimplified_geometries:
# update the edge
network.add_edge(*edge_key,
**{osm.KEY_OSMNX_GEOMETRY: edge_geo})
network.add_edge(*edge_key, **{osm.KEY_OSMNX_GEOMETRY: edge_geo})
# use nearest_points to locate the closest points on the edge
points_per_edge[edge_key] = [
nearest_points(
edge_geo,
Point(network.nodes[node_key][osm.KEY_OSMNX_X],
network.nodes[node_key][osm.KEY_OSMNX_Y])
)[0] # [0] to get the point on the edge
Point(
network.nodes[node_key][osm.KEY_OSMNX_X],
network.nodes[node_key][osm.KEY_OSMNX_Y],
),
)[
0
] # [0] to get the point on the edge
for node_key in _node_keys
]
# TIP: exclude the points that can be considered close to the start or end nodes
......@@ -1167,8 +1156,7 @@ def connect_nodes_to_edges(
# 3) recreate each edge after dividing it at the specified points
connection_node_keys_per_edge, recreated_edges = recreate_edges(
network,
points=points_per_edge
network, points=points_per_edge
)
# put the keys for the connection nodes
......@@ -1198,17 +1186,11 @@ def connect_nodes_to_edges(
# proceed with other types of edges
if use_one_edge_per_direction:
# add one directed edge per direction
connection_edge_containers.append(
(node_key, connection_node_key)
)
connection_edge_containers.append(
(connection_node_key, node_key)
)
connection_edge_containers.append((node_key, connection_node_key))
connection_edge_containers.append((connection_node_key, node_key))
else:
# add one directed edge starting from the edge and ending in the node
connection_edge_containers.append(
(connection_node_key, node_key)
)
connection_edge_containers.append((connection_node_key, node_key))
edge_keys = network.add_edges_from(connection_edge_containers)
# *************************************************************************
......@@ -1221,24 +1203,24 @@ def connect_nodes_to_edges(
# there are new edges: calculate the lengths and add them
new_edge_keys = [
(*edge_tuple[0:2], edge_key) # apply it only to specific edges
for edge_tuple, edge_key in zip(
connection_edge_containers, edge_keys)
for edge_tuple, edge_key in zip(connection_edge_containers, edge_keys)
]
if is_projected(network.graph['crs']):
if is_projected(network.graph["crs"]):
# projected crs: use own method
lengths_dict = edge_lengths(
network,
edge_keys=new_edge_keys
)
lengths_dict = edge_lengths(network, edge_keys=new_edge_keys)
network.add_edges_from(
tuple(
(*edge_key,
{osm.KEY_OSMNX_LENGTH: lengths_dict[edge_key],
(
*edge_key,
{
osm.KEY_OSMNX_LENGTH: lengths_dict[edge_key],
osm.KEY_OSMNX_ONEWAY: False,
osm.KEY_OSMNX_REVERSED: False,
osm.KEY_OSMNX_OSMID: None})
osm.KEY_OSMNX_OSMID: None,
},
)
for edge_key in new_edge_keys
)
)
......@@ -1250,7 +1232,6 @@ def connect_nodes_to_edges(
# update the street count
update_street_count(network)
else:
new_edge_keys = []
# *************************************************************************
......@@ -1258,13 +1239,12 @@ def connect_nodes_to_edges(
return network, new_edge_keys, connection_node_keys_per_edge, recreated_edges
# *****************************************************************************
# *****************************************************************************
def remove_reversed_edges(
network: nx.MultiDiGraph,
reversed_attr: bool = True
) -> list:
def remove_reversed_edges(network: nx.MultiDiGraph, reversed_attr: bool = True) -> list:
"""
Removes reversed edges from an OSMnx-formatted multi directed edge graph.
......@@ -1293,8 +1273,10 @@ def remove_reversed_edges(
# at least one reversed edge
if len(reverse_edges) >= 1
# must satisfy the reversed attribute value
if (network.edges[edge_key][osm.KEY_OSMNX_REVERSED] == reversed_attr
or type(network.edges[edge_key][osm.KEY_OSMNX_REVERSED]) == list)
if (
network.edges[edge_key][osm.KEY_OSMNX_REVERSED] == reversed_attr
or type(network.edges[edge_key][osm.KEY_OSMNX_REVERSED]) == list
)
]
# filter
for edge_key in edges_removed:
......@@ -1306,9 +1288,11 @@ def remove_reversed_edges(
# return
return edges_removed
# *****************************************************************************
# *****************************************************************************
def create_reverse_edges(
network: nx.MultiDiGraph,
edge_keys: list = None,
......@@ -1348,7 +1332,7 @@ def create_reverse_edges(
for edge_key in edge_keys:
# make sure the edge exists
if not network.has_edge(*edge_key):
raise ValueError('Unknown edge: '+str(edge_key))
raise ValueError("Unknown edge: " + str(edge_key))
# get its data
edge_dict = network.get_edge_data(*edge_key)
# create a dict for the reversed edge
......@@ -1356,34 +1340,33 @@ def create_reverse_edges(
# check for the reversed keyword
if type(edge_dict[osm.KEY_OSMNX_REVERSED]) == bool:
# boolean: negate it
rev_edge_dict[osm.KEY_OSMNX_REVERSED] = (
not edge_dict[osm.KEY_OSMNX_REVERSED]
)
elif (type(edge_dict[osm.KEY_OSMNX_REVERSED]) == list and
len(edge_dict[osm.KEY_OSMNX_REVERSED]) == 2 and
len(set(edge_dict[osm.KEY_OSMNX_REVERSED])) == 2):
rev_edge_dict[osm.KEY_OSMNX_REVERSED] = not edge_dict[
osm.KEY_OSMNX_REVERSED
]
elif (
type(edge_dict[osm.KEY_OSMNX_REVERSED]) == list
and len(edge_dict[osm.KEY_OSMNX_REVERSED]) == 2
and len(set(edge_dict[osm.KEY_OSMNX_REVERSED])) == 2
):
# list:
rev_edge_dict[osm.KEY_OSMNX_REVERSED] = [True, False]
else:
raise ValueError(
'The edge '+str(edge_key)+'is not compliant with OSMnx.'
"The edge " + str(edge_key) + "is not compliant with OSMnx."
)
# check for the geometry keyword
if osm.KEY_OSMNX_GEOMETRY in edge_dict:
# a geometry exists, reverse it for the reverse edge dict
rev_edge_dict[osm.KEY_OSMNX_GEOMETRY] = (
edge_dict[osm.KEY_OSMNX_GEOMETRY].reverse()
)
rev_edge_dict[osm.KEY_OSMNX_GEOMETRY] = edge_dict[
osm.KEY_OSMNX_GEOMETRY
].reverse()
# add the edge
rev_k = network.add_edge(
edge_key[1],
edge_key[0],
**rev_edge_dict
)
rev_k = network.add_edge(edge_key[1], edge_key[0], **rev_edge_dict)
edges_created.append((edge_key[1], edge_key[0], rev_k))
# return the keys for the edges created
return edges_created
# *****************************************************************************
# *****************************************************************************
......@@ -5,14 +5,14 @@
# general
KEY_OSM_CITY = 'addr:city'
KEY_OSM_COUNTRY = 'addr:country'
KEY_OSM_HOUSE_NUMBER = 'addr:housenumber'
KEY_OSM_MUNICIPALITY = 'addr:municipality'
KEY_OSM_PLACE = 'addr:place'
KEY_OSM_POSTCODE = 'addr:postcode'
KEY_OSM_STREET = 'addr:street'
KEY_OSM_SOURCE = 'source'
KEY_OSM_CITY = "addr:city"
KEY_OSM_COUNTRY = "addr:country"
KEY_OSM_HOUSE_NUMBER = "addr:housenumber"
KEY_OSM_MUNICIPALITY = "addr:municipality"
KEY_OSM_PLACE = "addr:place"
KEY_OSM_POSTCODE = "addr:postcode"
KEY_OSM_STREET = "addr:street"
KEY_OSM_SOURCE = "source"
KEYS_OSM = [
KEY_OSM_CITY,
......@@ -22,37 +22,35 @@ KEYS_OSM = [
KEY_OSM_PLACE,
KEY_OSM_POSTCODE,
KEY_OSM_STREET,
KEY_OSM_SOURCE
KEY_OSM_SOURCE,
]
# country specific
KEY_COUNTRY_DK = 'dk'
KEY_COUNTRY_DK = "dk"
KEY_OSM_DK_BUILDING_ENTRANCE_ID = 'osak:identifier'
KEY_OSM_DK_BUILDING_ENTRANCE_ID = "osak:identifier"
KEY_OSM_BUILDING_ENTRANCE_ID = {
KEY_COUNTRY_DK: KEY_OSM_DK_BUILDING_ENTRANCE_ID
}
KEY_OSM_BUILDING_ENTRANCE_ID = {KEY_COUNTRY_DK: KEY_OSM_DK_BUILDING_ENTRANCE_ID}
# *****************************************************************************
# osmnx
KEY_OSMNX_OSMID = 'osmid'
KEY_OSMNX_ELEMENT_TYPE = 'element_type'
KEY_OSMNX_OSMID = "osmid"
KEY_OSMNX_ELEMENT_TYPE = "element_type"
KEY_OSMNX_NAME = 'name'
KEY_OSMNX_GEOMETRY = 'geometry'
KEY_OSMNX_REVERSED = 'reversed'
KEY_OSMNX_LENGTH = 'length'
KEY_OSMNX_ONEWAY = 'oneway'
KEY_OSMNX_X = 'x'
KEY_OSMNX_Y = 'y'
KEY_OSMNX_LON = 'lon'
KEY_OSMNX_LAT = 'lat'
KEY_OSMNX_STREET_COUNT = 'street_count'
KEY_OSMNX_NAME = "name"
KEY_OSMNX_GEOMETRY = "geometry"
KEY_OSMNX_REVERSED = "reversed"
KEY_OSMNX_LENGTH = "length"
KEY_OSMNX_ONEWAY = "oneway"
KEY_OSMNX_X = "x"
KEY_OSMNX_Y = "y"
KEY_OSMNX_LON = "lon"
KEY_OSMNX_LAT = "lat"
KEY_OSMNX_STREET_COUNT = "street_count"
KEYS_OSMNX = [
KEY_OSMNX_OSMID, # one half of multi-index for geodataframes from osmnx
......@@ -66,7 +64,7 @@ KEYS_OSMNX = [
KEY_OSMNX_Y,
KEY_OSMNX_LON,
KEY_OSMNX_LAT,
KEY_OSMNX_STREET_COUNT
KEY_OSMNX_STREET_COUNT,
]
KEYS_OSMNX_NODES = {
......@@ -77,28 +75,24 @@ KEYS_OSMNX_NODES = {
KEY_OSMNX_Y,
KEY_OSMNX_LON,
KEY_OSMNX_LAT,
KEY_OSMNX_STREET_COUNT
KEY_OSMNX_STREET_COUNT,
}
KEYS_OSMNX_NODES_ESSENTIAL = {
KEY_OSMNX_OSMID,
KEY_OSMNX_NAME,
KEY_OSMNX_STREET_COUNT
}
KEYS_OSMNX_NODES_ESSENTIAL = {KEY_OSMNX_OSMID, KEY_OSMNX_NAME, KEY_OSMNX_STREET_COUNT}
KEYS_OSMNX_EDGES = {
KEY_OSMNX_OSMID,
KEY_OSMNX_LENGTH,
KEY_OSMNX_ONEWAY,
KEY_OSMNX_GEOMETRY,
KEY_OSMNX_REVERSED
KEY_OSMNX_REVERSED,
}
KEYS_OSMNX_EDGES_ESSENTIAL = {
KEY_OSMNX_OSMID,
KEY_OSMNX_LENGTH,
KEY_OSMNX_ONEWAY,
KEY_OSMNX_REVERSED
KEY_OSMNX_REVERSED,
}
# *****************************************************************************
# imports
from ast import literal_eval
......@@ -27,18 +26,18 @@ from ..gis import calculate as gis_calc
# constants
KEY_GPD_CRS = 'crs'
KEY_GPD_GEOMETRY = 'geometry'
KEY_GPD_CRS = "crs"
KEY_GPD_GEOMETRY = "geometry"
RKW_GPKG = 'packed'
RKW_GPKG = "packed"
# *****************************************************************************
# *****************************************************************************
# TODO: complete method
def find_gpkg_packable_columns(gdf: GeoDataFrame) -> set:
def find_gpkg_packable_columns(gdf: GeoDataFrame) -> set:
# columns incompatible with GPKG format:
# 1) columns with equivalent lowercase names
# 2) columns of Nones (fiona 1.9.3; appears to work with fiona 1.8.x)
......@@ -52,10 +51,7 @@ def find_gpkg_packable_columns(gdf: GeoDataFrame) -> set:
# 1) columns with equivalent lowercase names
lowercase_columns = tuple(
column.lower()
for column in gdf.columns
)
lowercase_columns = tuple(column.lower() for column in gdf.columns)
set_columns = set(
column
......@@ -68,12 +64,10 @@ def find_gpkg_packable_columns(gdf: GeoDataFrame) -> set:
# for each column
for column in gdf.columns:
# if the column has already been identified, or if it is the geometry
# one (identified via KEY_GPD_GEOMETRY), skip the current column
if column == KEY_GPD_GEOMETRY or column in set_columns:
continue
# 2) columns of Nones (fiona 1.9.3; appears to work with fiona 1.8.x)
......@@ -81,17 +75,18 @@ def find_gpkg_packable_columns(gdf: GeoDataFrame) -> set:
# identify the type of objects in each row
set_types = set(
type(gdf.loc[(index,column)])
for index in gdf.index
)
set_types = set(type(gdf.loc[(index, column)]) for index in gdf.index)
# allowed types: int, float, numpy floats
if (len(set_types) == 1 and
(str in set_types or float in set_types or int in set_types or
bool in set_types or float64 in set_types or int64 in set_types)):
if len(set_types) == 1 and (
str in set_types
or float in set_types
or int in set_types
or bool in set_types
or float64 in set_types
or int64 in set_types
):
# if (len(set_types) == 1 and
# (str in set_types or float in set_types or int in set_types or
# bool in set_types or float64 in set_types or int64 in set_types or
......@@ -103,7 +98,6 @@ def find_gpkg_packable_columns(gdf: GeoDataFrame) -> set:
continue
else:
# two or more different types are not allowed
set_columns.add(column)
......@@ -112,14 +106,18 @@ def find_gpkg_packable_columns(gdf: GeoDataFrame) -> set:
return set_columns
# *****************************************************************************
# *****************************************************************************
def write_gdf_file(gdf: GeoDataFrame,
def write_gdf_file(
gdf: GeoDataFrame,
filename: str,
columns_to_pack: tuple = None,
preserve_original: bool = True,
**kwargs):
**kwargs
):
"""
Writes the contents of a GeoDataFrame object into a GIS-compatible file.
......@@ -161,32 +159,27 @@ def write_gdf_file(gdf: GeoDataFrame,
"""
if preserve_original:
# copy the original (slower)
new_gdf = gdf.copy()
else:
# just point to the original (faster)
new_gdf = gdf
if type(columns_to_pack) != tuple:
# no columns identified, find the columns with containers
# TODO: reach this statement
columns_to_pack = tuple(find_gpkg_packable_columns(gdf))
else:
# focus on specific columns
for column in columns_to_pack:
if column not in new_gdf.columns:
# TODO: reach this statement
raise ValueError('Unknown column: '+str(column))
raise ValueError("Unknown column: " + str(column))
# handle NaN and other values
......@@ -200,14 +193,10 @@ def write_gdf_file(gdf: GeoDataFrame,
# GPKG: columns with the same lower case equivalent are not allowed
if '.gpkg' in filename: # solution: use reserved words and numbers
if ".gpkg" in filename: # solution: use reserved words and numbers
# identify incompatible columns
lowercase_columns = tuple(
column.lower()
for column in gdf.columns
)
lowercase_columns = tuple(column.lower() for column in gdf.columns)
# place all their contents into one new column
......@@ -217,20 +206,24 @@ def write_gdf_file(gdf: GeoDataFrame,
column
for column, lccolumn in zip(gdf.columns, lowercase_columns)
if lowercase_columns.count(lccolumn) >= 2
)
),
)
# the GeoDataFrame object is ready: write it
new_gdf.to_file(filename, **kwargs)
# *****************************************************************************
# *****************************************************************************
def pack_columns(gdf: GeoDataFrame,
def pack_columns(
gdf: GeoDataFrame,
columns: list,
packed_column_name: str = RKW_GPKG,
convert_to_string: bool = True):
convert_to_string: bool = True,
):
"""
Places the contents of multiple GeoDataFrame columns into a single one.
......@@ -263,14 +256,13 @@ def pack_columns(gdf: GeoDataFrame,
# if only one or no columns are specified, change nothing
if len(columns) <= 1:
return
# if the new column name is pre-existing, raise error
if packed_column_name in gdf.columns:
# TODO: reach this statement
raise ValueError('The desired column name already exists.')
raise ValueError("The desired column name already exists.")
# create a new data dict
......@@ -290,18 +282,17 @@ def pack_columns(gdf: GeoDataFrame,
# convert it to a string, if needed
if convert_to_string:
gdf[packed_column_name] = gdf[packed_column_name].apply(
lambda x: repr(x)
)
gdf[packed_column_name] = gdf[packed_column_name].apply(lambda x: repr(x))
# drop original columns
gdf.drop(labels=columns, axis=1, inplace=True)
# *****************************************************************************
# *****************************************************************************
def unpack_columns(gdf: GeoDataFrame, packed_column_name: str = RKW_GPKG):
"""
Unpacks a specific GeoDataFrame column into multiple columns.
......@@ -328,12 +319,11 @@ def unpack_columns(gdf: GeoDataFrame, packed_column_name: str = RKW_GPKG):
"""
if packed_column_name not in gdf.columns:
# TODO: reach this statement
raise ValueError('The column specified does not exist.')
raise ValueError("The column specified does not exist.")
# if there are no rows, there is nothing to unpack
if len(gdf) != 0:
# the object is not empty
# create a dict with one dict per merged column
......@@ -351,19 +341,20 @@ def unpack_columns(gdf: GeoDataFrame, packed_column_name: str = RKW_GPKG):
# create the columns
for name, content in column_content_dict.items():
gdf[name] = Series(data=content, index=gdf.index)
# delete the packed column
gdf.drop(labels=packed_column_name, axis=1, inplace=True)
# *****************************************************************************
# *****************************************************************************
def read_gdf_file(filename: str,
packed_columns: tuple = None,
index: str or list = None) -> GeoDataFrame:
def read_gdf_file(
filename: str, packed_columns: tuple = None, index: str or list = None
) -> GeoDataFrame:
"""
Loads the contents of a file with GIS data into a GeoDataFrame object.
......@@ -401,13 +392,10 @@ def read_gdf_file(filename: str,
# unpack special columns
if '.gpkg' in filename and RKW_GPKG in gdf.columns:
if ".gpkg" in filename and RKW_GPKG in gdf.columns:
# packed column appears to exist: decode column contents
gdf[RKW_GPKG] = gdf[RKW_GPKG].apply(
lambda x: literal_eval(x)
)
gdf[RKW_GPKG] = gdf[RKW_GPKG].apply(lambda x: literal_eval(x))
# unpack it
......@@ -416,13 +404,11 @@ def read_gdf_file(filename: str,
# handle types
if type(index) != type(None):
# a specific index is required, replace existing one
gdf.set_index(index, drop=True, inplace=True)
if type(packed_columns) != tuple:
# figure out which ones need it...
# TODO: reach this statement
raise NotImplementedError
......@@ -432,42 +418,38 @@ def read_gdf_file(filename: str,
# focus on specific columns
for column in packed_columns:
if column not in gdf.columns:
# TODO: reach this statement
raise ValueError('Unknown column: '+str(column))
raise ValueError("Unknown column: " + str(column))
gdf[column] = gdf[column].apply(
lambda x: literal_eval(x)
)
gdf[column] = gdf[column].apply(lambda x: literal_eval(x))
return gdf
# *****************************************************************************
# *****************************************************************************
# create osmnx-like geodataframes for nodes
def create_node_geodataframe(longitudes: tuple or list,
def create_node_geodataframe(
longitudes: tuple or list,
latitudes: tuple or list,
osmids: tuple or list = None,
crs: str = "EPSG:4326",
**kwargs) -> GeoDataFrame:
**kwargs
) -> GeoDataFrame:
if len(longitudes) != len(latitudes):
raise ValueError('The input parameters have mismatched sizes.')
raise ValueError("The input parameters have mismatched sizes.")
if type(osmids) != type(None):
# check sizes
if len(longitudes) != len(osmids):
raise ValueError('The input parameters have mismatched sizes.')
raise ValueError("The input parameters have mismatched sizes.")
else:
# generate node keys
osmids = (str(uuid4()) for i in range(len(longitudes)))
......@@ -480,27 +462,28 @@ def create_node_geodataframe(longitudes: tuple or list,
}
for kwarg in kwargs:
data_dict[kwarg] = kwargs[kwarg]
return GeoDataFrame(
data_dict,
index=MultiIndex.from_tuples(
[('node', osmid) for osmid in osmids],
names=[osm.KEY_OSMNX_ELEMENT_TYPE,
osm.KEY_OSMNX_OSMID]
[("node", osmid) for osmid in osmids],
names=[osm.KEY_OSMNX_ELEMENT_TYPE, osm.KEY_OSMNX_OSMID],
),
crs=crs
crs=crs,
)
# *****************************************************************************
# *****************************************************************************
def prepare_node_data_from_geodataframe(
gdf: GeoDataFrame,
node_key_column: str = None,
include_columns: list = None,
include_geometry: bool = False) -> tuple:
include_geometry: bool = False,
) -> tuple:
"""Prepare a container with node data from a GeoDataFrame object."""
node_keys = []
......@@ -512,24 +495,19 @@ def prepare_node_data_from_geodataframe(
# check if the GeoDataFrame has the right type of index
if gdf.index.names != [osm.KEY_OSMNX_ELEMENT_TYPE, osm.KEY_OSMNX_OSMID]:
raise ValueError(
'The GeoDataFrame object does not have the right index.')
raise ValueError("The GeoDataFrame object does not have the right index.")
# for entry in the gdf object
for gdf_entry in range(len(gdf)):
# select key
if type(node_key_column) == str:
# the node_key_column has been specified: use a specific column as key
node_key = gdf.iloc[gdf_entry][node_key_column]
else: # default case: the key is the OSM identifier (should be unique)
# use the OSMID as the node key
node_key = gdf.index[gdf_entry][1]
......@@ -538,31 +516,22 @@ def prepare_node_data_from_geodataframe(
geo = gdf.iloc[gdf_entry][KEY_GPD_GEOMETRY]
node_dict = {
osm.KEY_OSMNX_X: geo.x,
osm.KEY_OSMNX_Y: geo.y
}
node_dict = {osm.KEY_OSMNX_X: geo.x, osm.KEY_OSMNX_Y: geo.y}
# add geometry
if include_geometry:
node_dict[osm.KEY_OSMNX_GEOMETRY] = geo
# add extra columns
if type(include_columns) == list:
for other_column in include_columns:
node_dict[other_column] = gdf.iloc[gdf_entry][other_column]
# create new entry in container
node_data_container.append(
(node_key,
node_dict)
)
node_data_container.append((node_key, node_dict))
# store node key
......@@ -576,34 +545,39 @@ def prepare_node_data_from_geodataframe(
return node_keys, node_data_container, node_key_to_gdf_index_dict
# *****************************************************************************
# *****************************************************************************
# TODO: simplify the passing of options to the methods relied upon
def plot_discrete_attributes(gdf_buildings: GeoDataFrame,
def plot_discrete_attributes(
gdf_buildings: GeoDataFrame,
column: str,
category_to_label: dict,
zoom_level: int = 15,
figsize: tuple = (25, 25),
legend_title: str = None,
markersize: int = 50,
edgecolor: str = 'k',
edgecolor: str = "k",
linewidth: float = 0.5,
markeredgewidth: float = 0.5,
markeredgecolor: str = 'k',
include_basemap: bool = False):
markeredgecolor: str = "k",
include_basemap: bool = False,
):
"""Plots a map with discrete attributes found in GeoDataFrame column."""
gdf_map = gdf_buildings.to_crs(epsg=3857)
ax = gdf_map.plot(figsize=figsize,
ax = gdf_map.plot(
figsize=figsize,
legend=True,
categorical=True,
column=column,
markersize=markersize,
edgecolor=edgecolor,
linewidth=linewidth
linewidth=linewidth,
)
# adjust legend labels
......@@ -616,33 +590,29 @@ def plot_discrete_attributes(gdf_buildings: GeoDataFrame,
# convert keys to string (since that is what the method asks for)
_category_to_label = {
str(key):value for key, value in category_to_label.items()
}
_category_to_label = {str(key): value for key, value in category_to_label.items()}
legend_texts = [
_category_to_label[text.get_text()] for text in ax.legend_.texts
]
legend_texts = [_category_to_label[text.get_text()] for text in ax.legend_.texts]
ax.legend(
legend_handles,
legend_texts,
title=legend_title
)
ax.legend(legend_handles, legend_texts, title=legend_title)
# add base map
if include_basemap:
cx.add_basemap(ax,
cx.add_basemap(
ax,
# crs="EPSG:4326", # switch to another crs
zoom=zoom_level,
source=cx.providers.OpenStreetMap.Mapnik)
source=cx.providers.OpenStreetMap.Mapnik,
)
# *****************************************************************************
# *****************************************************************************
def count_ocurrences(gdf: GeoDataFrame,
column: str,
column_entries: list = None) -> dict:
def count_ocurrences(
gdf: GeoDataFrame, column: str, column_entries: list = None
) -> dict:
"""
Counts the number of occurrences per entry in a DataFrame object's column.
......@@ -667,7 +637,6 @@ def count_ocurrences(gdf: GeoDataFrame,
"""
if type(column_entries) == list:
# find entries also present in the dict
# initialise dict
......@@ -677,7 +646,6 @@ def count_ocurrences(gdf: GeoDataFrame,
# for each key in the dict
for key in column_entries:
# store the number of rows
count_dict[key] = gdf[gdf[column] == key].shape[0]
......@@ -685,15 +653,12 @@ def count_ocurrences(gdf: GeoDataFrame,
# count the number of rows with this key
if type(key) == type(None):
count_dict[key] = gdf[gdf[column].isnull()].shape[0]
else:
count_dict[key] = gdf[gdf[column] == key].shape[0]
else:
# find all unique entries
# initialise dict
......@@ -701,11 +666,9 @@ def count_ocurrences(gdf: GeoDataFrame,
count_dict = {}
for entry in gdf[column]:
# check if it is already in the dict
if entry in count_dict:
# it is, skip
continue
......@@ -713,22 +676,23 @@ def count_ocurrences(gdf: GeoDataFrame,
# it is not, count and store the number of rows with said entry
if type(entry) == type(None):
count_dict[entry] = gdf[gdf[column].isnull()].shape[0]
else:
count_dict[entry] = gdf[gdf[column] == entry].shape[0]
# return statement
return count_dict
# *****************************************************************************
# *****************************************************************************
def get_directed(network: MultiGraph,
drop_unsimplified_geometries: bool = True) -> MultiDiGraph:
def get_directed(
network: MultiGraph, drop_unsimplified_geometries: bool = True
) -> MultiDiGraph:
"""
Converts an OSMnx-formatted MultiGraph object into a MultiDiGraph one.
......@@ -752,35 +716,36 @@ def get_directed(network: MultiGraph,
directed_network.add_nodes_from(network.nodes(data=True))
for edge_key in network.edges(keys=True):
edge_data = dict(network.edges[edge_key])
u = edge_data['from']
v = edge_data['to']
edge_data.pop('from')
edge_data.pop('to')
if (drop_unsimplified_geometries and
osm.KEY_OSMNX_GEOMETRY in edge_data and
len(edge_data[osm.KEY_OSMNX_GEOMETRY].coords) == 2):
u = edge_data["from"]
v = edge_data["to"]
edge_data.pop("from")
edge_data.pop("to")
if (
drop_unsimplified_geometries
and osm.KEY_OSMNX_GEOMETRY in edge_data
and len(edge_data[osm.KEY_OSMNX_GEOMETRY].coords) == 2
):
edge_data.pop(osm.KEY_OSMNX_GEOMETRY)
directed_network.add_edge(
u_for_edge=u,
v_for_edge=v,
**edge_data)
directed_network.add_edge(u_for_edge=u, v_for_edge=v, **edge_data)
return directed_network
# *****************************************************************************
# *****************************************************************************
def simplify_network(network: MultiDiGraph,
def simplify_network(
network: MultiDiGraph,
protected_nodes: list,
dead_end_probing_depth: int = 5,
remove_opposite_parallel_edges: bool = False,
update_street_count_per_node: bool = True,
**roundabout_conditions):
**roundabout_conditions
):
"""
Simplifies a network described in a OSMnx-formatted MultiDiGraph object.
......@@ -808,48 +773,40 @@ def simplify_network(network: MultiDiGraph,
# 1) remove dead ends (tends to create straight paths)
gis_mod.remove_dead_ends(
network,
protected_nodes,
max_iterations=dead_end_probing_depth
network, protected_nodes, max_iterations=dead_end_probing_depth
)
# 2) remove longer parallel edges (tends to create straight paths)
gis_mod.remove_longer_parallel_edges(
network,
ignore_edge_directions=remove_opposite_parallel_edges
network, ignore_edge_directions=remove_opposite_parallel_edges
)
# 3) remove self loops (tends to create straight paths and dead ends)
gis_mod.remove_self_loops(network)
# 4) join segments (can create self-loops)
simplifiable_paths = gis_iden.find_simplifiable_paths(
network,
protected_nodes
)
simplifiable_paths = gis_iden.find_simplifiable_paths(network, protected_nodes)
for path in simplifiable_paths:
gis_mod.replace_path(network, path)
# 4) remove self loops (tends to create straight paths and dead ends)
gis_mod.remove_self_loops(network)
# 5) transform roundabouts into crossroads (can create straight paths)
list_roundabout_nodes = gis_iden.find_roundabouts(
network,
**roundabout_conditions)
gis_mod.transform_roundabouts_into_crossroads(
network,
list_roundabout_nodes
)
list_roundabout_nodes = gis_iden.find_roundabouts(network, **roundabout_conditions)
gis_mod.transform_roundabouts_into_crossroads(network, list_roundabout_nodes)
# 6) update street count
if update_street_count_per_node:
gis_calc.update_street_count(network)
# *****************************************************************************
# *****************************************************************************
def identify_building_entrance_edges(
gdf: GeoDataFrame,
gdf_street_column: str,
network: gis_iden.nx.MultiDiGraph,
node_key_to_gdf_index_dict: dict,
crs: str = None,
revert_to_original_crs: bool = False) -> tuple:
revert_to_original_crs: bool = False,
) -> tuple:
"""
Identifies the edges that can be linked to special nodes in an OSMnx graph
through a OSMnx-formatted GeoDataFrame object.
......@@ -912,8 +869,7 @@ def identify_building_entrance_edges(
# *************************************************************************
if revert_to_original_crs:
original_crs = network.graph['crs']
original_crs = network.graph["crs"]
# *************************************************************************
......@@ -921,9 +877,8 @@ def identify_building_entrance_edges(
node_keys = list(node_key_to_gdf_index_dict.keys())
closest_edge_keys, network = gis_iden.identify_edge_closest_to_node(
network=network,
node_keys=node_keys,
crs=crs) # do not revert back to the original yet
network=network, node_keys=node_keys, crs=crs
) # do not revert back to the original yet
# create a dict for the closest edge keys: {node keys: closest edge keys}
......@@ -954,19 +909,16 @@ def identify_building_entrance_edges(
# 2.1) generate a dict with the correspondence between streets and nodes
node_street_names = {
node_key: gdf.loc[
node_key_to_gdf_index_dict[node_key]][gdf_street_column]
node_key: gdf.loc[node_key_to_gdf_index_dict[node_key]][gdf_street_column]
for node_key in node_keys
}
trouble_nodes = []
for node_key, closest_edge_key in zip(node_keys, closest_edge_keys):
# check if the street name is a string
if type(node_street_names[node_key]) != str:
# not a string, this node is not problematic (case i)
continue
......@@ -974,22 +926,20 @@ def identify_building_entrance_edges(
# check if the edge has a name attribute
if osm.KEY_OSMNX_NAME in network.edges[closest_edge_key]:
# edge object has name attribute, check if the street names match
if type(network.edges[closest_edge_key][osm.KEY_OSMNX_NAME]) == str:
# the address is a string
if (network.edges[closest_edge_key][osm.KEY_OSMNX_NAME] in
node_street_names[node_key]):
if (
network.edges[closest_edge_key][osm.KEY_OSMNX_NAME]
in node_street_names[node_key]
):
# the street names match, this is not a problematic node (ii)
continue
else:
# the streets names differ, this is a problematic node (iv)
trouble_nodes.append(node_key)
......@@ -997,23 +947,19 @@ def identify_building_entrance_edges(
continue
else: # the address is not a string: it should be a list (osmnx)
# if the node street is found among the elements
matching_street_name_found_list = tuple(
_name in node_street_names[node_key]
for _name in network.edges[closest_edge_key][
osm.KEY_OSMNX_NAME]
for _name in network.edges[closest_edge_key][osm.KEY_OSMNX_NAME]
)
if True in matching_street_name_found_list:
# the street names match, this is not a problematic node (ii)
continue
else:
# the streets names differ, this is a problematic node (iv)
trouble_nodes.append(node_key)
......@@ -1024,15 +970,11 @@ def identify_building_entrance_edges(
# get adjacent/neighbouring edges
other_edges = gis_iden.get_edges_involving_node(
network=network,
node_key=closest_edge_key[0],
include_self_loops=False
network=network, node_key=closest_edge_key[0], include_self_loops=False
)
other_edges.extend(
gis_iden.get_edges_involving_node(
network=network,
node_key=closest_edge_key[1],
include_self_loops=False
network=network, node_key=closest_edge_key[1], include_self_loops=False
)
)
......@@ -1041,11 +983,9 @@ def identify_building_entrance_edges(
# for each neighbour
for other_edge_key in other_edges:
# check if the current edge is the closest one
if closest_edge_key == other_edge_key:
# it is: skip, since it has already been considered
continue
......@@ -1053,17 +993,15 @@ def identify_building_entrance_edges(
# check if the current edge has the address/name attribute
if osm.KEY_OSMNX_NAME in network.edges[other_edge_key]:
# it does, now check if it is a string
if type(network.edges[other_edge_key][
osm.KEY_OSMNX_NAME]) == str:
if type(network.edges[other_edge_key][osm.KEY_OSMNX_NAME]) == str:
# it is, now check if the street names match
if (network.edges[other_edge_key][osm.KEY_OSMNX_NAME] in
node_street_names[node_key]):
if (
network.edges[other_edge_key][osm.KEY_OSMNX_NAME]
in node_street_names[node_key]
):
# an edge with a matching street name was found (iii)
matching_street_name_found = True
......@@ -1071,17 +1009,14 @@ def identify_building_entrance_edges(
break
else:
# if the node street is found among the elements
matching_street_name_found_list = tuple(
_name in node_street_names[node_key]
for _name in network.edges[other_edge_key][
osm.KEY_OSMNX_NAME]
for _name in network.edges[other_edge_key][osm.KEY_OSMNX_NAME]
)
if True in matching_street_name_found_list:
# the street names match, this node is okay (case iii)
matching_street_name_found = True
......@@ -1091,7 +1026,6 @@ def identify_building_entrance_edges(
# check if a matching street name was found among the neighbours
if matching_street_name_found:
# one was, this is not a problematic case (case iii)
continue
......@@ -1108,15 +1042,14 @@ def identify_building_entrance_edges(
# 3.1) generate the list of edge keys per street
unique_street_names = set(
node_street_names[node_key] for node_key in trouble_nodes
)
unique_street_names = set(node_street_names[node_key] for node_key in trouble_nodes)
# edge keys with a given street name
edges_per_street_name = {
street_name: [
edge_key for edge_key in network.edges(keys=True)
edge_key
for edge_key in network.edges(keys=True)
if osm.KEY_OSMNX_NAME in network.edges[edge_key]
if street_name in network.edges[edge_key][osm.KEY_OSMNX_NAME]
]
......@@ -1127,7 +1060,6 @@ def identify_building_entrance_edges(
# street and pick the closest on
for node_key in trouble_nodes:
# check the edges keys relevant for this node
other_edge_keys = edges_per_street_name[node_street_names[node_key]]
......@@ -1135,7 +1067,6 @@ def identify_building_entrance_edges(
# check if there are no edges mentioning the street
if len(other_edge_keys) == 0:
# no edges mentioning that street, skip
continue
......@@ -1150,7 +1081,8 @@ def identify_building_entrance_edges(
new_network,
X=network.nodes[node_key][osm.KEY_OSMNX_X],
Y=network.nodes[node_key][osm.KEY_OSMNX_Y],
return_dist=False)
return_dist=False,
)
# replace previous entry
......@@ -1165,7 +1097,6 @@ def identify_building_entrance_edges(
# revert network crs back to the original, if necessary
if revert_to_original_crs:
network = gis_iden.project_graph(network, to_crs=original_crs)
# return edge keys
......@@ -1175,12 +1106,14 @@ def identify_building_entrance_edges(
# *************************************************************************
# *************************************************************************
# *****************************************************************************
# *****************************************************************************
def convert_edge_path(network: MultiDiGraph,
path: list,
allow_reversed_edges: bool = False) -> list:
def convert_edge_path(
network: MultiDiGraph, path: list, allow_reversed_edges: bool = False
) -> list:
"""
Converts a path of edge keys into a path of node keys.
......@@ -1203,11 +1136,9 @@ def convert_edge_path(network: MultiDiGraph,
# check if the path corresponds to an edge path
if not gis_iden.is_edge_path(
network,
path,
ignore_edge_direction=allow_reversed_edges
network, path, ignore_edge_direction=allow_reversed_edges
):
raise ValueError('No edge path was provided.')
raise ValueError("No edge path was provided.")
# path is a sequence of edge keys: convert to node path
if allow_reversed_edges:
......@@ -1255,5 +1186,6 @@ def convert_edge_path(network: MultiDiGraph,
# return statement
return node_path
# *****************************************************************************
# *****************************************************************************
# -*- coding: utf-8 -*-