From 03109771ae8000aafd3b73d2ff57a50faa291fc4 Mon Sep 17 00:00:00 2001 From: jenhagg <66005238+jenhagg@users.noreply.github.com> Date: Fri, 3 Jun 2022 12:40:27 -0700 Subject: [PATCH 01/59] fix: pass grid instead of grid.dcline (#645) --- powersimdata/design/investment/investment_costs.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/powersimdata/design/investment/investment_costs.py b/powersimdata/design/investment/investment_costs.py index cd93480ea..0dc4ce3da 100644 --- a/powersimdata/design/investment/investment_costs.py +++ b/powersimdata/design/investment/investment_costs.py @@ -291,9 +291,7 @@ def calculate_dc_inv_costs(scenario, sum_results=True, base_grid=None): _check_grid_models_match(base_grid, grid_differences) # find upgraded DC lines - capacity_difference = calculate_dcline_difference( - base_grid.dcline, grid_differences.dcline - ) + capacity_difference = calculate_dcline_difference(base_grid, grid_differences) grid_differences.dcline = grid_differences.dcline.assign( Pmax=capacity_difference["diff"].to_numpy() ) From e868b2972afc5545e4231f5deded28a6a33ac49a Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 14 Apr 2022 17:41:02 -0700 Subject: [PATCH 02/59] docs: fix docstring in grid module --- powersimdata/input/grid.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/powersimdata/input/grid.py b/powersimdata/input/grid.py index f4c487482..fbd5e386c 100644 --- a/powersimdata/input/grid.py +++ b/powersimdata/input/grid.py @@ -19,12 +19,12 @@ class Grid: """Grid - :param str/iterable interconnect: geographical region covered. Either *'USA'*, one of - the three interconnections, i.e., *'Eastern'*, *'Western'* or *'Texas'* or a - combination of two interconnections. - :param str source: model used to build the network. Can be one of the - supported models ("usa_tamu"), or a .mat file that represents a - grid. + :param str/iterable interconnect: geographical region covered. Either the region + (e.g. USA), one of the interconnects in the region or a combination of the + interconnects in the region. The full list of interconnects of the grid models + is defined in :mod:`powersimdata.network.constants.model.model2interconnect`. + :param str source: model used to build the network. Can be one of the supported + models, or a .mat file that represents a grid. :param str engine: engine used to run scenario, if using ScenarioGrid. :raises TypeError: if source and engine are not both strings. :raises ValueError: if source or engine does not exist. From e7d08923b12e08275f4d138bdd0631b1d9281aac Mon Sep 17 00:00:00 2001 From: Daniel Olsen Date: Wed, 14 Apr 2021 12:18:27 -0700 Subject: [PATCH 03/59] feat: add HIFLD grid class --- powersimdata/input/abstract_grid.py | 71 ++++- powersimdata/input/scenario_grid.py | 12 +- powersimdata/network/hifld/__init__.py | 0 .../network/hifld/constants/__init__.py | 0 .../network/hifld/constants/plants.py | 105 ++++++++ .../network/hifld/constants/storage.py | 17 ++ powersimdata/network/hifld/constants/zones.py | 246 ++++++++++++++++++ powersimdata/network/hifld/model.py | 50 ++++ powersimdata/network/usa_tamu/model.py | 75 +----- 9 files changed, 495 insertions(+), 81 deletions(-) create mode 100644 powersimdata/network/hifld/__init__.py create mode 100644 powersimdata/network/hifld/constants/__init__.py create mode 100644 powersimdata/network/hifld/constants/plants.py create mode 100644 powersimdata/network/hifld/constants/storage.py create mode 100644 powersimdata/network/hifld/constants/zones.py create mode 100644 powersimdata/network/hifld/model.py diff --git a/powersimdata/input/abstract_grid.py b/powersimdata/input/abstract_grid.py index 5c6525507..6f00b7105 100644 --- a/powersimdata/input/abstract_grid.py +++ b/powersimdata/input/abstract_grid.py @@ -1,15 +1,25 @@ +import os + import pandas as pd from powersimdata.input import const +from powersimdata.input.helpers import ( + add_coord_to_grid_data_frames, + add_zone_to_grid_data_frames, + csv_to_data_frame, +) +from powersimdata.network.csv_reader import CSVReader class AbstractGrid: - """Grid Builder.""" + """Grid Builder. Child classes must assign self.top_dirname and + self.umbrella_interconnect before self.__init__ is called, or re-define the __init__ + and/or methods called within the __init__ to avoid an AttributeError. + """ def __init__(self): """Constructor""" self.data_loc = None - self.interconnect = None self.zone2id = {} self.id2zone = {} self.sub = pd.DataFrame() @@ -20,6 +30,63 @@ def __init__(self): self.bus = pd.DataFrame() self.branch = pd.DataFrame() self.storage = storage_template() + self._set_data_loc() + self._build_network() + + def _set_data_loc(self): + """Sets data location. + + :raises IOError: if directory does not exist. + """ + data_loc = os.path.join(self.top_dirname, "data") + if os.path.isdir(data_loc) is False: + raise IOError("%s directory not found" % data_loc) + else: + self.data_loc = data_loc + + def _build_network(self): + """Build network.""" + reader = CSVReader(self.data_loc) + self.bus = reader.bus + self.plant = reader.plant + self.branch = reader.branch + self.dcline = reader.dcline + self.gencost["after"] = self.gencost["before"] = reader.gencost + + self._add_information_to_model() + + if self.umbrella_interconnect not in self.interconnect: + self._drop_interconnect() + + def _add_information_to_model(self): + self.sub = csv_to_data_frame(self.data_loc, "sub.csv") + self.bus2sub = csv_to_data_frame(self.data_loc, "bus2sub.csv") + self.id2zone = csv_to_data_frame(self.data_loc, "zone.csv").zone_name.to_dict() + self.zone2id = {v: k for k, v in self.id2zone.items()} + + add_zone_to_grid_data_frames(self) + add_coord_to_grid_data_frames(self) + + def _drop_interconnect(self): + """Trim data frames to only keep information pertaining to the user + defined interconnect(s). + + """ + for key, value in self.__dict__.items(): + if key in ["sub", "bus2sub", "bus", "plant", "branch"]: + value.query("interconnect == @self.interconnect", inplace=True) + elif key == "gencost": + value["before"].query( + "interconnect == @self.interconnect", inplace=True + ) + elif key == "dcline": + value.query( + "from_interconnect == @self.interconnect &" + "to_interconnect == @self.interconnect", + inplace=True, + ) + self.id2zone = {k: self.id2zone[k] for k in self.bus.zone_id.unique()} + self.zone2id = {value: key for key, value in self.id2zone.items()} def storage_template(): diff --git a/powersimdata/input/scenario_grid.py b/powersimdata/input/scenario_grid.py index 449638d9e..b1e0ed488 100644 --- a/powersimdata/input/scenario_grid.py +++ b/powersimdata/input/scenario_grid.py @@ -21,21 +21,19 @@ def __init__(self, filename): :param str filename: path to file. """ + self.filename = filename super().__init__() - self._set_data_loc(filename) - self._build_network() - - def _set_data_loc(self, filename): + def _set_data_loc(self): """Sets data location. :param str filename: path to file :raises FileNotFoundError: if file does not exist. """ - if os.path.isfile(filename) is False: - raise FileNotFoundError("%s file not found" % filename) + if os.path.isfile(self.filename) is False: + raise FileNotFoundError("%s file not found" % self.filename) else: - self.data_loc = filename + self.data_loc = self.filename def _read_network(self): data = loadmat(self.data_loc, squeeze_me=True, struct_as_record=False) diff --git a/powersimdata/network/hifld/__init__.py b/powersimdata/network/hifld/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/powersimdata/network/hifld/constants/__init__.py b/powersimdata/network/hifld/constants/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/powersimdata/network/hifld/constants/plants.py b/powersimdata/network/hifld/constants/plants.py new file mode 100644 index 000000000..ef950613f --- /dev/null +++ b/powersimdata/network/hifld/constants/plants.py @@ -0,0 +1,105 @@ +_exports = [ + "all_resources", + "carbon_per_mmbtu", + "carbon_per_mwh", + "carbon_resources", + "clean_resources", + "label2type", + "nox_per_mwh", + "renewable_resources", + "so2_per_mwh", + "type2color", + "type2hatchcolor", + "type2label", +] + +type2color = { + "wind": "xkcd:green", + "solar": "xkcd:amber", + "hydro": "xkcd:light blue", + "ng": "xkcd:orchid", + "nuclear": "xkcd:silver", + "coal": "xkcd:light brown", + "geothermal": "xkcd:hot pink", + "dfo": "xkcd:royal blue", + "biomass": "xkcd:dark green", + "other": "xkcd:melon", + "storage": "xkcd:orange", + "wind_offshore": "xkcd:teal", + "solar_curtailment": "xkcd:amber", + "wind_curtailment": "xkcd:green", + "wind_offshore_curtailment": "xkcd:teal", +} + +type2label = { + "nuclear": "Nuclear", + "geothermal": "Geo-thermal", + "coal": "Coal", + "dfo": "DFO", + "hydro": "Hydro", + "ng": "Natural Gas", + "solar": "Solar", + "wind": "Wind", + "wind_offshore": "Wind Offshore", + "biomass": "Biomass", + "other": "Other", + "storage": "Storage", + "solar_curtailment": "Solar Curtailment", + "wind_curtailment": "Wind Curtailment", + "wind_offshore_curtailment": "Offshore Wind Curtailment", +} + +type2hatchcolor = { + "solar_curtailment": "xkcd:grey", + "wind_curtailment": "xkcd:grey", + "wind_offshore_curtailment": "xkcd:grey", +} + +label2type = {value: key for key, value in type2label.items()} + +renewable_resources = {"solar", "wind", "wind_offshore"} +carbon_resources = {"coal", "ng", "dfo"} +clean_resources = renewable_resources | {"geothermal", "hydro", "nuclear"} +all_resources = carbon_resources | {"other"} | clean_resources + + +# MWh to kilograms of CO2 +# Source: IPCC Special Report on Renewable Energy Sources and Climate Change +# Mitigation (2011), Annex II: Methodology, Table A.II.4, 50th percentile +# http://www.ipcc-wg3.de/report/IPCC_SRREN_Annex_II.pdf +carbon_per_mwh = { + "coal": 1001, + "dfo": 840, + "ng": 469, +} + +# MMBTu of fuel per hour to kilograms of CO2 per hour +# Source: https://www.epa.gov/energy/greenhouse-gases-equivalencies-calculator-calculations-and-references +# = (Heat rate MMBTu/h) * (kg C/mmbtu) * (mass ratio CO2/C) +carbon_per_mmbtu = { + "coal": 26.05, + "dfo": 20.31, + "ng": 14.46, +} + +# MWh to kilograms of NOx +# Source: EPA eGrid 2018, tab 'US18' (U.S. summary), columns AN to AP +# https://www.epa.gov/egrid/egrid-questions-and-answers +nox_per_mwh = { + "coal": 0.658, + "dfo": 1.537, + "ng": 0.179, +} + +# MWh to kilograms of SO2 +# Source: EPA eGrid 2018, tab 'US18' (U.S. summary), columns AV to AX +# https://www.epa.gov/egrid/egrid-questions-and-answers +so2_per_mwh = { + "coal": 0.965, + "dfo": 2.189, + "ng": 0.010, +} + + +def __dir__(): + return sorted(_exports) diff --git a/powersimdata/network/hifld/constants/storage.py b/powersimdata/network/hifld/constants/storage.py new file mode 100644 index 000000000..d3b667e63 --- /dev/null +++ b/powersimdata/network/hifld/constants/storage.py @@ -0,0 +1,17 @@ +_exports = ["defaults"] + +defaults = { + "duration": 4, + "min_stor": 0.05, + "max_stor": 0.95, + "InEff": 0.9, + "OutEff": 0.9, + "energy_value": 20, + "LossFactor": 0, + "terminal_min": 0, + "terminal_max": 1, +} + + +def __dir__(): + return sorted(_exports) diff --git a/powersimdata/network/hifld/constants/zones.py b/powersimdata/network/hifld/constants/zones.py new file mode 100644 index 000000000..cde476e0d --- /dev/null +++ b/powersimdata/network/hifld/constants/zones.py @@ -0,0 +1,246 @@ +import os + +import pandas as pd + +_exports = [ + "abv", + "abv2interconnect", + "abv2loadzone", + "abv2state", + "id2abv", + "id2loadzone", + "id2timezone", + "interconnect", + "interconnect2abv", + "interconnect2id", + "interconnect2loadzone", + "interconnect2timezone", + "interconnect_combinations", + "loadzone", + "loadzone2id", + "loadzone2interconnect", + "loadzone2state", + "mappings", + "state", + "state2abv", + "state2loadzone", + "timezone2id", +] + +mappings = {"loadzone", "state", "state_abbr", "interconnect"} + +# Define combinations of interconnects +interconnect_combinations = { + "USA": {"Eastern", "Western", "ERCOT"}, +} + + +# Map state abbreviations to state name +abv2state = { + "AK": "Alaska", + "AL": "Alabama", + "AR": "Arkansas", + "AZ": "Arizona", + "CA": "California", + "CO": "Colorado", + "CT": "Connecticut", + "DE": "Delaware", + "FL": "Florida", + "GA": "Georgia", + "HI": "Hawaii", + "IA": "Iowa", + "ID": "Idaho", + "IL": "Illinois", + "IN": "Indiana", + "KS": "Kansas", + "KY": "Kentucky", + "LA": "Louisiana", + "MA": "Massachusetts", + "MD": "Maryland", + "ME": "Maine", + "MI": "Michigan", + "MN": "Minnesota", + "MO": "Missouri", + "MS": "Mississippi", + "MT": "Montana", + "NC": "North Carolina", + "ND": "North Dakota", + "NE": "Nebraska", + "NH": "New Hampshire", + "NJ": "New Jersey", + "NM": "New Mexico", + "NV": "Nevada", + "NY": "New York", + "OH": "Ohio", + "OK": "Oklahoma", + "OR": "Oregon", + "PA": "Pennsylvania", + "RI": "Rhode Island", + "SC": "South Carolina", + "SD": "South Dakota", + "TN": "Tennessee", + "TX": "Texas", + "UT": "Utah", + "VA": "Virginia", + "VT": "Vermont", + "WA": "Washington", + "WI": "Wisconsin", + "WV": "West Virginia", + "WY": "Wyoming", +} + + +# Map state name to state abbreviations +state2abv = {value: key for key, value in abv2state.items()} + + +# Map zones to higher-level aggregations using the information in zone.csv +zone_csv_path = os.path.join(os.path.dirname(__file__), "..", "data", "zone.csv") +zone_df = pd.read_csv(zone_csv_path, index_col=0) + +# load zone id to load zone name +id2loadzone = zone_df["zone_name"].to_dict() +# load zone name to load zone id +loadzone2id = {v: k for k, v in id2loadzone.items()} +# Map state name to load zone name +state2loadzone = { + k: set(v) for k, v in zone_df.groupby("state").zone_name.unique().to_dict().items() +} +# Map interconnect name to load zone name +interconnect2loadzone = { + k: set(v) + for k, v in zone_df.groupby("interconnect").zone_name.unique().to_dict().items() +} +interconnect2loadzone["USA"] = ( + interconnect2loadzone["Eastern"] + | interconnect2loadzone["Western"] + | interconnect2loadzone["ERCOT"] +) +# Map interconnect to load zone id +interconnect2id = { + k: set(zone_df.isin(v).query("zone_name == True").index) + for k, v in interconnect2loadzone.items() +} + +# Map load zone id to state abbreviations +id2abv = {k: state2abv[v] for k, v in zone_df.state.to_dict().items()} + + +# Map state abbreviations to load zone name +abv2loadzone = { + state2abv[state]: loadzone for state, loadzone in state2loadzone.items() +} + + +# Map load zone name to state name +loadzone2state = {} +for state, zone_set in state2loadzone.items(): + loadzone2state.update({zone: state for zone in zone_set}) + + +# Map load zone name to interconnect name +loadzone2interconnect = { + zone: interconnect + for interconnect, zone_set in interconnect2loadzone.items() + for zone in zone_set + if interconnect not in interconnect_combinations +} + + +# Map interconnect name to state abbreviations +# Note: states which span interconnects are assigned to the one they're 'most' in. +interconnect2abv = { + "Eastern": { + "ME", + "NH", + "VT", + "MA", + "RI", + "CT", + "NY", + "NJ", + "PA", + "DE", + "MD", + "VA", + "NC", + "SC", + "GA", + "FL", + "AL", + "MS", + "TN", + "KY", + "WV", + "OH", + "MI", + "IN", + "IL", + "WI", + "MN", + "IA", + "MO", + "AR", + "LA", + "OK", + "KS", + "NE", + "SD", + "ND", + }, + "ERCOT": {"TX"}, + "Western": {"WA", "OR", "CA", "NV", "AZ", "UT", "NM", "CO", "WY", "ID", "MT"}, +} +interconnect2abv["USA"] = ( + interconnect2abv["Eastern"] + | interconnect2abv["Western"] + | interconnect2abv["ERCOT"] +) + + +# Map state abbreviations to interconnect name +abv2interconnect = {} +for k, v in interconnect2abv.items(): + if k in interconnect_combinations: + continue + for s in v: + abv2interconnect[s] = k + + +# List of interconnect name +interconnect = set(interconnect2abv.keys()) + + +# List of state name +state = set(state2abv.keys()) + + +# List of state abbreviations +abv = set(abv2state.keys()) + + +# List of load zone name +loadzone = set(loadzone2interconnect.keys()) + +# Map interconnect name to time zone +interconnect2timezone = { + "USA": "ETC/GMT+6", + "Eastern": "ETC/GMT+5", + "ERCOT": "ETC/GMT+6", + "Western": "ETC/GMT+8", + "Eastern_ERCOT": "ETC/GMT+5", + "Eastern_Western": "ETC/GMT+6", + "ERCOT_Western": "ETC/GMT+7", +} + + +# Map load zone IDs to time zones +# Note: load zones in > 1 time zone are put in the one where most load centers reside +id2timezone = zone_df["time_zone"].to_dict() + +# Map time zones to load zone IDs +timezone2id = {k: set(v) for k, v in zone_df.groupby("time_zone").groups.items()} + + +def __dir__(): + return sorted(_exports) diff --git a/powersimdata/network/hifld/model.py b/powersimdata/network/hifld/model.py new file mode 100644 index 000000000..225a25ac0 --- /dev/null +++ b/powersimdata/network/hifld/model.py @@ -0,0 +1,50 @@ +import os + +from powersimdata.input.abstract_grid import AbstractGrid +from powersimdata.network.hifld.constants.storage import defaults + + +class HIFLD(AbstractGrid): + def __init__(self, interconnect): + """Constructor.""" + self.top_dirname = os.path.dirname(__file__) + self.interconnect = check_and_format_interconnect(interconnect) + self.umbrella_interconnect = "USA" + super().__init__() + self.storage.update(defaults) + + +def check_and_format_interconnect(interconnect): + """Checks interconnect. + + :param str/iterable interconnect: interconnect name(s). + :return: (*list*) -- interconnect(s) + :raises TypeError: if parameter has wrong type. + :raises ValueError: if interconnect not found or combination of interconnect is not + appropriate. + """ + if isinstance(interconnect, str): + interconnect = [interconnect] + try: + interconnect = sorted(set(interconnect)) + except: # noqa + raise TypeError("interconnect must be either str or an iterable of str") + + possible = ["Eastern", "Western", "ERCOT", "USA"] + if any(i for i in interconnect if i not in possible): + raise ValueError("Wrong interconnect. Choose from %s" % " | ".join(possible)) + n = len(interconnect) + if "USA" in interconnect and n > 1: + raise ValueError("'USA' cannot be paired") + if n == 3: + raise ValueError("Use 'USA' instead") + + return interconnect + + +def interconnect_to_name(interconnect): + """Return name of interconnect or collection of interconnects. + + :param iterable interconnect: interconnect name(s). + """ + return "_".join(sorted(check_and_format_interconnect(interconnect))) diff --git a/powersimdata/network/usa_tamu/model.py b/powersimdata/network/usa_tamu/model.py index 903578682..5d4509f88 100644 --- a/powersimdata/network/usa_tamu/model.py +++ b/powersimdata/network/usa_tamu/model.py @@ -1,12 +1,6 @@ import os from powersimdata.input.abstract_grid import AbstractGrid -from powersimdata.input.helpers import ( - add_coord_to_grid_data_frames, - add_zone_to_grid_data_frames, - csv_to_data_frame, -) -from powersimdata.network.csv_reader import CSVReader from powersimdata.network.usa_tamu.constants.storage import defaults @@ -18,61 +12,12 @@ class TAMU(AbstractGrid): def __init__(self, interconnect): """Constructor.""" - super().__init__() - self._set_data_loc() - + self.top_dirname = os.path.dirname(__file__) self.interconnect = check_and_format_interconnect(interconnect) - self._build_network() - - def _set_data_loc(self): - """Sets data location. - - :raises IOError: if directory does not exist. - """ - top_dirname = os.path.dirname(__file__) - data_loc = os.path.join(top_dirname, "data") - if os.path.isdir(data_loc) is False: - raise IOError("%s directory not found" % data_loc) - else: - self.data_loc = data_loc - - def _build_network(self): - """Build network.""" - reader = CSVReader(self.data_loc) - self.bus = reader.bus - self.plant = reader.plant - self.branch = reader.branch - self.dcline = reader.dcline - self.gencost["after"] = self.gencost["before"] = reader.gencost - + self.umbrella_interconnect = "USA" + super().__init__() self.storage.update(defaults) - add_information_to_model(self) - - if "USA" not in self.interconnect: - self._drop_interconnect() - - def _drop_interconnect(self): - """Trim data frames to only keep information pertaining to the user - defined interconnect(s). - - """ - for key, value in self.__dict__.items(): - if key in ["sub", "bus2sub", "bus", "plant", "branch"]: - value.query("interconnect == @self.interconnect", inplace=True) - elif key == "gencost": - value["before"].query( - "interconnect == @self.interconnect", inplace=True - ) - elif key == "dcline": - value.query( - "from_interconnect == @self.interconnect &" - "to_interconnect == @self.interconnect", - inplace=True, - ) - self.id2zone = {k: self.id2zone[k] for k in self.bus.zone_id.unique()} - self.zone2id = {value: key for key, value in self.id2zone.items()} - def check_and_format_interconnect(interconnect): """Checks interconnect. @@ -108,17 +53,3 @@ def interconnect_to_name(interconnect): :param list interconnect: interconnect name(s). """ return "_".join(sorted(check_and_format_interconnect(interconnect))) - - -def add_information_to_model(model): - """Adds information to TAMU model. This is done inplace. - - :param powersimdata.input.TAMU model: TAMU instance. - """ - model.sub = csv_to_data_frame(model.data_loc, "sub.csv") - model.bus2sub = csv_to_data_frame(model.data_loc, "bus2sub.csv") - model.id2zone = csv_to_data_frame(model.data_loc, "zone.csv").zone_name.to_dict() - model.zone2id = {v: k for k, v in model.id2zone.items()} - - add_zone_to_grid_data_frames(model) - add_coord_to_grid_data_frames(model) From 1d3752c3f2c75ce8514f8ab89aa4a1d162e02196 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 14 Apr 2022 17:42:24 -0700 Subject: [PATCH 04/59] feat: factor out check_and_format_interconnect and interconnect_to_name --- powersimdata/input/abstract_grid.py | 26 +++++----- powersimdata/network/constants/model.py | 5 ++ powersimdata/network/helpers.py | 48 ++++++++++++++++++ powersimdata/network/hifld/model.py | 45 +++-------------- powersimdata/network/model.py | 18 ++++--- .../test_model.py => tests/test_helpers.py} | 49 +++++++++++-------- powersimdata/network/usa_tamu/model.py | 44 ++--------------- powersimdata/scenario/create.py | 2 +- 8 files changed, 118 insertions(+), 119 deletions(-) create mode 100644 powersimdata/network/constants/model.py create mode 100644 powersimdata/network/helpers.py rename powersimdata/network/{usa_tamu/tests/test_model.py => tests/test_helpers.py} (57%) diff --git a/powersimdata/input/abstract_grid.py b/powersimdata/input/abstract_grid.py index 6f00b7105..41cef42a6 100644 --- a/powersimdata/input/abstract_grid.py +++ b/powersimdata/input/abstract_grid.py @@ -8,18 +8,18 @@ add_zone_to_grid_data_frames, csv_to_data_frame, ) +from powersimdata.network.constants.model import model2region from powersimdata.network.csv_reader import CSVReader +from powersimdata.network.helpers import check_and_format_interconnect class AbstractGrid: - """Grid Builder. Child classes must assign self.top_dirname and - self.umbrella_interconnect before self.__init__ is called, or re-define the __init__ - and/or methods called within the __init__ to avoid an AttributeError. - """ + """Grid Builder.""" def __init__(self): """Constructor""" self.data_loc = None + self.interconnect = None self.zone2id = {} self.id2zone = {} self.sub = pd.DataFrame() @@ -30,22 +30,25 @@ def __init__(self): self.bus = pd.DataFrame() self.branch = pd.DataFrame() self.storage = storage_template() - self._set_data_loc() - self._build_network() - def _set_data_loc(self): + def _set_data_loc(self, top_dirname): """Sets data location. + :param str top_dirname: name of directory enclosing data. :raises IOError: if directory does not exist. """ - data_loc = os.path.join(self.top_dirname, "data") + data_loc = os.path.join(top_dirname, "data") if os.path.isdir(data_loc) is False: raise IOError("%s directory not found" % data_loc) else: self.data_loc = data_loc - def _build_network(self): - """Build network.""" + def _build_network(self, interconnect, grid_model): + """Build network. + + :param str/iterable interconnect: interconnect name(s). + :param str model: the grid model. + """ reader = CSVReader(self.data_loc) self.bus = reader.bus self.plant = reader.plant @@ -53,9 +56,10 @@ def _build_network(self): self.dcline = reader.dcline self.gencost["after"] = self.gencost["before"] = reader.gencost + self.interconnect = check_and_format_interconnect(interconnect, grid_model) self._add_information_to_model() - if self.umbrella_interconnect not in self.interconnect: + if model2region[grid_model] not in self.interconnect: self._drop_interconnect() def _add_information_to_model(self): diff --git a/powersimdata/network/constants/model.py b/powersimdata/network/constants/model.py new file mode 100644 index 000000000..b2f159fbe --- /dev/null +++ b/powersimdata/network/constants/model.py @@ -0,0 +1,5 @@ +model2region = {"usa_tamu": "USA", "hifld": "USA"} +model2interconnect = { + "usa_tamu": ["Eastern", "Texas", "Western"], + "hifld": ["Eastern", "ERCOT", "Western"], +} diff --git a/powersimdata/network/helpers.py b/powersimdata/network/helpers.py new file mode 100644 index 000000000..c60a8de77 --- /dev/null +++ b/powersimdata/network/helpers.py @@ -0,0 +1,48 @@ +from powersimdata.network.constants.model import model2interconnect, model2region + + +def check_and_format_interconnect(interconnect, model="hifld"): + """Checks interconnect in a grid model. + + :param str/iterable interconnect: interconnect name(s). + :param str model: the grid model. + :return: (*set*) -- interconnect(s) + :raises TypeError: if ``interconnect`` and ``model`` are not str. + :raises ValueError: + if ``model`` does not exist. + if ``interconnect`` is not in the model. + if combination of interconnect is incorrect. + """ + if not isinstance(model, str): + raise TypeError("model must be a str") + if model not in model2region: + raise ValueError(f"Invalid model. Choose among {' | '.join(model2region)}") + + if isinstance(interconnect, str): + interconnect = [interconnect] + try: + interconnect = sorted(set(interconnect)) + except TypeError: + raise TypeError("interconnect must be either str or an iterable of str") + + region = model2region[model] + possible = model2interconnect[model] + if len(set(interconnect) - ({region} | set(possible))) != 0: + raise ValueError( + f"Invalid interconnect(s). Choose from {' | '.join(set(possible) | {region})}" + ) + if region in interconnect and len(interconnect) > 1: + raise ValueError(f"{region} cannot be paired") + if len(set(possible) - set(interconnect)) == 0: + raise ValueError(f"Use {region} instead") + + return interconnect + + +def interconnect_to_name(interconnect, model="hifld"): + """Return name of interconnect or collection of interconnects for a grid model. + + :param list interconnect: interconnect name(s). + :param str model: the grid model. + """ + return "_".join(sorted(check_and_format_interconnect(interconnect, model))) diff --git a/powersimdata/network/hifld/model.py b/powersimdata/network/hifld/model.py index 225a25ac0..f6a74b027 100644 --- a/powersimdata/network/hifld/model.py +++ b/powersimdata/network/hifld/model.py @@ -5,46 +5,15 @@ class HIFLD(AbstractGrid): - def __init__(self, interconnect): - """Constructor.""" - self.top_dirname = os.path.dirname(__file__) - self.interconnect = check_and_format_interconnect(interconnect) - self.umbrella_interconnect = "USA" - super().__init__() - self.storage.update(defaults) - - -def check_and_format_interconnect(interconnect): - """Checks interconnect. + """HIFLD network. :param str/iterable interconnect: interconnect name(s). - :return: (*list*) -- interconnect(s) - :raises TypeError: if parameter has wrong type. - :raises ValueError: if interconnect not found or combination of interconnect is not - appropriate. """ - if isinstance(interconnect, str): - interconnect = [interconnect] - try: - interconnect = sorted(set(interconnect)) - except: # noqa - raise TypeError("interconnect must be either str or an iterable of str") - - possible = ["Eastern", "Western", "ERCOT", "USA"] - if any(i for i in interconnect if i not in possible): - raise ValueError("Wrong interconnect. Choose from %s" % " | ".join(possible)) - n = len(interconnect) - if "USA" in interconnect and n > 1: - raise ValueError("'USA' cannot be paired") - if n == 3: - raise ValueError("Use 'USA' instead") - return interconnect - - -def interconnect_to_name(interconnect): - """Return name of interconnect or collection of interconnects. + def __init__(self, interconnect): + """Constructor.""" + super().__init__() - :param iterable interconnect: interconnect name(s). - """ - return "_".join(sorted(check_and_format_interconnect(interconnect))) + self._set_data_loc(os.path.dirname(__file__)) + self._build_network(interconnect, "hifld") + self.storage.update(defaults) diff --git a/powersimdata/network/model.py b/powersimdata/network/model.py index 19a1d4940..8da96eda7 100644 --- a/powersimdata/network/model.py +++ b/powersimdata/network/model.py @@ -1,5 +1,11 @@ from importlib import import_module +from powersimdata.network.constants.model import model2region +from powersimdata.network.helpers import ( + check_and_format_interconnect, + interconnect_to_name, +) + class ModelImmutables: """Immutables for a grid model. @@ -16,11 +22,8 @@ def __init__(self, model): self.storage = self._import_constants("storage") self.zones = self._import_constants("zones") - mod = import_module(f"powersimdata.network.{self.model}.model") - self.check_and_format_interconnect = getattr( - mod, "check_and_format_interconnect" - ) - self.interconnect_to_name = getattr(mod, "interconnect_to_name") + self.check_and_format_interconnect = check_and_format_interconnect + self.interconnect_to_name = interconnect_to_name @staticmethod def _check_model(model): @@ -29,9 +32,8 @@ def _check_model(model): :param str model: grid model name :raises ValueError: if grid model does not exist. """ - possible = {"usa_tamu", "hifld"} - if model not in possible: - raise ValueError("model must be one of %s" % " | ".join(possible)) + if model not in model2region: + raise ValueError(f"Invalid model. Choose among {' | '.join(model2region)}") def _import_constants(self, kind): """Import constants related to the grid model. diff --git a/powersimdata/network/usa_tamu/tests/test_model.py b/powersimdata/network/tests/test_helpers.py similarity index 57% rename from powersimdata/network/usa_tamu/tests/test_model.py rename to powersimdata/network/tests/test_helpers.py index 91abb9a22..3d7658382 100644 --- a/powersimdata/network/usa_tamu/tests/test_model.py +++ b/powersimdata/network/tests/test_helpers.py @@ -1,45 +1,52 @@ import pytest -from powersimdata.network.usa_tamu.model import TAMU, check_and_format_interconnect +from powersimdata.network.helpers import check_and_format_interconnect +from powersimdata.network.usa_tamu.model import TAMU def _assert_lists_equal(a, b): assert sorted(a) == sorted(b) -def test_interconnect_type(): - interconnect = 42 - with pytest.raises(TypeError): - check_and_format_interconnect(interconnect) +def test_check_and_format_interconnect_argument_type(): + with pytest.raises( + TypeError, match="interconnect must be either str or an iterable of str" + ): + check_and_format_interconnect(42) + with pytest.raises( + TypeError, match="interconnect must be either str or an iterable of str" + ): + check_and_format_interconnect([42, "Western"]) -def test_interconnect_value(): - interconnect = ["Canada"] - with pytest.raises(ValueError): - check_and_format_interconnect(interconnect) + with pytest.raises(TypeError, match="model must be a str"): + check_and_format_interconnect("Eastern", model=1) -def test_interconnect_duplicate_value(): - interconnect = ["Western", "Western", "Texas"] - result = check_and_format_interconnect(interconnect) - _assert_lists_equal(["Western", "Texas"], result) +def test_check_and_format_interconnect_argument_value(): + with pytest.raises(ValueError): + check_and_format_interconnect("Eastern", model="tamu") + interconnect = "Canada" + with pytest.raises(ValueError): + check_and_format_interconnect(interconnect) -def test_interconnect_usa_is_unique(): interconnect = ["Western", "USA"] with pytest.raises(ValueError, match="USA cannot be paired"): - check_and_format_interconnect(interconnect) + check_and_format_interconnect(interconnect, model="usa_tamu") -def test_interconnect_iterable(): - result = check_and_format_interconnect({"Texas", "Eastern"}) - _assert_lists_equal(["Eastern", "Texas"], result) +def test_check_and_format_interconnect(): + result = check_and_format_interconnect({"ERCOT", "Eastern"}) + _assert_lists_equal(["Eastern", "ERCOT"], result) - result = check_and_format_interconnect(("Texas", "Eastern")) - _assert_lists_equal(["Eastern", "Texas"], result) + result = check_and_format_interconnect(("ERCOT", "Eastern")) + _assert_lists_equal(["Eastern", "ERCOT"], result) + interconnect = ["Western", "Western", "Texas"] + result = check_and_format_interconnect(interconnect, model="usa_tamu") + _assert_lists_equal(["Western", "Texas"], result) -def test_interconnect(): arg = ("Western", ["Eastern", "Western"]) expected = (["Western"], ["Eastern", "Western"]) for a, e in zip(arg, expected): diff --git a/powersimdata/network/usa_tamu/model.py b/powersimdata/network/usa_tamu/model.py index 5d4509f88..bb86f0b98 100644 --- a/powersimdata/network/usa_tamu/model.py +++ b/powersimdata/network/usa_tamu/model.py @@ -7,49 +7,13 @@ class TAMU(AbstractGrid): """TAMU network. - :param str/list interconnect: interconnect name(s). + :param str/iterable interconnect: interconnect name(s). """ def __init__(self, interconnect): """Constructor.""" - self.top_dirname = os.path.dirname(__file__) - self.interconnect = check_and_format_interconnect(interconnect) - self.umbrella_interconnect = "USA" super().__init__() - self.storage.update(defaults) - - -def check_and_format_interconnect(interconnect): - """Checks interconnect. - - :param str/iterable interconnect: interconnect name(s). - :return: (*list*) -- interconnect(s) - :raises TypeError: if parameter has wrong type. - :raises ValueError: if interconnect not found or combination of interconnect is not - appropriate. - """ - if isinstance(interconnect, str): - interconnect = [interconnect] - try: - interconnect = sorted(set(interconnect)) - except: # noqa - raise TypeError("interconnect must be either str or an iterable of str") - possible = ["Eastern", "Texas", "Western", "USA"] - if any(i for i in interconnect if i not in possible): - raise ValueError("Wrong interconnect. Choose from %s" % " | ".join(possible)) - n = len(interconnect) - if "USA" in interconnect and n > 1: - raise ValueError("USA cannot be paired") - if n == 3: - raise ValueError("Use USA instead") - - return interconnect - - -def interconnect_to_name(interconnect): - """Return name of interconnect or collection of interconnects.. - - :param list interconnect: interconnect name(s). - """ - return "_".join(sorted(check_and_format_interconnect(interconnect))) + self._set_data_loc(os.path.dirname(__file__)) + self._build_network(interconnect, "usa_tamu") + self.storage.update(defaults) diff --git a/powersimdata/scenario/create.py b/powersimdata/scenario/create.py index 6f3a30a22..26bc14cb7 100644 --- a/powersimdata/scenario/create.py +++ b/powersimdata/scenario/create.py @@ -208,7 +208,7 @@ def __init__(self, grid_model, interconnect, table): mi = ModelImmutables(grid_model) self.grid_model = mi.model - self.interconnect = mi.interconnect_to_name(interconnect) + self.interconnect = mi.interconnect_to_name(interconnect, self.grid_model) self.base_grid = Grid(interconnect, source=grid_model) self.change_table = ChangeTable(self.base_grid) From dd529cf4ef20696774b2ba9580ac25692e8253c6 Mon Sep 17 00:00:00 2001 From: Daniel Olsen Date: Wed, 14 Apr 2021 12:04:56 -0700 Subject: [PATCH 05/59] chore: add 'hifld' to list of acceptable models for ModelImmutables --- powersimdata/network/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/powersimdata/network/model.py b/powersimdata/network/model.py index 4be574ce1..19a1d4940 100644 --- a/powersimdata/network/model.py +++ b/powersimdata/network/model.py @@ -29,7 +29,7 @@ def _check_model(model): :param str model: grid model name :raises ValueError: if grid model does not exist. """ - possible = {"usa_tamu"} + possible = {"usa_tamu", "hifld"} if model not in possible: raise ValueError("model must be one of %s" % " | ".join(possible)) From 2c950057e9cda88c6eb604d76dc07725bcce2356 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Mon, 18 Apr 2022 13:48:38 -0700 Subject: [PATCH 06/59] feat: factor out storage constants --- powersimdata/input/grid.py | 4 ++-- powersimdata/network/constants/storage.py | 14 ++++++++++++++ powersimdata/network/hifld/constants/storage.py | 17 ----------------- powersimdata/network/hifld/model.py | 7 ++++--- powersimdata/network/model.py | 3 ++- .../network/usa_tamu/constants/storage.py | 17 ----------------- powersimdata/network/usa_tamu/model.py | 7 ++++--- 7 files changed, 26 insertions(+), 43 deletions(-) create mode 100644 powersimdata/network/constants/storage.py delete mode 100644 powersimdata/network/hifld/constants/storage.py delete mode 100644 powersimdata/network/usa_tamu/constants/storage.py diff --git a/powersimdata/input/grid.py b/powersimdata/input/grid.py index fbd5e386c..ebe468d37 100644 --- a/powersimdata/input/grid.py +++ b/powersimdata/input/grid.py @@ -3,9 +3,9 @@ from powersimdata.data_access.context import Context from powersimdata.data_access.scenario_list import ScenarioListManager from powersimdata.input.scenario_grid import FromREISE, FromREISEjl +from powersimdata.network.constants.storage import storage from powersimdata.network.hifld.model import HIFLD from powersimdata.network.model import ModelImmutables -from powersimdata.network.usa_tamu.constants import storage as tamu_storage from powersimdata.network.usa_tamu.model import TAMU from powersimdata.utility.helpers import MemoryCache, cache_key @@ -132,7 +132,7 @@ def _univ_eq(ref, test, failure_flag=None): # compare storage _univ_eq(len(self.storage["gen"]), len(other.storage["gen"]), "storage") _univ_eq(self.storage.keys(), other.storage.keys(), "storage") - ignored_subkeys = {"gencost"} | set(tamu_storage.defaults.keys()) + ignored_subkeys = {"gencost"} | set(storage.keys()) for subkey in set(self.storage.keys()) - ignored_subkeys: # REISE will modify some gen columns self_data = self.storage[subkey] diff --git a/powersimdata/network/constants/storage.py b/powersimdata/network/constants/storage.py new file mode 100644 index 000000000..0064f24a5 --- /dev/null +++ b/powersimdata/network/constants/storage.py @@ -0,0 +1,14 @@ +storage = { + "usa_tamu": { + "duration": 4, + "min_stor": 0.05, + "max_stor": 0.95, + "InEff": 0.9, + "OutEff": 0.9, + "energy_value": 20, + "LossFactor": 0, + "terminal_min": 0, + "terminal_max": 1, + } +} +storage.update({"hifld": storage["usa_tamu"]}) diff --git a/powersimdata/network/hifld/constants/storage.py b/powersimdata/network/hifld/constants/storage.py deleted file mode 100644 index d3b667e63..000000000 --- a/powersimdata/network/hifld/constants/storage.py +++ /dev/null @@ -1,17 +0,0 @@ -_exports = ["defaults"] - -defaults = { - "duration": 4, - "min_stor": 0.05, - "max_stor": 0.95, - "InEff": 0.9, - "OutEff": 0.9, - "energy_value": 20, - "LossFactor": 0, - "terminal_min": 0, - "terminal_max": 1, -} - - -def __dir__(): - return sorted(_exports) diff --git a/powersimdata/network/hifld/model.py b/powersimdata/network/hifld/model.py index f6a74b027..3b0730286 100644 --- a/powersimdata/network/hifld/model.py +++ b/powersimdata/network/hifld/model.py @@ -1,7 +1,7 @@ import os from powersimdata.input.abstract_grid import AbstractGrid -from powersimdata.network.hifld.constants.storage import defaults +from powersimdata.network.constants.storage import storage class HIFLD(AbstractGrid): @@ -12,8 +12,9 @@ class HIFLD(AbstractGrid): def __init__(self, interconnect): """Constructor.""" + model = "hifld" super().__init__() self._set_data_loc(os.path.dirname(__file__)) - self._build_network(interconnect, "hifld") - self.storage.update(defaults) + self._build_network(interconnect, model) + self.storage.update(storage[model]) diff --git a/powersimdata/network/model.py b/powersimdata/network/model.py index 8da96eda7..c1542bfce 100644 --- a/powersimdata/network/model.py +++ b/powersimdata/network/model.py @@ -1,6 +1,7 @@ from importlib import import_module from powersimdata.network.constants.model import model2region +from powersimdata.network.constants.storage import storage from powersimdata.network.helpers import ( check_and_format_interconnect, interconnect_to_name, @@ -19,7 +20,7 @@ def __init__(self, model): self.model = model self.plants = self._import_constants("plants") - self.storage = self._import_constants("storage") + self.storage = storage[model] self.zones = self._import_constants("zones") self.check_and_format_interconnect = check_and_format_interconnect diff --git a/powersimdata/network/usa_tamu/constants/storage.py b/powersimdata/network/usa_tamu/constants/storage.py deleted file mode 100644 index d3b667e63..000000000 --- a/powersimdata/network/usa_tamu/constants/storage.py +++ /dev/null @@ -1,17 +0,0 @@ -_exports = ["defaults"] - -defaults = { - "duration": 4, - "min_stor": 0.05, - "max_stor": 0.95, - "InEff": 0.9, - "OutEff": 0.9, - "energy_value": 20, - "LossFactor": 0, - "terminal_min": 0, - "terminal_max": 1, -} - - -def __dir__(): - return sorted(_exports) diff --git a/powersimdata/network/usa_tamu/model.py b/powersimdata/network/usa_tamu/model.py index bb86f0b98..a4527abbc 100644 --- a/powersimdata/network/usa_tamu/model.py +++ b/powersimdata/network/usa_tamu/model.py @@ -1,7 +1,7 @@ import os from powersimdata.input.abstract_grid import AbstractGrid -from powersimdata.network.usa_tamu.constants.storage import defaults +from powersimdata.network.constants.storage import storage class TAMU(AbstractGrid): @@ -12,8 +12,9 @@ class TAMU(AbstractGrid): def __init__(self, interconnect): """Constructor.""" + model = "usa_tamu" super().__init__() self._set_data_loc(os.path.dirname(__file__)) - self._build_network(interconnect, "usa_tamu") - self.storage.update(defaults) + self._build_network(interconnect, model) + self.storage.update(storage[model]) From fa4433887bee2c6aa125cdf59b1b81567a485052 Mon Sep 17 00:00:00 2001 From: Daniel Olsen Date: Wed, 14 Apr 2021 12:22:40 -0700 Subject: [PATCH 07/59] feat: update Grid class to enable HIFLD grids --- powersimdata/input/grid.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/powersimdata/input/grid.py b/powersimdata/input/grid.py index 2f8c9d114..f4c487482 100644 --- a/powersimdata/input/grid.py +++ b/powersimdata/input/grid.py @@ -3,6 +3,7 @@ from powersimdata.data_access.context import Context from powersimdata.data_access.scenario_list import ScenarioListManager from powersimdata.input.scenario_grid import FromREISE, FromREISEjl +from powersimdata.network.hifld.model import HIFLD from powersimdata.network.model import ModelImmutables from powersimdata.network.usa_tamu.constants import storage as tamu_storage from powersimdata.network.usa_tamu.model import TAMU @@ -13,7 +14,7 @@ class Grid: - SUPPORTED_MODELS = {"usa_tamu"} + SUPPORTED_MODELS = {"hifld", "usa_tamu"} SUPPORTED_ENGINES = {"REISE", "REISE.jl"} """Grid @@ -49,11 +50,15 @@ def __init__(self, interconnect, source="usa_tamu", engine="REISE"): data = cached elif source == "usa_tamu": data = TAMU(interconnect) + elif source == "hifld": + data = HIFLD(interconnect) elif os.path.splitext(source)[1] == ".mat": if engine == "REISE": data = FromREISE(source) elif engine == "REISE.jl": data = FromREISEjl(source) + else: + raise ValueError(f"Unknown source: {source}") self.data_loc = data.data_loc self.interconnect = data.interconnect From 4c483b2897390e82f0b26d86c4bd0e441e4e0517 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 26 Apr 2022 12:58:08 -0700 Subject: [PATCH 08/59] feat: access model immutables for Europe --- powersimdata/network/constants/model.py | 9 +- powersimdata/network/constants/plants.py | 7 +- .../network/constants/region/europe.py | 257 ++++++++++++++++++ powersimdata/network/helpers.py | 2 + powersimdata/network/model.py | 105 +++---- 5 files changed, 316 insertions(+), 64 deletions(-) create mode 100644 powersimdata/network/constants/region/europe.py diff --git a/powersimdata/network/constants/model.py b/powersimdata/network/constants/model.py index b2f159fbe..fdf1da7d2 100644 --- a/powersimdata/network/constants/model.py +++ b/powersimdata/network/constants/model.py @@ -1,5 +1,12 @@ -model2region = {"usa_tamu": "USA", "hifld": "USA"} +model2region = {"usa_tamu": "USA", "hifld": "USA", "europe_tub": "Europe"} model2interconnect = { "usa_tamu": ["Eastern", "Texas", "Western"], "hifld": ["Eastern", "ERCOT", "Western"], + "europe_tub": [ + "ContinentalEurope", + "Nordic", + "GreatBritain", + "Ireland", + "Baltic", + ], } diff --git a/powersimdata/network/constants/plants.py b/powersimdata/network/constants/plants.py index e131fddd2..71cf647ad 100644 --- a/powersimdata/network/constants/plants.py +++ b/powersimdata/network/constants/plants.py @@ -1,3 +1,5 @@ +from powersimdata.network.helpers import check_model + type2color = { "wind": "xkcd:green", "solar": "xkcd:amber", @@ -92,6 +94,8 @@ def get_plants(model): :param str model: grid model :return: (*dict*) -- plants information. """ + check_model(model) + exports = [ "all_resources", "carbon_resources", @@ -106,5 +110,4 @@ def get_plants(model): "type2hatchcolor", "type2label", ] - if model in ["usa_tamu", "hifld"]: - return {a: eval(a) for a in exports} + return {a: eval(a) for a in exports} diff --git a/powersimdata/network/constants/region/europe.py b/powersimdata/network/constants/region/europe.py new file mode 100644 index 000000000..a535838ca --- /dev/null +++ b/powersimdata/network/constants/region/europe.py @@ -0,0 +1,257 @@ +from itertools import combinations + +import pandas as pd + +from powersimdata.network.constants.model import model2interconnect +from powersimdata.network.helpers import interconnect_to_name + +abv2country = { + "AL": "Albania", + "AT": "Austria", + "BA": "Bosnia And Herzegovina", + "BE": "Belgium", + "BG": "Bulgaria", + "CH": "Switzerland", + "CZ": "Czech Republic", + "DE": "Germany", + "DK": "Danemark", + "EE": "Estonia", + "ES": "Spain", + "FI": "Finland", + "FR": "France", + "GB": "Great Britain", + "GR": "Greece", + "HR": "Croatia", + "HU": "Hungary", + "IE": "Ireland", + "IT": "Italy", + "LT": "Lithuania", + "LU": "Luxembourg", + "LV": "Latvia", + "ME": "Montenegro", + "MK": "Macedonia", + "NL": "Netherlands", + "NO": "Norway", + "PL": "Poland", + "PT": "Portugal", + "RO": "Romania", + "RS": "Serbia", + "SE": "Sweden", + "SI": "Slovenia", + "SK": "Slovakia", +} + +abv2timezone = { + "AL": "ETC/GMT-1", + "AT": "ETC/GMT-1", + "BA": "ETC/GMT-1", + "BE": "ETC/GMT-1", + "BG": "ETC/GMT-2", + "CH": "ETC/GMT-1", + "CZ": "ETC/GMT-1", + "DE": "ETC/GMT-1", + "DK": "ETC/GMT-1", + "EE": "ETC/GMT-2", + "ES": "ETC/GMT-1", + "FI": "ETC/GMT-2", + "FR": "ETC/GMT-1", + "GB": "ETC/GMT", + "GR": "ETC/GMT-2", + "HR": "ETC/GMT-1", + "HU": "ETC/GMT-1", + "IE": "ETC/GMT", + "IT": "ETC/GMT-1", + "LT": "ETC/GMT-2", + "LU": "ETC/GMT-1", + "LV": "ETC/GMT-2", + "ME": "ETC/GMT-1", + "MK": "ETC/GMT-1", + "NL": "ETC/GMT-1", + "NO": "ETC/GMT-1", + "PL": "ETC/GMT-1", + "PT": "ETC/GMT", + "RO": "ETC/GMT-2", + "RS": "ETC/GMT-1", + "SE": "ETC/GMT-1", + "SI": "ETC/GMT-1", + "SK": "ETC/GMT-1", +} + + +interconnect2abv = { + "ContinentalEurope": { + "AL", + "AT", + "BA", + "BE", + "BG", + "CH", + "CZ", + "DE", + "DK", + "ES", + "FR", + "GR", + "HR", + "HU", + "IT", + "LU", + "ME", + "MK", + "NL", + "PL", + "PT", + "RO", + "RS", + "SI", + "SK", + }, + "Nordic": {"FI", "NO", "SE"}, + "GreatBritain": {"GB"}, + "Ireland": {"IE"}, + "Baltic": {"EE", "LT", "LV"}, +} + +cb = [i for j in range(2, 6) for i in combinations(model2interconnect["europe_tub"], j)] +for c in cb: + interconnect2abv[interconnect_to_name(c, model="europe_tub")] = { + a for i in c for a in interconnect2abv[i] + } + +name2interconnect = { + interconnect_to_name(i, model="europe_tub"): set(i) + for c in range(1, 6) + for i in combinations(model2interconnect["europe_tub"], c) +} + +name2component = name2interconnect.copy() +name2component.update({"Europe": set(name2interconnect) - {"Europe"}}) + + +interconnect2timezone = { + interconnect_to_name(i, model="europe_tub"): "ETC/GMT-1" + for c in range(1, 6) + for i in combinations(model2interconnect["europe_tub"], c) +} +interconnect2timezone.update( + { + interconnect_to_name("GreatBritain", model="europe_tub"): "ETC/GMT", + interconnect_to_name("Ireland", model="europe_tub"): "ETC/GMT", + interconnect_to_name( + ["GreatBritain", "Ireland"], model="europe_tub" + ): "ETC/GMT", + interconnect_to_name("Baltic", model="europe_tub"): "ETC/GMT-2", + interconnect_to_name(["Nordic", "Baltic"], model="europe_tub"): "ETC/GMT-2", + } +) + + +def get_interconnect_mapping(zone, model): + """Return interconnect mapping. + + :param pandas.DataFrame zone: information on zones of a grid model. + :param str model: the grid model. + :return: (*dict*) -- mappings of interconnect to other areas. + """ + mapping = dict() + + name = interconnect_to_name(zone["interconnect"], model=model) + + mapping["interconnect"] = name2component[name] | {name} + mapping["name2interconnect"] = { + i: name2interconnect[i] for i in mapping["interconnect"] + } + mapping["name2component"] = {i: name2component[i] for i in mapping["interconnect"]} + mapping["interconnect2timezone"] = { + i: interconnect2timezone[i] for i in mapping["interconnect"] + } + mapping["interconnect2abv"] = { + i: interconnect2abv[i] for i in mapping["interconnect"] + } + if model == "europe_tub": + mapping["interconnect2loadzone"] = {i: set() for i in mapping["interconnect"]} + mapping["interconnect2id"] = {i: set() for i in mapping["interconnect"]} + + return mapping + + +def get_country_mapping(zone, model): + """Return country mapping. + + :param pandas.DataFrame zone: information on zones of a grid model. + :param str model: the grid model. + :return: (*dict*) -- mappings of countries to other areas. + """ + mapping = dict() + + mapping["country"] = set(zone["country"]) + mapping["abv"] = set(zone["abv"]) + mapping["country_abbr"] = set(zone["abv"]) + mapping["country2abv"] = dict(zip(zone["country"], zone["abv"])) + mapping["abv2country"] = dict(zip(zone["abv"], zone["country"])) + mapping["abv2interconnect"] = dict(zip(zone["abv"], zone["interconnect"])) + + if model == "europe_tub": + mapping["country2loadzone"] = {c: set() for c in set(zone["country"])} + mapping["abv2loadzone"] = {a: set() for a in set(zone["abv"])} + mapping["abv2id"] = {a: set() for a in set(zone["abv"])} + mapping["id2abv"] = dict() + + return mapping + + +def get_loadzone_mapping(zone, model): + """Return loadzone mapping + + :param pandas.DataFrame zone: information on zones of a grid model. + :param str model: the grid model. + :return: (*dict*) -- mappings of loadzones to other areas. + """ + mapping = dict() + + if model == "europe_tub": + mapping["loadzone"] = set() + mapping["id2timezone"] = dict() + mapping["id2loadzone"] = dict() + mapping["timezone2id"] = dict() + mapping["loadzone2id"] = dict() + mapping["loadzone2country"] = dict() + mapping["loadzone2abv"] = dict() + mapping["loadzone2interconnect"] = dict() + + return mapping + + +def get_zones(interconnect, model): + """Return zone constants. + + :para list interconnect: interconnect(s). + :param str model: the grid model. + :return: (*dict*) -- zones information. + """ + zones = dict() + zones["mappings"] = {"loadzone", "country", "country_abbr", "interconnect"} + + interconnect = ( + model2interconnect[model] if "Europe" in interconnect else interconnect + ) + if model == "europe_tub": + # geographical information will be enclosed in the PyPSA Network object + zone_info = pd.DataFrame( + {"abv": [a for i in interconnect for a in interconnect2abv[i]]} + ) + zone_info["country"] = zone_info["abv"].map(abv2country) + zone_info["time_zone"] = zone_info["abv"].map( + {a: t for a, t in abv2timezone.items()} + ) + zone_info["interconnect"] = zone_info["abv"].map( + {a: i for i in interconnect for a in interconnect2abv[i]} + ) + else: + raise ValueError("Invalid model") + + zones.update(get_loadzone_mapping(zone_info, model)) + zones.update(get_country_mapping(zone_info, model)) + zones.update(get_interconnect_mapping(zone_info, model)) + + return zones diff --git a/powersimdata/network/helpers.py b/powersimdata/network/helpers.py index 1f713b67f..b8b2938a5 100644 --- a/powersimdata/network/helpers.py +++ b/powersimdata/network/helpers.py @@ -36,6 +36,8 @@ def check_and_format_interconnect(interconnect, model="hifld"): except TypeError: raise TypeError("interconnect must be either str or an iterable of str") + interconnect = [i.replace(" ", "") for i in interconnect] + region = model2region[model] possible = model2interconnect[model] if len(set(interconnect) - ({region} | set(possible))) != 0: diff --git a/powersimdata/network/model.py b/powersimdata/network/model.py index a88e919ca..9f06ec254 100644 --- a/powersimdata/network/model.py +++ b/powersimdata/network/model.py @@ -1,3 +1,4 @@ +from powersimdata.network.constants.model import model2region from powersimdata.network.constants.plants import get_plants from powersimdata.network.constants.storage import get_storage from powersimdata.network.constants.zones import get_zones @@ -20,7 +21,7 @@ def __init__(self, model, interconnect=None): check_model(model) self.model = model interconnect = ( - ["USA"] + [model2region[model]] if interconnect is None else check_and_format_interconnect(interconnect, model=model) ) @@ -34,75 +35,57 @@ def __init__(self, model, interconnect=None): def area_to_loadzone(self, *args, **kwargs): """Map the query area to a list of loadzones, using the known grid model.""" - return area_to_loadzone( - self.model, *args, mappings=self.zones["mappings"], **kwargs - ) + return area_to_loadzone(self.model, *args, **kwargs) -def area_to_loadzone(grid_model, area, area_type=None, mappings=None): +def area_to_loadzone(model, area, area_type=None): """Map the query area to a list of loadzones. - :param str grid_model: the grid model to use to look up constants for mapping. - :param str area: one of: *loadzone*, *state*, *state abbreviation*, + :param str model: grid model to use to look up constants for mapping. + :param str area: one of *loadzone*, *state*, *state abbreviation*, *interconnect*, *'all'*. - :param str area_type: one of: *'loadzone'*, *'state'*, *'state_abbr'*, - *'interconnect'*. - :param iterable mappings: a set of strings, representing area types to use to map. - If None, all mappings are tried. - :return: (*set*) -- set of loadzone names associated with the query area. - :raises TypeError: if area is not None or str. - :raises ValueError: if area is invalid or the combination of area and area_type is - invalid. - :raises KeyError: if a mapping is provided which isn't present for a grid_model. - - .. note:: if area_type is not specified, the function will check the area in the - order of 'state', 'loadzone', 'state abbreviation', 'interconnect' and 'all'. + :param str area_type: one of *'loadzone'*, *'state'*/*'country'*, + *'state_abbr'*/'*country_abbr*', *'interconnect'*. If None, ``area`` will be + searched successively into *'state'*/*'country'*, *'loadzone'*, + *'state abbreviation'*/*'country abbreviation'*, *'interconnect'* and *'all'*. + :return: (*set*) -- set of loadzone names located in the query area. + :raises TypeError: + if ``area`` is not a str. + if ``area_type`` is not None or str. + :raises ValueError: + if ``area`` is invalid + if combination of ``area`` and ``area_type`` is invalid. """ + zones = ModelImmutables(model).zones + mappings = zones["mappings"] - def raise_invalid_area(area_type): - raise ValueError("Invalid area for area_type=%s" % area_type) - - zones = ModelImmutables(grid_model).zones - mappings = {"loadzone", "state", "state_abbr", "interconnect"} + if not isinstance(area, str): + raise TypeError("area must be a str") if area_type is not None and not isinstance(area_type, str): - raise TypeError("'area_type' should be either None or str.") + raise TypeError("area_type must be either None or str") + + division = [a for a in mappings if "abbr" in a][0].split("_")[0] + area2loadzone = { + f"{division}": lambda x: zones[f"{division}2loadzone"][x], + "loadzone": lambda x: zones["loadzone"].intersection({x}), + f"{division}_abbr": lambda x: zones["abv2loadzone"][x], + "interconnect": lambda x: zones["interconnect2loadzone"][x], + "all": lambda _: zones["loadzone"], + } + if area_type: - if area_type == "loadzone" and "loadzone" in mappings: - if area in zones["loadzone"]: - loadzone_set = {area} - else: - raise_invalid_area(area_type) - elif area_type == "state" and "state" in mappings: - if area in zones["abv2state"].values(): - loadzone_set = zones["state2loadzone"][area] - else: - raise_invalid_area(area_type) - elif area_type == "state_abbr" and "state_abbr" in mappings: - if area in zones.abv2state: - loadzone_set = zones["state2loadzone"][zones["abv2state"][area]] - else: - raise_invalid_area(area_type) - elif area_type == "interconnect" and "interconnect" in mappings: - if area in zones["interconnect2loadzone"]: - loadzone_set = zones["interconnect2loadzone"][area] - else: - raise_invalid_area(area_type) - else: - print(f"{area_type} is incorrect. Available area_types are: {mappings}.") - raise ValueError("Invalid area_type") + if area_type not in mappings: + raise ValueError(f"Invalid area type. Choose among {' | '.join(mappings)}") + if area not in zones[area_type]: + raise ValueError("Invalid area / area_type combination") + loadzone = area2loadzone[area_type](area) else: - if "state" in mappings and area in zones["abv2state"].values(): - loadzone_set = zones["state2loadzone"][area] - elif "loadzone" in mappings and area in zones["loadzone"]: - loadzone_set = {area} - elif "state" in mappings and area in zones["abv2state"]: - loadzone_set = zones["state2loadzone"][zones["abv2state"][area]] - elif "interconnect" in mappings and area in zones["interconnect2loadzone"]: - loadzone_set = zones["interconnect2loadzone"][area] - elif area == "all": - loadzone_set = zones["loadzone"] - else: - print("%s is incorrect." % area) + zones["all"] = "all" + loadzone = set().union( + *(area2loadzone[a](area) for a in area2loadzone if area in zones[a]) + ) + if len(loadzone) == 0: raise ValueError("Invalid area") - return loadzone_set + + return loadzone From cd5db672a8cf3ae2ac03096d331e0467b9fe2783 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Mon, 23 May 2022 14:46:20 -0700 Subject: [PATCH 09/59] test: add tests for interconnect_to_name --- powersimdata/network/tests/test_helpers.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/powersimdata/network/tests/test_helpers.py b/powersimdata/network/tests/test_helpers.py index b144bab0c..3d1c11da8 100644 --- a/powersimdata/network/tests/test_helpers.py +++ b/powersimdata/network/tests/test_helpers.py @@ -1,6 +1,10 @@ import pytest -from powersimdata.network.helpers import check_and_format_interconnect, check_model +from powersimdata.network.helpers import ( + check_and_format_interconnect, + check_model, + interconnect_to_name, +) from powersimdata.network.usa_tamu.model import TAMU @@ -18,6 +22,18 @@ def test_check_model_argument_value(): check_model("tamu") +def test_interconnect_to_name(): + assert ( + interconnect_to_name("ContinentalEurope", model="europe_tub") + == interconnect_to_name("Continental Europe", model="europe_tub") + == "ContinentalEurope" + ) + assert ( + interconnect_to_name(["Nordic", "ContinentalEurope"], model="europe_tub") + == "ContinentalEurope_Nordic" + ) + + def test_check_and_format_interconnect_argument_type(): with pytest.raises( TypeError, match="interconnect must be either str or an iterable of str" From ad1d161d85acb4e283fee75e302b22148024d629 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 5 May 2022 15:29:07 -0700 Subject: [PATCH 10/59] chore: add zenodo_get to list of dependencies --- Pipfile | 1 + Pipfile.lock | 468 ++++++++++++++++++++++++++++----------------------- 2 files changed, 259 insertions(+), 210 deletions(-) diff --git a/Pipfile b/Pipfile index b5b233c2c..53a9ca3c9 100644 --- a/Pipfile +++ b/Pipfile @@ -9,6 +9,7 @@ pytest = "*" coverage = "*" pytest-cov = "*" pypsa = "*" +zenodo_get = "*" [packages] networkx = "~=2.5" diff --git a/Pipfile.lock b/Pipfile.lock index 85916ad8b..fea416cc8 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "a67c6723a40f49a1b5583825b2a9421be4285893bb4d3ccc39f96a385a9937e8" + "sha256": "29c8b39573ea573de449402e5c9e687ecacbf39fe479188b19df88c86ea009a7" }, "pipfile-spec": 6, "requires": {}, @@ -23,11 +23,11 @@ }, "azure-core": { "hashes": [ - "sha256:345b1b041faad7d0205b20d5697f1d0df344302e7aaa8501905580ff87bd0be5", - "sha256:923e492e72d103c768a643dfad331ce6b8ec1669575c7d0832fed19bffd119f7" + "sha256:28a01dfbaf0a6812c4e2a82d1642ea30956a9739f25bc77c9b23b91f4ea68f0f", + "sha256:c3e8a9a3ec9d89f59b5d5b2f19d19a30d76a5b5c0cee3788ecad3cb72b9bd028" ], "markers": "python_version >= '3.6'", - "version": "==1.24.0" + "version": "==1.23.1" }, "azure-storage-blob": { "hashes": [ @@ -55,11 +55,10 @@ }, "certifi": { "hashes": [ - "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7", - "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a" + "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872", + "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569" ], - "markers": "python_version >= '3.6'", - "version": "==2022.5.18.1" + "version": "==2021.10.8" }, "cffi": { "hashes": [ @@ -200,39 +199,37 @@ }, "networkx": { "hashes": [ - "sha256:51d6ae63c24dcd33901357688a2ad20d6bcd38f9a4c5307720048d3a8081059c", - "sha256:ae99c9b0d35e5b4a62cf1cfea01e5b3633d8d02f4a0ead69685b6e7de5b85eab" + "sha256:1a1e8fe052cc1b4e0339b998f6795099562a264a13a5af7a32cad45ab9d4e126", + "sha256:4a52cf66aed221955420e11b3e2e05ca44196b4829aab9576d4d439212b0a14f" ], "index": "pypi", - "version": "==2.8.2" + "version": "==2.8" }, "numpy": { "hashes": [ - "sha256:0791fbd1e43bf74b3502133207e378901272f3c156c4df4954cad833b1380207", - "sha256:1ce7ab2053e36c0a71e7a13a7475bd3b1f54750b4b433adc96313e127b870887", - "sha256:2d487e06ecbf1dc2f18e7efce82ded4f705f4bd0cd02677ffccfb39e5c284c7e", - "sha256:37431a77ceb9307c28382c9773da9f306435135fae6b80b62a11c53cfedd8802", - "sha256:3e1ffa4748168e1cc8d3cde93f006fe92b5421396221a02f2274aab6ac83b077", - "sha256:425b390e4619f58d8526b3dcf656dde069133ae5c240229821f01b5f44ea07af", - "sha256:43a8ca7391b626b4c4fe20aefe79fec683279e31e7c79716863b4b25021e0e74", - "sha256:4c6036521f11a731ce0648f10c18ae66d7143865f19f7299943c985cdc95afb5", - "sha256:59d55e634968b8f77d3fd674a3cf0b96e85147cd6556ec64ade018f27e9479e1", - "sha256:64f56fc53a2d18b1924abd15745e30d82a5782b2cab3429aceecc6875bd5add0", - "sha256:7228ad13744f63575b3a972d7ee4fd61815b2879998e70930d4ccf9ec721dce0", - "sha256:9ce7df0abeabe7fbd8ccbf343dc0db72f68549856b863ae3dd580255d009648e", - "sha256:a911e317e8c826ea632205e63ed8507e0dc877dcdc49744584dfc363df9ca08c", - "sha256:b89bf9b94b3d624e7bb480344e91f68c1c6c75f026ed6755955117de00917a7c", - "sha256:ba9ead61dfb5d971d77b6c131a9dbee62294a932bf6a356e48c75ae684e635b3", - "sha256:c1d937820db6e43bec43e8d016b9b3165dcb42892ea9f106c70fb13d430ffe72", - "sha256:cc7f00008eb7d3f2489fca6f334ec19ca63e31371be28fd5dad955b16ec285bd", - "sha256:d4c5d5eb2ec8da0b4f50c9a843393971f31f1d60be87e0fb0917a49133d257d6", - "sha256:e96d7f3096a36c8754207ab89d4b3282ba7b49ea140e4973591852c77d09eb76", - "sha256:f0725df166cf4785c0bc4cbfb320203182b1ecd30fee6e541c8752a92df6aa32", - "sha256:f3eb268dbd5cfaffd9448113539e44e2dd1c5ca9ce25576f7c04a5453edc26fa", - "sha256:fb7a980c81dd932381f8228a426df8aeb70d59bbcda2af075b627bbc50207cba" + "sha256:07a8c89a04997625236c5ecb7afe35a02af3896c8aa01890a849913a2309c676", + "sha256:08d9b008d0156c70dc392bb3ab3abb6e7a711383c3247b410b39962263576cd4", + "sha256:201b4d0552831f7250a08d3b38de0d989d6f6e4658b709a02a73c524ccc6ffce", + "sha256:2c10a93606e0b4b95c9b04b77dc349b398fdfbda382d2a39ba5a822f669a0123", + "sha256:3ca688e1b9b95d80250bca34b11a05e389b1420d00e87a0d12dc45f131f704a1", + "sha256:48a3aecd3b997bf452a2dedb11f4e79bc5bfd21a1d4cc760e703c31d57c84b3e", + "sha256:568dfd16224abddafb1cbcce2ff14f522abe037268514dd7e42c6776a1c3f8e5", + "sha256:5bfb1bb598e8229c2d5d48db1860bcf4311337864ea3efdbe1171fb0c5da515d", + "sha256:639b54cdf6aa4f82fe37ebf70401bbb74b8508fddcf4797f9fe59615b8c5813a", + "sha256:8251ed96f38b47b4295b1ae51631de7ffa8260b5b087808ef09a39a9d66c97ab", + "sha256:92bfa69cfbdf7dfc3040978ad09a48091143cffb778ec3b03fa170c494118d75", + "sha256:97098b95aa4e418529099c26558eeb8486e66bd1e53a6b606d684d0c3616b168", + "sha256:a3bae1a2ed00e90b3ba5f7bd0a7c7999b55d609e0c54ceb2b076a25e345fa9f4", + "sha256:c34ea7e9d13a70bf2ab64a2532fe149a9aced424cd05a2c4ba662fd989e3e45f", + "sha256:dbc7601a3b7472d559dc7b933b18b4b66f9aa7452c120e87dfb33d02008c8a18", + "sha256:e7927a589df200c5e23c57970bafbd0cd322459aa7b1ff73b7c2e84d6e3eae62", + "sha256:f8c1f39caad2c896bc0018f699882b345b2a63708008be29b1f355ebf6f933fe", + "sha256:f950f8845b480cffe522913d35567e29dd381b0dc7e4ce6a4a9f9156417d2430", + "sha256:fade0d4f4d292b6f39951b6836d7a3c7ef5b2347f3c420cd9820a1d90d794802", + "sha256:fdf3c08bce27132395d3c3ba1503cac12e17282358cb4bddc25cc46b0aca07aa" ], "index": "pypi", - "version": "==1.22.4" + "version": "==1.22.3" }, "oauthlib": { "hashes": [ @@ -342,40 +339,40 @@ }, "scipy": { "hashes": [ - "sha256:02b567e722d62bddd4ac253dafb01ce7ed8742cf8031aea030a41414b86c1125", - "sha256:1166514aa3bbf04cb5941027c6e294a000bba0cf00f5cdac6c77f2dad479b434", - "sha256:1da52b45ce1a24a4a22db6c157c38b39885a990a566748fc904ec9f03ed8c6ba", - "sha256:23b22fbeef3807966ea42d8163322366dd89da9bebdc075da7034cee3a1441ca", - "sha256:28d2cab0c6ac5aa131cc5071a3a1d8e1366dad82288d9ec2ca44df78fb50e649", - "sha256:2ef0fbc8bcf102c1998c1f16f15befe7cffba90895d6e84861cd6c6a33fb54f6", - "sha256:3b69b90c9419884efeffaac2c38376d6ef566e6e730a231e15722b0ab58f0328", - "sha256:4b93ec6f4c3c4d041b26b5f179a6aab8f5045423117ae7a45ba9710301d7e462", - "sha256:4e53a55f6a4f22de01ffe1d2f016e30adedb67a699a310cdcac312806807ca81", - "sha256:6311e3ae9cc75f77c33076cb2794fb0606f14c8f1b1c9ff8ce6005ba2c283621", - "sha256:65b77f20202599c51eb2771d11a6b899b97989159b7975e9b5259594f1d35ef4", - "sha256:6cc6b33139eb63f30725d5f7fa175763dc2df6a8f38ddf8df971f7c345b652dc", - "sha256:70de2f11bf64ca9921fda018864c78af7147025e467ce9f4a11bc877266900a6", - "sha256:70ebc84134cf0c504ce6a5f12d6db92cb2a8a53a49437a6bb4edca0bc101f11c", - "sha256:83606129247e7610b58d0e1e93d2c5133959e9cf93555d3c27e536892f1ba1f2", - "sha256:93d07494a8900d55492401917a119948ed330b8c3f1d700e0b904a578f10ead4", - "sha256:9c4e3ae8a716c8b3151e16c05edb1daf4cb4d866caa385e861556aff41300c14", - "sha256:9dd4012ac599a1e7eb63c114d1eee1bcfc6dc75a29b589ff0ad0bb3d9412034f", - "sha256:9e3fb1b0e896f14a85aa9a28d5f755daaeeb54c897b746df7a55ccb02b340f33", - "sha256:a0aa8220b89b2e3748a2836fbfa116194378910f1a6e78e4675a095bcd2c762d", - "sha256:d3b3c8924252caaffc54d4a99f1360aeec001e61267595561089f8b5900821bb", - "sha256:e013aed00ed776d790be4cb32826adb72799c61e318676172495383ba4570aa4", - "sha256:f3e7a8867f307e3359cc0ed2c63b61a1e33a19080f92fe377bc7d49f646f2ec1" + "sha256:011d4386b53b933142f58a652aa0f149c9b9242abd4f900b9f4ea5fbafc86b89", + "sha256:16e09ef68b352d73befa8bcaf3ebe25d3941fe1a58c82909d5589856e6bc8174", + "sha256:31d4f2d6b724bc9a98e527b5849b8a7e589bf1ea630c33aa563eda912c9ff0bd", + "sha256:38aa39b6724cb65271e469013aeb6f2ce66fd44f093e241c28a9c6bc64fd79ed", + "sha256:3d573228c10a3a8c32b9037be982e6440e411b443a6267b067cac72f690b8d56", + "sha256:3d9dd6c8b93a22bf9a3a52d1327aca7e092b1299fb3afc4f89e8eba381be7b59", + "sha256:559a8a4c03a5ba9fe3232f39ed24f86457e4f3f6c0abbeae1fb945029f092720", + "sha256:5e73343c5e0d413c1f937302b2e04fb07872f5843041bcfd50699aef6e95e399", + "sha256:723b9f878095ed994756fa4ee3060c450e2db0139c5ba248ee3f9628bd64e735", + "sha256:87b01c7d5761e8a266a0fbdb9d88dcba0910d63c1c671bdb4d99d29f469e9e03", + "sha256:8f4d059a97b29c91afad46b1737274cb282357a305a80bdd9e8adf3b0ca6a3f0", + "sha256:92b2c2af4183ed09afb595709a8ef5783b2baf7f41e26ece24e1329c109691a7", + "sha256:937d28722f13302febde29847bbe554b89073fbb924a30475e5ed7b028898b5f", + "sha256:a279e27c7f4566ef18bab1b1e2c37d168e365080974758d107e7d237d3f0f484", + "sha256:ad5be4039147c808e64f99c0e8a9641eb5d2fa079ff5894dcd8240e94e347af4", + "sha256:ae3e327da323d82e918e593460e23babdce40d7ab21490ddf9fc06dec6b91a18", + "sha256:bb7088e89cd751acf66195d2f00cf009a1ea113f3019664032d9075b1e727b6c", + "sha256:c17a1878d00a5dd2797ccd73623ceca9d02375328f6218ee6d921e1325e61aff", + "sha256:c2bae431d127bf0b1da81fc24e4bba0a84d058e3a96b9dd6475dfcb3c5e8761e", + "sha256:de2e80ee1d925984c2504812a310841c241791c5279352be4707cdcd7c255039", + "sha256:e6f0cd9c0bd374ef834ee1e0f0999678d49dcc400ea6209113d81528958f97c7", + "sha256:f3720d0124aced49f6f2198a6900304411dbbeed12f56951d7c66ebef05e3df6", + "sha256:f4a6d3b9f9797eb2d43938ac2c5d96d02aed17ef170c8b38f11798717523ddba" ], "index": "pypi", - "version": "==1.8.1" + "version": "==1.8.0" }, "setuptools": { "hashes": [ - "sha256:68e45d17c9281ba25dc0104eadd2647172b3472d9e01f911efa57965e8d51a36", - "sha256:a43bdedf853c670e5fed28e5623403bad2f73cf02f9a2774e91def6bda8265a7" + "sha256:26ead7d1f93efc0f8c804d9fafafbe4a44b179580a7105754b245155f9af05a8", + "sha256:47c7b0c0f8fc10eec4cf1e71c6fdadf8decaa74ffa087e68cd1c20db7ad6a592" ], "markers": "python_version >= '3.7'", - "version": "==62.3.2" + "version": "==62.1.0" }, "six": { "hashes": [ @@ -448,6 +445,13 @@ "index": "pypi", "version": "==22.3.0" }, + "certifi": { + "hashes": [ + "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872", + "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569" + ], + "version": "==2021.10.8" + }, "cftime": { "hashes": [ "sha256:003abc800933a39d6f4ecec113f401aef9961642a4769aa1099ed34e0c1c3d46", @@ -484,6 +488,14 @@ ], "version": "==1.6.0" }, + "charset-normalizer": { + "hashes": [ + "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597", + "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df" + ], + "markers": "python_version >= '3'", + "version": "==2.0.12" + }, "click": { "hashes": [ "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", @@ -494,50 +506,50 @@ }, "coverage": { "hashes": [ - "sha256:00c8544510f3c98476bbd58201ac2b150ffbcce46a8c3e4fb89ebf01998f806a", - "sha256:016d7f5cf1c8c84f533a3c1f8f36126fbe00b2ec0ccca47cc5731c3723d327c6", - "sha256:03014a74023abaf5a591eeeaf1ac66a73d54eba178ff4cb1fa0c0a44aae70383", - "sha256:033ebec282793bd9eb988d0271c211e58442c31077976c19c442e24d827d356f", - "sha256:21e6686a95025927775ac501e74f5940cdf6fe052292f3a3f7349b0abae6d00f", - "sha256:26f8f92699756cb7af2b30720de0c5bb8d028e923a95b6d0c891088025a1ac8f", - "sha256:2e76bd16f0e31bc2b07e0fb1379551fcd40daf8cdf7e24f31a29e442878a827c", - "sha256:341e9c2008c481c5c72d0e0dbf64980a4b2238631a7f9780b0fe2e95755fb018", - "sha256:3cfd07c5889ddb96a401449109a8b97a165be9d67077df6802f59708bfb07720", - "sha256:4002f9e8c1f286e986fe96ec58742b93484195defc01d5cc7809b8f7acb5ece3", - "sha256:50ed480b798febce113709846b11f5d5ed1e529c88d8ae92f707806c50297abf", - "sha256:543e172ce4c0de533fa892034cce260467b213c0ea8e39da2f65f9a477425211", - "sha256:5a78cf2c43b13aa6b56003707c5203f28585944c277c1f3f109c7b041b16bd39", - "sha256:5cd698341626f3c77784858427bad0cdd54a713115b423d22ac83a28303d1d95", - "sha256:60c2147921da7f4d2d04f570e1838db32b95c5509d248f3fe6417e91437eaf41", - "sha256:62d382f7d77eeeaff14b30516b17bcbe80f645f5cf02bb755baac376591c653c", - "sha256:69432946f154c6add0e9ede03cc43b96e2ef2733110a77444823c053b1ff5166", - "sha256:727dafd7f67a6e1cad808dc884bd9c5a2f6ef1f8f6d2f22b37b96cb0080d4f49", - "sha256:742fb8b43835078dd7496c3c25a1ec8d15351df49fb0037bffb4754291ef30ce", - "sha256:750e13834b597eeb8ae6e72aa58d1d831b96beec5ad1d04479ae3772373a8088", - "sha256:7b546cf2b1974ddc2cb222a109b37c6ed1778b9be7e6b0c0bc0cf0438d9e45a6", - "sha256:83bd142cdec5e4a5c4ca1d4ff6fa807d28460f9db919f9f6a31babaaa8b88426", - "sha256:8d2e80dd3438e93b19e1223a9850fa65425e77f2607a364b6fd134fcd52dc9df", - "sha256:9229d074e097f21dfe0643d9d0140ee7433814b3f0fc3706b4abffd1e3038632", - "sha256:968ed5407f9460bd5a591cefd1388cc00a8f5099de9e76234655ae48cfdbe2c3", - "sha256:9c82f2cd69c71698152e943f4a5a6b83a3ab1db73b88f6e769fabc86074c3b08", - "sha256:a00441f5ea4504f5abbc047589d09e0dc33eb447dc45a1a527c8b74bfdd32c65", - "sha256:a022394996419142b33a0cf7274cb444c01d2bb123727c4bb0b9acabcb515dea", - "sha256:af5b9ee0fc146e907aa0f5fb858c3b3da9199d78b7bb2c9973d95550bd40f701", - "sha256:b5578efe4038be02d76c344007b13119b2b20acd009a88dde8adec2de4f630b5", - "sha256:b84ab65444dcc68d761e95d4d70f3cfd347ceca5a029f2ffec37d4f124f61311", - "sha256:c53ad261dfc8695062fc8811ac7c162bd6096a05a19f26097f411bdf5747aee7", - "sha256:cc173f1ce9ffb16b299f51c9ce53f66a62f4d975abe5640e976904066f3c835d", - "sha256:d548edacbf16a8276af13063a2b0669d58bbcfca7c55a255f84aac2870786a61", - "sha256:d55fae115ef9f67934e9f1103c9ba826b4c690e4c5bcf94482b8b2398311bf9c", - "sha256:d8099ea680201c2221f8468c372198ceba9338a5fec0e940111962b03b3f716a", - "sha256:e35217031e4b534b09f9b9a5841b9344a30a6357627761d4218818b865d45055", - "sha256:e4f52c272fdc82e7c65ff3f17a7179bc5f710ebc8ce8a5cadac81215e8326740", - "sha256:e637ae0b7b481905358624ef2e81d7fb0b1af55f5ff99f9ba05442a444b11e45", - "sha256:eef5292b60b6de753d6e7f2d128d5841c7915fb1e3321c3a1fe6acfe76c38052", - "sha256:fb45fe08e1abc64eb836d187b20a59172053999823f7f6ef4f18a819c44ba16f" + "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9", + "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d", + "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf", + "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7", + "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6", + "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4", + "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059", + "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39", + "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536", + "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac", + "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c", + "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903", + "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d", + "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05", + "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684", + "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1", + "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f", + "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7", + "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca", + "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad", + "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca", + "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d", + "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92", + "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4", + "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf", + "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6", + "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1", + "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4", + "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359", + "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3", + "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620", + "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512", + "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69", + "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2", + "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518", + "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0", + "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa", + "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4", + "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e", + "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1", + "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2" ], "index": "pypi", - "version": "==6.4" + "version": "==6.3.2" }, "cycler": { "hashes": [ @@ -562,6 +574,14 @@ "markers": "python_version >= '3.7'", "version": "==4.33.3" }, + "idna": { + "hashes": [ + "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", + "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" + ], + "markers": "python_version >= '3'", + "version": "==3.3" + }, "iniconfig": { "hashes": [ "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", @@ -706,11 +726,11 @@ }, "networkx": { "hashes": [ - "sha256:51d6ae63c24dcd33901357688a2ad20d6bcd38f9a4c5307720048d3a8081059c", - "sha256:ae99c9b0d35e5b4a62cf1cfea01e5b3633d8d02f4a0ead69685b6e7de5b85eab" + "sha256:1a1e8fe052cc1b4e0339b998f6795099562a264a13a5af7a32cad45ab9d4e126", + "sha256:4a52cf66aed221955420e11b3e2e05ca44196b4829aab9576d4d439212b0a14f" ], "index": "pypi", - "version": "==2.8.2" + "version": "==2.8" }, "numexpr": { "hashes": [ @@ -745,31 +765,29 @@ }, "numpy": { "hashes": [ - "sha256:0791fbd1e43bf74b3502133207e378901272f3c156c4df4954cad833b1380207", - "sha256:1ce7ab2053e36c0a71e7a13a7475bd3b1f54750b4b433adc96313e127b870887", - "sha256:2d487e06ecbf1dc2f18e7efce82ded4f705f4bd0cd02677ffccfb39e5c284c7e", - "sha256:37431a77ceb9307c28382c9773da9f306435135fae6b80b62a11c53cfedd8802", - "sha256:3e1ffa4748168e1cc8d3cde93f006fe92b5421396221a02f2274aab6ac83b077", - "sha256:425b390e4619f58d8526b3dcf656dde069133ae5c240229821f01b5f44ea07af", - "sha256:43a8ca7391b626b4c4fe20aefe79fec683279e31e7c79716863b4b25021e0e74", - "sha256:4c6036521f11a731ce0648f10c18ae66d7143865f19f7299943c985cdc95afb5", - "sha256:59d55e634968b8f77d3fd674a3cf0b96e85147cd6556ec64ade018f27e9479e1", - "sha256:64f56fc53a2d18b1924abd15745e30d82a5782b2cab3429aceecc6875bd5add0", - "sha256:7228ad13744f63575b3a972d7ee4fd61815b2879998e70930d4ccf9ec721dce0", - "sha256:9ce7df0abeabe7fbd8ccbf343dc0db72f68549856b863ae3dd580255d009648e", - "sha256:a911e317e8c826ea632205e63ed8507e0dc877dcdc49744584dfc363df9ca08c", - "sha256:b89bf9b94b3d624e7bb480344e91f68c1c6c75f026ed6755955117de00917a7c", - "sha256:ba9ead61dfb5d971d77b6c131a9dbee62294a932bf6a356e48c75ae684e635b3", - "sha256:c1d937820db6e43bec43e8d016b9b3165dcb42892ea9f106c70fb13d430ffe72", - "sha256:cc7f00008eb7d3f2489fca6f334ec19ca63e31371be28fd5dad955b16ec285bd", - "sha256:d4c5d5eb2ec8da0b4f50c9a843393971f31f1d60be87e0fb0917a49133d257d6", - "sha256:e96d7f3096a36c8754207ab89d4b3282ba7b49ea140e4973591852c77d09eb76", - "sha256:f0725df166cf4785c0bc4cbfb320203182b1ecd30fee6e541c8752a92df6aa32", - "sha256:f3eb268dbd5cfaffd9448113539e44e2dd1c5ca9ce25576f7c04a5453edc26fa", - "sha256:fb7a980c81dd932381f8228a426df8aeb70d59bbcda2af075b627bbc50207cba" + "sha256:07a8c89a04997625236c5ecb7afe35a02af3896c8aa01890a849913a2309c676", + "sha256:08d9b008d0156c70dc392bb3ab3abb6e7a711383c3247b410b39962263576cd4", + "sha256:201b4d0552831f7250a08d3b38de0d989d6f6e4658b709a02a73c524ccc6ffce", + "sha256:2c10a93606e0b4b95c9b04b77dc349b398fdfbda382d2a39ba5a822f669a0123", + "sha256:3ca688e1b9b95d80250bca34b11a05e389b1420d00e87a0d12dc45f131f704a1", + "sha256:48a3aecd3b997bf452a2dedb11f4e79bc5bfd21a1d4cc760e703c31d57c84b3e", + "sha256:568dfd16224abddafb1cbcce2ff14f522abe037268514dd7e42c6776a1c3f8e5", + "sha256:5bfb1bb598e8229c2d5d48db1860bcf4311337864ea3efdbe1171fb0c5da515d", + "sha256:639b54cdf6aa4f82fe37ebf70401bbb74b8508fddcf4797f9fe59615b8c5813a", + "sha256:8251ed96f38b47b4295b1ae51631de7ffa8260b5b087808ef09a39a9d66c97ab", + "sha256:92bfa69cfbdf7dfc3040978ad09a48091143cffb778ec3b03fa170c494118d75", + "sha256:97098b95aa4e418529099c26558eeb8486e66bd1e53a6b606d684d0c3616b168", + "sha256:a3bae1a2ed00e90b3ba5f7bd0a7c7999b55d609e0c54ceb2b076a25e345fa9f4", + "sha256:c34ea7e9d13a70bf2ab64a2532fe149a9aced424cd05a2c4ba662fd989e3e45f", + "sha256:dbc7601a3b7472d559dc7b933b18b4b66f9aa7452c120e87dfb33d02008c8a18", + "sha256:e7927a589df200c5e23c57970bafbd0cd322459aa7b1ff73b7c2e84d6e3eae62", + "sha256:f8c1f39caad2c896bc0018f699882b345b2a63708008be29b1f355ebf6f933fe", + "sha256:f950f8845b480cffe522913d35567e29dd381b0dc7e4ce6a4a9f9156417d2430", + "sha256:fade0d4f4d292b6f39951b6836d7a3c7ef5b2347f3c420cd9820a1d90d794802", + "sha256:fdf3c08bce27132395d3c3ba1503cac12e17282358cb4bddc25cc46b0aca07aa" ], "index": "pypi", - "version": "==1.22.4" + "version": "==1.22.3" }, "packaging": { "hashes": [ @@ -815,47 +833,47 @@ }, "pillow": { "hashes": [ - "sha256:088df396b047477dd1bbc7de6e22f58400dae2f21310d9e2ec2933b2ef7dfa4f", - "sha256:09e67ef6e430f90caa093528bd758b0616f8165e57ed8d8ce014ae32df6a831d", - "sha256:0b4d5ad2cd3a1f0d1df882d926b37dbb2ab6c823ae21d041b46910c8f8cd844b", - "sha256:0b525a356680022b0af53385944026d3486fc8c013638cf9900eb87c866afb4c", - "sha256:1d4331aeb12f6b3791911a6da82de72257a99ad99726ed6b63f481c0184b6fb9", - "sha256:20d514c989fa28e73a5adbddd7a171afa5824710d0ab06d4e1234195d2a2e546", - "sha256:2b291cab8a888658d72b575a03e340509b6b050b62db1f5539dd5cd18fd50578", - "sha256:3f6c1716c473ebd1649663bf3b42702d0d53e27af8b64642be0dd3598c761fb1", - "sha256:42dfefbef90eb67c10c45a73a9bc1599d4dac920f7dfcbf4ec6b80cb620757fe", - "sha256:488f3383cf5159907d48d32957ac6f9ea85ccdcc296c14eca1a4e396ecc32098", - "sha256:4d45dbe4b21a9679c3e8b3f7f4f42a45a7d3ddff8a4a16109dff0e1da30a35b2", - "sha256:53c27bd452e0f1bc4bfed07ceb235663a1df7c74df08e37fd6b03eb89454946a", - "sha256:55e74faf8359ddda43fee01bffbc5bd99d96ea508d8a08c527099e84eb708f45", - "sha256:59789a7d06c742e9d13b883d5e3569188c16acb02eeed2510fd3bfdbc1bd1530", - "sha256:5b650dbbc0969a4e226d98a0b440c2f07a850896aed9266b6fedc0f7e7834108", - "sha256:66daa16952d5bf0c9d5389c5e9df562922a59bd16d77e2a276e575d32e38afd1", - "sha256:6e760cf01259a1c0a50f3c845f9cad1af30577fd8b670339b1659c6d0e7a41dd", - "sha256:7502539939b53d7565f3d11d87c78e7ec900d3c72945d4ee0e2f250d598309a0", - "sha256:769a7f131a2f43752455cc72f9f7a093c3ff3856bf976c5fb53a59d0ccc704f6", - "sha256:7c150dbbb4a94ea4825d1e5f2c5501af7141ea95825fadd7829f9b11c97aaf6c", - "sha256:8844217cdf66eabe39567118f229e275f0727e9195635a15e0e4b9227458daaf", - "sha256:8a66fe50386162df2da701b3722781cbe90ce043e7d53c1fd6bd801bca6b48d4", - "sha256:9370d6744d379f2de5d7fa95cdbd3a4d92f0b0ef29609b4b1687f16bc197063d", - "sha256:937a54e5694684f74dcbf6e24cc453bfc5b33940216ddd8f4cd8f0f79167f765", - "sha256:9c857532c719fb30fafabd2371ce9b7031812ff3889d75273827633bca0c4602", - "sha256:a4165205a13b16a29e1ac57efeee6be2dfd5b5408122d59ef2145bc3239fa340", - "sha256:b3fe2ff1e1715d4475d7e2c3e8dabd7c025f4410f79513b4ff2de3d51ce0fa9c", - "sha256:b6617221ff08fbd3b7a811950b5c3f9367f6e941b86259843eab77c8e3d2b56b", - "sha256:b761727ed7d593e49671d1827044b942dd2f4caae6e51bab144d4accf8244a84", - "sha256:baf3be0b9446a4083cc0c5bb9f9c964034be5374b5bc09757be89f5d2fa247b8", - "sha256:c17770a62a71718a74b7548098a74cd6880be16bcfff5f937f900ead90ca8e92", - "sha256:c67db410508b9de9c4694c57ed754b65a460e4812126e87f5052ecf23a011a54", - "sha256:d78ca526a559fb84faaaf84da2dd4addef5edb109db8b81677c0bb1aad342601", - "sha256:e9ed59d1b6ee837f4515b9584f3d26cf0388b742a11ecdae0d9237a94505d03a", - "sha256:f054b020c4d7e9786ae0404278ea318768eb123403b18453e28e47cdb7a0a4bf", - "sha256:f372d0f08eff1475ef426344efe42493f71f377ec52237bf153c5713de987251", - "sha256:f3f6a6034140e9e17e9abc175fc7a266a6e63652028e157750bd98e804a8ed9a", - "sha256:ffde4c6fabb52891d81606411cbfaf77756e3b561b566efd270b3ed3791fde4e" + "sha256:01ce45deec9df310cbbee11104bae1a2a43308dd9c317f99235b6d3080ddd66e", + "sha256:0c51cb9edac8a5abd069fd0758ac0a8bfe52c261ee0e330f363548aca6893595", + "sha256:17869489de2fce6c36690a0c721bd3db176194af5f39249c1ac56d0bb0fcc512", + "sha256:21dee8466b42912335151d24c1665fcf44dc2ee47e021d233a40c3ca5adae59c", + "sha256:25023a6209a4d7c42154073144608c9a71d3512b648a2f5d4465182cb93d3477", + "sha256:255c9d69754a4c90b0ee484967fc8818c7ff8311c6dddcc43a4340e10cd1636a", + "sha256:35be4a9f65441d9982240e6966c1eaa1c654c4e5e931eaf580130409e31804d4", + "sha256:3f42364485bfdab19c1373b5cd62f7c5ab7cc052e19644862ec8f15bb8af289e", + "sha256:3fddcdb619ba04491e8f771636583a7cc5a5051cd193ff1aa1ee8616d2a692c5", + "sha256:463acf531f5d0925ca55904fa668bb3461c3ef6bc779e1d6d8a488092bdee378", + "sha256:4fe29a070de394e449fd88ebe1624d1e2d7ddeed4c12e0b31624561b58948d9a", + "sha256:55dd1cf09a1fd7c7b78425967aacae9b0d70125f7d3ab973fadc7b5abc3de652", + "sha256:5a3ecc026ea0e14d0ad7cd990ea7f48bfcb3eb4271034657dc9d06933c6629a7", + "sha256:5cfca31ab4c13552a0f354c87fbd7f162a4fafd25e6b521bba93a57fe6a3700a", + "sha256:66822d01e82506a19407d1afc104c3fcea3b81d5eb11485e593ad6b8492f995a", + "sha256:69e5ddc609230d4408277af135c5b5c8fe7a54b2bdb8ad7c5100b86b3aab04c6", + "sha256:6b6d4050b208c8ff886fd3db6690bf04f9a48749d78b41b7a5bf24c236ab0165", + "sha256:7a053bd4d65a3294b153bdd7724dce864a1d548416a5ef61f6d03bf149205160", + "sha256:82283af99c1c3a5ba1da44c67296d5aad19f11c535b551a5ae55328a317ce331", + "sha256:8782189c796eff29dbb37dd87afa4ad4d40fc90b2742704f94812851b725964b", + "sha256:8d79c6f468215d1a8415aa53d9868a6b40c4682165b8cb62a221b1baa47db458", + "sha256:97bda660702a856c2c9e12ec26fc6d187631ddfd896ff685814ab21ef0597033", + "sha256:a325ac71914c5c043fa50441b36606e64a10cd262de12f7a179620f579752ff8", + "sha256:a336a4f74baf67e26f3acc4d61c913e378e931817cd1e2ef4dfb79d3e051b481", + "sha256:a598d8830f6ef5501002ae85c7dbfcd9c27cc4efc02a1989369303ba85573e58", + "sha256:a5eaf3b42df2bcda61c53a742ee2c6e63f777d0e085bbc6b2ab7ed57deb13db7", + "sha256:aea7ce61328e15943d7b9eaca87e81f7c62ff90f669116f857262e9da4057ba3", + "sha256:af79d3fde1fc2e33561166d62e3b63f0cc3e47b5a3a2e5fea40d4917754734ea", + "sha256:c24f718f9dd73bb2b31a6201e6db5ea4a61fdd1d1c200f43ee585fc6dcd21b34", + "sha256:c5b0ff59785d93b3437c3703e3c64c178aabada51dea2a7f2c5eccf1bcf565a3", + "sha256:c7110ec1701b0bf8df569a7592a196c9d07c764a0a74f65471ea56816f10e2c8", + "sha256:c870193cce4b76713a2b29be5d8327c8ccbe0d4a49bc22968aa1e680930f5581", + "sha256:c9efef876c21788366ea1f50ecb39d5d6f65febe25ad1d4c0b8dff98843ac244", + "sha256:de344bcf6e2463bb25179d74d6e7989e375f906bcec8cb86edb8b12acbc7dfef", + "sha256:eb1b89b11256b5b6cad5e7593f9061ac4624f7651f7a8eb4dfa37caa1dfaa4d0", + "sha256:ed742214068efa95e9844c2d9129e209ed63f61baa4d54dbf4cf8b5e2d30ccf2", + "sha256:f401ed2bbb155e1ade150ccc63db1a4f6c1909d3d378f7d1235a44e90d75fb97", + "sha256:fb89397013cf302f282f0fc998bb7abf11d49dcff72c8ecb320f76ea6e2c5717" ], "markers": "python_version >= '3.7'", - "version": "==9.1.1" + "version": "==9.1.0" }, "platformdirs": { "hashes": [ @@ -890,30 +908,30 @@ }, "pyomo": { "hashes": [ - "sha256:1c3cc9d96e43baa72507a4cb5d52f779d541c42dac999759c289efe7f2a5ea15", - "sha256:3440bd5268e2c8193e9557d8d0a5b0c99d6eada909da1e17cb718a25805483b8", - "sha256:394ed6962064ebb745b8bf98a38692c9d49176b04482ea8d9c612ddcd45aa99b", - "sha256:45f8225faa53ac3c8b6551921276028b885d5ed9d96347021324ed2bdd9be2ef", - "sha256:56632731f8415cb1584201282e54d0b0fbaa218b3c498899beee12b2373edaa4", - "sha256:5e385c13549912f0f0d7c768ba32d3b242d66df9064cfd9e499cfaeacfd4ea6e", - "sha256:91b6f9a349b41c5e3a335692ed4ea00ec0ebcc27e7d23575e5f832289f8c214c", - "sha256:9662ed2e046a1edccc351510ef288893f5044a27fb214d632a7311e6fd5e04b4", - "sha256:a636a3a1c8314b8be85899cb6fac5d6a9a78fc75c6d58b74d3ec106ae5ed8f59", - "sha256:b5d8ec269515071a8c1d07966624f9bf79f4cb5a134e26d48292424c38e8da85", - "sha256:cf81b7b44403df4bbdd0b5023f859252476071f92a8166e43206758e51524ff3", - "sha256:d30138cbfe37507545bb270763e9eed7425a3d32a2e6c3b92a8f32eefa5159ce", - "sha256:d593dcbe22e7e8f7c434398b34d8b02d83e8cb83aef4390a2289f566cb2eb293" + "sha256:12b9c04c12775b4203de7902793ec64bc031fe14d622147fdd6c81e806e8c952", + "sha256:1b4e8cc4a5cb93ee9e5f7090052904339ff318cf9b9fb1463b26fd1bbd5c0f25", + "sha256:218011a6aef94fc4f5b353b1d1623df465c92e29870a1597a29caf66a001db71", + "sha256:2517f54f4f47eae329a129516de2a194797ca1c7e7f52bc1bfdc04942ff7374f", + "sha256:533fbf865b64b0c8ef3bb2f935a3e24ecedafb6f95801b631867f1d848ac846a", + "sha256:575bef8447561579f798c625606fa3c74a259a2f0e271e1442bcf3da5bfd379f", + "sha256:7d4aab869a5d412e607c5b1df3452aef72de077b2895d4276409ea87a59c25d2", + "sha256:90bd36d11b4f217e6362e6ca08e88a8d62c5cabb64da1776e1def08ca8c18ef3", + "sha256:929ad3e1c405767313f6a5b57e2d73815372f74b13cb590a6f724379aceb04a6", + "sha256:b548825301b6bd4073a0620a8265d956153d53c12fca37cc7184fa54fce96222", + "sha256:c5ca678afb12ebf42e4361ad5d8318d15b11375d3e26f14025ba9270cb226dcb", + "sha256:d5d593c32beb760a2bb766b6e701c7f0e17e247b1c464a91f9140a61ba379009", + "sha256:f5e155b159381d85f572cdcf4775fada5e4106ae94ef3d93a7c4de5a78deb0ca" ], "markers": "python_version >= '3.7'", - "version": "==6.4.1" + "version": "==6.4.0" }, "pyparsing": { "hashes": [ - "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", - "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" + "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954", + "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06" ], "markers": "python_full_version >= '3.6.8'", - "version": "==3.0.9" + "version": "==3.0.8" }, "pypsa": { "hashes": [ @@ -953,34 +971,42 @@ ], "version": "==2022.1" }, + "requests": { + "hashes": [ + "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61", + "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d" + ], + "index": "pypi", + "version": "==2.27.1" + }, "scipy": { "hashes": [ - "sha256:02b567e722d62bddd4ac253dafb01ce7ed8742cf8031aea030a41414b86c1125", - "sha256:1166514aa3bbf04cb5941027c6e294a000bba0cf00f5cdac6c77f2dad479b434", - "sha256:1da52b45ce1a24a4a22db6c157c38b39885a990a566748fc904ec9f03ed8c6ba", - "sha256:23b22fbeef3807966ea42d8163322366dd89da9bebdc075da7034cee3a1441ca", - "sha256:28d2cab0c6ac5aa131cc5071a3a1d8e1366dad82288d9ec2ca44df78fb50e649", - "sha256:2ef0fbc8bcf102c1998c1f16f15befe7cffba90895d6e84861cd6c6a33fb54f6", - "sha256:3b69b90c9419884efeffaac2c38376d6ef566e6e730a231e15722b0ab58f0328", - "sha256:4b93ec6f4c3c4d041b26b5f179a6aab8f5045423117ae7a45ba9710301d7e462", - "sha256:4e53a55f6a4f22de01ffe1d2f016e30adedb67a699a310cdcac312806807ca81", - "sha256:6311e3ae9cc75f77c33076cb2794fb0606f14c8f1b1c9ff8ce6005ba2c283621", - "sha256:65b77f20202599c51eb2771d11a6b899b97989159b7975e9b5259594f1d35ef4", - "sha256:6cc6b33139eb63f30725d5f7fa175763dc2df6a8f38ddf8df971f7c345b652dc", - "sha256:70de2f11bf64ca9921fda018864c78af7147025e467ce9f4a11bc877266900a6", - "sha256:70ebc84134cf0c504ce6a5f12d6db92cb2a8a53a49437a6bb4edca0bc101f11c", - "sha256:83606129247e7610b58d0e1e93d2c5133959e9cf93555d3c27e536892f1ba1f2", - "sha256:93d07494a8900d55492401917a119948ed330b8c3f1d700e0b904a578f10ead4", - "sha256:9c4e3ae8a716c8b3151e16c05edb1daf4cb4d866caa385e861556aff41300c14", - "sha256:9dd4012ac599a1e7eb63c114d1eee1bcfc6dc75a29b589ff0ad0bb3d9412034f", - "sha256:9e3fb1b0e896f14a85aa9a28d5f755daaeeb54c897b746df7a55ccb02b340f33", - "sha256:a0aa8220b89b2e3748a2836fbfa116194378910f1a6e78e4675a095bcd2c762d", - "sha256:d3b3c8924252caaffc54d4a99f1360aeec001e61267595561089f8b5900821bb", - "sha256:e013aed00ed776d790be4cb32826adb72799c61e318676172495383ba4570aa4", - "sha256:f3e7a8867f307e3359cc0ed2c63b61a1e33a19080f92fe377bc7d49f646f2ec1" + "sha256:011d4386b53b933142f58a652aa0f149c9b9242abd4f900b9f4ea5fbafc86b89", + "sha256:16e09ef68b352d73befa8bcaf3ebe25d3941fe1a58c82909d5589856e6bc8174", + "sha256:31d4f2d6b724bc9a98e527b5849b8a7e589bf1ea630c33aa563eda912c9ff0bd", + "sha256:38aa39b6724cb65271e469013aeb6f2ce66fd44f093e241c28a9c6bc64fd79ed", + "sha256:3d573228c10a3a8c32b9037be982e6440e411b443a6267b067cac72f690b8d56", + "sha256:3d9dd6c8b93a22bf9a3a52d1327aca7e092b1299fb3afc4f89e8eba381be7b59", + "sha256:559a8a4c03a5ba9fe3232f39ed24f86457e4f3f6c0abbeae1fb945029f092720", + "sha256:5e73343c5e0d413c1f937302b2e04fb07872f5843041bcfd50699aef6e95e399", + "sha256:723b9f878095ed994756fa4ee3060c450e2db0139c5ba248ee3f9628bd64e735", + "sha256:87b01c7d5761e8a266a0fbdb9d88dcba0910d63c1c671bdb4d99d29f469e9e03", + "sha256:8f4d059a97b29c91afad46b1737274cb282357a305a80bdd9e8adf3b0ca6a3f0", + "sha256:92b2c2af4183ed09afb595709a8ef5783b2baf7f41e26ece24e1329c109691a7", + "sha256:937d28722f13302febde29847bbe554b89073fbb924a30475e5ed7b028898b5f", + "sha256:a279e27c7f4566ef18bab1b1e2c37d168e365080974758d107e7d237d3f0f484", + "sha256:ad5be4039147c808e64f99c0e8a9641eb5d2fa079ff5894dcd8240e94e347af4", + "sha256:ae3e327da323d82e918e593460e23babdce40d7ab21490ddf9fc06dec6b91a18", + "sha256:bb7088e89cd751acf66195d2f00cf009a1ea113f3019664032d9075b1e727b6c", + "sha256:c17a1878d00a5dd2797ccd73623ceca9d02375328f6218ee6d921e1325e61aff", + "sha256:c2bae431d127bf0b1da81fc24e4bba0a84d058e3a96b9dd6475dfcb3c5e8761e", + "sha256:de2e80ee1d925984c2504812a310841c241791c5279352be4707cdcd7c255039", + "sha256:e6f0cd9c0bd374ef834ee1e0f0999678d49dcc400ea6209113d81528958f97c7", + "sha256:f3720d0124aced49f6f2198a6900304411dbbeed12f56951d7c66ebef05e3df6", + "sha256:f4a6d3b9f9797eb2d43938ac2c5d96d02aed17ef170c8b38f11798717523ddba" ], "index": "pypi", - "version": "==1.8.1" + "version": "==1.8.0" }, "six": { "hashes": [ @@ -1027,7 +1053,7 @@ "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" ], - "markers": "python_version < '3.11'", + "markers": "python_version >= '3.7'", "version": "==2.0.1" }, "typing-extensions": { @@ -1038,6 +1064,20 @@ "markers": "python_version >= '3.7'", "version": "==4.2.0" }, + "urllib3": { + "hashes": [ + "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14", + "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.26.9" + }, + "wget": { + "hashes": [ + "sha256:35e630eca2aa50ce998b9b1a127bb26b30dfee573702782aa982f875e3f16061" + ], + "version": "==3.2" + }, "xarray": { "hashes": [ "sha256:398344bf7d170477aaceff70210e11ebd69af6b156fe13978054d25c48729440", @@ -1045,6 +1085,14 @@ ], "markers": "python_version >= '3.8'", "version": "==2022.3.0" + }, + "zenodo-get": { + "hashes": [ + "sha256:b71cd784df7877749a7bdb00513964ce0c9f6b62aad3311a8725a3b279d6f4f5", + "sha256:d595ebfe7bbad2217bf7f3b7923a0daa9d36751248f1ebeab0f425143061a033" + ], + "index": "pypi", + "version": "==1.3.4" } } } From f7eb58ec7873a69287912cf917feb8824e763596 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Wed, 4 May 2022 16:27:21 -0700 Subject: [PATCH 11/59] fix: swap ERCOT and Texas in interconnect mapping --- powersimdata/network/constants/region/usa.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/powersimdata/network/constants/region/usa.py b/powersimdata/network/constants/region/usa.py index e68a55e40..44db19e9a 100644 --- a/powersimdata/network/constants/region/usa.py +++ b/powersimdata/network/constants/region/usa.py @@ -144,12 +144,10 @@ def _substitute(entry): mapping = dict() sub = "Texas" if model == "usa_tamu" else "ERCOT" - name = interconnect_to_name(zone["interconnect"].replace(sub, "ERCOT").unique()) - + name = interconnect_to_name(zone["interconnect"].unique(), model=model) mapping["interconnect"] = ast.literal_eval( - repr(name2component[name]).replace("ERCOT", sub) - ) | {name} - + repr(name2component).replace("ERCOT", sub) + )[name] | {name} mapping["name2interconnect"] = _substitute(name2interconnect) mapping["name2component"] = _substitute(name2component) mapping["interconnect2timezone"] = _substitute(interconnect2timezone) From 7d1fcd02c909785fc1e9d5c987b1519085eda6d8 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Mon, 18 Apr 2022 14:47:15 -0700 Subject: [PATCH 12/59] feat: factor out check_model --- powersimdata/network/helpers.py | 21 ++++++++++++++------- powersimdata/network/model.py | 14 ++------------ powersimdata/network/tests/test_helpers.py | 18 +++++++++++------- 3 files changed, 27 insertions(+), 26 deletions(-) diff --git a/powersimdata/network/helpers.py b/powersimdata/network/helpers.py index c60a8de77..2a7ef458e 100644 --- a/powersimdata/network/helpers.py +++ b/powersimdata/network/helpers.py @@ -1,23 +1,30 @@ from powersimdata.network.constants.model import model2interconnect, model2region +def check_model(model): + """Check that a grid model exists. + + :param str model: grid model name + :raises TypeError: if ``model`` is not a str. + :raises ValueError: if grid model does not exist. + """ + if not isinstance(model, str): + raise TypeError("model must be a str") + if model not in model2region: + raise ValueError(f"Invalid model. Choose among {' | '.join(model2region)}") + + def check_and_format_interconnect(interconnect, model="hifld"): """Checks interconnect in a grid model. :param str/iterable interconnect: interconnect name(s). :param str model: the grid model. :return: (*set*) -- interconnect(s) - :raises TypeError: if ``interconnect`` and ``model`` are not str. + :raises TypeError: if ``interconnect`` is not a str. :raises ValueError: - if ``model`` does not exist. if ``interconnect`` is not in the model. if combination of interconnect is incorrect. """ - if not isinstance(model, str): - raise TypeError("model must be a str") - if model not in model2region: - raise ValueError(f"Invalid model. Choose among {' | '.join(model2region)}") - if isinstance(interconnect, str): interconnect = [interconnect] try: diff --git a/powersimdata/network/model.py b/powersimdata/network/model.py index c1542bfce..0b0e93fa8 100644 --- a/powersimdata/network/model.py +++ b/powersimdata/network/model.py @@ -1,9 +1,9 @@ from importlib import import_module -from powersimdata.network.constants.model import model2region from powersimdata.network.constants.storage import storage from powersimdata.network.helpers import ( check_and_format_interconnect, + check_model, interconnect_to_name, ) @@ -16,7 +16,7 @@ class ModelImmutables: def __init__(self, model): """Constructor.""" - self._check_model(model) + check_model(model) self.model = model self.plants = self._import_constants("plants") @@ -26,16 +26,6 @@ def __init__(self, model): self.check_and_format_interconnect = check_and_format_interconnect self.interconnect_to_name = interconnect_to_name - @staticmethod - def _check_model(model): - """Check that a grid model exists. - - :param str model: grid model name - :raises ValueError: if grid model does not exist. - """ - if model not in model2region: - raise ValueError(f"Invalid model. Choose among {' | '.join(model2region)}") - def _import_constants(self, kind): """Import constants related to the grid model. diff --git a/powersimdata/network/tests/test_helpers.py b/powersimdata/network/tests/test_helpers.py index 3d7658382..4d2900bfe 100644 --- a/powersimdata/network/tests/test_helpers.py +++ b/powersimdata/network/tests/test_helpers.py @@ -1,6 +1,6 @@ import pytest -from powersimdata.network.helpers import check_and_format_interconnect +from powersimdata.network.helpers import check_and_format_interconnect, check_model from powersimdata.network.usa_tamu.model import TAMU @@ -8,6 +8,16 @@ def _assert_lists_equal(a, b): assert sorted(a) == sorted(b) +def test_check_model_argument_type(): + with pytest.raises(TypeError, match="model must be a str"): + check_model(1) + + +def test_check_model_argument_value(): + with pytest.raises(ValueError): + check_model("tamu") + + def test_check_and_format_interconnect_argument_type(): with pytest.raises( TypeError, match="interconnect must be either str or an iterable of str" @@ -19,14 +29,8 @@ def test_check_and_format_interconnect_argument_type(): ): check_and_format_interconnect([42, "Western"]) - with pytest.raises(TypeError, match="model must be a str"): - check_and_format_interconnect("Eastern", model=1) - def test_check_and_format_interconnect_argument_value(): - with pytest.raises(ValueError): - check_and_format_interconnect("Eastern", model="tamu") - interconnect = "Canada" with pytest.raises(ValueError): check_and_format_interconnect(interconnect) From af672296ffca452db76a6389d4a2144bfd16e963 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Wed, 4 May 2022 13:10:11 -0700 Subject: [PATCH 13/59] feat: add pypsa-eur grid model --- .gitignore | 1 + powersimdata/network/europe_tub/__init__.py | 0 powersimdata/network/europe_tub/model.py | 139 ++++++++++++++++++++ 3 files changed, 140 insertions(+) create mode 100644 powersimdata/network/europe_tub/__init__.py create mode 100644 powersimdata/network/europe_tub/model.py diff --git a/.gitignore b/.gitignore index 01ba1505f..b74b162a7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ # This is specific to this package powersimdata/utility/.server_user config.ini +powersimdata/network/europe_tub/data/* # The remainder of this file taken from github/gitignore # https://github.com/github/gitignore/blob/master/Python.gitignore diff --git a/powersimdata/network/europe_tub/__init__.py b/powersimdata/network/europe_tub/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/powersimdata/network/europe_tub/model.py b/powersimdata/network/europe_tub/model.py new file mode 100644 index 000000000..7a3a5cd1e --- /dev/null +++ b/powersimdata/network/europe_tub/model.py @@ -0,0 +1,139 @@ +import os +import shutil +from zipfile import ZipFile + +from powersimdata.network.constants.region.europe import ( + abv2country, + abv2timezone, + interconnect2abv, +) +from powersimdata.network.helpers import ( + check_and_format_interconnect, + interconnect_to_name, +) +from powersimdata.network.model import ModelImmutables +from powersimdata.utility.helpers import _check_import + +pypsa = _check_import("pypsa") +zenodo_get = _check_import("zenodo_get") + + +class TUB: + """PyPSA Europe network. + + :param str/iterable interconnect: interconnect name(s). + :param int reduction: reduction parameter (number of nodes in network). If None, + the full network is loaded. + :param bool overwrite: the existing dataset is deleted and a new dataset is + downloaded from zenodo. + """ + + def __init__(self, interconnect, reduction=None, overwrite=False): + """Constructor.""" + self.grid_model = "europe_tub" + self.interconnect = check_and_format_interconnect( + interconnect, model=self.grid_model + ) + self.data_loc = os.path.join(os.path.dirname(__file__), "data") + self.zenodo_record_id = "3601881" + self.reduction = reduction + + if overwrite: + self.remove_data() + + self.retrieve_data() + + def remove_data(self): + """Remove data stored on disk""" + print("Removing PyPSA-Eur dataset") + shutil.rmtree(self.data_loc) + + def retrieve_data(self): + """Fetch data""" + zenodo_get.zenodo_get([self.zenodo_record_id, "-o", f"{self.data_loc}"]) + with ZipFile(os.path.join(self.data_loc, "networks.zip"), "r") as zip_network: + zip_network.extractall(self.data_loc) + + def build(self): + """Build network""" + path = os.path.join(self.data_loc, "networks") + if self.reduction is None: + network = pypsa.Network(os.path.join(path, "elec_s.nc")) + elif os.path.exists(os.path.join(path, f"elec_s_{self.reduction}.nc")): + network = pypsa.Network(os.path.join(path, f"elec_s_{self.reduction}.nc")) + else: + raise ValueError( + "Invalid Resolution. Choose among: None | 1024 | 512 | 256 | 128 | 37" + ) + id2zone = {i: l for i, l in enumerate(network.buses.index)} + zone2id = {l: i for i, l in id2zone.items()} + + if self.interconnect == ["Europe"]: + self.network = network + self.id2zone = id2zone + self.zone2id = zone2id + else: + filter = list( # noqa: F841 + interconnect2abv[ + interconnect_to_name(self.interconnect, model=self.grid_model) + ] + ) + self.network = network[network.buses.query("country == @filter").index] + self.zone2id = {l: zone2id[l] for l in self.network.buses.index} + self.id2zone = {i: l for l, i in self.zone2id.items()} + + self.model_immutables = self._generate_model_immutables() + + def _generate_model_immutables(self): + """Generate the model immutables""" + mapping = ModelImmutables(self.grid_model, interconnect=self.interconnect) + + # loadzone + mapping.zones["loadzone"] = set(self.zone2id) + mapping.zones["id2loadzone"] = self.id2zone + mapping.zones["loadzone2id"] = self.zone2id + mapping.zones["loadzone2abv"] = self.network.buses["country"].to_dict() + mapping.zones["loadzone2country"] = ( + self.network.buses["country"].map(abv2country).to_dict() + ) + mapping.zones["loadzone2interconnect"] = { + l: mapping.zones["abv2interconnect"][a] + for l, a in mapping.zones["loadzone2abv"].items() + } + mapping.zones["id2timezone"] = { + self.zone2id[l]: abv2timezone[a] + for l, a in mapping.zones["loadzone2abv"].items() + } + mapping.zones["timezone2id"] = { + t: i for i, t in mapping.zones["id2timezone"].items() + } + + # country + mapping.zones["country2loadzone"] = { + abv2country[a]: set(l) + for a, l in self.network.buses.groupby("country").groups.items() + } + mapping.zones["abv2loadzone"] = { + a: set(l) for a, l in self.network.buses.groupby("country").groups.items() + } + mapping.zones["abv2id"] = { + a: {self.zone2id[l] for l in l_in_country} + for a, l_in_country in mapping.zones["abv2loadzone"].items() + } + mapping.zones["id2abv"] = { + i: mapping.zones["loadzone2abv"][l] for i, l in self.id2zone.items() + } + + # interconnect + mapping.zones["interconnect2loadzone"] = { + i: set().union( + *(mapping.zones["abv2loadzone"][a] for a in a_in_interconnect) + ) + for i, a_in_interconnect in mapping.zones["interconnect2abv"].items() + } + mapping.zones["interconnect2id"] = { + i: set().union(*({self.zone2id[l]} for l in l_in_interconnect)) + for i, l_in_interconnect in mapping.zones["interconnect2loadzone"].items() + } + + return mapping From eac9ebd2e640c3819a3ef4882c10f4e5f605a0c5 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Fri, 6 May 2022 15:17:30 -0700 Subject: [PATCH 14/59] fix: cast Int64Index single element to int --- powersimdata/network/constants/region/usa.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/powersimdata/network/constants/region/usa.py b/powersimdata/network/constants/region/usa.py index 44db19e9a..4f63962f6 100644 --- a/powersimdata/network/constants/region/usa.py +++ b/powersimdata/network/constants/region/usa.py @@ -204,7 +204,9 @@ def get_loadzone_mapping(zone): mapping["timezone2id"] = { t: set(i) for t, i in zone.groupby("time_zone").groups.items() } - mapping["loadzone2id"] = {l: i for l, i in zone.groupby("zone_name").groups.items()} + mapping["loadzone2id"] = { + l: i[0] for l, i in zone.groupby("zone_name").groups.items() + } mapping["loadzone2state"] = dict(zip(zone["zone_name"], zone["state"])) mapping["loadzone2abv"] = dict(zip(zone["zone_name"], zone["abv"])) mapping["loadzone2interconnect"] = dict( From 5b7b0047615cbd1cc64d1067618afc618f409d5f Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Mon, 18 Apr 2022 15:11:58 -0700 Subject: [PATCH 15/59] feat: factor out plant constants --- .../network/{hifld => }/constants/plants.py | 0 powersimdata/network/model.py | 8 +- .../network/usa_tamu/constants/plants.py | 105 ------------------ 3 files changed, 7 insertions(+), 106 deletions(-) rename powersimdata/network/{hifld => }/constants/plants.py (100%) delete mode 100644 powersimdata/network/usa_tamu/constants/plants.py diff --git a/powersimdata/network/hifld/constants/plants.py b/powersimdata/network/constants/plants.py similarity index 100% rename from powersimdata/network/hifld/constants/plants.py rename to powersimdata/network/constants/plants.py diff --git a/powersimdata/network/model.py b/powersimdata/network/model.py index 0b0e93fa8..edee8bc04 100644 --- a/powersimdata/network/model.py +++ b/powersimdata/network/model.py @@ -19,13 +19,19 @@ def __init__(self, model): check_model(model) self.model = model - self.plants = self._import_constants("plants") + self.plants = self._import_generator_related_constants() self.storage = storage[model] self.zones = self._import_constants("zones") self.check_and_format_interconnect = check_and_format_interconnect self.interconnect_to_name = interconnect_to_name + @staticmethod + def _import_generator_related_constants(): + """Import generator related constants.""" + mod = import_module("powersimdata.network.constants.plants") + return {a: getattr(mod, a) for a in dir(mod)} + def _import_constants(self, kind): """Import constants related to the grid model. diff --git a/powersimdata/network/usa_tamu/constants/plants.py b/powersimdata/network/usa_tamu/constants/plants.py deleted file mode 100644 index ef950613f..000000000 --- a/powersimdata/network/usa_tamu/constants/plants.py +++ /dev/null @@ -1,105 +0,0 @@ -_exports = [ - "all_resources", - "carbon_per_mmbtu", - "carbon_per_mwh", - "carbon_resources", - "clean_resources", - "label2type", - "nox_per_mwh", - "renewable_resources", - "so2_per_mwh", - "type2color", - "type2hatchcolor", - "type2label", -] - -type2color = { - "wind": "xkcd:green", - "solar": "xkcd:amber", - "hydro": "xkcd:light blue", - "ng": "xkcd:orchid", - "nuclear": "xkcd:silver", - "coal": "xkcd:light brown", - "geothermal": "xkcd:hot pink", - "dfo": "xkcd:royal blue", - "biomass": "xkcd:dark green", - "other": "xkcd:melon", - "storage": "xkcd:orange", - "wind_offshore": "xkcd:teal", - "solar_curtailment": "xkcd:amber", - "wind_curtailment": "xkcd:green", - "wind_offshore_curtailment": "xkcd:teal", -} - -type2label = { - "nuclear": "Nuclear", - "geothermal": "Geo-thermal", - "coal": "Coal", - "dfo": "DFO", - "hydro": "Hydro", - "ng": "Natural Gas", - "solar": "Solar", - "wind": "Wind", - "wind_offshore": "Wind Offshore", - "biomass": "Biomass", - "other": "Other", - "storage": "Storage", - "solar_curtailment": "Solar Curtailment", - "wind_curtailment": "Wind Curtailment", - "wind_offshore_curtailment": "Offshore Wind Curtailment", -} - -type2hatchcolor = { - "solar_curtailment": "xkcd:grey", - "wind_curtailment": "xkcd:grey", - "wind_offshore_curtailment": "xkcd:grey", -} - -label2type = {value: key for key, value in type2label.items()} - -renewable_resources = {"solar", "wind", "wind_offshore"} -carbon_resources = {"coal", "ng", "dfo"} -clean_resources = renewable_resources | {"geothermal", "hydro", "nuclear"} -all_resources = carbon_resources | {"other"} | clean_resources - - -# MWh to kilograms of CO2 -# Source: IPCC Special Report on Renewable Energy Sources and Climate Change -# Mitigation (2011), Annex II: Methodology, Table A.II.4, 50th percentile -# http://www.ipcc-wg3.de/report/IPCC_SRREN_Annex_II.pdf -carbon_per_mwh = { - "coal": 1001, - "dfo": 840, - "ng": 469, -} - -# MMBTu of fuel per hour to kilograms of CO2 per hour -# Source: https://www.epa.gov/energy/greenhouse-gases-equivalencies-calculator-calculations-and-references -# = (Heat rate MMBTu/h) * (kg C/mmbtu) * (mass ratio CO2/C) -carbon_per_mmbtu = { - "coal": 26.05, - "dfo": 20.31, - "ng": 14.46, -} - -# MWh to kilograms of NOx -# Source: EPA eGrid 2018, tab 'US18' (U.S. summary), columns AN to AP -# https://www.epa.gov/egrid/egrid-questions-and-answers -nox_per_mwh = { - "coal": 0.658, - "dfo": 1.537, - "ng": 0.179, -} - -# MWh to kilograms of SO2 -# Source: EPA eGrid 2018, tab 'US18' (U.S. summary), columns AV to AX -# https://www.epa.gov/egrid/egrid-questions-and-answers -so2_per_mwh = { - "coal": 0.965, - "dfo": 2.189, - "ng": 0.010, -} - - -def __dir__(): - return sorted(_exports) From 839046aa57a55b4618b22f3301849b7a3d71710c Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 10 May 2022 12:53:33 -0700 Subject: [PATCH 16/59] test: add tests for area_to_loadzone --- powersimdata/network/tests/test_model.py | 57 ++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 powersimdata/network/tests/test_model.py diff --git a/powersimdata/network/tests/test_model.py b/powersimdata/network/tests/test_model.py new file mode 100644 index 000000000..8d119f80d --- /dev/null +++ b/powersimdata/network/tests/test_model.py @@ -0,0 +1,57 @@ +import pytest + +from powersimdata.network.model import area_to_loadzone + + +def test_area_to_loadzone_argument_type(): + with pytest.raises(TypeError, match="area must be a str"): + area_to_loadzone("europe_tub", 3) + + with pytest.raises(TypeError, match="area_type must be either None or str"): + area_to_loadzone("europe_tub", "all", area_type=["interconnect"]) + + +def test_area_to_loadzone_argument_value(): + with pytest.raises(ValueError): + area_to_loadzone("usa_tamu", "all", area_type="province") + + with pytest.raises(ValueError, match="Invalid area / area_type combination"): + area_to_loadzone("usa_tamu", "California", area_type="loadzone") + + with pytest.raises(ValueError, match="Invalid area / area_type combination"): + area_to_loadzone("usa_tamu", "WA", area_type="interconnect") + + with pytest.raises(ValueError, match="Invalid area / area_type combination"): + area_to_loadzone("europe_tub", "France", area_type="country_abbr") + + +def test_area_to_loadzone(): + assert area_to_loadzone("usa_tamu", "El Paso") == {"El Paso"} + assert area_to_loadzone("usa_tamu", "Texas", area_type="state") == area_to_loadzone( + "usa_tamu", "Texas" + ) + assert area_to_loadzone("usa_tamu", "Texas", area_type="state") == { + "East Texas", + "South Central", + "Far West", + "North Central", + "West", + "North", + "Texas Panhandle", + "South", + "East", + "Coast", + "El Paso", + } + + assert area_to_loadzone("usa_tamu", "Texas", area_type="interconnect") == { + "South Central", + "Far West", + "North Central", + "West", + "North", + "South", + "East", + "Coast", + } + assert area_to_loadzone("usa_tamu", "MT") == {"Montana Eastern", "Montana Western"} From b8607a84283ff7e6f23252cd8bd9b3377afb03b8 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Mon, 9 May 2022 12:03:58 -0700 Subject: [PATCH 17/59] feat: add state_abbr mapping --- powersimdata/network/constants/region/usa.py | 1 + 1 file changed, 1 insertion(+) diff --git a/powersimdata/network/constants/region/usa.py b/powersimdata/network/constants/region/usa.py index 4f63962f6..e78785031 100644 --- a/powersimdata/network/constants/region/usa.py +++ b/powersimdata/network/constants/region/usa.py @@ -173,6 +173,7 @@ def get_state_mapping(zone): mapping["state"] = set(zone["state"]) mapping["abv"] = set(zone["abv"]) + mapping["state_abbr"] = set(zone["abv"]) mapping["state2loadzone"] = { k: set(v) for k, v in zone.groupby("state")["zone_name"].unique().to_dict().items() From e766abd8fd13463a971d2b3bd478b34388f380a7 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 19 Apr 2022 15:00:10 -0700 Subject: [PATCH 18/59] refactor: change access to storage and plant constants --- powersimdata/network/constants/__init__.py | 0 powersimdata/network/constants/plants.py | 39 ++++++++++++---------- powersimdata/network/constants/storage.py | 34 ++++++++++++------- powersimdata/network/hifld/model.py | 4 +-- powersimdata/network/model.py | 13 +++----- powersimdata/network/usa_tamu/model.py | 4 +-- 6 files changed, 52 insertions(+), 42 deletions(-) create mode 100644 powersimdata/network/constants/__init__.py diff --git a/powersimdata/network/constants/__init__.py b/powersimdata/network/constants/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/powersimdata/network/constants/plants.py b/powersimdata/network/constants/plants.py index ef950613f..e131fddd2 100644 --- a/powersimdata/network/constants/plants.py +++ b/powersimdata/network/constants/plants.py @@ -1,18 +1,3 @@ -_exports = [ - "all_resources", - "carbon_per_mmbtu", - "carbon_per_mwh", - "carbon_resources", - "clean_resources", - "label2type", - "nox_per_mwh", - "renewable_resources", - "so2_per_mwh", - "type2color", - "type2hatchcolor", - "type2label", -] - type2color = { "wind": "xkcd:green", "solar": "xkcd:amber", @@ -101,5 +86,25 @@ } -def __dir__(): - return sorted(_exports) +def get_plants(model): + """Return plant constants. + + :param str model: grid model + :return: (*dict*) -- plants information. + """ + exports = [ + "all_resources", + "carbon_resources", + "renewable_resources", + "clean_resources", + "carbon_per_mwh", + "carbon_per_mmbtu", + "nox_per_mwh", + "so2_per_mwh", + "label2type", + "type2color", + "type2hatchcolor", + "type2label", + ] + if model in ["usa_tamu", "hifld"]: + return {a: eval(a) for a in exports} diff --git a/powersimdata/network/constants/storage.py b/powersimdata/network/constants/storage.py index 0064f24a5..cb176c8d2 100644 --- a/powersimdata/network/constants/storage.py +++ b/powersimdata/network/constants/storage.py @@ -1,14 +1,24 @@ +from powersimdata.network.helpers import check_model + storage = { - "usa_tamu": { - "duration": 4, - "min_stor": 0.05, - "max_stor": 0.95, - "InEff": 0.9, - "OutEff": 0.9, - "energy_value": 20, - "LossFactor": 0, - "terminal_min": 0, - "terminal_max": 1, - } + "duration": 4, + "min_stor": 0.05, + "max_stor": 0.95, + "InEff": 0.9, + "OutEff": 0.9, + "energy_value": 20, + "LossFactor": 0, + "terminal_min": 0, + "terminal_max": 1, } -storage.update({"hifld": storage["usa_tamu"]}) + + +def get_storage(model): + """Return storage constants. + + :param str model: grid model + :return: (*dict*) -- storage information. + """ + check_model(model) + + return storage diff --git a/powersimdata/network/hifld/model.py b/powersimdata/network/hifld/model.py index 3b0730286..c11b14ed8 100644 --- a/powersimdata/network/hifld/model.py +++ b/powersimdata/network/hifld/model.py @@ -1,7 +1,7 @@ import os from powersimdata.input.abstract_grid import AbstractGrid -from powersimdata.network.constants.storage import storage +from powersimdata.network.constants.storage import get_storage class HIFLD(AbstractGrid): @@ -17,4 +17,4 @@ def __init__(self, interconnect): self._set_data_loc(os.path.dirname(__file__)) self._build_network(interconnect, model) - self.storage.update(storage[model]) + self.storage.update(get_storage(model)) diff --git a/powersimdata/network/model.py b/powersimdata/network/model.py index edee8bc04..8d778084a 100644 --- a/powersimdata/network/model.py +++ b/powersimdata/network/model.py @@ -1,6 +1,7 @@ from importlib import import_module -from powersimdata.network.constants.storage import storage +from powersimdata.network.constants.plants import get_plants +from powersimdata.network.constants.storage import get_storage from powersimdata.network.helpers import ( check_and_format_interconnect, check_model, @@ -19,19 +20,13 @@ def __init__(self, model): check_model(model) self.model = model - self.plants = self._import_generator_related_constants() - self.storage = storage[model] + self.plants = get_plants(model) + self.storage = get_storage(model) self.zones = self._import_constants("zones") self.check_and_format_interconnect = check_and_format_interconnect self.interconnect_to_name = interconnect_to_name - @staticmethod - def _import_generator_related_constants(): - """Import generator related constants.""" - mod = import_module("powersimdata.network.constants.plants") - return {a: getattr(mod, a) for a in dir(mod)} - def _import_constants(self, kind): """Import constants related to the grid model. diff --git a/powersimdata/network/usa_tamu/model.py b/powersimdata/network/usa_tamu/model.py index a4527abbc..d2e53018a 100644 --- a/powersimdata/network/usa_tamu/model.py +++ b/powersimdata/network/usa_tamu/model.py @@ -1,7 +1,7 @@ import os from powersimdata.input.abstract_grid import AbstractGrid -from powersimdata.network.constants.storage import storage +from powersimdata.network.constants.storage import get_storage class TAMU(AbstractGrid): @@ -17,4 +17,4 @@ def __init__(self, interconnect): self._set_data_loc(os.path.dirname(__file__)) self._build_network(interconnect, model) - self.storage.update(storage[model]) + self.storage.update(get_storage(model)) From 199a3a0ad184fee9b05b730df354beeab533a453 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 10 May 2022 15:17:26 -0700 Subject: [PATCH 19/59] refactor: enable european grid model in check functions --- powersimdata/input/check.py | 35 +++++++++++++++-------------------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/powersimdata/input/check.py b/powersimdata/input/check.py index bc792ed17..46c48faeb 100644 --- a/powersimdata/input/check.py +++ b/powersimdata/input/check.py @@ -299,19 +299,20 @@ def _check_grid_type(grid): raise TypeError(f"grid must be a {_grid.Grid} object") -def _check_areas_and_format(areas, grid_model="usa_tamu"): +def _check_areas_and_format(areas, mi=ModelImmutables("usa_tamu")): """Ensure that areas are valid. Duplicates are removed and state abbreviations are converted to their actual name. :param str/list/tuple/set areas: areas(s) to check. Could be load zone name(s), state name(s)/abbreviation(s) or interconnect(s). - :param str grid_model: grid model. - :raises TypeError: if areas is not a list/tuple/set of str. - :raises ValueError: if areas is empty or not valid. + :param powersimdata.network.model.ModelImmutables mi: immutables of a grid model. + :raises TypeError: if ``areas`` is not a list/tuple/set of str. + :raises ValueError: if ``areas`` is empty or not valid. :return: (*set*) -- areas as a set. State abbreviations are converted to state names. """ - mi = ModelImmutables(grid_model) + division = [a for a in mi.zones["mappings"] if "abbr" in a][0].split("_")[0] + if isinstance(areas, str): areas = {areas} elif isinstance(areas, (list, set, tuple)): @@ -322,12 +323,7 @@ def _check_areas_and_format(areas, grid_model="usa_tamu"): raise TypeError("areas must be a str or a list/tuple/set of str") if len(areas) == 0: raise ValueError("areas must be non-empty") - all_areas = ( - mi.zones["loadzone"] - | mi.zones["abv"] - | mi.zones["state"] - | mi.zones["interconnect"] - ) + all_areas = set().union(*(mi.zones[z] for z in mi.zones["mappings"])) if not areas <= all_areas: diff = areas - all_areas raise ValueError("invalid area(s): %s" % " | ".join(diff)) @@ -335,21 +331,20 @@ def _check_areas_and_format(areas, grid_model="usa_tamu"): abv_in_areas = [z for z in areas if z in mi.zones["abv"]] for a in abv_in_areas: areas.remove(a) - areas.add(mi.zones["abv2state"][a]) + areas.add(mi.zones[f"abv2{division}"][a]) return areas -def _check_resources_and_format(resources, grid_model="usa_tamu"): +def _check_resources_and_format(resources, mi=ModelImmutables("usa_tamu")): """Ensure that resources are valid and convert variable to a set. :param str/list/tuple/set resources: resource(s) to check. - :param str grid_model: grid model. + :param powersimdata.network.model.ModelImmutables mi: immutables of a grid model. :raises TypeError: if resources is not a list/tuple/set of str. :raises ValueError: if resources is empty or not valid. :return: (*set*) -- resources as a set. """ - mi = ModelImmutables(grid_model) if isinstance(resources, str): resources = {resources} elif isinstance(resources, (list, set, tuple)): @@ -366,17 +361,17 @@ def _check_resources_and_format(resources, grid_model="usa_tamu"): return resources -def _check_resources_are_renewable_and_format(resources, grid_model="usa_tamu"): +def _check_resources_are_renewable_and_format( + resources, mi=ModelImmutables("usa_tamu") +): """Ensure that resources are valid renewable resources and convert variable to a set. - + :param powersimdata.network.model.ModelImmutables mi: immutables of a grid model. :param str/list/tuple/set resources: resource(s) to analyze. - :param str grid_model: grid model. :raises ValueError: if resources are not renewables. return: (*set*) -- resources as a set """ - mi = ModelImmutables(grid_model) - resources = _check_resources_and_format(resources, grid_model=grid_model) + resources = _check_resources_and_format(resources, mi=mi) if not resources <= mi.plants["renewable_resources"]: diff = resources - mi.plants["all_resources"] raise ValueError("invalid renewable resource(s): %s" % " | ".join(diff)) From e147c62fa6ed7cec7b0803279489be1cc8733eca Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 10 May 2022 12:45:04 -0700 Subject: [PATCH 20/59] fix: correct list of loadzones for Texas as state or interconnect --- powersimdata/network/constants/region/usa.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/powersimdata/network/constants/region/usa.py b/powersimdata/network/constants/region/usa.py index e78785031..afc06f008 100644 --- a/powersimdata/network/constants/region/usa.py +++ b/powersimdata/network/constants/region/usa.py @@ -153,13 +153,13 @@ def _substitute(entry): mapping["interconnect2timezone"] = _substitute(interconnect2timezone) mapping["interconnect2abv"] = _substitute(interconnect2abv) mapping["interconnect2loadzone"] = { - i: set(zone.set_index("abv")["zone_name"].loc[list(a)]) - for i, a in mapping["interconnect2abv"].items() + i: set(l) + for i, l in zone.set_index("zone_name").groupby("interconnect").groups.items() } mapping["interconnect2id"] = { - i: set(zone.reset_index().set_index("abv")["zone_id"].loc[list(a)]) - for i, a in mapping["interconnect2abv"].items() + i: set(id) for i, id in zone.groupby("interconnect").groups.items() } + return mapping From 3e6307f918cccafd81451334e7761182fecf7b17 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Fri, 22 Apr 2022 23:17:15 -0700 Subject: [PATCH 21/59] fix: correct interconnects in test_helpers --- powersimdata/input/tests/test_helpers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/powersimdata/input/tests/test_helpers.py b/powersimdata/input/tests/test_helpers.py index 333e4735b..70aade44b 100644 --- a/powersimdata/input/tests/test_helpers.py +++ b/powersimdata/input/tests/test_helpers.py @@ -217,7 +217,7 @@ def test_get_plant_id_in_interconnects_argument_value(grid): def test_get_plant_id_in_interconnects(grid): - arg = (("Western", grid), (["Texas_Western", "Eastern"], grid)) + arg = (("Western", grid), (["Texas", "Western", "Eastern"], grid)) expected = (["Western"], ["Texas", "Western", "Eastern"]) for a, e in zip(arg, expected): plant_id = get_plant_id_in_interconnects(a[0], a[1]) @@ -286,7 +286,7 @@ def test_get_plant_id_for_resources_in_interconnects_argument_type(grid): arg = ( (1, 1, grid), ([1, 2, 3], {4, 5, 5}, grid), - (["solar", "ng", "gothermal"], "Texas_Western", 1), + (["solar", "ng", "gothermal"], "Texas", 1), ) for a in arg: with pytest.raises(TypeError): @@ -306,7 +306,7 @@ def test_get_plant_id_for_resources_in_interconnects_argument_value(grid): def test_get_plant_id_for_resources_in_interconnects(grid): arg = ( ("solar", ["Western"], grid), - (["nuclear", "wind"], ["Texas_Western"], grid), + (["nuclear", "wind"], ["Texas", "Western"], grid), (["geothermal"], ["Western", "Eastern"], grid), ) expected = ( From b839c9efd204f0b6c05cbc0364bab2f00cfa3596 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 10 May 2022 14:07:21 -0700 Subject: [PATCH 22/59] test: add tests for check functions --- powersimdata/input/tests/test_check.py | 32 ++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 5 deletions(-) diff --git a/powersimdata/input/tests/test_check.py b/powersimdata/input/tests/test_check.py index 3adb79ed2..d2da7b83f 100644 --- a/powersimdata/input/tests/test_check.py +++ b/powersimdata/input/tests/test_check.py @@ -24,9 +24,18 @@ _check_time_series, check_grid, ) +from powersimdata.network.europe_tub.model import TUB +from powersimdata.network.model import ModelImmutables from powersimdata.tests.mock_scenario import MockScenario +@pytest.fixture +def europe(): + tub = TUB("Europe", reduction=128) + tub.build() + return tub + + @pytest.fixture def mock_plant(): return { @@ -218,16 +227,25 @@ def test_check_areas_and_format_argument_value(): _check_areas_and_format(a) -def test_check_areas_and_format(): +def test_check_areas_and_format(europe): _check_areas_and_format(["Western", "NY", "El Paso", "Arizona"]) - areas = _check_areas_and_format(["California", "CA", "NY", "TX", "MT", "WA"]) - assert areas == {"Washington", "Texas", "Montana", "California", "New York"} + assert _check_areas_and_format(["California", "CA", "NY", "TX", "MT", "WA"]) == { + "Washington", + "Texas", + "Montana", + "California", + "New York", + } + assert _check_areas_and_format( + ["FR", "Germany", "Slovakia", "IT", "Italy", "NO", "Norway", "Nordic", "GB5 9"], + mi=europe.model_immutables, + ) == {"France", "Germany", "Slovakia", "Italy", "Norway", "Nordic", "GB5 9"} def test_check_resources_and_format_argument_type(): arg = ( 1, - {"coal": [1, 2, 3], "htdro": [4, 5, 6]}, + {"coal": [1, 2, 3], "hydro": [4, 5, 6]}, [1, 2, 3, 4], (1, 2, 3, 4), {1, 2, 3, 4}, @@ -247,8 +265,9 @@ def test_check_resources_and_format_argument_value(): def test_check_resources_and_format(): _check_resources_and_format(["dfo", "wind", "solar", "ng"]) - _check_resources_and_format("wind_offshore") + _check_resources_and_format("wind_offshore", mi=ModelImmutables("europe_tub")) _check_resources_and_format({"nuclear"}) + _check_resources_and_format("geothermal", mi=ModelImmutables("europe_tub")) def test_check_resources_are_renewable_and_format_argument_value(): @@ -260,6 +279,9 @@ def test_check_resources_are_renewable_and_format(): _check_resources_are_renewable_and_format(["wind_offshore", "wind"]) _check_resources_are_renewable_and_format("solar") _check_resources_are_renewable_and_format({"wind"}) + _check_resources_are_renewable_and_format( + {"solar"}, mi=ModelImmutables("europe_tub") + ) def test_check_areas_are_in_grid_and_format_argument_type(mock_grid): From c19b96233e21a98318e841bf1514320ede68d2c1 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 5 May 2022 09:26:49 -0700 Subject: [PATCH 23/59] refactor: remove dependency on ModelImmutables object in create state --- powersimdata/scenario/create.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/powersimdata/scenario/create.py b/powersimdata/scenario/create.py index 26bc14cb7..a56d39920 100644 --- a/powersimdata/scenario/create.py +++ b/powersimdata/scenario/create.py @@ -13,7 +13,7 @@ from powersimdata.input.profile_input import ProfileInput from powersimdata.input.transform_grid import TransformGrid from powersimdata.input.transform_profile import TransformProfile -from powersimdata.network.model import ModelImmutables +from powersimdata.network.helpers import check_model, interconnect_to_name from powersimdata.scenario.execute import Execute from powersimdata.scenario.state import State @@ -205,14 +205,12 @@ class _Builder: def __init__(self, grid_model, interconnect, table): """Constructor.""" - mi = ModelImmutables(grid_model) - - self.grid_model = mi.model - self.interconnect = mi.interconnect_to_name(interconnect, self.grid_model) + check_model(grid_model) + self.grid_model = grid_model self.base_grid = Grid(interconnect, source=grid_model) self.change_table = ChangeTable(self.base_grid) - + self.interconnect = interconnect_to_name(interconnect, grid_model) self.existing = table[table.interconnect == self.interconnect] def get_ct(self): From c2a825b69fe67dfb086a1d72cad784d27168df86 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Fri, 22 Apr 2022 23:20:38 -0700 Subject: [PATCH 24/59] feat: factor out zone constants --- .../design/transmission/statelines.py | 4 +- powersimdata/input/check.py | 2 +- powersimdata/input/helpers.py | 8 +- .../region}/__init__.py | 0 powersimdata/network/constants/region/usa.py | 237 ++++++++++++++++ powersimdata/network/constants/zones.py | 18 ++ powersimdata/network/helpers.py | 32 ++- powersimdata/network/hifld/constants/zones.py | 246 ----------------- powersimdata/network/model.py | 22 +- .../network/usa_tamu/constants/__init__.py | 0 .../network/usa_tamu/constants/zones.py | 258 ------------------ 11 files changed, 295 insertions(+), 532 deletions(-) rename powersimdata/network/{hifld/constants => constants/region}/__init__.py (100%) create mode 100644 powersimdata/network/constants/region/usa.py create mode 100644 powersimdata/network/constants/zones.py delete mode 100644 powersimdata/network/hifld/constants/zones.py delete mode 100644 powersimdata/network/usa_tamu/constants/__init__.py delete mode 100644 powersimdata/network/usa_tamu/constants/zones.py diff --git a/powersimdata/design/transmission/statelines.py b/powersimdata/design/transmission/statelines.py index 685fdb468..6230ad117 100644 --- a/powersimdata/design/transmission/statelines.py +++ b/powersimdata/design/transmission/statelines.py @@ -1,6 +1,3 @@ -from powersimdata.network.usa_tamu.constants.zones import id2abv - - def classify_interstate_intrastate(scenario): """Classifies branches in a change_table as interstate or intrastate. @@ -27,6 +24,7 @@ def _classify_interstate_intrastate(ct, grid): """ branch = grid.branch + id2abv = grid.model_immutables.zones["id2abv"] upgraded_branches = {"interstate": [], "intrastate": []} if "branch" not in ct or "branch_id" not in ct["branch"]: diff --git a/powersimdata/input/check.py b/powersimdata/input/check.py index 939408af6..bc792ed17 100644 --- a/powersimdata/input/check.py +++ b/powersimdata/input/check.py @@ -211,7 +211,7 @@ def _check_connected_components(grid, error_messages): num_connected_components = len([c for c in nx.connected_components(g)]) if len(grid.interconnect) == 1: # Check for e.g. ['USA'] interconnect, which is really three interconnects - interconnect_aliases = grid.model_immutables.zones["interconnect_combinations"] + interconnect_aliases = grid.model_immutables.zones["name2interconnect"] if grid.interconnect[0] in interconnect_aliases: num_interconnects = len(interconnect_aliases[grid.interconnect[0]]) else: diff --git a/powersimdata/input/helpers.py b/powersimdata/input/helpers.py index a4b52bddb..ff19a77f9 100644 --- a/powersimdata/input/helpers.py +++ b/powersimdata/input/helpers.py @@ -187,17 +187,13 @@ def get_plant_id_in_interconnects(interconnects, grid): :return: (*set*) -- list of plant id """ areas = _check_areas_are_in_grid_and_format({"interconnect": interconnects}, grid) - loadzones = set.union( + return set.union( *( - grid.model_immutables.zones["interconnect2loadzone"][i] + set(grid.plant.groupby("interconnect").groups[i]) for i in areas["interconnect"] ) ) - plant = grid.plant - plant_id = plant[(plant.zone_name.isin(loadzones))].index - return set(plant_id) - def get_plant_id_in_states(states, grid): """Get plant id for plants in state(s). diff --git a/powersimdata/network/hifld/constants/__init__.py b/powersimdata/network/constants/region/__init__.py similarity index 100% rename from powersimdata/network/hifld/constants/__init__.py rename to powersimdata/network/constants/region/__init__.py diff --git a/powersimdata/network/constants/region/usa.py b/powersimdata/network/constants/region/usa.py new file mode 100644 index 000000000..e68a55e40 --- /dev/null +++ b/powersimdata/network/constants/region/usa.py @@ -0,0 +1,237 @@ +import ast +from itertools import combinations + +from powersimdata.network.constants.model import model2interconnect +from powersimdata.network.helpers import get_zone_info, interconnect_to_name + +abv2state = { + "AK": "Alaska", + "AL": "Alabama", + "AR": "Arkansas", + "AZ": "Arizona", + "CA": "California", + "CO": "Colorado", + "CT": "Connecticut", + "DE": "Delaware", + "FL": "Florida", + "GA": "Georgia", + "HI": "Hawaii", + "IA": "Iowa", + "ID": "Idaho", + "IL": "Illinois", + "IN": "Indiana", + "KS": "Kansas", + "KY": "Kentucky", + "LA": "Louisiana", + "MA": "Massachusetts", + "MD": "Maryland", + "ME": "Maine", + "MI": "Michigan", + "MN": "Minnesota", + "MO": "Missouri", + "MS": "Mississippi", + "MT": "Montana", + "NC": "North Carolina", + "ND": "North Dakota", + "NE": "Nebraska", + "NH": "New Hampshire", + "NJ": "New Jersey", + "NM": "New Mexico", + "NV": "Nevada", + "NY": "New York", + "OH": "Ohio", + "OK": "Oklahoma", + "OR": "Oregon", + "PA": "Pennsylvania", + "RI": "Rhode Island", + "SC": "South Carolina", + "SD": "South Dakota", + "TN": "Tennessee", + "TX": "Texas", + "UT": "Utah", + "VA": "Virginia", + "VT": "Vermont", + "WA": "Washington", + "WI": "Wisconsin", + "WV": "West Virginia", + "WY": "Wyoming", +} + + +name2interconnect = { + interconnect_to_name(i): set(i) + for c in range(1, 4) + for i in combinations(["ERCOT", "Eastern", "Western"], c) +} + +name2component = name2interconnect.copy() +name2component.update({"USA": set(name2interconnect) - {"USA"}}) + +interconnect2timezone = { + interconnect_to_name("USA"): "ETC/GMT+6", + interconnect_to_name("Eastern"): "ETC/GMT+5", + interconnect_to_name("ERCOT"): "ETC/GMT+6", + interconnect_to_name("Western"): "ETC/GMT+8", + interconnect_to_name(["ERCOT", "Western"]): "ETC/GMT+7", + interconnect_to_name(["ERCOT", "Eastern"]): "ETC/GMT+5", + interconnect_to_name(["Eastern", "Western"]): "ETC/GMT+6", +} + +interconnect2abv = { + "Eastern": { + "ME", + "NH", + "VT", + "MA", + "RI", + "CT", + "NY", + "NJ", + "PA", + "DE", + "MD", + "VA", + "NC", + "SC", + "GA", + "FL", + "AL", + "MS", + "TN", + "KY", + "WV", + "OH", + "MI", + "IN", + "IL", + "WI", + "MN", + "IA", + "MO", + "AR", + "LA", + "OK", + "KS", + "NE", + "SD", + "ND", + }, + "ERCOT": {"TX"}, + "Western": {"WA", "OR", "CA", "NV", "AZ", "UT", "NM", "CO", "WY", "ID", "MT"}, +} + +cb = [i for j in range(2, 4) for i in combinations(["ERCOT", "Eastern", "Western"], j)] +for c in cb: + interconnect2abv[interconnect_to_name(c)] = { + a for i in c for a in interconnect2abv[i] + } + + +def get_interconnect_mapping(zone, model): + """Return interconnect mapping. + + :param pandas.DataFrame zone: information on zones of a grid model. + :param str model: the grid model. + :return: (*dict*) -- mappings of interconnect to other areas. + """ + + def _substitute(entry): + return { + i: ast.literal_eval(repr(entry).replace("ERCOT", sub))[i] + for i in mapping["interconnect"] + } + + mapping = dict() + sub = "Texas" if model == "usa_tamu" else "ERCOT" + + name = interconnect_to_name(zone["interconnect"].replace(sub, "ERCOT").unique()) + + mapping["interconnect"] = ast.literal_eval( + repr(name2component[name]).replace("ERCOT", sub) + ) | {name} + + mapping["name2interconnect"] = _substitute(name2interconnect) + mapping["name2component"] = _substitute(name2component) + mapping["interconnect2timezone"] = _substitute(interconnect2timezone) + mapping["interconnect2abv"] = _substitute(interconnect2abv) + mapping["interconnect2loadzone"] = { + i: set(zone.set_index("abv")["zone_name"].loc[list(a)]) + for i, a in mapping["interconnect2abv"].items() + } + mapping["interconnect2id"] = { + i: set(zone.reset_index().set_index("abv")["zone_id"].loc[list(a)]) + for i, a in mapping["interconnect2abv"].items() + } + return mapping + + +def get_state_mapping(zone): + """Return state mapping. + + :param pandas.DataFrame zone: information on zones of a grid model. + :return: (*dict*) -- mappings of states to other areas. + """ + mapping = dict() + + mapping["state"] = set(zone["state"]) + mapping["abv"] = set(zone["abv"]) + mapping["state2loadzone"] = { + k: set(v) + for k, v in zone.groupby("state")["zone_name"].unique().to_dict().items() + } + mapping["abv2loadzone"] = { + k: set(v) + for k, v in zone.groupby("abv")["zone_name"].unique().to_dict().items() + } + mapping["abv2id"] = {k: set(v) for k, v in zone.groupby("abv").groups.items()} + mapping["id2abv"] = {k: v for k, v in zone["abv"].to_dict().items()} + mapping["state2abv"] = dict(zip(zone["state"], zone["abv"])) + mapping["abv2state"] = dict(zip(zone["abv"], zone["state"])) + mapping["abv2interconnect"] = dict(zip(zone["abv"], zone["interconnect"])) + + return mapping + + +def get_loadzone_mapping(zone): + """Return loadzone mapping + + :param pandas.DataFrame zone: information on zones of a grid model. + :return: (*dict*) -- mappings of loadzones to other areas + """ + mapping = dict() + + mapping["loadzone"] = set(zone["zone_name"]) + mapping["id2timezone"] = zone["time_zone"].to_dict() + mapping["id2loadzone"] = zone["zone_name"].to_dict() + mapping["timezone2id"] = { + t: set(i) for t, i in zone.groupby("time_zone").groups.items() + } + mapping["loadzone2id"] = {l: i for l, i in zone.groupby("zone_name").groups.items()} + mapping["loadzone2state"] = dict(zip(zone["zone_name"], zone["state"])) + mapping["loadzone2abv"] = dict(zip(zone["zone_name"], zone["abv"])) + mapping["loadzone2interconnect"] = dict( + zip(zone["zone_name"], zone["interconnect"]) + ) + + return mapping + + +def get_zones(interconnect, model): + """Return zone constants. + + :para list interconnect: interconnect(s). + :param str model: the grid model. + :return: (*dict*) -- zones information. + """ + query = model2interconnect[model] if "USA" in interconnect else interconnect # noqa + zone_info = get_zone_info(model=model).query("interconnect == @query") + zone_info["abv"] = zone_info["state"].map({s: a for a, s in abv2state.items()}) + + zones = dict() + zones["mappings"] = {"loadzone", "state", "state_abbr", "interconnect"} + + zones.update(get_loadzone_mapping(zone_info)) + zones.update(get_state_mapping(zone_info)) + zones.update(get_interconnect_mapping(zone_info, model)) + + return zones diff --git a/powersimdata/network/constants/zones.py b/powersimdata/network/constants/zones.py new file mode 100644 index 000000000..64588003a --- /dev/null +++ b/powersimdata/network/constants/zones.py @@ -0,0 +1,18 @@ +from importlib import import_module + +from powersimdata.network.constants.model import model2region + + +def get_zones(interconnect, model): + """Return zone constants. + + :para list interconnect: interconnect(s). + :param str model: the grid model. + :return: (*func*) -- function returning information on zones for a given model. + """ + mod = import_module( + f"powersimdata.network.constants.region.{model2region[model].lower()}" + ) + zones = getattr(mod, "get_zones") + + return zones(interconnect, model) diff --git a/powersimdata/network/helpers.py b/powersimdata/network/helpers.py index 2a7ef458e..1f713b67f 100644 --- a/powersimdata/network/helpers.py +++ b/powersimdata/network/helpers.py @@ -1,3 +1,7 @@ +import os + +import pandas as pd + from powersimdata.network.constants.model import model2interconnect, model2region @@ -19,11 +23,11 @@ def check_and_format_interconnect(interconnect, model="hifld"): :param str/iterable interconnect: interconnect name(s). :param str model: the grid model. - :return: (*set*) -- interconnect(s) + :return: (*list*) -- interconnect(s) :raises TypeError: if ``interconnect`` is not a str. :raises ValueError: if ``interconnect`` is not in the model. - if combination of interconnect is incorrect. + if combination of interconnects is incorrect. """ if isinstance(interconnect, str): interconnect = [interconnect] @@ -41,7 +45,7 @@ def check_and_format_interconnect(interconnect, model="hifld"): if region in interconnect and len(interconnect) > 1: raise ValueError(f"{region} cannot be paired") if len(set(possible) - set(interconnect)) == 0: - raise ValueError(f"Use {region} instead") + interconnect = [region] return interconnect @@ -49,7 +53,25 @@ def check_and_format_interconnect(interconnect, model="hifld"): def interconnect_to_name(interconnect, model="hifld"): """Return name of interconnect or collection of interconnects for a grid model. - :param list interconnect: interconnect name(s). + :param str/iterable interconnect: interconnect name(s). :param str model: the grid model. + :return: (*str*): name of grid model. """ - return "_".join(sorted(check_and_format_interconnect(interconnect, model))) + return "_".join(sorted(check_and_format_interconnect(interconnect, model=model))) + + +def get_zone_info(model="hifld"): + """Return information loacated in the zone CSV file of the model. + + :param str model: the grid model. + :return: (*pandas.DataFrame*) -- information on the zones of the model. + :raises FileNotFoundError: if file enclosing the geographical information of the + grid model can't be found. + """ + check_model(model) + + path = os.path.join(os.path.dirname(__file__), model, "data", "zone.csv") + if os.path.exists(path): + return pd.read_csv(path, index_col=0) + else: + raise FileNotFoundError(f"File {path} cannot be found") diff --git a/powersimdata/network/hifld/constants/zones.py b/powersimdata/network/hifld/constants/zones.py deleted file mode 100644 index cde476e0d..000000000 --- a/powersimdata/network/hifld/constants/zones.py +++ /dev/null @@ -1,246 +0,0 @@ -import os - -import pandas as pd - -_exports = [ - "abv", - "abv2interconnect", - "abv2loadzone", - "abv2state", - "id2abv", - "id2loadzone", - "id2timezone", - "interconnect", - "interconnect2abv", - "interconnect2id", - "interconnect2loadzone", - "interconnect2timezone", - "interconnect_combinations", - "loadzone", - "loadzone2id", - "loadzone2interconnect", - "loadzone2state", - "mappings", - "state", - "state2abv", - "state2loadzone", - "timezone2id", -] - -mappings = {"loadzone", "state", "state_abbr", "interconnect"} - -# Define combinations of interconnects -interconnect_combinations = { - "USA": {"Eastern", "Western", "ERCOT"}, -} - - -# Map state abbreviations to state name -abv2state = { - "AK": "Alaska", - "AL": "Alabama", - "AR": "Arkansas", - "AZ": "Arizona", - "CA": "California", - "CO": "Colorado", - "CT": "Connecticut", - "DE": "Delaware", - "FL": "Florida", - "GA": "Georgia", - "HI": "Hawaii", - "IA": "Iowa", - "ID": "Idaho", - "IL": "Illinois", - "IN": "Indiana", - "KS": "Kansas", - "KY": "Kentucky", - "LA": "Louisiana", - "MA": "Massachusetts", - "MD": "Maryland", - "ME": "Maine", - "MI": "Michigan", - "MN": "Minnesota", - "MO": "Missouri", - "MS": "Mississippi", - "MT": "Montana", - "NC": "North Carolina", - "ND": "North Dakota", - "NE": "Nebraska", - "NH": "New Hampshire", - "NJ": "New Jersey", - "NM": "New Mexico", - "NV": "Nevada", - "NY": "New York", - "OH": "Ohio", - "OK": "Oklahoma", - "OR": "Oregon", - "PA": "Pennsylvania", - "RI": "Rhode Island", - "SC": "South Carolina", - "SD": "South Dakota", - "TN": "Tennessee", - "TX": "Texas", - "UT": "Utah", - "VA": "Virginia", - "VT": "Vermont", - "WA": "Washington", - "WI": "Wisconsin", - "WV": "West Virginia", - "WY": "Wyoming", -} - - -# Map state name to state abbreviations -state2abv = {value: key for key, value in abv2state.items()} - - -# Map zones to higher-level aggregations using the information in zone.csv -zone_csv_path = os.path.join(os.path.dirname(__file__), "..", "data", "zone.csv") -zone_df = pd.read_csv(zone_csv_path, index_col=0) - -# load zone id to load zone name -id2loadzone = zone_df["zone_name"].to_dict() -# load zone name to load zone id -loadzone2id = {v: k for k, v in id2loadzone.items()} -# Map state name to load zone name -state2loadzone = { - k: set(v) for k, v in zone_df.groupby("state").zone_name.unique().to_dict().items() -} -# Map interconnect name to load zone name -interconnect2loadzone = { - k: set(v) - for k, v in zone_df.groupby("interconnect").zone_name.unique().to_dict().items() -} -interconnect2loadzone["USA"] = ( - interconnect2loadzone["Eastern"] - | interconnect2loadzone["Western"] - | interconnect2loadzone["ERCOT"] -) -# Map interconnect to load zone id -interconnect2id = { - k: set(zone_df.isin(v).query("zone_name == True").index) - for k, v in interconnect2loadzone.items() -} - -# Map load zone id to state abbreviations -id2abv = {k: state2abv[v] for k, v in zone_df.state.to_dict().items()} - - -# Map state abbreviations to load zone name -abv2loadzone = { - state2abv[state]: loadzone for state, loadzone in state2loadzone.items() -} - - -# Map load zone name to state name -loadzone2state = {} -for state, zone_set in state2loadzone.items(): - loadzone2state.update({zone: state for zone in zone_set}) - - -# Map load zone name to interconnect name -loadzone2interconnect = { - zone: interconnect - for interconnect, zone_set in interconnect2loadzone.items() - for zone in zone_set - if interconnect not in interconnect_combinations -} - - -# Map interconnect name to state abbreviations -# Note: states which span interconnects are assigned to the one they're 'most' in. -interconnect2abv = { - "Eastern": { - "ME", - "NH", - "VT", - "MA", - "RI", - "CT", - "NY", - "NJ", - "PA", - "DE", - "MD", - "VA", - "NC", - "SC", - "GA", - "FL", - "AL", - "MS", - "TN", - "KY", - "WV", - "OH", - "MI", - "IN", - "IL", - "WI", - "MN", - "IA", - "MO", - "AR", - "LA", - "OK", - "KS", - "NE", - "SD", - "ND", - }, - "ERCOT": {"TX"}, - "Western": {"WA", "OR", "CA", "NV", "AZ", "UT", "NM", "CO", "WY", "ID", "MT"}, -} -interconnect2abv["USA"] = ( - interconnect2abv["Eastern"] - | interconnect2abv["Western"] - | interconnect2abv["ERCOT"] -) - - -# Map state abbreviations to interconnect name -abv2interconnect = {} -for k, v in interconnect2abv.items(): - if k in interconnect_combinations: - continue - for s in v: - abv2interconnect[s] = k - - -# List of interconnect name -interconnect = set(interconnect2abv.keys()) - - -# List of state name -state = set(state2abv.keys()) - - -# List of state abbreviations -abv = set(abv2state.keys()) - - -# List of load zone name -loadzone = set(loadzone2interconnect.keys()) - -# Map interconnect name to time zone -interconnect2timezone = { - "USA": "ETC/GMT+6", - "Eastern": "ETC/GMT+5", - "ERCOT": "ETC/GMT+6", - "Western": "ETC/GMT+8", - "Eastern_ERCOT": "ETC/GMT+5", - "Eastern_Western": "ETC/GMT+6", - "ERCOT_Western": "ETC/GMT+7", -} - - -# Map load zone IDs to time zones -# Note: load zones in > 1 time zone are put in the one where most load centers reside -id2timezone = zone_df["time_zone"].to_dict() - -# Map time zones to load zone IDs -timezone2id = {k: set(v) for k, v in zone_df.groupby("time_zone").groups.items()} - - -def __dir__(): - return sorted(_exports) diff --git a/powersimdata/network/model.py b/powersimdata/network/model.py index 8d778084a..a88e919ca 100644 --- a/powersimdata/network/model.py +++ b/powersimdata/network/model.py @@ -1,7 +1,6 @@ -from importlib import import_module - from powersimdata.network.constants.plants import get_plants from powersimdata.network.constants.storage import get_storage +from powersimdata.network.constants.zones import get_zones from powersimdata.network.helpers import ( check_and_format_interconnect, check_model, @@ -13,29 +12,26 @@ class ModelImmutables: """Immutables for a grid model. :param str model: grid model name. + :param str interconnect: interconnect of grid model. """ - def __init__(self, model): + def __init__(self, model, interconnect=None): """Constructor.""" check_model(model) self.model = model + interconnect = ( + ["USA"] + if interconnect is None + else check_and_format_interconnect(interconnect, model=model) + ) self.plants = get_plants(model) self.storage = get_storage(model) - self.zones = self._import_constants("zones") + self.zones = get_zones(interconnect, model) self.check_and_format_interconnect = check_and_format_interconnect self.interconnect_to_name = interconnect_to_name - def _import_constants(self, kind): - """Import constants related to the grid model. - - :param str kind: either *'plants'*, *'storage'* or *'zones'*. - :return: (*dict*) -- constants of the grid model - """ - mod = import_module(f"powersimdata.network.{self.model}.constants.{kind}") - return {a: getattr(mod, a) for a in dir(mod)} - def area_to_loadzone(self, *args, **kwargs): """Map the query area to a list of loadzones, using the known grid model.""" return area_to_loadzone( diff --git a/powersimdata/network/usa_tamu/constants/__init__.py b/powersimdata/network/usa_tamu/constants/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/powersimdata/network/usa_tamu/constants/zones.py b/powersimdata/network/usa_tamu/constants/zones.py deleted file mode 100644 index 8b8338bca..000000000 --- a/powersimdata/network/usa_tamu/constants/zones.py +++ /dev/null @@ -1,258 +0,0 @@ -import os -from collections import defaultdict - -import pandas as pd - -_exports = [ - "abv", - "abv2id", - "abv2interconnect", - "abv2loadzone", - "abv2state", - "id2abv", - "id2loadzone", - "id2timezone", - "interconnect", - "interconnect2abv", - "interconnect2id", - "interconnect2loadzone", - "interconnect2timezone", - "interconnect_combinations", - "loadzone", - "loadzone2id", - "loadzone2interconnect", - "loadzone2state", - "mappings", - "state", - "state2abv", - "state2loadzone", - "timezone2id", -] - -mappings = {"loadzone", "state", "state_abbr", "interconnect"} - -# Define combinations of interconnects -interconnect_combinations = { - "USA": {"Eastern", "Western", "Texas"}, - "Texas_Western": {"Western", "Texas"}, -} - - -# Map state abbreviations to state name -abv2state = { - "AK": "Alaska", - "AL": "Alabama", - "AR": "Arkansas", - "AZ": "Arizona", - "CA": "California", - "CO": "Colorado", - "CT": "Connecticut", - "DE": "Delaware", - "FL": "Florida", - "GA": "Georgia", - "HI": "Hawaii", - "IA": "Iowa", - "ID": "Idaho", - "IL": "Illinois", - "IN": "Indiana", - "KS": "Kansas", - "KY": "Kentucky", - "LA": "Louisiana", - "MA": "Massachusetts", - "MD": "Maryland", - "ME": "Maine", - "MI": "Michigan", - "MN": "Minnesota", - "MO": "Missouri", - "MS": "Mississippi", - "MT": "Montana", - "NC": "North Carolina", - "ND": "North Dakota", - "NE": "Nebraska", - "NH": "New Hampshire", - "NJ": "New Jersey", - "NM": "New Mexico", - "NV": "Nevada", - "NY": "New York", - "OH": "Ohio", - "OK": "Oklahoma", - "OR": "Oregon", - "PA": "Pennsylvania", - "RI": "Rhode Island", - "SC": "South Carolina", - "SD": "South Dakota", - "TN": "Tennessee", - "TX": "Texas", - "UT": "Utah", - "VA": "Virginia", - "VT": "Vermont", - "WA": "Washington", - "WI": "Wisconsin", - "WV": "West Virginia", - "WY": "Wyoming", -} - - -# Map state name to state abbreviations -state2abv = {value: key for key, value in abv2state.items()} - - -# Map zones to higher-level aggregations using the information in zone.csv -zone_csv_path = os.path.join(os.path.dirname(__file__), "..", "data", "zone.csv") -zone_df = pd.read_csv(zone_csv_path, index_col=0) - -# load zone id to load zone name -id2loadzone = zone_df["zone_name"].to_dict() -# load zone name to load zone id -loadzone2id = {v: k for k, v in id2loadzone.items()} -# Map state name to load zone name -state2loadzone = { - k: set(v) for k, v in zone_df.groupby("state").zone_name.unique().to_dict().items() -} -# Map interconnect name to load zone name -interconnect2loadzone = { - k: set(v) - for k, v in zone_df.groupby("interconnect").zone_name.unique().to_dict().items() -} -interconnect2loadzone["Texas_Western"] = ( - interconnect2loadzone["Texas"] | interconnect2loadzone["Western"] -) -interconnect2loadzone["USA"] = ( - interconnect2loadzone["Eastern"] - | interconnect2loadzone["Western"] - | interconnect2loadzone["Texas"] -) -# Map interconnect to load zone id -interconnect2id = { - k: set(zone_df.isin(v).query("zone_name == True").index) - for k, v in interconnect2loadzone.items() -} - -# Map load zone id to state abbreviations -id2abv = {k: state2abv[v] for k, v in zone_df.state.to_dict().items()} - -# Map state abbreviations to load zone IDs -abv2id = defaultdict(set) -for k, v in id2abv.items(): - abv2id[v].add(k) - -# Map state abbreviations to load zone name -abv2loadzone = { - state2abv[state]: loadzone for state, loadzone in state2loadzone.items() -} - - -# Map load zone name to state name -loadzone2state = {} -for state, zone_set in state2loadzone.items(): - loadzone2state.update({zone: state for zone in zone_set}) - - -# Map load zone name to interconnect name -loadzone2interconnect = { - zone: interconnect - for interconnect, zone_set in interconnect2loadzone.items() - for zone in zone_set - if interconnect not in interconnect_combinations -} - - -# Map interconnect name to state abbreviations -# Note: states which span interconnects are assigned to the one they're 'most' in. -interconnect2abv = { - "Eastern": { - "ME", - "NH", - "VT", - "MA", - "RI", - "CT", - "NY", - "NJ", - "PA", - "DE", - "MD", - "VA", - "NC", - "SC", - "GA", - "FL", - "AL", - "MS", - "TN", - "KY", - "WV", - "OH", - "MI", - "IN", - "IL", - "WI", - "MN", - "IA", - "MO", - "AR", - "LA", - "OK", - "KS", - "NE", - "SD", - "ND", - }, - "Texas": {"TX"}, - "Western": {"WA", "OR", "CA", "NV", "AZ", "UT", "NM", "CO", "WY", "ID", "MT"}, -} -interconnect2abv["USA"] = ( - interconnect2abv["Eastern"] - | interconnect2abv["Western"] - | interconnect2abv["Texas"] -) -interconnect2abv["Texas_Western"] = ( - interconnect2abv["Texas"] | interconnect2abv["Western"] -) - - -# Map state abbreviations to interconnect name -abv2interconnect = {} -for k, v in interconnect2abv.items(): - if k in interconnect_combinations: - continue - for s in v: - abv2interconnect[s] = k - -# List of interconnect name -interconnect = set(interconnect2abv.keys()) - - -# List of state name -state = set(state2abv.keys()) - - -# List of state abbreviations -abv = set(abv2state.keys()) - - -# List of load zone name -loadzone = set(loadzone2interconnect.keys()) - -# Map interconnect name to time zone -interconnect2timezone = { - "USA": "ETC/GMT+6", - "Eastern": "ETC/GMT+5", - "Texas": "ETC/GMT+6", - "Western": "ETC/GMT+8", - "Texas_Western": "ETC/GMT+7", - "Texas_Eastern": "ETC/GMT+5", - "Eastern_Western": "ETC/GMT+6", -} - - -# Map load zone IDs to time zones -# Note: load zones in > 1 time zone are put in the one where most load centers reside -id2timezone = zone_df["time_zone"].to_dict() - -# Map time zones to load zone IDs -timezone2id = {k: set(v) for k, v in zone_df.groupby("time_zone").groups.items()} - - -def __dir__(): - return sorted(_exports) From f86a53ecbd31f1c6042c679c7d2527eded1df1e1 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Mon, 23 May 2022 16:56:24 -0700 Subject: [PATCH 25/59] feat: add division attribute in model immutables --- powersimdata/input/check.py | 8 +++----- powersimdata/network/constants/region/europe.py | 1 + powersimdata/network/constants/region/usa.py | 7 +++++-- powersimdata/network/model.py | 2 +- 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/powersimdata/input/check.py b/powersimdata/input/check.py index 46c48faeb..257d07812 100644 --- a/powersimdata/input/check.py +++ b/powersimdata/input/check.py @@ -308,11 +308,9 @@ def _check_areas_and_format(areas, mi=ModelImmutables("usa_tamu")): :param powersimdata.network.model.ModelImmutables mi: immutables of a grid model. :raises TypeError: if ``areas`` is not a list/tuple/set of str. :raises ValueError: if ``areas`` is empty or not valid. - :return: (*set*) -- areas as a set. State abbreviations are converted to state - names. + :return: (*set*) -- areas as a set. State/Country abbreviations are converted to + state/country names. """ - division = [a for a in mi.zones["mappings"] if "abbr" in a][0].split("_")[0] - if isinstance(areas, str): areas = {areas} elif isinstance(areas, (list, set, tuple)): @@ -331,7 +329,7 @@ def _check_areas_and_format(areas, mi=ModelImmutables("usa_tamu")): abv_in_areas = [z for z in areas if z in mi.zones["abv"]] for a in abv_in_areas: areas.remove(a) - areas.add(mi.zones[f"abv2{division}"][a]) + areas.add(mi.zones[f"abv2{mi.zones['division']}"][a]) return areas diff --git a/powersimdata/network/constants/region/europe.py b/powersimdata/network/constants/region/europe.py index a535838ca..1956e5362 100644 --- a/powersimdata/network/constants/region/europe.py +++ b/powersimdata/network/constants/region/europe.py @@ -231,6 +231,7 @@ def get_zones(interconnect, model): """ zones = dict() zones["mappings"] = {"loadzone", "country", "country_abbr", "interconnect"} + zones["division"] = "country" interconnect = ( model2interconnect[model] if "Europe" in interconnect else interconnect diff --git a/powersimdata/network/constants/region/usa.py b/powersimdata/network/constants/region/usa.py index afc06f008..a70071571 100644 --- a/powersimdata/network/constants/region/usa.py +++ b/powersimdata/network/constants/region/usa.py @@ -224,12 +224,15 @@ def get_zones(interconnect, model): :param str model: the grid model. :return: (*dict*) -- zones information. """ - query = model2interconnect[model] if "USA" in interconnect else interconnect # noqa - zone_info = get_zone_info(model=model).query("interconnect == @query") + interconnect = ( # noqa + model2interconnect[model] if "USA" in interconnect else interconnect + ) + zone_info = get_zone_info(model=model).query("interconnect == @interconnect") zone_info["abv"] = zone_info["state"].map({s: a for a, s in abv2state.items()}) zones = dict() zones["mappings"] = {"loadzone", "state", "state_abbr", "interconnect"} + zones["division"] = "state" zones.update(get_loadzone_mapping(zone_info)) zones.update(get_state_mapping(zone_info)) diff --git a/powersimdata/network/model.py b/powersimdata/network/model.py index 9f06ec254..e9b0616ff 100644 --- a/powersimdata/network/model.py +++ b/powersimdata/network/model.py @@ -58,6 +58,7 @@ def area_to_loadzone(model, area, area_type=None): """ zones = ModelImmutables(model).zones mappings = zones["mappings"] + division = zones["division"] if not isinstance(area, str): raise TypeError("area must be a str") @@ -65,7 +66,6 @@ def area_to_loadzone(model, area, area_type=None): if area_type is not None and not isinstance(area_type, str): raise TypeError("area_type must be either None or str") - division = [a for a in mappings if "abbr" in a][0].split("_")[0] area2loadzone = { f"{division}": lambda x: zones[f"{division}2loadzone"][x], "loadzone": lambda x: zones["loadzone"].intersection({x}), From e2beffb28b25ebdb93d750fbbfc49c58f57f4da4 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 5 May 2022 09:28:01 -0700 Subject: [PATCH 26/59] feat: add attributes to AbstractGrid class --- powersimdata/input/abstract_grid.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/powersimdata/input/abstract_grid.py b/powersimdata/input/abstract_grid.py index 8b5514a9a..85eba8d5b 100644 --- a/powersimdata/input/abstract_grid.py +++ b/powersimdata/input/abstract_grid.py @@ -30,6 +30,8 @@ def __init__(self): self.bus = pd.DataFrame() self.branch = pd.DataFrame() self.storage = storage_template() + self.grid_model = "" + self.model_immutables = None class AbstractGridCSV(AbstractGrid): From 3a57eee0271c0d9c257d2efa4ff9fa2ecd54126f Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 26 May 2022 09:33:56 -0700 Subject: [PATCH 27/59] refactor: use powerset function --- .../network/constants/region/europe.py | 26 ++++------ powersimdata/network/constants/region/usa.py | 50 +++++++++---------- powersimdata/network/helpers.py | 5 ++ 3 files changed, 38 insertions(+), 43 deletions(-) diff --git a/powersimdata/network/constants/region/europe.py b/powersimdata/network/constants/region/europe.py index 1956e5362..4ab4aafb3 100644 --- a/powersimdata/network/constants/region/europe.py +++ b/powersimdata/network/constants/region/europe.py @@ -1,9 +1,9 @@ -from itertools import combinations +from itertools import chain import pandas as pd from powersimdata.network.constants.model import model2interconnect -from powersimdata.network.helpers import interconnect_to_name +from powersimdata.network.helpers import interconnect_to_name, powerset abv2country = { "AL": "Albania", @@ -77,7 +77,6 @@ "SK": "ETC/GMT-1", } - interconnect2abv = { "ContinentalEurope": { "AL", @@ -111,27 +110,22 @@ "Ireland": {"IE"}, "Baltic": {"EE", "LT", "LV"}, } - -cb = [i for j in range(2, 6) for i in combinations(model2interconnect["europe_tub"], j)] -for c in cb: - interconnect2abv[interconnect_to_name(c, model="europe_tub")] = { - a for i in c for a in interconnect2abv[i] - } +for c in powerset(model2interconnect["europe_tub"], 2): + interconnect2abv[interconnect_to_name(c, model="europe_tub")] = set( + chain(*[interconnect2abv[i] for i in c]) + ) name2interconnect = { - interconnect_to_name(i, model="europe_tub"): set(i) - for c in range(1, 6) - for i in combinations(model2interconnect["europe_tub"], c) + interconnect_to_name(c, model="europe_tub"): set(c) + for c in powerset(model2interconnect["europe_tub"], 1) } name2component = name2interconnect.copy() name2component.update({"Europe": set(name2interconnect) - {"Europe"}}) - interconnect2timezone = { - interconnect_to_name(i, model="europe_tub"): "ETC/GMT-1" - for c in range(1, 6) - for i in combinations(model2interconnect["europe_tub"], c) + interconnect_to_name(c, model="europe_tub"): "ETC/GMT-1" + for c in powerset(model2interconnect["europe_tub"], 1) } interconnect2timezone.update( { diff --git a/powersimdata/network/constants/region/usa.py b/powersimdata/network/constants/region/usa.py index a70071571..30f8964b4 100644 --- a/powersimdata/network/constants/region/usa.py +++ b/powersimdata/network/constants/region/usa.py @@ -1,8 +1,8 @@ import ast -from itertools import combinations +from itertools import chain from powersimdata.network.constants.model import model2interconnect -from powersimdata.network.helpers import get_zone_info, interconnect_to_name +from powersimdata.network.helpers import get_zone_info, interconnect_to_name, powerset abv2state = { "AK": "Alaska", @@ -57,26 +57,6 @@ "WY": "Wyoming", } - -name2interconnect = { - interconnect_to_name(i): set(i) - for c in range(1, 4) - for i in combinations(["ERCOT", "Eastern", "Western"], c) -} - -name2component = name2interconnect.copy() -name2component.update({"USA": set(name2interconnect) - {"USA"}}) - -interconnect2timezone = { - interconnect_to_name("USA"): "ETC/GMT+6", - interconnect_to_name("Eastern"): "ETC/GMT+5", - interconnect_to_name("ERCOT"): "ETC/GMT+6", - interconnect_to_name("Western"): "ETC/GMT+8", - interconnect_to_name(["ERCOT", "Western"]): "ETC/GMT+7", - interconnect_to_name(["ERCOT", "Eastern"]): "ETC/GMT+5", - interconnect_to_name(["Eastern", "Western"]): "ETC/GMT+6", -} - interconnect2abv = { "Eastern": { "ME", @@ -119,12 +99,28 @@ "ERCOT": {"TX"}, "Western": {"WA", "OR", "CA", "NV", "AZ", "UT", "NM", "CO", "WY", "ID", "MT"}, } +for c in powerset(model2interconnect["hifld"], 2): + interconnect2abv[interconnect_to_name(c, model="hifld")] = set( + chain(*[interconnect2abv[i] for i in c]) + ) -cb = [i for j in range(2, 4) for i in combinations(["ERCOT", "Eastern", "Western"], j)] -for c in cb: - interconnect2abv[interconnect_to_name(c)] = { - a for i in c for a in interconnect2abv[i] - } +name2interconnect = { + interconnect_to_name(c, model="hifld"): set(c) + for c in powerset(model2interconnect["hifld"], 1) +} + +name2component = name2interconnect.copy() +name2component.update({"USA": set(name2interconnect) - {"USA"}}) + +interconnect2timezone = { + interconnect_to_name("USA"): "ETC/GMT+6", + interconnect_to_name("Eastern"): "ETC/GMT+5", + interconnect_to_name("ERCOT"): "ETC/GMT+6", + interconnect_to_name("Western"): "ETC/GMT+8", + interconnect_to_name(["ERCOT", "Western"]): "ETC/GMT+7", + interconnect_to_name(["ERCOT", "Eastern"]): "ETC/GMT+5", + interconnect_to_name(["Eastern", "Western"]): "ETC/GMT+6", +} def get_interconnect_mapping(zone, model): diff --git a/powersimdata/network/helpers.py b/powersimdata/network/helpers.py index b8b2938a5..bdf07f87b 100644 --- a/powersimdata/network/helpers.py +++ b/powersimdata/network/helpers.py @@ -1,4 +1,5 @@ import os +from itertools import chain, combinations import pandas as pd @@ -77,3 +78,7 @@ def get_zone_info(model="hifld"): return pd.read_csv(path, index_col=0) else: raise FileNotFoundError(f"File {path} cannot be found") + + +def powerset(l, r): + return list(chain.from_iterable(combinations(l, i) for i in range(r, len(l) + 1))) From 335e89b8b26b3bfa574a110aca47d44ac27d7416 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 5 May 2022 11:50:58 -0700 Subject: [PATCH 28/59] refactor: move grid model and model immutables setting in their respective model --- powersimdata/input/abstract_grid.py | 23 ++++---- powersimdata/input/grid.py | 69 ++++++++-------------- powersimdata/input/scenario_grid.py | 45 +++++++++----- powersimdata/network/hifld/model.py | 17 +++++- powersimdata/network/tests/test_helpers.py | 2 + powersimdata/network/usa_tamu/model.py | 17 +++++- 6 files changed, 93 insertions(+), 80 deletions(-) diff --git a/powersimdata/input/abstract_grid.py b/powersimdata/input/abstract_grid.py index 85eba8d5b..9ba3876fd 100644 --- a/powersimdata/input/abstract_grid.py +++ b/powersimdata/input/abstract_grid.py @@ -10,7 +10,6 @@ ) from powersimdata.network.constants.model import model2region from powersimdata.network.csv_reader import CSVReader -from powersimdata.network.helpers import check_and_format_interconnect class AbstractGrid: @@ -49,10 +48,10 @@ def _set_data_loc(self, top_dirname): else: self.data_loc = data_loc - def _build_network(self, interconnect, grid_model): + def _build(self, interconnect, grid_model): """Build network. - :param str/iterable interconnect: interconnect name(s). + :param list interconnect: interconnect name(s). :param str model: the grid model. """ reader = CSVReader(self.data_loc) @@ -62,11 +61,10 @@ def _build_network(self, interconnect, grid_model): self.dcline = reader.dcline self.gencost["after"] = self.gencost["before"] = reader.gencost - self.interconnect = check_and_format_interconnect(interconnect, grid_model) self._add_information_to_model() - if model2region[grid_model] not in self.interconnect: - self._drop_interconnect() + if model2region[grid_model] not in interconnect: + self._drop_interconnect(interconnect) def _add_information_to_model(self): self.sub = csv_to_data_frame(self.data_loc, "sub.csv") @@ -77,22 +75,21 @@ def _add_information_to_model(self): add_zone_to_grid_data_frames(self) add_coord_to_grid_data_frames(self) - def _drop_interconnect(self): + def _drop_interconnect(self, interconnect): """Trim data frames to only keep information pertaining to the user defined interconnect(s). + :param list interconnect: interconnect name(s). """ for key, value in self.__dict__.items(): if key in ["sub", "bus2sub", "bus", "plant", "branch"]: - value.query("interconnect == @self.interconnect", inplace=True) + value.query("interconnect == @interconnect", inplace=True) elif key == "gencost": - value["before"].query( - "interconnect == @self.interconnect", inplace=True - ) + value["before"].query("interconnect == @interconnect", inplace=True) elif key == "dcline": value.query( - "from_interconnect == @self.interconnect &" - "to_interconnect == @self.interconnect", + "from_interconnect == @interconnect &" + "to_interconnect == @interconnect", inplace=True, ) self.id2zone = {k: self.id2zone[k] for k in self.bus.zone_id.unique()} diff --git a/powersimdata/input/grid.py b/powersimdata/input/grid.py index ebe468d37..593649887 100644 --- a/powersimdata/input/grid.py +++ b/powersimdata/input/grid.py @@ -1,11 +1,8 @@ import os -from powersimdata.data_access.context import Context -from powersimdata.data_access.scenario_list import ScenarioListManager from powersimdata.input.scenario_grid import FromREISE, FromREISEjl from powersimdata.network.constants.storage import storage from powersimdata.network.hifld.model import HIFLD -from powersimdata.network.model import ModelImmutables from powersimdata.network.usa_tamu.model import TAMU from powersimdata.utility.helpers import MemoryCache, cache_key @@ -47,46 +44,37 @@ def __init__(self, interconnect, source="usa_tamu", engine="REISE"): key = cache_key(interconnect, source) cached = _cache.get(key) if cached is not None: - data = cached + network = cached elif source == "usa_tamu": - data = TAMU(interconnect) + network = TAMU(interconnect) elif source == "hifld": - data = HIFLD(interconnect) + network = HIFLD(interconnect) elif os.path.splitext(source)[1] == ".mat": if engine == "REISE": - data = FromREISE(source) + network = FromREISE(source) elif engine == "REISE.jl": - data = FromREISEjl(source) + network = FromREISEjl(source) else: raise ValueError(f"Unknown source: {source}") - self.data_loc = data.data_loc - self.interconnect = data.interconnect - self.zone2id = data.zone2id - self.id2zone = data.id2zone - self.sub = data.sub - self.plant = data.plant - self.gencost = data.gencost - self.dcline = data.dcline - self.bus2sub = data.bus2sub - self.bus = data.bus - self.branch = data.branch - self.storage = data.storage - - _cache.put(key, self) - - self.grid_model = self._get_grid_model() - self.model_immutables = ModelImmutables(self.grid_model) - - def _get_grid_model(self): - """Get the grid model. - - :return: (*str*). - """ - if os.path.isfile(self.data_loc): - return _get_grid_model_from_scenario_list(self.data_loc) - elif os.path.isdir(self.data_loc): - return self.data_loc.split(os.sep)[-2] + network.build() + + self.data_loc = network.data_loc + self.interconnect = network.interconnect + self.zone2id = network.zone2id + self.id2zone = network.id2zone + self.sub = network.sub + self.plant = network.plant + self.gencost = network.gencost + self.dcline = network.dcline + self.bus2sub = network.bus2sub + self.bus = network.bus + self.branch = network.branch + self.storage = network.storage + self.grid_model = network.grid_model + self.model_immutables = network.model_immutables + + _cache.put(key, network) def __eq__(self, other): """Used when 'self == other' is evaluated. @@ -167,14 +155,3 @@ def _univ_eq(ref, test, failure_flag=None): print(f"non-matching entries: {', '.join(sorted(nonmatching_entries))}") return False return True - - -def _get_grid_model_from_scenario_list(source): - """Get grid model for a scenario listed in the scenario list. - - :param str source: path to MAT-file enclosing the grid data. - :return: (*str*) -- the grid model. - """ - scenario_number = int(os.path.basename(source).split("_")[0]) - slm = ScenarioListManager(Context.get_data_access()) - return slm.get_scenario(scenario_number)["grid_model"] diff --git a/powersimdata/input/scenario_grid.py b/powersimdata/input/scenario_grid.py index b1e0ed488..cd64437bf 100644 --- a/powersimdata/input/scenario_grid.py +++ b/powersimdata/input/scenario_grid.py @@ -4,6 +4,8 @@ import pandas as pd from scipy.io import loadmat +from powersimdata.data_access.context import Context +from powersimdata.data_access.scenario_list import ScenarioListManager from powersimdata.input import const from powersimdata.input.abstract_grid import AbstractGrid from powersimdata.input.helpers import ( @@ -11,6 +13,7 @@ add_interconnect_to_grid_data_frames, add_zone_to_grid_data_frames, ) +from powersimdata.network.model import ModelImmutables class ScenarioGrid(AbstractGrid): @@ -21,21 +24,22 @@ def __init__(self, filename): :param str filename: path to file. """ - self.filename = filename super().__init__() + self._set_data_loc(filename) + self.grid_model = _get_grid_model_from_scenario_list(filename) - def _set_data_loc(self): + def _set_data_loc(self, filename): """Sets data location. :param str filename: path to file :raises FileNotFoundError: if file does not exist. """ - if os.path.isfile(self.filename) is False: - raise FileNotFoundError("%s file not found" % self.filename) + if os.path.isfile(filename) is False: + raise FileNotFoundError("%s file not found" % filename) else: - self.data_loc = self.filename + self.data_loc = filename - def _read_network(self): + def _read(self): data = loadmat(self.data_loc, squeeze_me=True, struct_as_record=False) mpc = data["mdi"].mpc try: @@ -118,18 +122,29 @@ def _read_network(self): # interconnect self.interconnect = self.sub.interconnect.unique().tolist() - def _build_network(self): - """Defines how to interpret the MAT file data to build a network. - Not implemented for ScenarioGrid, but must be defined for subclasses. - """ - pass + # model immutables + if self.grid_model in ["usa_tamu", "hifld"]: + self.model_immutables = ModelImmutables( + self.grid_model, interconnect=self.interconnect + ) + + +def _get_grid_model_from_scenario_list(source): + """Get grid model for a scenario listed in the scenario list. + + :param str source: path to MAT-file enclosing the grid data. + :return: (*str*) -- the grid model. + """ + scenario_number = int(os.path.basename(source).split("_")[0]) + slm = ScenarioListManager(Context.get_data_access()) + return slm.get_scenario(scenario_number)["grid_model"] class FromREISE(ScenarioGrid): """MATLAB file reader, for MAT files created by REISE/MATPOWER""" - def _build_network(self): - self._read_network() + def build(self): + self._read() reindex_model(self) add_information_to_model(self) @@ -137,8 +152,8 @@ def _build_network(self): class FromREISEjl(ScenarioGrid): """MATLAB file reader, for MAT files created (& converted) by REISE.jl""" - def _build_network(self): - self._read_network() + def build(self): + self._read() add_information_to_model(self) diff --git a/powersimdata/network/hifld/model.py b/powersimdata/network/hifld/model.py index 8f6e3b567..237c34286 100644 --- a/powersimdata/network/hifld/model.py +++ b/powersimdata/network/hifld/model.py @@ -2,6 +2,8 @@ from powersimdata.input.abstract_grid import AbstractGridCSV from powersimdata.network.constants.storage import get_storage +from powersimdata.network.helpers import check_and_format_interconnect +from powersimdata.network.model import ModelImmutables class HIFLD(AbstractGridCSV): @@ -12,9 +14,18 @@ class HIFLD(AbstractGridCSV): def __init__(self, interconnect): """Constructor.""" - model = "hifld" super().__init__() + self.grid_model = "hifld" + self.interconnect = check_and_format_interconnect( + interconnect, model=self.grid_model + ) + self.model_immutables = ModelImmutables( + self.grid_model, interconnect=interconnect + ) self._set_data_loc(os.path.dirname(__file__)) - self._build_network(interconnect, model) - self.storage.update(get_storage(model)) + + def build(self): + """Build network""" + self._build(self.interconnect, self.grid_model) + self.storage.update(get_storage(self.grid_model)) diff --git a/powersimdata/network/tests/test_helpers.py b/powersimdata/network/tests/test_helpers.py index 4d2900bfe..b144bab0c 100644 --- a/powersimdata/network/tests/test_helpers.py +++ b/powersimdata/network/tests/test_helpers.py @@ -71,12 +71,14 @@ def _assert_interconnect_missing(interconnect, model): def test_drop_one_interconnect(): model = TAMU(["Western", "Texas"]) + model.build() _assert_lists_equal(["Western", "Texas"], model.interconnect) _assert_interconnect_missing("Eastern", model) def test_drop_two_interconnect(): model = TAMU(["Western"]) + model.build() _assert_lists_equal(["Western"], model.interconnect) for interconnect in ["Eastern", "Texas"]: _assert_interconnect_missing(interconnect, model) diff --git a/powersimdata/network/usa_tamu/model.py b/powersimdata/network/usa_tamu/model.py index 177507463..00ea3fcc4 100644 --- a/powersimdata/network/usa_tamu/model.py +++ b/powersimdata/network/usa_tamu/model.py @@ -2,6 +2,8 @@ from powersimdata.input.abstract_grid import AbstractGridCSV from powersimdata.network.constants.storage import get_storage +from powersimdata.network.helpers import check_and_format_interconnect +from powersimdata.network.model import ModelImmutables class TAMU(AbstractGridCSV): @@ -12,9 +14,18 @@ class TAMU(AbstractGridCSV): def __init__(self, interconnect): """Constructor.""" - model = "usa_tamu" super().__init__() + self.grid_model = "usa_tamu" + self.interconnect = check_and_format_interconnect( + interconnect, model=self.grid_model + ) + self.model_immutables = ModelImmutables( + self.grid_model, interconnect=interconnect + ) self._set_data_loc(os.path.dirname(__file__)) - self._build_network(interconnect, model) - self.storage.update(get_storage(model)) + + def build(self): + """Build network""" + self._build(self.interconnect, self.grid_model) + self.storage.update(get_storage(self.grid_model)) From 3a014f51d4350b0afae74bee69d9d0d18db5446f Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 3 May 2022 12:54:35 -0700 Subject: [PATCH 29/59] feat: create an abstract grid class for model enclosed in CSV files (#627) --- powersimdata/input/abstract_grid.py | 4 ++++ powersimdata/network/hifld/model.py | 4 ++-- powersimdata/network/usa_tamu/model.py | 4 ++-- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/powersimdata/input/abstract_grid.py b/powersimdata/input/abstract_grid.py index 41cef42a6..8b5514a9a 100644 --- a/powersimdata/input/abstract_grid.py +++ b/powersimdata/input/abstract_grid.py @@ -31,6 +31,10 @@ def __init__(self): self.branch = pd.DataFrame() self.storage = storage_template() + +class AbstractGridCSV(AbstractGrid): + """Grid Builder.""" + def _set_data_loc(self, top_dirname): """Sets data location. diff --git a/powersimdata/network/hifld/model.py b/powersimdata/network/hifld/model.py index c11b14ed8..8f6e3b567 100644 --- a/powersimdata/network/hifld/model.py +++ b/powersimdata/network/hifld/model.py @@ -1,10 +1,10 @@ import os -from powersimdata.input.abstract_grid import AbstractGrid +from powersimdata.input.abstract_grid import AbstractGridCSV from powersimdata.network.constants.storage import get_storage -class HIFLD(AbstractGrid): +class HIFLD(AbstractGridCSV): """HIFLD network. :param str/iterable interconnect: interconnect name(s). diff --git a/powersimdata/network/usa_tamu/model.py b/powersimdata/network/usa_tamu/model.py index d2e53018a..177507463 100644 --- a/powersimdata/network/usa_tamu/model.py +++ b/powersimdata/network/usa_tamu/model.py @@ -1,10 +1,10 @@ import os -from powersimdata.input.abstract_grid import AbstractGrid +from powersimdata.input.abstract_grid import AbstractGridCSV from powersimdata.network.constants.storage import get_storage -class TAMU(AbstractGrid): +class TAMU(AbstractGridCSV): """TAMU network. :param str/iterable interconnect: interconnect name(s). From 54d1947ab4ae40182a3e2513d81cad4ff3251230 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 26 May 2022 11:27:48 -0700 Subject: [PATCH 30/59] refactor: use get to access values in dictionary --- powersimdata/network/model.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/powersimdata/network/model.py b/powersimdata/network/model.py index e9b0616ff..a95ea1019 100644 --- a/powersimdata/network/model.py +++ b/powersimdata/network/model.py @@ -67,10 +67,10 @@ def area_to_loadzone(model, area, area_type=None): raise TypeError("area_type must be either None or str") area2loadzone = { - f"{division}": lambda x: zones[f"{division}2loadzone"][x], + f"{division}": zones[f"{division}2loadzone"].get, "loadzone": lambda x: zones["loadzone"].intersection({x}), - f"{division}_abbr": lambda x: zones["abv2loadzone"][x], - "interconnect": lambda x: zones["interconnect2loadzone"][x], + f"{division}_abbr": zones["abv2loadzone"].get, + "interconnect": zones["interconnect2loadzone"].get, "all": lambda _: zones["loadzone"], } From f83d107fd9c69ccbb82f8916b35d8ee460f840cb Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 2 Jun 2022 12:40:28 -0700 Subject: [PATCH 31/59] feat: use factory pattern to instantiate the builder from grid model (#635) --- powersimdata/scenario/create.py | 199 +++++++++++++++++++++++--------- 1 file changed, 143 insertions(+), 56 deletions(-) diff --git a/powersimdata/scenario/create.py b/powersimdata/scenario/create.py index a56d39920..2f86fa2c0 100644 --- a/powersimdata/scenario/create.py +++ b/powersimdata/scenario/create.py @@ -1,5 +1,4 @@ import copy -import warnings import numpy as np import pandas as pd @@ -29,7 +28,6 @@ class Create(State): default_exported_methods = { "create_scenario", "get_bus_demand", - "set_builder", "set_grid", } | State.exported_methods @@ -130,37 +128,23 @@ def create_scenario(self): ) self.switch(Execute) - def set_builder(self, *args, **kwargs): - """Alias to :func:`~powersimdata.scenario.create.Create.set_grid`""" - warnings.warn( - "set_builder is deprecated, use set_grid instead", DeprecationWarning - ) - self.set_grid(*args, **kwargs) - - def set_grid(self, grid_model="usa_tamu", interconnect="USA"): + def set_grid(self, grid_model="usa_tamu", interconnect="USA", **kwargs): """Sets grid builder. :param str grid_model: name of grid model. Default is *'usa_tamu'*. :param str/list interconnect: name of interconnect(s). Default is *'USA'*. + :param \\*\\*kwargs: optional parameters used to instantiate a Grid object. """ - self.builder = _Builder( - grid_model, interconnect, self._scenario_list_manager.get_scenario_table() + scenario_table = self._scenario_list_manager.get_scenario_table() + + self.builder = get_builder( + grid_model, + interconnect, + scenario_table, + **kwargs, ) - self.exported_methods |= _Builder.exported_methods - print("--> Summary") - print("# Existing study") - if self.builder.existing.empty: - print("Nothing yet") - else: - plan = [p for p in self.builder.existing.plan.unique()] - print("%s" % " | ".join(plan)) - - print("# Available profiles") - for p in ["demand", "hydro", "solar", "wind"]: - possible = self.builder.get_base_profile(p) - if len(possible) != 0: - print("%s: %s" % (p, " | ".join(possible))) + self.exported_methods |= self.builder.exported_methods self._scenario_info["grid_model"] = self.builder.grid_model self._scenario_info["interconnect"] = self.builder.interconnect @@ -189,7 +173,6 @@ class _Builder: wind = "" engine = "REISE.jl" exported_methods = { - "set_base_profile", "set_engine", "set_name", "set_time", @@ -205,14 +188,34 @@ class _Builder: def __init__(self, grid_model, interconnect, table): """Constructor.""" - check_model(grid_model) - self.grid_model = grid_model - self.base_grid = Grid(interconnect, source=grid_model) - self.change_table = ChangeTable(self.base_grid) self.interconnect = interconnect_to_name(interconnect, grid_model) self.existing = table[table.interconnect == self.interconnect] + def print_existing_study(self): + """Print existing study""" + + print("--> Begin: Existing Study") + if self.existing.empty: + print("Nothing yet") + else: + print(" | ".join(self.existing.plant.unique())) + print("<-- End: Existing Study") + + def set_base_grid(self): + """Set base grid + + :raises NotImplementedError: always - implemented in child classes. + """ + raise NotImplementedError("Implemented in the child classes") + + def set_change_table(self): + """Set change table + + :raises NotImplementedError: always - implemented in child classes. + """ + raise NotImplementedError("Implemented in the child classes") + def get_ct(self): """Returns change table. @@ -314,8 +317,75 @@ def set_time(self, start_date, end_date, interval): self.end_date = end_date self.interval = interval + def set_engine(self, engine): + """Sets simulation engine to be used for scenarion. + + :param str engine: simulation engine + """ + possible = ["REISE.jl"] + if engine not in possible: + print("Available engines: %s" % " | ".join(possible)) + return + else: + self.engine = engine + + def get_grid(self): + """Returns a transformed grid. + + :return: (*powersimdata.input.grid.Grid*) -- a Grid object. + """ + return TransformGrid(self.base_grid, self.change_table.ct).get_grid() + + def get_base_grid(self): + """Returns original grid. + + :return: (*powersimdata.input.grid.Grid*) -- a Grid object. + """ + return copy.deepcopy(self.base_grid) + + def __str__(self): + return self.name + + +class FromCSV(_Builder): + """Build scenario using grid model and associated profiles enclosed in CSV files + + :param str grid model: the grid model + :param list interconnect: list of interconnect(s) to build. + :param pandas.DataFrame table: scenario list table. + :param \\*\\*kwargs: optional parameters used to instantiate a Grid object. + """ + + def __init__(self, grid_model, interconnect, table, **kwargs): + super().__init__(grid_model, interconnect, table) + + self.exported_methods |= {"set_base_profile", "get_base_profile"} + + self.print_existing_study() + self.print_available_profile() + + self.set_base_grid() + self.set_change_table() + + def print_available_profile(self): + """Print available profiles for the grid model""" + print("--> Begin: Available profiles") + for p in ["demand", "hydro", "solar", "wind"]: + possible = self.get_base_profile(p) + if len(possible) != 0: + print("%s: %s" % (p, " | ".join(possible))) + print("<-- End: Available profiles") + + def set_base_grid(self): + """Set base grid""" + self.base_grid = Grid(self.interconnect, source=self.grid_model) + + def set_change_table(self): + """Set change table""" + self.change_table = ChangeTable(self.base_grid) + def get_base_profile(self, kind): - """Returns available base profiles. + """Return available base profiles. :param str kind: one of *'demand'*, *'hydro'*, *'solar'*, *'wind'*. :return: (*list*) -- available version for selected profile kind. @@ -323,10 +393,10 @@ def get_base_profile(self, kind): return ProfileInput().get_profile_version(self.grid_model, kind) def set_base_profile(self, kind, version): - """Sets demand profile. + """Set base profile. :param str kind: one of *'demand'*, *'hydro'*, *'solar'*, *'wind'*. - :param str version: demand profile version. + :param str version: base profile version. :raises ValueError: if no profiles are available or version is not available. """ possible = self.get_base_profile(kind) @@ -344,31 +414,48 @@ def set_base_profile(self, kind, version): else: raise ValueError("Available %s profiles: %s" % (kind, " | ".join(possible))) - def set_engine(self, engine): - """Sets simulation engine to be used for scenarion. - :param str engine: simulation engine - """ - possible = ["REISE.jl"] - if engine not in possible: - print("Available engines: %s" % " | ".join(possible)) - return - else: - self.engine = engine +class FromPyPSA(_Builder): + """Build scenario from a PyPSA Network object - def get_grid(self): - """Returns a transformed grid. + :param str grid model: the grid model + :param list interconnect: list of interconnect(s) to build. + :param pandas.DataFrame table: scenario list table. + :param \\*\\*kwargs: optional parameters used to instantiate a Grid object: + *'reduction'*: number of nodes in the network. If None, the full resolution + PyPSA Network object will be used. Available reductions are specified in the + :mod:`powersimdata.network.europe_tub.model` module. + """ - :return: (*powersimdata.input.grid.Grid*) -- a Grid object. - """ - return TransformGrid(self.base_grid, self.change_table.ct).get_grid() + def __init__(self, grid_model, interconnect, table, **kwargs): + super().__init__(grid_model, interconnect, table) - def get_base_grid(self): - """Returns original grid. + self.reduction = None if "reduction" not in kwargs else kwargs["reduction"] - :return: (*powersimdata.input.grid.Grid*) -- a Grid object. - """ - return copy.deepcopy(self.base_grid) + self.print_existing_study() - def __str__(self): - return self.name + self.set_base_grid() + self.set_change_table() + + def set_base_grid(self): + """Set base grid""" + raise NotImplementedError() + + def set_change_table(self): + """Set change table""" + raise NotImplementedError() + + +def get_builder(grid_model, interconnect, table, **kwargs): + """Returns a Builder instance + + :param str grid model: the grid model + :param list interconnect: list of interconnect(s) to build. + :param pandas.DataFrame table: scenario list table + :param \\*\\*kwargs: optional parameters used to instantiate a Grid object. + :return: (*object*) -- builder instance associated with the grid model. + """ + check_model(grid_model) + model2builder = {"usa_tamu": FromCSV, "hifld": FromCSV, "europe_tub": FromPyPSA} + + return model2builder[grid_model](grid_model, interconnect, table, **kwargs) From b965495e9d20c4247a4bac12fd8ac3d68b250121 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Fri, 3 Jun 2022 12:05:14 -0700 Subject: [PATCH 32/59] chore: remove hifld from list of supported model (#644) --- powersimdata/input/grid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/powersimdata/input/grid.py b/powersimdata/input/grid.py index 593649887..9ada86359 100644 --- a/powersimdata/input/grid.py +++ b/powersimdata/input/grid.py @@ -11,7 +11,7 @@ class Grid: - SUPPORTED_MODELS = {"hifld", "usa_tamu"} + SUPPORTED_MODELS = {"usa_tamu"} SUPPORTED_ENGINES = {"REISE", "REISE.jl"} """Grid From e930df8e9292c074aa29fa88ae86ee999886bbd6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Jun 2022 09:56:40 -0700 Subject: [PATCH 33/59] chore(deps): bump pillow from 9.1.0 to 9.1.1 (#647) Bumps [pillow](https://github.com/python-pillow/Pillow) from 9.1.0 to 9.1.1. - [Release notes](https://github.com/python-pillow/Pillow/releases) - [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) - [Commits](https://github.com/python-pillow/Pillow/compare/9.1.0...9.1.1) --- updated-dependencies: - dependency-name: pillow dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Pipfile.lock | 148 ++++++++++++++++++++++++--------------------------- 1 file changed, 71 insertions(+), 77 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index fea416cc8..0f70dfdee 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -23,11 +23,11 @@ }, "azure-core": { "hashes": [ - "sha256:28a01dfbaf0a6812c4e2a82d1642ea30956a9739f25bc77c9b23b91f4ea68f0f", - "sha256:c3e8a9a3ec9d89f59b5d5b2f19d19a30d76a5b5c0cee3788ecad3cb72b9bd028" + "sha256:39c5d59d04209bb70a1a7ee879cef05d07bc76472cd3fb5eaa2e607a90d312bb", + "sha256:f48a640affa59fa45ac770565b3bead4c4f834242d16983c1ae2bb173a4b8a6d" ], "markers": "python_version >= '3.6'", - "version": "==1.23.1" + "version": "==1.24.1" }, "azure-storage-blob": { "hashes": [ @@ -55,10 +55,11 @@ }, "certifi": { "hashes": [ - "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872", - "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569" + "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7", + "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a" ], - "version": "==2021.10.8" + "markers": "python_version >= '3.6'", + "version": "==2022.5.18.1" }, "cffi": { "hashes": [ @@ -368,11 +369,11 @@ }, "setuptools": { "hashes": [ - "sha256:26ead7d1f93efc0f8c804d9fafafbe4a44b179580a7105754b245155f9af05a8", - "sha256:47c7b0c0f8fc10eec4cf1e71c6fdadf8decaa74ffa087e68cd1c20db7ad6a592" + "sha256:68e45d17c9281ba25dc0104eadd2647172b3472d9e01f911efa57965e8d51a36", + "sha256:a43bdedf853c670e5fed28e5623403bad2f73cf02f9a2774e91def6bda8265a7" ], "markers": "python_version >= '3.7'", - "version": "==62.1.0" + "version": "==62.3.2" }, "six": { "hashes": [ @@ -447,10 +448,11 @@ }, "certifi": { "hashes": [ - "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872", - "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569" + "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7", + "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a" ], - "version": "==2021.10.8" + "markers": "python_version >= '3.6'", + "version": "==2022.5.18.1" }, "cftime": { "hashes": [ @@ -833,47 +835,47 @@ }, "pillow": { "hashes": [ - "sha256:01ce45deec9df310cbbee11104bae1a2a43308dd9c317f99235b6d3080ddd66e", - "sha256:0c51cb9edac8a5abd069fd0758ac0a8bfe52c261ee0e330f363548aca6893595", - "sha256:17869489de2fce6c36690a0c721bd3db176194af5f39249c1ac56d0bb0fcc512", - "sha256:21dee8466b42912335151d24c1665fcf44dc2ee47e021d233a40c3ca5adae59c", - "sha256:25023a6209a4d7c42154073144608c9a71d3512b648a2f5d4465182cb93d3477", - "sha256:255c9d69754a4c90b0ee484967fc8818c7ff8311c6dddcc43a4340e10cd1636a", - "sha256:35be4a9f65441d9982240e6966c1eaa1c654c4e5e931eaf580130409e31804d4", - "sha256:3f42364485bfdab19c1373b5cd62f7c5ab7cc052e19644862ec8f15bb8af289e", - "sha256:3fddcdb619ba04491e8f771636583a7cc5a5051cd193ff1aa1ee8616d2a692c5", - "sha256:463acf531f5d0925ca55904fa668bb3461c3ef6bc779e1d6d8a488092bdee378", - "sha256:4fe29a070de394e449fd88ebe1624d1e2d7ddeed4c12e0b31624561b58948d9a", - "sha256:55dd1cf09a1fd7c7b78425967aacae9b0d70125f7d3ab973fadc7b5abc3de652", - "sha256:5a3ecc026ea0e14d0ad7cd990ea7f48bfcb3eb4271034657dc9d06933c6629a7", - "sha256:5cfca31ab4c13552a0f354c87fbd7f162a4fafd25e6b521bba93a57fe6a3700a", - "sha256:66822d01e82506a19407d1afc104c3fcea3b81d5eb11485e593ad6b8492f995a", - "sha256:69e5ddc609230d4408277af135c5b5c8fe7a54b2bdb8ad7c5100b86b3aab04c6", - "sha256:6b6d4050b208c8ff886fd3db6690bf04f9a48749d78b41b7a5bf24c236ab0165", - "sha256:7a053bd4d65a3294b153bdd7724dce864a1d548416a5ef61f6d03bf149205160", - "sha256:82283af99c1c3a5ba1da44c67296d5aad19f11c535b551a5ae55328a317ce331", - "sha256:8782189c796eff29dbb37dd87afa4ad4d40fc90b2742704f94812851b725964b", - "sha256:8d79c6f468215d1a8415aa53d9868a6b40c4682165b8cb62a221b1baa47db458", - "sha256:97bda660702a856c2c9e12ec26fc6d187631ddfd896ff685814ab21ef0597033", - "sha256:a325ac71914c5c043fa50441b36606e64a10cd262de12f7a179620f579752ff8", - "sha256:a336a4f74baf67e26f3acc4d61c913e378e931817cd1e2ef4dfb79d3e051b481", - "sha256:a598d8830f6ef5501002ae85c7dbfcd9c27cc4efc02a1989369303ba85573e58", - "sha256:a5eaf3b42df2bcda61c53a742ee2c6e63f777d0e085bbc6b2ab7ed57deb13db7", - "sha256:aea7ce61328e15943d7b9eaca87e81f7c62ff90f669116f857262e9da4057ba3", - "sha256:af79d3fde1fc2e33561166d62e3b63f0cc3e47b5a3a2e5fea40d4917754734ea", - "sha256:c24f718f9dd73bb2b31a6201e6db5ea4a61fdd1d1c200f43ee585fc6dcd21b34", - "sha256:c5b0ff59785d93b3437c3703e3c64c178aabada51dea2a7f2c5eccf1bcf565a3", - "sha256:c7110ec1701b0bf8df569a7592a196c9d07c764a0a74f65471ea56816f10e2c8", - "sha256:c870193cce4b76713a2b29be5d8327c8ccbe0d4a49bc22968aa1e680930f5581", - "sha256:c9efef876c21788366ea1f50ecb39d5d6f65febe25ad1d4c0b8dff98843ac244", - "sha256:de344bcf6e2463bb25179d74d6e7989e375f906bcec8cb86edb8b12acbc7dfef", - "sha256:eb1b89b11256b5b6cad5e7593f9061ac4624f7651f7a8eb4dfa37caa1dfaa4d0", - "sha256:ed742214068efa95e9844c2d9129e209ed63f61baa4d54dbf4cf8b5e2d30ccf2", - "sha256:f401ed2bbb155e1ade150ccc63db1a4f6c1909d3d378f7d1235a44e90d75fb97", - "sha256:fb89397013cf302f282f0fc998bb7abf11d49dcff72c8ecb320f76ea6e2c5717" + "sha256:088df396b047477dd1bbc7de6e22f58400dae2f21310d9e2ec2933b2ef7dfa4f", + "sha256:09e67ef6e430f90caa093528bd758b0616f8165e57ed8d8ce014ae32df6a831d", + "sha256:0b4d5ad2cd3a1f0d1df882d926b37dbb2ab6c823ae21d041b46910c8f8cd844b", + "sha256:0b525a356680022b0af53385944026d3486fc8c013638cf9900eb87c866afb4c", + "sha256:1d4331aeb12f6b3791911a6da82de72257a99ad99726ed6b63f481c0184b6fb9", + "sha256:20d514c989fa28e73a5adbddd7a171afa5824710d0ab06d4e1234195d2a2e546", + "sha256:2b291cab8a888658d72b575a03e340509b6b050b62db1f5539dd5cd18fd50578", + "sha256:3f6c1716c473ebd1649663bf3b42702d0d53e27af8b64642be0dd3598c761fb1", + "sha256:42dfefbef90eb67c10c45a73a9bc1599d4dac920f7dfcbf4ec6b80cb620757fe", + "sha256:488f3383cf5159907d48d32957ac6f9ea85ccdcc296c14eca1a4e396ecc32098", + "sha256:4d45dbe4b21a9679c3e8b3f7f4f42a45a7d3ddff8a4a16109dff0e1da30a35b2", + "sha256:53c27bd452e0f1bc4bfed07ceb235663a1df7c74df08e37fd6b03eb89454946a", + "sha256:55e74faf8359ddda43fee01bffbc5bd99d96ea508d8a08c527099e84eb708f45", + "sha256:59789a7d06c742e9d13b883d5e3569188c16acb02eeed2510fd3bfdbc1bd1530", + "sha256:5b650dbbc0969a4e226d98a0b440c2f07a850896aed9266b6fedc0f7e7834108", + "sha256:66daa16952d5bf0c9d5389c5e9df562922a59bd16d77e2a276e575d32e38afd1", + "sha256:6e760cf01259a1c0a50f3c845f9cad1af30577fd8b670339b1659c6d0e7a41dd", + "sha256:7502539939b53d7565f3d11d87c78e7ec900d3c72945d4ee0e2f250d598309a0", + "sha256:769a7f131a2f43752455cc72f9f7a093c3ff3856bf976c5fb53a59d0ccc704f6", + "sha256:7c150dbbb4a94ea4825d1e5f2c5501af7141ea95825fadd7829f9b11c97aaf6c", + "sha256:8844217cdf66eabe39567118f229e275f0727e9195635a15e0e4b9227458daaf", + "sha256:8a66fe50386162df2da701b3722781cbe90ce043e7d53c1fd6bd801bca6b48d4", + "sha256:9370d6744d379f2de5d7fa95cdbd3a4d92f0b0ef29609b4b1687f16bc197063d", + "sha256:937a54e5694684f74dcbf6e24cc453bfc5b33940216ddd8f4cd8f0f79167f765", + "sha256:9c857532c719fb30fafabd2371ce9b7031812ff3889d75273827633bca0c4602", + "sha256:a4165205a13b16a29e1ac57efeee6be2dfd5b5408122d59ef2145bc3239fa340", + "sha256:b3fe2ff1e1715d4475d7e2c3e8dabd7c025f4410f79513b4ff2de3d51ce0fa9c", + "sha256:b6617221ff08fbd3b7a811950b5c3f9367f6e941b86259843eab77c8e3d2b56b", + "sha256:b761727ed7d593e49671d1827044b942dd2f4caae6e51bab144d4accf8244a84", + "sha256:baf3be0b9446a4083cc0c5bb9f9c964034be5374b5bc09757be89f5d2fa247b8", + "sha256:c17770a62a71718a74b7548098a74cd6880be16bcfff5f937f900ead90ca8e92", + "sha256:c67db410508b9de9c4694c57ed754b65a460e4812126e87f5052ecf23a011a54", + "sha256:d78ca526a559fb84faaaf84da2dd4addef5edb109db8b81677c0bb1aad342601", + "sha256:e9ed59d1b6ee837f4515b9584f3d26cf0388b742a11ecdae0d9237a94505d03a", + "sha256:f054b020c4d7e9786ae0404278ea318768eb123403b18453e28e47cdb7a0a4bf", + "sha256:f372d0f08eff1475ef426344efe42493f71f377ec52237bf153c5713de987251", + "sha256:f3f6a6034140e9e17e9abc175fc7a266a6e63652028e157750bd98e804a8ed9a", + "sha256:ffde4c6fabb52891d81606411cbfaf77756e3b561b566efd270b3ed3791fde4e" ], - "markers": "python_version >= '3.7'", - "version": "==9.1.0" + "index": "pypi", + "version": "==9.1.1" }, "platformdirs": { "hashes": [ @@ -908,30 +910,30 @@ }, "pyomo": { "hashes": [ - "sha256:12b9c04c12775b4203de7902793ec64bc031fe14d622147fdd6c81e806e8c952", - "sha256:1b4e8cc4a5cb93ee9e5f7090052904339ff318cf9b9fb1463b26fd1bbd5c0f25", - "sha256:218011a6aef94fc4f5b353b1d1623df465c92e29870a1597a29caf66a001db71", - "sha256:2517f54f4f47eae329a129516de2a194797ca1c7e7f52bc1bfdc04942ff7374f", - "sha256:533fbf865b64b0c8ef3bb2f935a3e24ecedafb6f95801b631867f1d848ac846a", - "sha256:575bef8447561579f798c625606fa3c74a259a2f0e271e1442bcf3da5bfd379f", - "sha256:7d4aab869a5d412e607c5b1df3452aef72de077b2895d4276409ea87a59c25d2", - "sha256:90bd36d11b4f217e6362e6ca08e88a8d62c5cabb64da1776e1def08ca8c18ef3", - "sha256:929ad3e1c405767313f6a5b57e2d73815372f74b13cb590a6f724379aceb04a6", - "sha256:b548825301b6bd4073a0620a8265d956153d53c12fca37cc7184fa54fce96222", - "sha256:c5ca678afb12ebf42e4361ad5d8318d15b11375d3e26f14025ba9270cb226dcb", - "sha256:d5d593c32beb760a2bb766b6e701c7f0e17e247b1c464a91f9140a61ba379009", - "sha256:f5e155b159381d85f572cdcf4775fada5e4106ae94ef3d93a7c4de5a78deb0ca" + "sha256:1c3cc9d96e43baa72507a4cb5d52f779d541c42dac999759c289efe7f2a5ea15", + "sha256:3440bd5268e2c8193e9557d8d0a5b0c99d6eada909da1e17cb718a25805483b8", + "sha256:394ed6962064ebb745b8bf98a38692c9d49176b04482ea8d9c612ddcd45aa99b", + "sha256:45f8225faa53ac3c8b6551921276028b885d5ed9d96347021324ed2bdd9be2ef", + "sha256:56632731f8415cb1584201282e54d0b0fbaa218b3c498899beee12b2373edaa4", + "sha256:5e385c13549912f0f0d7c768ba32d3b242d66df9064cfd9e499cfaeacfd4ea6e", + "sha256:91b6f9a349b41c5e3a335692ed4ea00ec0ebcc27e7d23575e5f832289f8c214c", + "sha256:9662ed2e046a1edccc351510ef288893f5044a27fb214d632a7311e6fd5e04b4", + "sha256:a636a3a1c8314b8be85899cb6fac5d6a9a78fc75c6d58b74d3ec106ae5ed8f59", + "sha256:b5d8ec269515071a8c1d07966624f9bf79f4cb5a134e26d48292424c38e8da85", + "sha256:cf81b7b44403df4bbdd0b5023f859252476071f92a8166e43206758e51524ff3", + "sha256:d30138cbfe37507545bb270763e9eed7425a3d32a2e6c3b92a8f32eefa5159ce", + "sha256:d593dcbe22e7e8f7c434398b34d8b02d83e8cb83aef4390a2289f566cb2eb293" ], "markers": "python_version >= '3.7'", - "version": "==6.4.0" + "version": "==6.4.1" }, "pyparsing": { "hashes": [ - "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954", - "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06" + "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", + "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" ], "markers": "python_full_version >= '3.6.8'", - "version": "==3.0.8" + "version": "==3.0.9" }, "pypsa": { "hashes": [ @@ -1056,14 +1058,6 @@ "markers": "python_version >= '3.7'", "version": "==2.0.1" }, - "typing-extensions": { - "hashes": [ - "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708", - "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376" - ], - "markers": "python_version >= '3.7'", - "version": "==4.2.0" - }, "urllib3": { "hashes": [ "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14", From b897d25d94e64639df36fe085e3c8edfc4aca3dd Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Wed, 1 Jun 2022 22:08:26 -0700 Subject: [PATCH 34/59] refactor: move function and read all CSV files in the CSVReader object --- powersimdata/input/abstract_grid.py | 14 ++++++------- powersimdata/input/helpers.py | 17 ++------------- powersimdata/network/csv_reader.py | 32 ++++++++++++++++++++++------- 3 files changed, 33 insertions(+), 30 deletions(-) diff --git a/powersimdata/input/abstract_grid.py b/powersimdata/input/abstract_grid.py index 9ba3876fd..3a9d4dd59 100644 --- a/powersimdata/input/abstract_grid.py +++ b/powersimdata/input/abstract_grid.py @@ -6,7 +6,6 @@ from powersimdata.input.helpers import ( add_coord_to_grid_data_frames, add_zone_to_grid_data_frames, - csv_to_data_frame, ) from powersimdata.network.constants.model import model2region from powersimdata.network.csv_reader import CSVReader @@ -60,18 +59,17 @@ def _build(self, interconnect, grid_model): self.branch = reader.branch self.dcline = reader.dcline self.gencost["after"] = self.gencost["before"] = reader.gencost + self.sub = reader.sub + self.bus2sub = reader.bus2sub + self.id2zone = reader.zone["zone_name"].to_dict() + self.zone2id = {v: k for k, v in self.id2zone.items()} - self._add_information_to_model() + self._add_information() if model2region[grid_model] not in interconnect: self._drop_interconnect(interconnect) - def _add_information_to_model(self): - self.sub = csv_to_data_frame(self.data_loc, "sub.csv") - self.bus2sub = csv_to_data_frame(self.data_loc, "bus2sub.csv") - self.id2zone = csv_to_data_frame(self.data_loc, "zone.csv").zone_name.to_dict() - self.zone2id = {v: k for k, v in self.id2zone.items()} - + def _add_information(self): add_zone_to_grid_data_frames(self) add_coord_to_grid_data_frames(self) diff --git a/powersimdata/input/helpers.py b/powersimdata/input/helpers.py index ff19a77f9..a6d4e9687 100644 --- a/powersimdata/input/helpers.py +++ b/powersimdata/input/helpers.py @@ -1,4 +1,3 @@ -import os from collections import defaultdict import pandas as pd @@ -12,18 +11,6 @@ ) -def csv_to_data_frame(data_loc, filename): - """Reads CSV. - - :return: (*pandas.DataFrame*) -- created data frame. - """ - print("Reading %s" % filename) - data_frame = pd.read_csv( - os.path.join(data_loc, filename), index_col=0, float_precision="high" - ) - return data_frame - - def add_column_to_data_frame(data_frame, column_dict): """Adds column(s) to data frame. Done inplace. @@ -37,7 +24,7 @@ def add_column_to_data_frame(data_frame, column_dict): def add_coord_to_grid_data_frames(grid): """Adds longitude and latitude information to bus, plant and branch data - frames of grid instance. + frames of grid instance. :param powersimdata.input.grid.Grid grid: grid instance. """ @@ -102,7 +89,7 @@ def get_zone_name(idx): def add_interconnect_to_grid_data_frames(grid): """Adds interconnect name to bus, branch, plant and dcline data frames of - grid instance. + grid instance. :param powersimdata.input.grid.Grid grid: grid instance. """ diff --git a/powersimdata/network/csv_reader.py b/powersimdata/network/csv_reader.py index bc4bd81ed..8a0a0184a 100644 --- a/powersimdata/network/csv_reader.py +++ b/powersimdata/network/csv_reader.py @@ -1,16 +1,34 @@ -from powersimdata.input.helpers import csv_to_data_frame +import os + +import pandas as pd class CSVReader: - """MPC files reader. + """Read CSV files enclosing a grid model. :param str data_loc: path to data. """ def __init__(self, data_loc): """Constructor""" - self.bus = csv_to_data_frame(data_loc, "bus.csv") - self.plant = csv_to_data_frame(data_loc, "plant.csv") - self.gencost = csv_to_data_frame(data_loc, "gencost.csv") - self.branch = csv_to_data_frame(data_loc, "branch.csv") - self.dcline = csv_to_data_frame(data_loc, "dcline.csv") + self.bus = read(data_loc, "bus.csv") + self.plant = read(data_loc, "plant.csv") + self.gencost = read(data_loc, "gencost.csv") + self.branch = read(data_loc, "branch.csv") + self.dcline = read(data_loc, "dcline.csv") + self.sub = read(data_loc, "sub.csv") + self.bus2sub = read(data_loc, "bus2sub.csv") + self.zone = read(data_loc, "zone.csv") + + +def read(data_loc, filename): + """Reads CSV. + + :return: (*pandas.DataFrame*) -- created data frame. + """ + path = os.path.join(data_loc, filename) + if os.path.isfile(path): + print("Reading %s" % filename) + return pd.read_csv(path, index_col=0, float_precision="high") + else: + raise FileNotFoundError(f"{path} cannot be found") From 6b33cb89391e7ee9274ab0d501cafd6a77fa55e3 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 2 Jun 2022 17:18:41 -0700 Subject: [PATCH 35/59] docs: fix docstring in scenario_grid module --- powersimdata/input/scenario_grid.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/powersimdata/input/scenario_grid.py b/powersimdata/input/scenario_grid.py index cd64437bf..e3379eaa0 100644 --- a/powersimdata/input/scenario_grid.py +++ b/powersimdata/input/scenario_grid.py @@ -164,7 +164,7 @@ def frame(name, table, index, n_storage=0): :param numpy.array table: table to be used to build data frame. :param numpy.array index: array to be used as data frame indices. :param int n_storage: number of storage devices. - :return: (tuple) -- first element is a data frame. Second element is None + :return: (*tuple*) -- first element is a data frame. Second element is None or a data frame when energy storage system are included. """ storage = None @@ -265,7 +265,7 @@ def format_gencost(data): """Modify generation cost data frame. :param pandas.DataFrame data: generation cost data frame. - :return: (pandas.DataFrame) -- formatted gencost data frame. + :return: (*pandas.DataFrame*) -- formatted gencost data frame. """ def parse_gencost_row(row): @@ -318,6 +318,11 @@ def add_information_to_model(grid): def reindex_model(grid): + """Reindex bus id columns. + + :param powersimdata.input.scenario_grid.ScenarioGrid grid: grid + """ + def reset_id(): return lambda x: grid.bus.index[x - 1] From ad072df819370c75a9d2ae609fbdaf29fc26c747 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 7 Jun 2022 16:05:21 -0700 Subject: [PATCH 36/59] fix: add missing rename in grid to pypsa exporter (#649) --- powersimdata/export/export_to_pypsa.py | 1 + 1 file changed, 1 insertion(+) diff --git a/powersimdata/export/export_to_pypsa.py b/powersimdata/export/export_to_pypsa.py index a1da98c17..456175268 100644 --- a/powersimdata/export/export_to_pypsa.py +++ b/powersimdata/export/export_to_pypsa.py @@ -70,6 +70,7 @@ "rename": { "startup": "startup_cost", "shutdown": "shutdown_cost", + "c1": "marginal_cost", } }, "branch": { From b8452df9a36858082b3ef60637f891c17c2ba460 Mon Sep 17 00:00:00 2001 From: jenhagg <66005238+jenhagg@users.noreply.github.com> Date: Wed, 8 Jun 2022 17:29:15 -0700 Subject: [PATCH 37/59] fix: typo in column name (#651) --- powersimdata/scenario/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/powersimdata/scenario/create.py b/powersimdata/scenario/create.py index 2f86fa2c0..e70b54435 100644 --- a/powersimdata/scenario/create.py +++ b/powersimdata/scenario/create.py @@ -199,7 +199,7 @@ def print_existing_study(self): if self.existing.empty: print("Nothing yet") else: - print(" | ".join(self.existing.plant.unique())) + print(" | ".join(self.existing.plan.unique())) print("<-- End: Existing Study") def set_base_grid(self): From f97a97545fb222dfb25970a6afb2265e78107a4d Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 7 Jun 2022 16:36:21 -0700 Subject: [PATCH 38/59] refactor: move/rename logic used to build grid from CSV files --- powersimdata/input/abstract_grid.py | 70 ------------- powersimdata/input/converter/__init__.py | 0 powersimdata/input/converter/csv_to_grid.py | 71 +++++++++++++ powersimdata/input/converter/helpers.py | 108 ++++++++++++++++++++ powersimdata/input/helpers.py | 107 ------------------- powersimdata/input/scenario_grid.py | 2 +- powersimdata/input/tests/test_grid.py | 2 +- powersimdata/network/hifld/model.py | 4 +- powersimdata/network/usa_tamu/model.py | 4 +- 9 files changed, 185 insertions(+), 183 deletions(-) create mode 100644 powersimdata/input/converter/__init__.py create mode 100644 powersimdata/input/converter/csv_to_grid.py create mode 100644 powersimdata/input/converter/helpers.py diff --git a/powersimdata/input/abstract_grid.py b/powersimdata/input/abstract_grid.py index 3a9d4dd59..d3256a260 100644 --- a/powersimdata/input/abstract_grid.py +++ b/powersimdata/input/abstract_grid.py @@ -1,14 +1,6 @@ -import os - import pandas as pd from powersimdata.input import const -from powersimdata.input.helpers import ( - add_coord_to_grid_data_frames, - add_zone_to_grid_data_frames, -) -from powersimdata.network.constants.model import model2region -from powersimdata.network.csv_reader import CSVReader class AbstractGrid: @@ -32,68 +24,6 @@ def __init__(self): self.model_immutables = None -class AbstractGridCSV(AbstractGrid): - """Grid Builder.""" - - def _set_data_loc(self, top_dirname): - """Sets data location. - - :param str top_dirname: name of directory enclosing data. - :raises IOError: if directory does not exist. - """ - data_loc = os.path.join(top_dirname, "data") - if os.path.isdir(data_loc) is False: - raise IOError("%s directory not found" % data_loc) - else: - self.data_loc = data_loc - - def _build(self, interconnect, grid_model): - """Build network. - - :param list interconnect: interconnect name(s). - :param str model: the grid model. - """ - reader = CSVReader(self.data_loc) - self.bus = reader.bus - self.plant = reader.plant - self.branch = reader.branch - self.dcline = reader.dcline - self.gencost["after"] = self.gencost["before"] = reader.gencost - self.sub = reader.sub - self.bus2sub = reader.bus2sub - self.id2zone = reader.zone["zone_name"].to_dict() - self.zone2id = {v: k for k, v in self.id2zone.items()} - - self._add_information() - - if model2region[grid_model] not in interconnect: - self._drop_interconnect(interconnect) - - def _add_information(self): - add_zone_to_grid_data_frames(self) - add_coord_to_grid_data_frames(self) - - def _drop_interconnect(self, interconnect): - """Trim data frames to only keep information pertaining to the user - defined interconnect(s). - - :param list interconnect: interconnect name(s). - """ - for key, value in self.__dict__.items(): - if key in ["sub", "bus2sub", "bus", "plant", "branch"]: - value.query("interconnect == @interconnect", inplace=True) - elif key == "gencost": - value["before"].query("interconnect == @interconnect", inplace=True) - elif key == "dcline": - value.query( - "from_interconnect == @interconnect &" - "to_interconnect == @interconnect", - inplace=True, - ) - self.id2zone = {k: self.id2zone[k] for k in self.bus.zone_id.unique()} - self.zone2id = {value: key for key, value in self.id2zone.items()} - - def storage_template(): """Get storage diff --git a/powersimdata/input/converter/__init__.py b/powersimdata/input/converter/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/powersimdata/input/converter/csv_to_grid.py b/powersimdata/input/converter/csv_to_grid.py new file mode 100644 index 000000000..f14769dc2 --- /dev/null +++ b/powersimdata/input/converter/csv_to_grid.py @@ -0,0 +1,71 @@ +import os + +from powersimdata.input.abstract_grid import AbstractGrid +from powersimdata.input.converter.helpers import ( + add_coord_to_grid_data_frames, + add_zone_to_grid_data_frames, +) +from powersimdata.network.constants.model import model2region +from powersimdata.network.csv_reader import CSVReader + + +class FromCSV(AbstractGrid): + """Grid Builder for grid model enclosed in CSV files.""" + + def _set_data_loc(self, top_dirname): + """Sets data location. + + :param str top_dirname: name of directory enclosing data. + :raises IOError: if directory does not exist. + """ + data_loc = os.path.join(top_dirname, "data") + if os.path.isdir(data_loc) is False: + raise IOError("%s directory not found" % data_loc) + else: + self.data_loc = data_loc + + def _build(self, interconnect, grid_model): + """Build network. + + :param list interconnect: interconnect name(s). + :param str model: the grid model. + """ + reader = CSVReader(self.data_loc) + self.bus = reader.bus + self.plant = reader.plant + self.branch = reader.branch + self.dcline = reader.dcline + self.gencost["after"] = self.gencost["before"] = reader.gencost + self.sub = reader.sub + self.bus2sub = reader.bus2sub + self.id2zone = reader.zone["zone_name"].to_dict() + self.zone2id = {v: k for k, v in self.id2zone.items()} + + self._add_information() + + if model2region[grid_model] not in interconnect: + self._drop_interconnect(interconnect) + + def _add_information(self): + add_zone_to_grid_data_frames(self) + add_coord_to_grid_data_frames(self) + + def _drop_interconnect(self, interconnect): + """Trim data frames to only keep information pertaining to the user + defined interconnect(s). + + :param list interconnect: interconnect name(s). + """ + for key, value in self.__dict__.items(): + if key in ["sub", "bus2sub", "bus", "plant", "branch"]: + value.query("interconnect == @interconnect", inplace=True) + elif key == "gencost": + value["before"].query("interconnect == @interconnect", inplace=True) + elif key == "dcline": + value.query( + "from_interconnect == @interconnect &" + "to_interconnect == @interconnect", + inplace=True, + ) + self.id2zone = {k: self.id2zone[k] for k in self.bus.zone_id.unique()} + self.zone2id = {value: key for key, value in self.id2zone.items()} diff --git a/powersimdata/input/converter/helpers.py b/powersimdata/input/converter/helpers.py new file mode 100644 index 000000000..346b3bb4e --- /dev/null +++ b/powersimdata/input/converter/helpers.py @@ -0,0 +1,108 @@ +import pandas as pd + + +def add_column_to_data_frame(data_frame, column_dict): + """Adds column(s) to data frame. Done inplace. + + :param pandas.DataFrame data_frame: input data frame + :param dict column_dict: column to be added. Keys are column name and + values a list of of values. + """ + for key, value in column_dict.items(): + data_frame[key] = value + + +def add_coord_to_grid_data_frames(grid): + """Adds longitude and latitude information to bus, plant and branch data + frames of grid instance. + + :param powersimdata.input.grid.Grid grid: grid instance. + """ + bus2coord = ( + pd.merge(grid.bus2sub[["sub_id"]], grid.sub[["lat", "lon"]], on="sub_id") + .set_index(grid.bus2sub.index) + .drop(columns="sub_id") + .to_dict() + ) + + def get_lat(idx): + return [bus2coord["lat"][i] for i in idx] + + def get_lon(idx): + return [bus2coord["lon"][i] for i in idx] + + extra_col_bus = {"lat": get_lat(grid.bus.index), "lon": get_lon(grid.bus.index)} + add_column_to_data_frame(grid.bus, extra_col_bus) + + extra_col_plant = { + "lat": get_lat(grid.plant.bus_id), + "lon": get_lon(grid.plant.bus_id), + } + add_column_to_data_frame(grid.plant, extra_col_plant) + + extra_col_branch = { + "from_lat": get_lat(grid.branch.from_bus_id), + "from_lon": get_lon(grid.branch.from_bus_id), + "to_lat": get_lat(grid.branch.to_bus_id), + "to_lon": get_lon(grid.branch.to_bus_id), + } + add_column_to_data_frame(grid.branch, extra_col_branch) + + +def add_zone_to_grid_data_frames(grid): + """Adds zone name/id to plant and branch data frames of grid instance. + + :param powersimdata.input.grid.Grid grid: grid instance. + """ + bus2zone = grid.bus.zone_id.to_dict() + + def get_zone_id(idx): + return [bus2zone[i] for i in idx] + + def get_zone_name(idx): + return [grid.id2zone[bus2zone[i]] for i in idx] + + extra_col_plant = { + "zone_id": get_zone_id(grid.plant.bus_id), + "zone_name": get_zone_name(grid.plant.bus_id), + } + add_column_to_data_frame(grid.plant, extra_col_plant) + + extra_col_branch = { + "from_zone_id": get_zone_id(grid.branch.from_bus_id), + "to_zone_id": get_zone_id(grid.branch.to_bus_id), + "from_zone_name": get_zone_name(grid.branch.from_bus_id), + "to_zone_name": get_zone_name(grid.branch.to_bus_id), + } + add_column_to_data_frame(grid.branch, extra_col_branch) + + +def add_interconnect_to_grid_data_frames(grid): + """Adds interconnect name to bus, branch, plant and dcline data frames of + grid instance. + + :param powersimdata.input.grid.Grid grid: grid instance. + """ + bus2interconnect = grid.bus2sub.interconnect.to_dict() + + def get_interconnect(idx): + return [bus2interconnect[i] for i in idx] + + extra_col_bus = {"interconnect": get_interconnect(grid.bus.index)} + add_column_to_data_frame(grid.bus, extra_col_bus) + + extra_col_branch = {"interconnect": get_interconnect(grid.branch.from_bus_id)} + add_column_to_data_frame(grid.branch, extra_col_branch) + + extra_col_plant = {"interconnect": get_interconnect(grid.plant.bus_id)} + add_column_to_data_frame(grid.plant, extra_col_plant) + + extra_col_gencost = {"interconnect": get_interconnect(grid.plant.bus_id)} + add_column_to_data_frame(grid.gencost["before"], extra_col_gencost) + add_column_to_data_frame(grid.gencost["after"], extra_col_gencost) + + extra_col_dcline = { + "from_interconnect": get_interconnect(grid.dcline.from_bus_id), + "to_interconnect": get_interconnect(grid.dcline.to_bus_id), + } + add_column_to_data_frame(grid.dcline, extra_col_dcline) diff --git a/powersimdata/input/helpers.py b/powersimdata/input/helpers.py index a6d4e9687..cef6c872e 100644 --- a/powersimdata/input/helpers.py +++ b/powersimdata/input/helpers.py @@ -11,113 +11,6 @@ ) -def add_column_to_data_frame(data_frame, column_dict): - """Adds column(s) to data frame. Done inplace. - - :param pandas.DataFrame data_frame: input data frame - :param dict column_dict: column to be added. Keys are column name and - values a list of of values. - """ - for key, value in column_dict.items(): - data_frame[key] = value - - -def add_coord_to_grid_data_frames(grid): - """Adds longitude and latitude information to bus, plant and branch data - frames of grid instance. - - :param powersimdata.input.grid.Grid grid: grid instance. - """ - bus2coord = ( - pd.merge(grid.bus2sub[["sub_id"]], grid.sub[["lat", "lon"]], on="sub_id") - .set_index(grid.bus2sub.index) - .drop(columns="sub_id") - .to_dict() - ) - - def get_lat(idx): - return [bus2coord["lat"][i] for i in idx] - - def get_lon(idx): - return [bus2coord["lon"][i] for i in idx] - - extra_col_bus = {"lat": get_lat(grid.bus.index), "lon": get_lon(grid.bus.index)} - add_column_to_data_frame(grid.bus, extra_col_bus) - - extra_col_plant = { - "lat": get_lat(grid.plant.bus_id), - "lon": get_lon(grid.plant.bus_id), - } - add_column_to_data_frame(grid.plant, extra_col_plant) - - extra_col_branch = { - "from_lat": get_lat(grid.branch.from_bus_id), - "from_lon": get_lon(grid.branch.from_bus_id), - "to_lat": get_lat(grid.branch.to_bus_id), - "to_lon": get_lon(grid.branch.to_bus_id), - } - add_column_to_data_frame(grid.branch, extra_col_branch) - - -def add_zone_to_grid_data_frames(grid): - """Adds zone name/id to plant and branch data frames of grid instance. - - :param powersimdata.input.grid.Grid grid: grid instance. - """ - bus2zone = grid.bus.zone_id.to_dict() - - def get_zone_id(idx): - return [bus2zone[i] for i in idx] - - def get_zone_name(idx): - return [grid.id2zone[bus2zone[i]] for i in idx] - - extra_col_plant = { - "zone_id": get_zone_id(grid.plant.bus_id), - "zone_name": get_zone_name(grid.plant.bus_id), - } - add_column_to_data_frame(grid.plant, extra_col_plant) - - extra_col_branch = { - "from_zone_id": get_zone_id(grid.branch.from_bus_id), - "to_zone_id": get_zone_id(grid.branch.to_bus_id), - "from_zone_name": get_zone_name(grid.branch.from_bus_id), - "to_zone_name": get_zone_name(grid.branch.to_bus_id), - } - add_column_to_data_frame(grid.branch, extra_col_branch) - - -def add_interconnect_to_grid_data_frames(grid): - """Adds interconnect name to bus, branch, plant and dcline data frames of - grid instance. - - :param powersimdata.input.grid.Grid grid: grid instance. - """ - bus2interconnect = grid.bus2sub.interconnect.to_dict() - - def get_interconnect(idx): - return [bus2interconnect[i] for i in idx] - - extra_col_bus = {"interconnect": get_interconnect(grid.bus.index)} - add_column_to_data_frame(grid.bus, extra_col_bus) - - extra_col_branch = {"interconnect": get_interconnect(grid.branch.from_bus_id)} - add_column_to_data_frame(grid.branch, extra_col_branch) - - extra_col_plant = {"interconnect": get_interconnect(grid.plant.bus_id)} - add_column_to_data_frame(grid.plant, extra_col_plant) - - extra_col_gencost = {"interconnect": get_interconnect(grid.plant.bus_id)} - add_column_to_data_frame(grid.gencost["before"], extra_col_gencost) - add_column_to_data_frame(grid.gencost["after"], extra_col_gencost) - - extra_col_dcline = { - "from_interconnect": get_interconnect(grid.dcline.from_bus_id), - "to_interconnect": get_interconnect(grid.dcline.to_bus_id), - } - add_column_to_data_frame(grid.dcline, extra_col_dcline) - - def get_resources_in_grid(grid): """Get resources in grid. diff --git a/powersimdata/input/scenario_grid.py b/powersimdata/input/scenario_grid.py index e3379eaa0..ef6d697d6 100644 --- a/powersimdata/input/scenario_grid.py +++ b/powersimdata/input/scenario_grid.py @@ -8,7 +8,7 @@ from powersimdata.data_access.scenario_list import ScenarioListManager from powersimdata.input import const from powersimdata.input.abstract_grid import AbstractGrid -from powersimdata.input.helpers import ( +from powersimdata.input.converter.helpers import ( add_coord_to_grid_data_frames, add_interconnect_to_grid_data_frames, add_zone_to_grid_data_frames, diff --git a/powersimdata/input/tests/test_grid.py b/powersimdata/input/tests/test_grid.py index e64af4842..24819dc23 100644 --- a/powersimdata/input/tests/test_grid.py +++ b/powersimdata/input/tests/test_grid.py @@ -4,8 +4,8 @@ import pandas as pd import pytest +from powersimdata.input.converter.helpers import add_column_to_data_frame from powersimdata.input.grid import Grid -from powersimdata.input.helpers import add_column_to_data_frame from powersimdata.input.scenario_grid import format_gencost, link INCORRECT_SOURCE = "invalid_source" diff --git a/powersimdata/network/hifld/model.py b/powersimdata/network/hifld/model.py index 237c34286..dfefae571 100644 --- a/powersimdata/network/hifld/model.py +++ b/powersimdata/network/hifld/model.py @@ -1,12 +1,12 @@ import os -from powersimdata.input.abstract_grid import AbstractGridCSV +from powersimdata.input.converter.csv_to_grid import FromCSV from powersimdata.network.constants.storage import get_storage from powersimdata.network.helpers import check_and_format_interconnect from powersimdata.network.model import ModelImmutables -class HIFLD(AbstractGridCSV): +class HIFLD(FromCSV): """HIFLD network. :param str/iterable interconnect: interconnect name(s). diff --git a/powersimdata/network/usa_tamu/model.py b/powersimdata/network/usa_tamu/model.py index 00ea3fcc4..966ae9a3c 100644 --- a/powersimdata/network/usa_tamu/model.py +++ b/powersimdata/network/usa_tamu/model.py @@ -1,12 +1,12 @@ import os -from powersimdata.input.abstract_grid import AbstractGridCSV +from powersimdata.input.converter.csv_to_grid import FromCSV from powersimdata.network.constants.storage import get_storage from powersimdata.network.helpers import check_and_format_interconnect from powersimdata.network.model import ModelImmutables -class TAMU(AbstractGridCSV): +class TAMU(FromCSV): """TAMU network. :param str/iterable interconnect: interconnect name(s). From 667370edccc05fd055a7a7b9102061fd2aa7e271 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 7 Jun 2022 17:00:55 -0700 Subject: [PATCH 39/59] refactor: move/rename logic used to build grid from REISE output --- .../reise_to_grid.py} | 14 ++++++++------ powersimdata/input/grid.py | 2 +- powersimdata/input/tests/test_grid.py | 2 +- 3 files changed, 10 insertions(+), 8 deletions(-) rename powersimdata/input/{scenario_grid.py => converter/reise_to_grid.py} (96%) diff --git a/powersimdata/input/scenario_grid.py b/powersimdata/input/converter/reise_to_grid.py similarity index 96% rename from powersimdata/input/scenario_grid.py rename to powersimdata/input/converter/reise_to_grid.py index ef6d697d6..153fccea1 100644 --- a/powersimdata/input/scenario_grid.py +++ b/powersimdata/input/converter/reise_to_grid.py @@ -16,8 +16,8 @@ from powersimdata.network.model import ModelImmutables -class ScenarioGrid(AbstractGrid): - """File reader for MAT files for scenarios which were run on the server.""" +class REISEConverter(AbstractGrid): + """Grid builder for MAT files generated by REISE and REISE.jl""" def __init__(self, filename): """Constructor. @@ -140,7 +140,7 @@ def _get_grid_model_from_scenario_list(source): return slm.get_scenario(scenario_number)["grid_model"] -class FromREISE(ScenarioGrid): +class FromREISE(REISEConverter): """MATLAB file reader, for MAT files created by REISE/MATPOWER""" def build(self): @@ -149,7 +149,7 @@ def build(self): add_information_to_model(self) -class FromREISEjl(ScenarioGrid): +class FromREISEjl(REISEConverter): """MATLAB file reader, for MAT files created (& converted) by REISE.jl""" def build(self): @@ -307,7 +307,8 @@ def parse_gencost_row(row): def add_information_to_model(grid): """Makes a standard grid. - :param powersimdata.input.ScenarioGrid grid: grid with missing information. + :param powersimdata.input.converter.reise_to_grid.REISEConverter grid: grid + produced by REISE/REISE.jl engines with missing information. """ add_interconnect_to_grid_data_frames(grid) @@ -320,7 +321,8 @@ def add_information_to_model(grid): def reindex_model(grid): """Reindex bus id columns. - :param powersimdata.input.scenario_grid.ScenarioGrid grid: grid + :param powersimdata.input.converter.reise_to_grid.REISEConverter grid: grid + produced by REISE engine to reindex. """ def reset_id(): diff --git a/powersimdata/input/grid.py b/powersimdata/input/grid.py index 9ada86359..cdb4ced8a 100644 --- a/powersimdata/input/grid.py +++ b/powersimdata/input/grid.py @@ -1,6 +1,6 @@ import os -from powersimdata.input.scenario_grid import FromREISE, FromREISEjl +from powersimdata.input.converter.reise_to_grid import FromREISE, FromREISEjl from powersimdata.network.constants.storage import storage from powersimdata.network.hifld.model import HIFLD from powersimdata.network.usa_tamu.model import TAMU diff --git a/powersimdata/input/tests/test_grid.py b/powersimdata/input/tests/test_grid.py index 24819dc23..de349fc46 100644 --- a/powersimdata/input/tests/test_grid.py +++ b/powersimdata/input/tests/test_grid.py @@ -5,8 +5,8 @@ import pytest from powersimdata.input.converter.helpers import add_column_to_data_frame +from powersimdata.input.converter.reise_to_grid import format_gencost, link from powersimdata.input.grid import Grid -from powersimdata.input.scenario_grid import format_gencost, link INCORRECT_SOURCE = "invalid_source" INCORRECT_ENGINE = "invalid_engine" From 73a88f01d299b1a513e1f383cc1464532bf002ba Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 7 Jun 2022 22:17:44 -0700 Subject: [PATCH 40/59] refactor: move tests --- .../input/converter/tests/__init__.py | 0 .../input/converter/tests/test_helpers.py | 299 ++++++++++++++++++ powersimdata/input/tests/test_grid.py | 298 ----------------- 3 files changed, 299 insertions(+), 298 deletions(-) create mode 100644 powersimdata/input/converter/tests/__init__.py create mode 100644 powersimdata/input/converter/tests/test_helpers.py diff --git a/powersimdata/input/converter/tests/__init__.py b/powersimdata/input/converter/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/powersimdata/input/converter/tests/test_helpers.py b/powersimdata/input/converter/tests/test_helpers.py new file mode 100644 index 000000000..f8fecdeb1 --- /dev/null +++ b/powersimdata/input/converter/tests/test_helpers.py @@ -0,0 +1,299 @@ +import numpy as np +import pandas as pd + +from powersimdata.input.converter.helpers import add_column_to_data_frame +from powersimdata.input.converter.reise_to_grid import format_gencost, link + + +def test_add_column_to_data_frame(): + df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]}) + column_to_add = {"c": [True, True, False], "d": ["one", 2, "three"]} + add_column_to_data_frame(df, column_to_add) + assert len(df.columns) == 4 + assert np.array_equal(df.c.values, [True, True, False]) + + +def test_format_gencost_polynomial_only_same_n(): + df_input = pd.DataFrame( + { + 0: [2, 2, 2], + 1: [0.0, 0.0, 0.0], + 2: [0.0, 0.0, 0.0], + 3: [4, 4, 4], + 4: [1.1, 1.2, 1.3], + 5: [2.7] * 3, + 6: [0.1, 0.2, 0.3], + 7: [1.0, 1.0, 2.0], + }, + index=[1, 2, 3], + ) + df_output = format_gencost(df_input) + assert np.array_equal( + df_output.columns, ["type", "startup", "shutdown", "n", "c3", "c2", "c1", "c0"] + ) + assert np.array_equal( + df_output.loc[1, ["c0", "c1", "c2", "c3"]].values, [1.0, 0.1, 2.7, 1.1] + ) + + +def test_format_gencost_polynomial_only_different_n(): + df_input = pd.DataFrame( + { + 0: [2, 2, 2, 2], + 1: [0.0, 0.0, 0.0, 0.0], + 2: [0.0, 0.0, 0.0, 0.0], + 3: [4, 2, 3, 2], + 4: [1.1, 0.2, 1.1, 0.4], + 5: [2.7, 1.0, 0.3, 1.0], + 6: [0.1, 0.0, 2.0, 0.0], + 7: [1.0, 0.0, 0.0, 0.0], + }, + index=[1, 2, 3, 4], + ) + df_output = format_gencost(df_input) + assert np.array_equal( + df_output.columns, ["type", "startup", "shutdown", "n", "c3", "c2", "c1", "c0"] + ) + assert np.array_equal( + df_output.loc[1, ["c0", "c1", "c2", "c3"]].values, [1.0, 0.1, 2.7, 1.1] + ) + assert np.array_equal( + df_output.loc[2, ["c0", "c1", "c2", "c3"]].values, [1.0, 0.2, 0.0, 0.0] + ) + assert np.array_equal( + df_output.loc[3, ["c0", "c1", "c2", "c3"]].values, [2.0, 0.3, 1.1, 0.0] + ) + assert np.array_equal( + df_output.loc[4, ["c0", "c1", "c2", "c3"]].values, [1.0, 0.4, 0.0, 0.0] + ) + + +def test_format_gencost_piece_wise_linear_only_same_n(): + df_input = pd.DataFrame( + { + 0: [1, 1, 1], + 1: [0.0, 0.0, 0.0], + 2: [0.0, 0.0, 0.0], + 3: [3, 3, 3], + 4: [1.0, 2.0, 3.0], + 5: [2.7, 2.1, 2.5], + 6: [2.0, 3.0, 4.0], + 7: [4.8, 5.4, 7.3], + 8: [3.0, 4.0, 5.0], + 9: [10.6, 9.4, 17.7], + }, + index=[1, 2, 3], + ) + df_output = format_gencost(df_input) + assert np.array_equal( + df_output.columns, + ["type", "startup", "shutdown", "n", "p1", "f1", "p2", "f2", "p3", "f3"], + ) + + +def test_format_gencost_piece_wise_linear_only_different_n(): + df_input = pd.DataFrame( + { + 0: [1, 1, 1], + 1: [0.0, 0.0, 0.0], + 2: [0.0, 0.0, 0.0], + 3: [4, 3, 2], + 4: [1.0, 2.0, 3.0], + 5: [2.7, 2.1, 2.5], + 6: [2.0, 3.0, 4.0], + 7: [4.8, 5.4, 7.3], + 8: [3.0, 4.0, 0.0], + 9: [10.6, 9.4, 0.0], + 10: [4.0, 0.0, 0.0], + 11: [15.1, 0.0, 0.0], + }, + index=[1, 2, 3], + ) + df_output = format_gencost(df_input) + assert np.array_equal( + df_output.columns, + [ + "type", + "startup", + "shutdown", + "n", + "p1", + "f1", + "p2", + "f2", + "p3", + "f3", + "p4", + "f4", + ], + ) + assert np.array_equal( + df_output.loc[1, ["p1", "f1", "p2", "f2", "p3", "f3", "p4", "f4"]].values, + [1.0, 2.7, 2.0, 4.8, 3.0, 10.6, 4.0, 15.1], + ) + assert np.array_equal( + df_output.loc[2, ["p1", "f1", "p2", "f2", "p3", "f3", "p4", "f4"]].values, + [2.0, 2.1, 3.0, 5.4, 4.0, 9.4, 0.0, 0.0], + ) + assert np.array_equal( + df_output.loc[3, ["p1", "f1", "p2", "f2", "p3", "f3", "p4", "f4"]].values, + [3.0, 2.5, 4.0, 7.3, 0.0, 0.0, 0.0, 0.0], + ) + + +def test_format_gencost_both_model_same_n(): + df_input = pd.DataFrame( + { + 0: [1, 2, 1, 2, 2], + 1: [0.0, 0.0, 0.0, 0.0, 0.0], + 2: [0.0, 0.0, 0.0, 0.0, 0.0], + 3: [4, 3, 2, 5, 2], + 4: [1.0, 1.3, 2.0, 2.8, 1.1], + 5: [2.7, 2.1, 2.5, 4.5, 6.4], + 6: [2.0, 3.8, 3.0, 7.3, 0.0], + 7: [4.8, 0.0, 7.3, 10.0, 0.0], + 8: [3.0, 0.0, 0.0, 14.3, 0.0], + 9: [10.6, 0.0, 0.0, 0.0, 0.0], + 10: [4.0, 0.0, 0.0, 0.0, 0.0], + 11: [15.1, 0.0, 0.0, 0.0, 0.0], + }, + index=[1, 2, 3, 4, 5], + ) + df_output = format_gencost(df_input) + assert np.array_equal( + df_output.columns, + [ + "type", + "startup", + "shutdown", + "n", + "c4", + "c3", + "c2", + "c1", + "c0", + "p1", + "f1", + "p2", + "f2", + "p3", + "f3", + "p4", + "f4", + ], + ) + assert np.array_equal( + df_output.loc[ + 1, + [ + "c4", + "c3", + "c2", + "c1", + "c0", + "p1", + "f1", + "p2", + "f2", + "p3", + "f3", + "p4", + "f4", + ], + ].values, + [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 2.7, 2.0, 4.8, 3.0, 10.6, 4.0, 15.1], + ) + assert np.array_equal( + df_output.loc[ + 2, + [ + "c4", + "c3", + "c2", + "c1", + "c0", + "p1", + "f1", + "p2", + "f2", + "p3", + "f3", + "p4", + "f4", + ], + ].values, + [0.0, 0.0, 1.3, 2.1, 3.8, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) + assert np.array_equal( + df_output.loc[ + 3, + [ + "c4", + "c3", + "c2", + "c1", + "c0", + "p1", + "f1", + "p2", + "f2", + "p3", + "f3", + "p4", + "f4", + ], + ].values, + [0.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.5, 3.0, 7.3, 0.0, 0.0, 0.0, 0.0], + ) + assert np.array_equal( + df_output.loc[ + 4, + [ + "c4", + "c3", + "c2", + "c1", + "c0", + "p1", + "f1", + "p2", + "f2", + "p3", + "f3", + "p4", + "f4", + ], + ].values, + [2.8, 4.5, 7.3, 10.0, 14.3, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) + assert np.array_equal( + df_output.loc[ + 5, + [ + "c4", + "c3", + "c2", + "c1", + "c0", + "p1", + "f1", + "p2", + "f2", + "p3", + "f3", + "p4", + "f4", + ], + ].values, + [0.0, 0.0, 0.0, 1.1, 6.4, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) + + +def test_link(): + keys = ["a", "b", "c", "d", "e"] + values = [1, 2, 3, 4, 5] + output = link(keys, values) + assert np.array_equal(list(output.keys()), keys) + assert np.array_equal(list(output.values()), values) + assert np.array_equal(output["a"], values[0]) + assert np.array_equal(output["c"], values[2]) diff --git a/powersimdata/input/tests/test_grid.py b/powersimdata/input/tests/test_grid.py index de349fc46..3df300d9d 100644 --- a/powersimdata/input/tests/test_grid.py +++ b/powersimdata/input/tests/test_grid.py @@ -1,11 +1,7 @@ import copy -import numpy as np -import pandas as pd import pytest -from powersimdata.input.converter.helpers import add_column_to_data_frame -from powersimdata.input.converter.reise_to_grid import format_gencost, link from powersimdata.input.grid import Grid INCORRECT_SOURCE = "invalid_source" @@ -22,305 +18,11 @@ def test_grid_incorrect_engine(): Grid(["USA"], engine=INCORRECT_ENGINE) -def test_add_column_to_data_frame(): - df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]}) - column_to_add = {"c": [True, True, False], "d": ["one", 2, "three"]} - add_column_to_data_frame(df, column_to_add) - assert len(df.columns) == 4 - assert np.array_equal(df.c.values, [True, True, False]) - - def test_grid_type(): g = Grid(["USA"]) assert isinstance(g, Grid) -def test_format_gencost_polynomial_only_same_n(): - df_input = pd.DataFrame( - { - 0: [2, 2, 2], - 1: [0.0, 0.0, 0.0], - 2: [0.0, 0.0, 0.0], - 3: [4, 4, 4], - 4: [1.1, 1.2, 1.3], - 5: [2.7] * 3, - 6: [0.1, 0.2, 0.3], - 7: [1.0, 1.0, 2.0], - }, - index=[1, 2, 3], - ) - df_output = format_gencost(df_input) - assert np.array_equal( - df_output.columns, ["type", "startup", "shutdown", "n", "c3", "c2", "c1", "c0"] - ) - assert np.array_equal( - df_output.loc[1, ["c0", "c1", "c2", "c3"]].values, [1.0, 0.1, 2.7, 1.1] - ) - - -def test_format_gencost_polynomial_only_different_n(): - df_input = pd.DataFrame( - { - 0: [2, 2, 2, 2], - 1: [0.0, 0.0, 0.0, 0.0], - 2: [0.0, 0.0, 0.0, 0.0], - 3: [4, 2, 3, 2], - 4: [1.1, 0.2, 1.1, 0.4], - 5: [2.7, 1.0, 0.3, 1.0], - 6: [0.1, 0.0, 2.0, 0.0], - 7: [1.0, 0.0, 0.0, 0.0], - }, - index=[1, 2, 3, 4], - ) - df_output = format_gencost(df_input) - assert np.array_equal( - df_output.columns, ["type", "startup", "shutdown", "n", "c3", "c2", "c1", "c0"] - ) - assert np.array_equal( - df_output.loc[1, ["c0", "c1", "c2", "c3"]].values, [1.0, 0.1, 2.7, 1.1] - ) - assert np.array_equal( - df_output.loc[2, ["c0", "c1", "c2", "c3"]].values, [1.0, 0.2, 0.0, 0.0] - ) - assert np.array_equal( - df_output.loc[3, ["c0", "c1", "c2", "c3"]].values, [2.0, 0.3, 1.1, 0.0] - ) - assert np.array_equal( - df_output.loc[4, ["c0", "c1", "c2", "c3"]].values, [1.0, 0.4, 0.0, 0.0] - ) - - -def test_format_gencost_piece_wise_linear_only_same_n(): - df_input = pd.DataFrame( - { - 0: [1, 1, 1], - 1: [0.0, 0.0, 0.0], - 2: [0.0, 0.0, 0.0], - 3: [3, 3, 3], - 4: [1.0, 2.0, 3.0], - 5: [2.7, 2.1, 2.5], - 6: [2.0, 3.0, 4.0], - 7: [4.8, 5.4, 7.3], - 8: [3.0, 4.0, 5.0], - 9: [10.6, 9.4, 17.7], - }, - index=[1, 2, 3], - ) - df_output = format_gencost(df_input) - assert np.array_equal( - df_output.columns, - ["type", "startup", "shutdown", "n", "p1", "f1", "p2", "f2", "p3", "f3"], - ) - - -def test_format_gencost_piece_wise_linear_only_different_n(): - df_input = pd.DataFrame( - { - 0: [1, 1, 1], - 1: [0.0, 0.0, 0.0], - 2: [0.0, 0.0, 0.0], - 3: [4, 3, 2], - 4: [1.0, 2.0, 3.0], - 5: [2.7, 2.1, 2.5], - 6: [2.0, 3.0, 4.0], - 7: [4.8, 5.4, 7.3], - 8: [3.0, 4.0, 0.0], - 9: [10.6, 9.4, 0.0], - 10: [4.0, 0.0, 0.0], - 11: [15.1, 0.0, 0.0], - }, - index=[1, 2, 3], - ) - df_output = format_gencost(df_input) - assert np.array_equal( - df_output.columns, - [ - "type", - "startup", - "shutdown", - "n", - "p1", - "f1", - "p2", - "f2", - "p3", - "f3", - "p4", - "f4", - ], - ) - assert np.array_equal( - df_output.loc[1, ["p1", "f1", "p2", "f2", "p3", "f3", "p4", "f4"]].values, - [1.0, 2.7, 2.0, 4.8, 3.0, 10.6, 4.0, 15.1], - ) - assert np.array_equal( - df_output.loc[2, ["p1", "f1", "p2", "f2", "p3", "f3", "p4", "f4"]].values, - [2.0, 2.1, 3.0, 5.4, 4.0, 9.4, 0.0, 0.0], - ) - assert np.array_equal( - df_output.loc[3, ["p1", "f1", "p2", "f2", "p3", "f3", "p4", "f4"]].values, - [3.0, 2.5, 4.0, 7.3, 0.0, 0.0, 0.0, 0.0], - ) - - -def test_format_gencost_both_model_same_n(): - df_input = pd.DataFrame( - { - 0: [1, 2, 1, 2, 2], - 1: [0.0, 0.0, 0.0, 0.0, 0.0], - 2: [0.0, 0.0, 0.0, 0.0, 0.0], - 3: [4, 3, 2, 5, 2], - 4: [1.0, 1.3, 2.0, 2.8, 1.1], - 5: [2.7, 2.1, 2.5, 4.5, 6.4], - 6: [2.0, 3.8, 3.0, 7.3, 0.0], - 7: [4.8, 0.0, 7.3, 10.0, 0.0], - 8: [3.0, 0.0, 0.0, 14.3, 0.0], - 9: [10.6, 0.0, 0.0, 0.0, 0.0], - 10: [4.0, 0.0, 0.0, 0.0, 0.0], - 11: [15.1, 0.0, 0.0, 0.0, 0.0], - }, - index=[1, 2, 3, 4, 5], - ) - df_output = format_gencost(df_input) - assert np.array_equal( - df_output.columns, - [ - "type", - "startup", - "shutdown", - "n", - "c4", - "c3", - "c2", - "c1", - "c0", - "p1", - "f1", - "p2", - "f2", - "p3", - "f3", - "p4", - "f4", - ], - ) - assert np.array_equal( - df_output.loc[ - 1, - [ - "c4", - "c3", - "c2", - "c1", - "c0", - "p1", - "f1", - "p2", - "f2", - "p3", - "f3", - "p4", - "f4", - ], - ].values, - [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 2.7, 2.0, 4.8, 3.0, 10.6, 4.0, 15.1], - ) - assert np.array_equal( - df_output.loc[ - 2, - [ - "c4", - "c3", - "c2", - "c1", - "c0", - "p1", - "f1", - "p2", - "f2", - "p3", - "f3", - "p4", - "f4", - ], - ].values, - [0.0, 0.0, 1.3, 2.1, 3.8, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - ) - assert np.array_equal( - df_output.loc[ - 3, - [ - "c4", - "c3", - "c2", - "c1", - "c0", - "p1", - "f1", - "p2", - "f2", - "p3", - "f3", - "p4", - "f4", - ], - ].values, - [0.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.5, 3.0, 7.3, 0.0, 0.0, 0.0, 0.0], - ) - assert np.array_equal( - df_output.loc[ - 4, - [ - "c4", - "c3", - "c2", - "c1", - "c0", - "p1", - "f1", - "p2", - "f2", - "p3", - "f3", - "p4", - "f4", - ], - ].values, - [2.8, 4.5, 7.3, 10.0, 14.3, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - ) - assert np.array_equal( - df_output.loc[ - 5, - [ - "c4", - "c3", - "c2", - "c1", - "c0", - "p1", - "f1", - "p2", - "f2", - "p3", - "f3", - "p4", - "f4", - ], - ].values, - [0.0, 0.0, 0.0, 1.1, 6.4, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - ) - - -def test_link(): - keys = ["a", "b", "c", "d", "e"] - values = [1, 2, 3, 4, 5] - output = link(keys, values) - assert np.array_equal(list(output.keys()), keys) - assert np.array_equal(list(output.values()), values) - assert np.array_equal(output["a"], values[0]) - assert np.array_equal(output["c"], values[2]) - - @pytest.fixture(scope="session") def base_texas(): return Grid(["Texas"]) From 8ee9905a66589fa705bcf2c09491cad33a20d7f5 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 7 Jun 2022 23:03:44 -0700 Subject: [PATCH 41/59] refactor: move/rename logic used to export to engines --- powersimdata/{export => input/exporter}/__init__.py | 0 powersimdata/{export => input/exporter}/export_to_pypsa.py | 0 .../exporter/export_to_reise.py} | 0 .../{export => input/exporter}/tests/test_export_to_pypsa.py | 2 +- powersimdata/scenario/execute.py | 2 +- 5 files changed, 2 insertions(+), 2 deletions(-) rename powersimdata/{export => input/exporter}/__init__.py (100%) rename powersimdata/{export => input/exporter}/export_to_pypsa.py (100%) rename powersimdata/{export/export_scenario_inputs.py => input/exporter/export_to_reise.py} (100%) rename powersimdata/{export => input/exporter}/tests/test_export_to_pypsa.py (94%) diff --git a/powersimdata/export/__init__.py b/powersimdata/input/exporter/__init__.py similarity index 100% rename from powersimdata/export/__init__.py rename to powersimdata/input/exporter/__init__.py diff --git a/powersimdata/export/export_to_pypsa.py b/powersimdata/input/exporter/export_to_pypsa.py similarity index 100% rename from powersimdata/export/export_to_pypsa.py rename to powersimdata/input/exporter/export_to_pypsa.py diff --git a/powersimdata/export/export_scenario_inputs.py b/powersimdata/input/exporter/export_to_reise.py similarity index 100% rename from powersimdata/export/export_scenario_inputs.py rename to powersimdata/input/exporter/export_to_reise.py diff --git a/powersimdata/export/tests/test_export_to_pypsa.py b/powersimdata/input/exporter/tests/test_export_to_pypsa.py similarity index 94% rename from powersimdata/export/tests/test_export_to_pypsa.py rename to powersimdata/input/exporter/tests/test_export_to_pypsa.py index c3d70e3a9..e6be0d221 100644 --- a/powersimdata/export/tests/test_export_to_pypsa.py +++ b/powersimdata/input/exporter/tests/test_export_to_pypsa.py @@ -2,7 +2,7 @@ import pytest -from powersimdata.export.export_to_pypsa import export_to_pypsa +from powersimdata.input.exporter.export_to_pypsa import export_to_pypsa from powersimdata.input.grid import Grid diff --git a/powersimdata/scenario/execute.py b/powersimdata/scenario/execute.py index fd15b6f1a..46b7c4b80 100644 --- a/powersimdata/scenario/execute.py +++ b/powersimdata/scenario/execute.py @@ -2,7 +2,7 @@ from scipy.io import savemat from powersimdata.data_access.context import Context -from powersimdata.export.export_scenario_inputs import export_case_mat +from powersimdata.input.exporter.export_to_reise import export_case_mat from powersimdata.input.grid import Grid from powersimdata.input.input_data import InputData from powersimdata.input.transform_grid import TransformGrid From e2c5ddc487999a6b6f5acec674353342b7688b60 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 9 Jun 2022 11:51:04 -0700 Subject: [PATCH 42/59] refactor: remove function used to add columns to data frames --- powersimdata/input/converter/helpers.py | 33 +++++++------------ .../input/converter/tests/test_helpers.py | 9 ----- 2 files changed, 11 insertions(+), 31 deletions(-) diff --git a/powersimdata/input/converter/helpers.py b/powersimdata/input/converter/helpers.py index 346b3bb4e..078d8e919 100644 --- a/powersimdata/input/converter/helpers.py +++ b/powersimdata/input/converter/helpers.py @@ -1,17 +1,6 @@ import pandas as pd -def add_column_to_data_frame(data_frame, column_dict): - """Adds column(s) to data frame. Done inplace. - - :param pandas.DataFrame data_frame: input data frame - :param dict column_dict: column to be added. Keys are column name and - values a list of of values. - """ - for key, value in column_dict.items(): - data_frame[key] = value - - def add_coord_to_grid_data_frames(grid): """Adds longitude and latitude information to bus, plant and branch data frames of grid instance. @@ -32,13 +21,13 @@ def get_lon(idx): return [bus2coord["lon"][i] for i in idx] extra_col_bus = {"lat": get_lat(grid.bus.index), "lon": get_lon(grid.bus.index)} - add_column_to_data_frame(grid.bus, extra_col_bus) + grid.bus = grid.bus.assign(**extra_col_bus) extra_col_plant = { "lat": get_lat(grid.plant.bus_id), "lon": get_lon(grid.plant.bus_id), } - add_column_to_data_frame(grid.plant, extra_col_plant) + grid.plant = grid.plant.assign(**extra_col_plant) extra_col_branch = { "from_lat": get_lat(grid.branch.from_bus_id), @@ -46,7 +35,7 @@ def get_lon(idx): "to_lat": get_lat(grid.branch.to_bus_id), "to_lon": get_lon(grid.branch.to_bus_id), } - add_column_to_data_frame(grid.branch, extra_col_branch) + grid.branch = grid.branch.assign(**extra_col_branch) def add_zone_to_grid_data_frames(grid): @@ -66,7 +55,7 @@ def get_zone_name(idx): "zone_id": get_zone_id(grid.plant.bus_id), "zone_name": get_zone_name(grid.plant.bus_id), } - add_column_to_data_frame(grid.plant, extra_col_plant) + grid.plant = grid.plant.assign(**extra_col_plant) extra_col_branch = { "from_zone_id": get_zone_id(grid.branch.from_bus_id), @@ -74,7 +63,7 @@ def get_zone_name(idx): "from_zone_name": get_zone_name(grid.branch.from_bus_id), "to_zone_name": get_zone_name(grid.branch.to_bus_id), } - add_column_to_data_frame(grid.branch, extra_col_branch) + grid.branch = grid.branch.assign(**extra_col_branch) def add_interconnect_to_grid_data_frames(grid): @@ -89,20 +78,20 @@ def get_interconnect(idx): return [bus2interconnect[i] for i in idx] extra_col_bus = {"interconnect": get_interconnect(grid.bus.index)} - add_column_to_data_frame(grid.bus, extra_col_bus) + grid.bus = grid.bus.assign(**extra_col_bus) extra_col_branch = {"interconnect": get_interconnect(grid.branch.from_bus_id)} - add_column_to_data_frame(grid.branch, extra_col_branch) + grid.branch = grid.branch.assign(**extra_col_branch) extra_col_plant = {"interconnect": get_interconnect(grid.plant.bus_id)} - add_column_to_data_frame(grid.plant, extra_col_plant) + grid.plant = grid.plant.assign(**extra_col_plant) extra_col_gencost = {"interconnect": get_interconnect(grid.plant.bus_id)} - add_column_to_data_frame(grid.gencost["before"], extra_col_gencost) - add_column_to_data_frame(grid.gencost["after"], extra_col_gencost) + grid.gencost["before"] = grid.gencost["before"].assign(**extra_col_gencost) + grid.gencost["after"] = grid.gencost["after"].assign(**extra_col_gencost) extra_col_dcline = { "from_interconnect": get_interconnect(grid.dcline.from_bus_id), "to_interconnect": get_interconnect(grid.dcline.to_bus_id), } - add_column_to_data_frame(grid.dcline, extra_col_dcline) + grid.dcline = grid.dcline.assign(**extra_col_dcline) diff --git a/powersimdata/input/converter/tests/test_helpers.py b/powersimdata/input/converter/tests/test_helpers.py index f8fecdeb1..06f7143e6 100644 --- a/powersimdata/input/converter/tests/test_helpers.py +++ b/powersimdata/input/converter/tests/test_helpers.py @@ -1,18 +1,9 @@ import numpy as np import pandas as pd -from powersimdata.input.converter.helpers import add_column_to_data_frame from powersimdata.input.converter.reise_to_grid import format_gencost, link -def test_add_column_to_data_frame(): - df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]}) - column_to_add = {"c": [True, True, False], "d": ["one", 2, "three"]} - add_column_to_data_frame(df, column_to_add) - assert len(df.columns) == 4 - assert np.array_equal(df.c.values, [True, True, False]) - - def test_format_gencost_polynomial_only_same_n(): df_input = pd.DataFrame( { From 9aed52fbd61a6152910f221d3d561ce42825e137 Mon Sep 17 00:00:00 2001 From: Fabian Hofmann Date: Wed, 15 Jun 2022 16:58:40 +0200 Subject: [PATCH 43/59] fix: add conversion factor for co2 emissions when exporting to pypsa (#653) --- powersimdata/input/exporter/export_to_pypsa.py | 6 +++++- powersimdata/network/constants/plants.py | 11 +++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/powersimdata/input/exporter/export_to_pypsa.py b/powersimdata/input/exporter/export_to_pypsa.py index 456175268..e512cdc9b 100644 --- a/powersimdata/input/exporter/export_to_pypsa.py +++ b/powersimdata/input/exporter/export_to_pypsa.py @@ -254,7 +254,11 @@ def export_to_pypsa( constants = grid.model_immutables.plants carriers["color"] = pd.Series(constants["type2color"]).reindex(cars) carriers["nice_name"] = pd.Series(constants["type2label"]).reindex(cars) - carriers["co2_emissions"] = pd.Series(constants["carbon_per_mwh"]).reindex(cars) + carriers["co2_emissions"] = ( + pd.Series(constants["carbon_per_mwh"]).div(1e3) + * pd.Series(constants["efficiency"]) + ).reindex(cars, fill_value=0) + generators["efficiency"] = generators.carrier.map(constants["efficiency"]).fillna(0) # now time-dependent if scenario: diff --git a/powersimdata/network/constants/plants.py b/powersimdata/network/constants/plants.py index 71cf647ad..22fb9910a 100644 --- a/powersimdata/network/constants/plants.py +++ b/powersimdata/network/constants/plants.py @@ -60,6 +60,16 @@ "ng": 469, } +# MWh_electric to MWh_thermal +# Source: Danish Energy Agency, "Technology Data - Generation of Energy and District Heating", +# https://ens.dk/sites/ens.dk/files/Analyser/technology_data_catalogue_for_el_and_dh.pdf +efficiency = { + "coal": 0.33, + "dfo": 0.35, + "ng": 0.41, # referring to OCGT values from DEA +} + + # MMBTu of fuel per hour to kilograms of CO2 per hour # Source: https://www.epa.gov/energy/greenhouse-gases-equivalencies-calculator-calculations-and-references # = (Heat rate MMBTu/h) * (kg C/mmbtu) * (mass ratio CO2/C) @@ -101,6 +111,7 @@ def get_plants(model): "carbon_resources", "renewable_resources", "clean_resources", + "efficiency", "carbon_per_mwh", "carbon_per_mmbtu", "nox_per_mwh", From cbc13ded1561096de3c35ebe544ec45ae6de9d83 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Tue, 28 Jun 2022 14:46:49 -0700 Subject: [PATCH 44/59] chore: use extra components PyPS-Eur files from zenodo (#655) --- powersimdata/network/europe_tub/model.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/powersimdata/network/europe_tub/model.py b/powersimdata/network/europe_tub/model.py index 7a3a5cd1e..cc8cbe48d 100644 --- a/powersimdata/network/europe_tub/model.py +++ b/powersimdata/network/europe_tub/model.py @@ -56,11 +56,11 @@ def retrieve_data(self): def build(self): """Build network""" - path = os.path.join(self.data_loc, "networks") + path = os.path.join(self.data_loc, "networks", "elec_s") if self.reduction is None: - network = pypsa.Network(os.path.join(path, "elec_s.nc")) - elif os.path.exists(os.path.join(path, f"elec_s_{self.reduction}.nc")): - network = pypsa.Network(os.path.join(path, f"elec_s_{self.reduction}.nc")) + network = pypsa.Network(path + ".nc") + elif os.path.exists(path + f"_{self.reduction}_ec.nc"): + network = pypsa.Network(path + f"_{self.reduction}_ec.nc") else: raise ValueError( "Invalid Resolution. Choose among: None | 1024 | 512 | 256 | 128 | 37" From fc4da59642f6c175a4faf061b29b3e821ae48ab9 Mon Sep 17 00:00:00 2001 From: jenhagg <66005238+jenhagg@users.noreply.github.com> Date: Mon, 25 Jul 2022 15:11:32 -0700 Subject: [PATCH 45/59] chore: update fs-azureblob to support new storage account (#660) --- Pipfile | 2 +- Pipfile.lock | 1095 ++++++++++++++++++++++++---------------------- requirements.txt | 2 +- setup.cfg | 2 +- 4 files changed, 575 insertions(+), 526 deletions(-) diff --git a/Pipfile b/Pipfile index 53a9ca3c9..d6fa96f44 100644 --- a/Pipfile +++ b/Pipfile @@ -21,4 +21,4 @@ tqdm = "==4.29.1" requests = "~=2.25" fs = "==2.4.14" "fs.sshfs" = "*" -fs-azureblob = "*" +fs-azureblob = ">=0.2.1" diff --git a/Pipfile.lock b/Pipfile.lock index 0f70dfdee..8c2e0a8ab 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "29c8b39573ea573de449402e5c9e687ecacbf39fe479188b19df88c86ea009a7" + "sha256": "bceba796a42dc729b6102499f9d52bf4c4ecc501db76051d720de3b0f42a8639" }, "pipfile-spec": 6, "requires": {}, @@ -23,18 +23,27 @@ }, "azure-core": { "hashes": [ - "sha256:39c5d59d04209bb70a1a7ee879cef05d07bc76472cd3fb5eaa2e607a90d312bb", - "sha256:f48a640affa59fa45ac770565b3bead4c4f834242d16983c1ae2bb173a4b8a6d" + "sha256:0f3a20d245659bf81fb3670070a5410c8d4a43298d5a981e62dce393000a9084", + "sha256:a76856fa83efe1925a4fd917dc179c7daa15917dd71da2774833fa82a96f3dfa" ], - "markers": "python_version >= '3.6'", - "version": "==1.24.1" + "markers": "python_full_version >= '3.6.0'", + "version": "==1.24.2" }, "azure-storage-blob": { "hashes": [ - "sha256:e74c2c49fd04b80225f5b9734f1dbd417d89f280abfedccced3ac21509e1659d", - "sha256:eb37b50ddfb6e558b29f6c8c03b0666514e55d6170bf4624e7261a3af93c6401" + "sha256:280a6ab032845bab9627582bee78a50497ca2f14772929b5c5ee8b4605af0cb3", + "sha256:53f0d4cd32970ac9ff9b9753f83dd2fb3f9ac30e1d01e71638c436c509bfd884" + ], + "markers": "python_full_version >= '3.6.0'", + "version": "==12.13.0" + }, + "azure-storage-file-datalake": { + "hashes": [ + "sha256:12e6306e5efb5ca28e0ccd9fa79a2c61acd589866d6109fe5601b18509da92f4", + "sha256:b6cf5733fe794bf3c866efbe3ce1941409e35b6b125028ac558b436bf90f2de7" ], - "version": "==12.8.1" + "markers": "python_full_version >= '3.6.0'", + "version": "==12.8.0" }, "bcrypt": { "hashes": [ @@ -50,107 +59,121 @@ "sha256:a2c46100e315c3a5b90fdc53e429c006c5f962529bc27e1dfd656292c20ccc40", "sha256:cd43303d6b8a165c29ec6756afd169faba9396a9472cdff753fe9f19b96ce2fa" ], - "markers": "python_version >= '3.6'", + "markers": "python_full_version >= '3.6.0'", "version": "==3.2.2" }, "certifi": { "hashes": [ - "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7", - "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a" + "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d", + "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412" ], - "markers": "python_version >= '3.6'", - "version": "==2022.5.18.1" + "markers": "python_full_version >= '3.6.0'", + "version": "==2022.6.15" }, "cffi": { "hashes": [ - "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3", - "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2", - "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636", - "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20", - "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728", - "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27", - "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66", - "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443", - "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0", - "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7", - "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39", - "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605", - "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a", - "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37", - "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029", - "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139", - "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc", - "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df", - "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14", - "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880", - "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2", - "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a", - "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e", - "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474", - "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024", - "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8", - "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0", - "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e", - "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a", - "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e", - "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032", - "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6", - "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e", - "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b", - "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e", - "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954", - "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962", - "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c", - "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4", - "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55", - "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962", - "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023", - "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c", - "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6", - "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8", - "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382", - "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7", - "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc", - "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997", - "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796" - ], - "version": "==1.15.0" + "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5", + "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef", + "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104", + "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426", + "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405", + "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375", + "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a", + "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e", + "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc", + "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf", + "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185", + "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497", + "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3", + "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35", + "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c", + "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83", + "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21", + "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca", + "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984", + "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac", + "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd", + "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee", + "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a", + "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2", + "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192", + "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7", + "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585", + "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f", + "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e", + "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27", + "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b", + "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e", + "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e", + "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d", + "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c", + "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415", + "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82", + "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02", + "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314", + "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325", + "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c", + "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3", + "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914", + "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045", + "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d", + "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9", + "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5", + "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2", + "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c", + "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3", + "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2", + "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8", + "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d", + "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d", + "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9", + "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162", + "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76", + "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4", + "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e", + "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9", + "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6", + "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b", + "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01", + "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0" + ], + "version": "==1.15.1" }, "charset-normalizer": { "hashes": [ - "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597", - "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df" + "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5", + "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413" ], - "markers": "python_version >= '3'", - "version": "==2.0.12" + "markers": "python_full_version >= '3.6.0'", + "version": "==2.1.0" }, "cryptography": { "hashes": [ - "sha256:093cb351031656d3ee2f4fa1be579a8c69c754cf874206be1d4cf3b542042804", - "sha256:0cc20f655157d4cfc7bada909dc5cc228211b075ba8407c46467f63597c78178", - "sha256:1b9362d34363f2c71b7853f6251219298124aa4cc2075ae2932e64c91a3e2717", - "sha256:1f3bfbd611db5cb58ca82f3deb35e83af34bb8cf06043fa61500157d50a70982", - "sha256:2bd1096476aaac820426239ab534b636c77d71af66c547b9ddcd76eb9c79e004", - "sha256:31fe38d14d2e5f787e0aecef831457da6cec68e0bb09a35835b0b44ae8b988fe", - "sha256:3b8398b3d0efc420e777c40c16764d6870bcef2eb383df9c6dbb9ffe12c64452", - "sha256:3c81599befb4d4f3d7648ed3217e00d21a9341a9a688ecdd615ff72ffbed7336", - "sha256:419c57d7b63f5ec38b1199a9521d77d7d1754eb97827bbb773162073ccd8c8d4", - "sha256:46f4c544f6557a2fefa7ac8ac7d1b17bf9b647bd20b16decc8fbcab7117fbc15", - "sha256:471e0d70201c069f74c837983189949aa0d24bb2d751b57e26e3761f2f782b8d", - "sha256:59b281eab51e1b6b6afa525af2bd93c16d49358404f814fe2c2410058623928c", - "sha256:731c8abd27693323b348518ed0e0705713a36d79fdbd969ad968fbef0979a7e0", - "sha256:95e590dd70642eb2079d280420a888190aa040ad20f19ec8c6e097e38aa29e06", - "sha256:a68254dd88021f24a68b613d8c51d5c5e74d735878b9e32cc0adf19d1f10aaf9", - "sha256:a7d5137e556cc0ea418dca6186deabe9129cee318618eb1ffecbd35bee55ddc1", - "sha256:aeaba7b5e756ea52c8861c133c596afe93dd716cbcacae23b80bc238202dc023", - "sha256:dc26bb134452081859aa21d4990474ddb7e863aa39e60d1592800a8865a702de", - "sha256:e53258e69874a306fcecb88b7534d61820db8a98655662a3dd2ec7f1afd9132f", - "sha256:ef15c2df7656763b4ff20a9bc4381d8352e6640cfeb95c2972c38ef508e75181", - "sha256:f224ad253cc9cea7568f49077007d2263efa57396a2f2f78114066fd54b5c68e", - "sha256:f8ec91983e638a9bcd75b39f1396e5c0dc2330cbd9ce4accefe68717e6779e0a" - ], - "markers": "python_version >= '3.6'", - "version": "==37.0.2" + "sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59", + "sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596", + "sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3", + "sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5", + "sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab", + "sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884", + "sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82", + "sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b", + "sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441", + "sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa", + "sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d", + "sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b", + "sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a", + "sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6", + "sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157", + "sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280", + "sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282", + "sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67", + "sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8", + "sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046", + "sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327", + "sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9" + ], + "markers": "python_full_version >= '3.6.0'", + "version": "==37.0.4" }, "fs": { "hashes": [ @@ -162,11 +185,11 @@ }, "fs-azureblob": { "hashes": [ - "sha256:485f9669ac9a212a64391012c2f1689930f408f13e3d00a83a8c4fa8ce97b5c5", - "sha256:812049faff88d465130b6e51ea2cb0d4aa7fa5c4baa274447cdc0bf58a2c76bb" + "sha256:12536fc7ea5ac2a5b831d87b997475296e893006746ee2a653c56ee8ed22b0be", + "sha256:55245b1ff9b5fe448aec13dfa600552d5773f9a6504dcaba80d2618299659c1e" ], "index": "pypi", - "version": "==0.1.0" + "version": "==0.2.1" }, "fs.sshfs": { "hashes": [ @@ -181,7 +204,7 @@ "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" ], - "markers": "python_version >= '3'", + "markers": "python_version >= '3.5'", "version": "==3.3" }, "isodate": { @@ -193,79 +216,82 @@ }, "msrest": { "hashes": [ - "sha256:72661bc7bedc2dc2040e8f170b6e9ef226ee6d3892e01affd4d26b06474d68d8", - "sha256:c840511c845330e96886011a236440fafc2c9aff7b2df9c0a92041ee2dee3782" + "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32", + "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9" ], - "version": "==0.6.21" + "markers": "python_full_version >= '3.6.0'", + "version": "==0.7.1" }, "networkx": { "hashes": [ - "sha256:1a1e8fe052cc1b4e0339b998f6795099562a264a13a5af7a32cad45ab9d4e126", - "sha256:4a52cf66aed221955420e11b3e2e05ca44196b4829aab9576d4d439212b0a14f" + "sha256:15a7b81a360791c458c55a417418ea136c13378cfdc06a2dcdc12bd2f9cf09c1", + "sha256:a762f4b385692d9c3a6f2912d058d76d29a827deaedf9e63ed14d397b8030687" ], "index": "pypi", - "version": "==2.8" + "version": "==2.8.5" }, "numpy": { "hashes": [ - "sha256:07a8c89a04997625236c5ecb7afe35a02af3896c8aa01890a849913a2309c676", - "sha256:08d9b008d0156c70dc392bb3ab3abb6e7a711383c3247b410b39962263576cd4", - "sha256:201b4d0552831f7250a08d3b38de0d989d6f6e4658b709a02a73c524ccc6ffce", - "sha256:2c10a93606e0b4b95c9b04b77dc349b398fdfbda382d2a39ba5a822f669a0123", - "sha256:3ca688e1b9b95d80250bca34b11a05e389b1420d00e87a0d12dc45f131f704a1", - "sha256:48a3aecd3b997bf452a2dedb11f4e79bc5bfd21a1d4cc760e703c31d57c84b3e", - "sha256:568dfd16224abddafb1cbcce2ff14f522abe037268514dd7e42c6776a1c3f8e5", - "sha256:5bfb1bb598e8229c2d5d48db1860bcf4311337864ea3efdbe1171fb0c5da515d", - "sha256:639b54cdf6aa4f82fe37ebf70401bbb74b8508fddcf4797f9fe59615b8c5813a", - "sha256:8251ed96f38b47b4295b1ae51631de7ffa8260b5b087808ef09a39a9d66c97ab", - "sha256:92bfa69cfbdf7dfc3040978ad09a48091143cffb778ec3b03fa170c494118d75", - "sha256:97098b95aa4e418529099c26558eeb8486e66bd1e53a6b606d684d0c3616b168", - "sha256:a3bae1a2ed00e90b3ba5f7bd0a7c7999b55d609e0c54ceb2b076a25e345fa9f4", - "sha256:c34ea7e9d13a70bf2ab64a2532fe149a9aced424cd05a2c4ba662fd989e3e45f", - "sha256:dbc7601a3b7472d559dc7b933b18b4b66f9aa7452c120e87dfb33d02008c8a18", - "sha256:e7927a589df200c5e23c57970bafbd0cd322459aa7b1ff73b7c2e84d6e3eae62", - "sha256:f8c1f39caad2c896bc0018f699882b345b2a63708008be29b1f355ebf6f933fe", - "sha256:f950f8845b480cffe522913d35567e29dd381b0dc7e4ce6a4a9f9156417d2430", - "sha256:fade0d4f4d292b6f39951b6836d7a3c7ef5b2347f3c420cd9820a1d90d794802", - "sha256:fdf3c08bce27132395d3c3ba1503cac12e17282358cb4bddc25cc46b0aca07aa" + "sha256:1408c3527a74a0209c781ac82bde2182b0f0bf54dea6e6a363fe0cc4488a7ce7", + "sha256:173f28921b15d341afadf6c3898a34f20a0569e4ad5435297ba262ee8941e77b", + "sha256:1865fdf51446839ca3fffaab172461f2b781163f6f395f1aed256b1ddc253622", + "sha256:3119daed207e9410eaf57dcf9591fdc68045f60483d94956bee0bfdcba790953", + "sha256:35590b9c33c0f1c9732b3231bb6a72d1e4f77872390c47d50a615686ae7ed3fd", + "sha256:37e5ebebb0eb54c5b4a9b04e6f3018e16b8ef257d26c8945925ba8105008e645", + "sha256:37ece2bd095e9781a7156852e43d18044fd0d742934833335599c583618181b9", + "sha256:3ab67966c8d45d55a2bdf40701536af6443763907086c0a6d1232688e27e5447", + "sha256:47f10ab202fe4d8495ff484b5561c65dd59177949ca07975663f4494f7269e3e", + "sha256:55df0f7483b822855af67e38fb3a526e787adf189383b4934305565d71c4b148", + "sha256:5d732d17b8a9061540a10fda5bfeabca5785700ab5469a5e9b93aca5e2d3a5fb", + "sha256:68b69f52e6545af010b76516f5daaef6173e73353e3295c5cb9f96c35d755641", + "sha256:7e8229f3687cdadba2c4faef39204feb51ef7c1a9b669247d49a24f3e2e1617c", + "sha256:8002574a6b46ac3b5739a003b5233376aeac5163e5dcd43dd7ad062f3e186129", + "sha256:876f60de09734fbcb4e27a97c9a286b51284df1326b1ac5f1bf0ad3678236b22", + "sha256:9ce242162015b7e88092dccd0e854548c0926b75c7924a3495e02c6067aba1f5", + "sha256:a35c4e64dfca659fe4d0f1421fc0f05b8ed1ca8c46fb73d9e5a7f175f85696bb", + "sha256:aeba539285dcf0a1ba755945865ec61240ede5432df41d6e29fab305f4384db2", + "sha256:b15c3f1ed08df4980e02cc79ee058b788a3d0bef2fb3c9ca90bb8cbd5b8a3a04", + "sha256:c2f91f88230042a130ceb1b496932aa717dcbd665350beb821534c5c7e15881c", + "sha256:d748ef349bfef2e1194b59da37ed5a29c19ea8d7e6342019921ba2ba4fd8b624", + "sha256:e0d7447679ae9a7124385ccf0ea990bb85bb869cef217e2ea6c844b6a6855073" ], "index": "pypi", - "version": "==1.22.3" + "version": "==1.23.1" }, "oauthlib": { "hashes": [ "sha256:23a8208d75b902797ea29fd31fa80a15ed9dc2c6c16fe73f5d346f83f6fa27a2", "sha256:6db33440354787f9b7f3a6dbd4febf5d0f93758354060e802f6c06cb493022fe" ], - "markers": "python_version >= '3.6'", + "markers": "python_full_version >= '3.6.0'", "version": "==3.2.0" }, "pandas": { "hashes": [ - "sha256:0010771bd9223f7afe5f051eb47c4a49534345dfa144f2f5470b27189a4dd3b5", - "sha256:061609334a8182ab500a90fe66d46f6f387de62d3a9cb9aa7e62e3146c712167", - "sha256:09d8be7dd9e1c4c98224c4dfe8abd60d145d934e9fc1f5f411266308ae683e6a", - "sha256:295872bf1a09758aba199992c3ecde455f01caf32266d50abc1a073e828a7b9d", - "sha256:3228198333dd13c90b6434ddf61aa6d57deaca98cf7b654f4ad68a2db84f8cfe", - "sha256:385c52e85aaa8ea6a4c600a9b2821181a51f8be0aee3af6f2dcb41dafc4fc1d0", - "sha256:51649ef604a945f781105a6d2ecf88db7da0f4868ac5d45c51cb66081c4d9c73", - "sha256:5586cc95692564b441f4747c47c8a9746792e87b40a4680a2feb7794defb1ce3", - "sha256:5a206afa84ed20e07603f50d22b5f0db3fb556486d8c2462d8bc364831a4b417", - "sha256:5b79af3a69e5175c6fa7b4e046b21a646c8b74e92c6581a9d825687d92071b51", - "sha256:5c54ea4ef3823108cd4ec7fb27ccba4c3a775e0f83e39c5e17f5094cb17748bc", - "sha256:8c5bf555b6b0075294b73965adaafb39cf71c312e38c5935c93d78f41c19828a", - "sha256:92bc1fc585f1463ca827b45535957815b7deb218c549b7c18402c322c7549a12", - "sha256:95c1e422ced0199cf4a34385ff124b69412c4bc912011ce895582bee620dfcaa", - "sha256:b8134651258bce418cb79c71adeff0a44090c98d955f6953168ba16cc285d9f7", - "sha256:be67c782c4f1b1f24c2f16a157e12c2693fd510f8df18e3287c77f33d124ed07", - "sha256:c072c7f06b9242c855ed8021ff970c0e8f8b10b35e2640c657d2a541c5950f59", - "sha256:d0d4f13e4be7ce89d7057a786023c461dd9370040bdb5efa0a7fe76b556867a0", - "sha256:df82739e00bb6daf4bba4479a40f38c718b598a84654cbd8bb498fd6b0aa8c16", - "sha256:f549097993744ff8c41b5e8f2f0d3cbfaabe89b4ae32c8c08ead6cc535b80139", - "sha256:ff08a14ef21d94cdf18eef7c569d66f2e24e0bc89350bcd7d243dd804e3b5eb2" + "sha256:07238a58d7cbc8a004855ade7b75bbd22c0db4b0ffccc721556bab8a095515f6", + "sha256:0daf876dba6c622154b2e6741f29e87161f844e64f84801554f879d27ba63c0d", + "sha256:16ad23db55efcc93fa878f7837267973b61ea85d244fc5ff0ccbcfa5638706c5", + "sha256:1d9382f72a4f0e93909feece6fef5500e838ce1c355a581b3d8f259839f2ea76", + "sha256:24ea75f47bbd5574675dae21d51779a4948715416413b30614c1e8b480909f81", + "sha256:2893e923472a5e090c2d5e8db83e8f907364ec048572084c7d10ef93546be6d1", + "sha256:2ff7788468e75917574f080cd4681b27e1a7bf36461fe968b49a87b5a54d007c", + "sha256:41fc406e374590a3d492325b889a2686b31e7a7780bec83db2512988550dadbf", + "sha256:48350592665ea3cbcd07efc8c12ff12d89be09cd47231c7925e3b8afada9d50d", + "sha256:605d572126eb4ab2eadf5c59d5d69f0608df2bf7bcad5c5880a47a20a0699e3e", + "sha256:6dfbf16b1ea4f4d0ee11084d9c026340514d1d30270eaa82a9f1297b6c8ecbf0", + "sha256:6f803320c9da732cc79210d7e8cc5c8019aad512589c910c66529eb1b1818230", + "sha256:721a3dd2f06ef942f83a819c0f3f6a648b2830b191a72bbe9451bcd49c3bd42e", + "sha256:755679c49460bd0d2f837ab99f0a26948e68fa0718b7e42afbabd074d945bf84", + "sha256:78b00429161ccb0da252229bcda8010b445c4bf924e721265bec5a6e96a92e92", + "sha256:958a0588149190c22cdebbc0797e01972950c927a11a900fe6c2296f207b1d6f", + "sha256:a3924692160e3d847e18702bb048dc38e0e13411d2b503fecb1adf0fcf950ba4", + "sha256:d51674ed8e2551ef7773820ef5dab9322be0828629f2cbf8d1fc31a0c4fed640", + "sha256:d5ebc990bd34f4ac3c73a2724c2dcc9ee7bf1ce6cf08e87bb25c6ad33507e318", + "sha256:d6c0106415ff1a10c326c49bc5dd9ea8b9897a6ca0c8688eb9c30ddec49535ef", + "sha256:e48fbb64165cda451c06a0f9e4c7a16b534fcabd32546d531b3c240ce2844112" ], "index": "pypi", - "version": "==1.4.2" + "version": "==1.4.3" }, "paramiko": { "hashes": [ @@ -304,7 +330,7 @@ "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b", "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543" ], - "markers": "python_version >= '3.6'", + "markers": "python_full_version >= '3.6.0'", "version": "==1.5.0" }, "python-dateutil": { @@ -324,11 +350,11 @@ }, "requests": { "hashes": [ - "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61", - "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d" + "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983", + "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349" ], "index": "pypi", - "version": "==2.27.1" + "version": "==2.28.1" }, "requests-oauthlib": { "hashes": [ @@ -340,40 +366,40 @@ }, "scipy": { "hashes": [ - "sha256:011d4386b53b933142f58a652aa0f149c9b9242abd4f900b9f4ea5fbafc86b89", - "sha256:16e09ef68b352d73befa8bcaf3ebe25d3941fe1a58c82909d5589856e6bc8174", - "sha256:31d4f2d6b724bc9a98e527b5849b8a7e589bf1ea630c33aa563eda912c9ff0bd", - "sha256:38aa39b6724cb65271e469013aeb6f2ce66fd44f093e241c28a9c6bc64fd79ed", - "sha256:3d573228c10a3a8c32b9037be982e6440e411b443a6267b067cac72f690b8d56", - "sha256:3d9dd6c8b93a22bf9a3a52d1327aca7e092b1299fb3afc4f89e8eba381be7b59", - "sha256:559a8a4c03a5ba9fe3232f39ed24f86457e4f3f6c0abbeae1fb945029f092720", - "sha256:5e73343c5e0d413c1f937302b2e04fb07872f5843041bcfd50699aef6e95e399", - "sha256:723b9f878095ed994756fa4ee3060c450e2db0139c5ba248ee3f9628bd64e735", - "sha256:87b01c7d5761e8a266a0fbdb9d88dcba0910d63c1c671bdb4d99d29f469e9e03", - "sha256:8f4d059a97b29c91afad46b1737274cb282357a305a80bdd9e8adf3b0ca6a3f0", - "sha256:92b2c2af4183ed09afb595709a8ef5783b2baf7f41e26ece24e1329c109691a7", - "sha256:937d28722f13302febde29847bbe554b89073fbb924a30475e5ed7b028898b5f", - "sha256:a279e27c7f4566ef18bab1b1e2c37d168e365080974758d107e7d237d3f0f484", - "sha256:ad5be4039147c808e64f99c0e8a9641eb5d2fa079ff5894dcd8240e94e347af4", - "sha256:ae3e327da323d82e918e593460e23babdce40d7ab21490ddf9fc06dec6b91a18", - "sha256:bb7088e89cd751acf66195d2f00cf009a1ea113f3019664032d9075b1e727b6c", - "sha256:c17a1878d00a5dd2797ccd73623ceca9d02375328f6218ee6d921e1325e61aff", - "sha256:c2bae431d127bf0b1da81fc24e4bba0a84d058e3a96b9dd6475dfcb3c5e8761e", - "sha256:de2e80ee1d925984c2504812a310841c241791c5279352be4707cdcd7c255039", - "sha256:e6f0cd9c0bd374ef834ee1e0f0999678d49dcc400ea6209113d81528958f97c7", - "sha256:f3720d0124aced49f6f2198a6900304411dbbeed12f56951d7c66ebef05e3df6", - "sha256:f4a6d3b9f9797eb2d43938ac2c5d96d02aed17ef170c8b38f11798717523ddba" + "sha256:2454ed30f47dda09c07057e4da368215836e0b5f9c4ce1b81fd96e95b1128fa7", + "sha256:2a3273724612819da49f975ee3cf14b9910aa0eaab2e23eea1d502702c939a48", + "sha256:320fa84c502b9aa77c4cfde2d973fa982b71ff06460d983904695f2b9d722580", + "sha256:32c954e00e89c942611e327cbfb9e868228b55068983a18b6d87a956b76db891", + "sha256:3eef55fb77e64437cc600a75c69f01902e385cfceae0bf3761ab1f99fb011a9a", + "sha256:3f165c2074558e76f8e7e70e3fb2cabef2408936645327b8b0e906bf247cc728", + "sha256:489d095a6f443aeeaea0e69bb4dfacceb8850870d33df9209fad930ea79639a5", + "sha256:6997d0d2cb1bcc57d9ea3d607c5f1227f4abe7dd15807911071825de06aafb15", + "sha256:73cc5b3392982c2bcdf6ee5c129b45eda257a18752655281942febc4aa088d76", + "sha256:7c28481580f5778e7ae788d4219026b2cd62544ff379612423f4dbcb7ad9c46f", + "sha256:93114c9292841406e27c0fe2b5c00d2de1cbd1ec3a4343c8ecf961cce17872c5", + "sha256:9704b7362b2e8cab80c273c07d3a6dacc0270c8e71f88bef052fa7b4ed280505", + "sha256:a13eb973be297e64a8a6aa1537820357cd7810aa5bae1d9076531f3ba63fec34", + "sha256:b3b14c1d6c79c1c8edbae120c4b87c863907805e187e17a38de5f72f1735769f", + "sha256:b67129931ca84fc8f0b9f1224ad88b19f2a4e4cbb98d54af254ed1ee0f423a07", + "sha256:b99872fce4a87ce4a01c21c0f8c9d85bf21a21ac4e5c74a50b94143850a56e0d", + "sha256:c24930b712cd7dc3a272776ea4150b0c4a2acc5a2868ece6e70acce5b8993820", + "sha256:d1b9120bf5e68d64de219a90f337a56de7f2e55c3c23fe85af785a6ff5932d4e", + "sha256:ea6659b8360184883e5193acc5de2c2277bc62bccabb953d48d9fc57ad228c5a", + "sha256:f51b051ce3c66cd529ae07c0af87f2804f54ecf14a5fbded0021a7b4579e94ee", + "sha256:fc90b11627641bf89713496d0e6f5b391ba70c3ecf2634fa8c697c97b3ba038e", + "sha256:fc9ba93ddb04fac40fef8f53a1d752da75492ec17cbf5c451b1102e257fd39b8", + "sha256:ffefbadde5afdc8c3003b77e1cc7845a996c8129a2012eff1b3b78074cb25ef9" ], "index": "pypi", - "version": "==1.8.0" + "version": "==1.9.0rc3" }, "setuptools": { "hashes": [ - "sha256:68e45d17c9281ba25dc0104eadd2647172b3472d9e01f911efa57965e8d51a36", - "sha256:a43bdedf853c670e5fed28e5623403bad2f73cf02f9a2774e91def6bda8265a7" + "sha256:0d33c374d41c7863419fc8f6c10bfe25b7b498aa34164d135c622e52580c6b16", + "sha256:c04b44a57a6265fe34a4a444e965884716d34bae963119a76353434d6f18e450" ], "markers": "python_version >= '3.7'", - "version": "==62.3.2" + "version": "==63.2.0" }, "six": { "hashes": [ @@ -393,19 +419,19 @@ }, "typing-extensions": { "hashes": [ - "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708", - "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376" + "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02", + "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6" ], "markers": "python_version >= '3.7'", - "version": "==4.2.0" + "version": "==4.3.0" }, "urllib3": { "hashes": [ - "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14", - "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e" + "sha256:c33ccba33c819596124764c23a97d25f32b28433ba0dedeb77d873a38722c9bc", + "sha256:ea6e8fb210b19d950fab93b60c9009226c63a28808bc8386e05301e25883ac0a" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.26.9" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'", + "version": "==1.26.11" } }, "develop": { @@ -419,84 +445,81 @@ }, "black": { "hashes": [ - "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b", - "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176", - "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09", - "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a", - "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015", - "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79", - "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb", - "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20", - "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464", - "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968", - "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82", - "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21", - "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0", - "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265", - "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b", - "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a", - "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72", - "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce", - "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0", - "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a", - "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163", - "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad", - "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d" + "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90", + "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c", + "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78", + "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4", + "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee", + "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e", + "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e", + "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6", + "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9", + "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c", + "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256", + "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f", + "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2", + "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c", + "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b", + "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807", + "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf", + "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def", + "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad", + "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d", + "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849", + "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69", + "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666" ], "index": "pypi", - "version": "==22.3.0" + "version": "==22.6.0" }, "certifi": { "hashes": [ - "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7", - "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a" + "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d", + "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412" ], - "markers": "python_version >= '3.6'", - "version": "==2022.5.18.1" + "markers": "python_full_version >= '3.6.0'", + "version": "==2022.6.15" }, "cftime": { "hashes": [ - "sha256:003abc800933a39d6f4ecec113f401aef9961642a4769aa1099ed34e0c1c3d46", - "sha256:09970124c0a8c84a66e34e8655c42f0254e1c29d73a766e71c9a871ce57f125b", - "sha256:1187fa1107974fe374f90936e39d2e09ac7cb4ffcd3b31314898684a2cebba87", - "sha256:13103e6650beea6552316bd5825d6aa3b7e98f5b8115026df4826798dff9f741", - "sha256:13e92118fa3606b2a1d95bbe1a18a0a260ba85704d9ff061e9599b0e745bb4a9", - "sha256:1888f0c0ab37d6ee23d07e74e51dcaae136da48784ceaa5d078115aa2e2b8b92", - "sha256:1c56556900b9ae3a5b4c9705699d78f865994e5c3c92cd230d54c89d669b88f0", - "sha256:1f43f1c839f5579bf045f13b0d2deb15048d314d209039664003ae1bf2544898", - "sha256:2da21d8626a7f549eaaae7d314110133914fa24726e4e7caab2775627cb7361a", - "sha256:4c608937082980e0905657ec2475b3ff2a2fc89255e185f44076da482102f617", - "sha256:4d39b666284c78726eb7ea8b4ad29aacd387e912177721b3eb9bdc508e2b86fa", - "sha256:517c50e29b3c141919d83bf3782892e4a4144034b63722e7a266db25488b3bb5", - "sha256:519d1c391ed7be3bb4d84990a51a76cbb1cc894bd776637c0227dfe1f6274f39", - "sha256:64bcb6ca9ef922b2332e9ed91cd6f6f2fe28de392fb10e752f2ac83ffdc50854", - "sha256:7900daf0efa495f263c3c5b0c96502cc909fb07ef56844791e3c8cdbb08a741c", - "sha256:80390ddb68a7e27b52bf83efc040c8e3ec2e3b0bb0e66084a75498fb39f63a63", - "sha256:9b31455449d2e347d033a6440f94305ce7eb48d32d863dfe0f53671c78e2561d", - "sha256:a49b023ca5cab9c43d33b3a43edc5a112f5d452f75fc1a85f8ca136f4fa9d440", - "sha256:a6c1e59299250299f23ab526d1ad640dcc3b9eb311468a8f9112f9d53d6106c6", - "sha256:a8a56edad64912b11d8f80245b5ce7e61057eee22b6af0718e9d086b381a17d9", - "sha256:b420e3811da55db538008229b3ca34055734d6ade12316889d1979d770d7dfaf", - "sha256:bb0d680149f128d181ef1c6561eaebbf99b572329dee090c83b6735d7284a703", - "sha256:bfb03c766250d86d6abf546d4c1fbe0db7401eaedeab5bbbd0e599fde7c7a7d3", - "sha256:c0b3aff3c955d18342c2097ad1102c78d0999dba5de178d0ce68a317caf6f87e", - "sha256:cab3fee686c58e2ce2f61e1bf138711eefde6c0235b0cfc736c8ea96381d8346", - "sha256:e020db7391b2112e4c1ec9db75ff82031f979666264903697ab5ad0bfb20d490", - "sha256:e195ed19638f324c40bb0094d13385d58e798607670b91cc8f42717a0a832263", - "sha256:eca80007816b9dd1cc154a393b8da2ada22baad1ac7b2b2a9cc01451796e3e93", - "sha256:ee740b8dd77cb1773ae1560b890cee3aa5b82a4a7ca056223f08f4b37ebb6466", - "sha256:f63c14e98022c3a67ff7c5c12edf17912a7353ddd95bbfde6db1e4bc05ea5bdc", - "sha256:f7007e7cd837cb37716149d7c735b90b947b3370b7a5652d58675ea940541542" + "sha256:00aef598f14faa77e48518728bd0cead585e45da89be5f7eae7be8f41c10b401", + "sha256:01d40598c8e551202911f5126cdff52b816fdc8316f8a88f798b81fc1a8b55bf", + "sha256:041c7f9f620ac5465fbaf4eb6570e4f286535e55e56ac4a34cfcfb3e853d4093", + "sha256:0ee1d1f9ff4e9f54700dd02d8d707a383bd9dad5c5de9c566372caba071982d5", + "sha256:39bc35a0c05db5f24aeef384af2119a9f0820791617e198d06bd9dba50218fa9", + "sha256:3c8389df99216bf184141f11b84ba00b414177c92618bc4ee1297376163a20d5", + "sha256:3d6af9e713b911baed904fbc4ebeec7ca44f5970971f1c5e86a34cba82fe98e9", + "sha256:511215f45ed7cc79ead84020e88e1fc476b8aba71b47d2fcdef8e65242406927", + "sha256:54c2533f6069cda87bd641fb6df28542238679896a5231006d0de00afbdf3edf", + "sha256:646991eb76a9906a023ec982714a04d8b4e06346ea5836ea8c30350dccca079c", + "sha256:67a2a9a6a7bb799b6d171239ea809e70b572c37eeda6053b04401088e6e83bed", + "sha256:6842d4d37377e1aca392cee4feb0e9f05d36264db08ca2cccd2c3d168487d981", + "sha256:688b2c54e5cb17e5cb55f799df093c5d3d4974feee8e1e7224c9ac40de2580e2", + "sha256:7c0724249912efa3708c265181e880ae80d90c68849aede5ce26ae7649c9e87d", + "sha256:8b1d2becda38027398894836c979467d6649e090a050db8c353461d9537273bb", + "sha256:8dfe2cf7b75a6541a54e0f25665bed5240f4c0cf5d233f81d53b4b5c55dd1c6d", + "sha256:93e32bee6cf67488661a7a2a73ede4c089902ef40f6d20312474351c52aa10c7", + "sha256:b0c192eca13faf932bbc30ad6b19878c816fe228fb84803e30873f2a5e7bd500", + "sha256:b46f9139aa23d6ebff01dff6970dc5476293287bc6ba473ce64542ff20607bfe", + "sha256:c130f78e5d7eeb8c625192848075a8a8bb92595d07ad5b4cb95b007011d78e5a", + "sha256:cdf27919b88a089c72266992ce31de80ed52b334a892e1ce0853ddf82d2a636b", + "sha256:db836dd53a56fc177d4e1e792fb94306592b8e21dc0c2d671c7a10058081e341", + "sha256:e57381585d421b2fdbb9369d7cc53f8442152d3c7b415bda210cb575366d7480", + "sha256:f3eb51592e03c9b5d4328eae7a40c7c063725b071e037ddf813a5d68f6a5cb6e", + "sha256:f5eb773cd21e2a8a492834cf0afd586a41b757807f53b845257a0a7419c17538", + "sha256:fb8c2e40b74708e20e3ab13e6273f33143037b7d16349c604e1091d2e8175275", + "sha256:fdb23b1a75e7efb86858c8ff6ab6744558009d8127e5543df9df543036504d21" ], - "version": "==1.6.0" + "markers": "python_version >= '3.7'", + "version": "==1.6.1" }, "charset-normalizer": { "hashes": [ - "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597", - "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df" + "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5", + "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413" ], - "markers": "python_version >= '3'", - "version": "==2.0.12" + "markers": "python_full_version >= '3.6.0'", + "version": "==2.1.0" }, "click": { "hashes": [ @@ -508,50 +531,50 @@ }, "coverage": { "hashes": [ - "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9", - "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d", - "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf", - "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7", - "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6", - "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4", - "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059", - "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39", - "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536", - "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac", - "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c", - "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903", - "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d", - "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05", - "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684", - "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1", - "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f", - "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7", - "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca", - "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad", - "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca", - "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d", - "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92", - "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4", - "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf", - "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6", - "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1", - "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4", - "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359", - "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3", - "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620", - "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512", - "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69", - "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2", - "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518", - "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0", - "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa", - "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4", - "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e", - "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1", - "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2" + "sha256:0895ea6e6f7f9939166cc835df8fa4599e2d9b759b02d1521b574e13b859ac32", + "sha256:0f211df2cba951ffcae210ee00e54921ab42e2b64e0bf2c0befc977377fb09b7", + "sha256:147605e1702d996279bb3cc3b164f408698850011210d133a2cb96a73a2f7996", + "sha256:24b04d305ea172ccb21bee5bacd559383cba2c6fcdef85b7701cf2de4188aa55", + "sha256:25b7ec944f114f70803d6529394b64f8749e93cbfac0fe6c5ea1b7e6c14e8a46", + "sha256:2b20286c2b726f94e766e86a3fddb7b7e37af5d0c635bdfa7e4399bc523563de", + "sha256:2dff52b3e7f76ada36f82124703f4953186d9029d00d6287f17c68a75e2e6039", + "sha256:2f8553878a24b00d5ab04b7a92a2af50409247ca5c4b7a2bf4eabe94ed20d3ee", + "sha256:3def6791adf580d66f025223078dc84c64696a26f174131059ce8e91452584e1", + "sha256:422fa44070b42fef9fb8dabd5af03861708cdd6deb69463adc2130b7bf81332f", + "sha256:4f89d8e03c8a3757aae65570d14033e8edf192ee9298303db15955cadcff0c63", + "sha256:5336e0352c0b12c7e72727d50ff02557005f79a0b8dcad9219c7c4940a930083", + "sha256:54d8d0e073a7f238f0666d3c7c0d37469b2aa43311e4024c925ee14f5d5a1cbe", + "sha256:5ef42e1db047ca42827a85e34abe973971c635f83aed49611b7f3ab49d0130f0", + "sha256:5f65e5d3ff2d895dab76b1faca4586b970a99b5d4b24e9aafffc0ce94a6022d6", + "sha256:6c3ccfe89c36f3e5b9837b9ee507472310164f352c9fe332120b764c9d60adbe", + "sha256:6d0b48aff8e9720bdec315d67723f0babd936a7211dc5df453ddf76f89c59933", + "sha256:6fe75dcfcb889b6800f072f2af5a331342d63d0c1b3d2bf0f7b4f6c353e8c9c0", + "sha256:79419370d6a637cb18553ecb25228893966bd7935a9120fa454e7076f13b627c", + "sha256:7bb00521ab4f99fdce2d5c05a91bddc0280f0afaee0e0a00425e28e209d4af07", + "sha256:80db4a47a199c4563d4a25919ff29c97c87569130375beca3483b41ad5f698e8", + "sha256:866ebf42b4c5dbafd64455b0a1cd5aa7b4837a894809413b930026c91e18090b", + "sha256:8af6c26ba8df6338e57bedbf916d76bdae6308e57fc8f14397f03b5da8622b4e", + "sha256:a13772c19619118903d65a91f1d5fea84be494d12fd406d06c849b00d31bf120", + "sha256:a697977157adc052284a7160569b36a8bbec09db3c3220642e6323b47cec090f", + "sha256:a9032f9b7d38bdf882ac9f66ebde3afb8145f0d4c24b2e600bc4c6304aafb87e", + "sha256:b5e28db9199dd3833cc8a07fa6cf429a01227b5d429facb56eccd765050c26cd", + "sha256:c77943ef768276b61c96a3eb854eba55633c7a3fddf0a79f82805f232326d33f", + "sha256:d230d333b0be8042ac34808ad722eabba30036232e7a6fb3e317c49f61c93386", + "sha256:d4548be38a1c810d79e097a38107b6bf2ff42151900e47d49635be69943763d8", + "sha256:d4e7ced84a11c10160c0697a6cc0b214a5d7ab21dfec1cd46e89fbf77cc66fae", + "sha256:d56f105592188ce7a797b2bd94b4a8cb2e36d5d9b0d8a1d2060ff2a71e6b9bbc", + "sha256:d714af0bdba67739598849c9f18efdcc5a0412f4993914a0ec5ce0f1e864d783", + "sha256:d774d9e97007b018a651eadc1b3970ed20237395527e22cbeb743d8e73e0563d", + "sha256:e0524adb49c716ca763dbc1d27bedce36b14f33e6b8af6dba56886476b42957c", + "sha256:e2618cb2cf5a7cc8d698306e42ebcacd02fb7ef8cfc18485c59394152c70be97", + "sha256:e36750fbbc422c1c46c9d13b937ab437138b998fe74a635ec88989afb57a3978", + "sha256:edfdabe7aa4f97ed2b9dd5dde52d2bb29cb466993bb9d612ddd10d0085a683cf", + "sha256:f22325010d8824594820d6ce84fa830838f581a7fd86a9235f0d2ed6deb61e29", + "sha256:f23876b018dfa5d3e98e96f5644b109090f16a4acb22064e0f06933663005d39", + "sha256:f7bd0ffbcd03dc39490a1f40b2669cc414fae0c4e16b77bb26806a4d0b7d1452" ], "index": "pypi", - "version": "==6.3.2" + "version": "==6.4.2" }, "cycler": { "hashes": [ @@ -570,18 +593,18 @@ }, "fonttools": { "hashes": [ - "sha256:c0fdcfa8ceebd7c1b2021240bd46ef77aa8e7408cf10434be55df52384865f8e", - "sha256:f829c579a8678fa939a1d9e9894d01941db869de44390adb49ce67055a06cc2a" + "sha256:9a1c52488045cd6c6491fd07711a380f932466e317cb8e016fc4e99dc7eac2f0", + "sha256:d73f25b283cd8033367451122aa868a23de0734757a01984e4b30b18b9050c72" ], "markers": "python_version >= '3.7'", - "version": "==4.33.3" + "version": "==4.34.4" }, "idna": { "hashes": [ "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" ], - "markers": "python_version >= '3'", + "markers": "python_version >= '3.5'", "version": "==3.3" }, "iniconfig": { @@ -593,52 +616,52 @@ }, "kiwisolver": { "hashes": [ - "sha256:0b7f50a1a25361da3440f07c58cd1d79957c2244209e4f166990e770256b6b0b", - "sha256:0c380bb5ae20d829c1a5473cfcae64267b73aaa4060adc091f6df1743784aae0", - "sha256:0d98dca86f77b851350c250f0149aa5852b36572514d20feeadd3c6b1efe38d0", - "sha256:0e45e780a74416ef2f173189ef4387e44b5494f45e290bcb1f03735faa6779bf", - "sha256:0e8afdf533b613122e4bbaf3c1e42c2a5e9e2d1dd3a0a017749a7658757cb377", - "sha256:1008346a7741620ab9cc6c96e8ad9b46f7a74ce839dbb8805ddf6b119d5fc6c2", - "sha256:1d1078ba770d6165abed3d9a1be1f9e79b61515de1dd00d942fa53bba79f01ae", - "sha256:1dcade8f6fe12a2bb4efe2cbe22116556e3b6899728d3b2a0d3b367db323eacc", - "sha256:240009fdf4fa87844f805e23f48995537a8cb8f8c361e35fda6b5ac97fcb906f", - "sha256:240c2d51d098395c012ddbcb9bd7b3ba5de412a1d11840698859f51d0e643c4f", - "sha256:262c248c60f22c2b547683ad521e8a3db5909c71f679b93876921549107a0c24", - "sha256:2e6cda72db409eefad6b021e8a4f964965a629f577812afc7860c69df7bdb84a", - "sha256:3c032c41ae4c3a321b43a3650e6ecc7406b99ff3e5279f24c9b310f41bc98479", - "sha256:42f6ef9b640deb6f7d438e0a371aedd8bef6ddfde30683491b2e6f568b4e884e", - "sha256:484f2a5f0307bc944bc79db235f41048bae4106ffa764168a068d88b644b305d", - "sha256:69b2d6c12f2ad5f55104a36a356192cfb680c049fe5e7c1f6620fc37f119cdc2", - "sha256:6e395ece147f0692ca7cdb05a028d31b83b72c369f7b4a2c1798f4b96af1e3d8", - "sha256:6ece2e12e4b57bc5646b354f436416cd2a6f090c1dadcd92b0ca4542190d7190", - "sha256:71469b5845b9876b8d3d252e201bef6f47bf7456804d2fbe9a1d6e19e78a1e65", - "sha256:7f606d91b8a8816be476513a77fd30abe66227039bd6f8b406c348cb0247dcc9", - "sha256:7f88c4b8e449908eeddb3bbd4242bd4dc2c7a15a7aa44bb33df893203f02dc2d", - "sha256:81237957b15469ea9151ec8ca08ce05656090ffabc476a752ef5ad7e2644c526", - "sha256:89b57c2984f4464840e4b768affeff6b6809c6150d1166938ade3e22fbe22db8", - "sha256:8a830a03970c462d1a2311c90e05679da56d3bd8e78a4ba9985cb78ef7836c9f", - "sha256:8ae5a071185f1a93777c79a9a1e67ac46544d4607f18d07131eece08d415083a", - "sha256:8b6086aa6936865962b2cee0e7aaecf01ab6778ce099288354a7229b4d9f1408", - "sha256:8ec2e55bf31b43aabe32089125dca3b46fdfe9f50afbf0756ae11e14c97b80ca", - "sha256:8ff3033e43e7ca1389ee59fb7ecb8303abb8713c008a1da49b00869e92e3dd7c", - "sha256:91eb4916271655dfe3a952249cb37a5c00b6ba68b4417ee15af9ba549b5ba61d", - "sha256:9d2bb56309fb75a811d81ed55fbe2208aa77a3a09ff5f546ca95e7bb5fac6eff", - "sha256:a4e8f072db1d6fb7a7cc05a6dbef8442c93001f4bb604f1081d8c2db3ca97159", - "sha256:b1605c7c38cc6a85212dfd6a641f3905a33412e49f7c003f35f9ac6d71f67720", - "sha256:b3e251e5c38ac623c5d786adb21477f018712f8c6fa54781bd38aa1c60b60fc2", - "sha256:b978afdb913ca953cf128d57181da2e8798e8b6153be866ae2a9c446c6162f40", - "sha256:be9a650890fb60393e60aacb65878c4a38bb334720aa5ecb1c13d0dac54dd73b", - "sha256:c222f91a45da9e01a9bc4f760727ae49050f8e8345c4ff6525495f7a164c8973", - "sha256:c839bf28e45d7ddad4ae8f986928dbf5a6d42ff79760d54ec8ada8fb263e097c", - "sha256:cbb5eb4a2ea1ffec26268d49766cafa8f957fe5c1b41ad00733763fae77f9436", - "sha256:e348f1904a4fab4153407f7ccc27e43b2a139752e8acf12e6640ba683093dd96", - "sha256:e677cc3626287f343de751e11b1e8a5b915a6ac897e8aecdbc996cd34de753a0", - "sha256:f74f2a13af201559e3d32b9ddfc303c94ae63d63d7f4326d06ce6fe67e7a8255", - "sha256:fa4d97d7d2b2c082e67907c0b8d9f31b85aa5d3ba0d33096b7116f03f8061261", - "sha256:ffbdb9a96c536f0405895b5e21ee39ec579cb0ed97bdbd169ae2b55f41d73219" + "sha256:02f79693ec433cb4b5f51694e8477ae83b3205768a6fb48ffba60549080e295b", + "sha256:03baab2d6b4a54ddbb43bba1a3a2d1627e82d205c5cf8f4c924dc49284b87166", + "sha256:1041feb4cda8708ce73bb4dcb9ce1ccf49d553bf87c3954bdfa46f0c3f77252c", + "sha256:10ee06759482c78bdb864f4109886dff7b8a56529bc1609d4f1112b93fe6423c", + "sha256:1d1573129aa0fd901076e2bfb4275a35f5b7aa60fbfb984499d661ec950320b0", + "sha256:2e407cb4bd5a13984a6c2c0fe1845e4e41e96f183e5e5cd4d77a857d9693494c", + "sha256:2f5e60fabb7343a836360c4f0919b8cd0d6dbf08ad2ca6b9cf90bf0c76a3c4f6", + "sha256:3fe20f63c9ecee44560d0e7f116b3a747a5d7203376abeea292ab3152334d004", + "sha256:41dae968a94b1ef1897cb322b39360a0812661dba7c682aa45098eb8e193dbdf", + "sha256:4ea39b0ccc4f5d803e3337dd46bcce60b702be4d86fd0b3d7531ef10fd99a1ac", + "sha256:5bce61af018b0cb2055e0e72e7d65290d822d3feee430b7b8203d8a855e78766", + "sha256:62ac9cc684da4cf1778d07a89bf5f81b35834cb96ca523d3a7fb32509380cbf6", + "sha256:7577c1987baa3adc4b3c62c33bd1118c3ef5c8ddef36f0f2c950ae0b199e100d", + "sha256:75facbe9606748f43428fc91a43edb46c7ff68889b91fa31f53b58894503a191", + "sha256:787518a6789009c159453da4d6b683f468ef7a65bbde796bcea803ccf191058d", + "sha256:7c43e1e1206cd421cd92e6b3280d4385d41d7166b3ed577ac20444b6995a445f", + "sha256:841293b17ad704d70c578f1f0013c890e219952169ce8a24ebc063eecf775454", + "sha256:8c808594c88a025d4e322d5bb549282c93c8e1ba71b790f539567932722d7bd8", + "sha256:8ed58b8acf29798b036d347791141767ccf65eee7f26bde03a71c944449e53de", + "sha256:91672bacaa030f92fc2f43b620d7b337fd9a5af28b0d6ed3f77afc43c4a64b5a", + "sha256:968f44fdbf6dd757d12920d63b566eeb4d5b395fd2d00d29d7ef00a00582aac9", + "sha256:a553dadda40fef6bfa1456dc4be49b113aa92c2a9a9e8711e955618cd69622e3", + "sha256:a68b62a02953b9841730db7797422f983935aeefceb1679f0fc85cbfbd311c32", + "sha256:abbe9fa13da955feb8202e215c4018f4bb57469b1b78c7a4c5c7b93001699938", + "sha256:ad881edc7ccb9d65b0224f4e4d05a1e85cf62d73aab798943df6d48ab0cd79a1", + "sha256:b428ef021242344340460fa4c9185d0b1f66fbdbfecc6c63eff4b7c29fad429d", + "sha256:b533558eae785e33e8c148a8d9921692a9fe5aa516efbdff8606e7d87b9d5824", + "sha256:ba59c92039ec0a66103b1d5fe588fa546373587a7d68f5c96f743c3396afc04b", + "sha256:bc8d3bd6c72b2dd9decf16ce70e20abcb3274ba01b4e1c96031e0c4067d1e7cd", + "sha256:c79ebe8f3676a4c6630fd3f777f3cfecf9289666c84e775a67d1d358578dc2e3", + "sha256:c97528e64cb9ebeff9701e7938653a9951922f2a38bd847787d4a8e498cc83ae", + "sha256:d0611a0a2a518464c05ddd5a3a1a0e856ccc10e67079bb17f265ad19ab3c7597", + "sha256:d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955", + "sha256:da152d8cdcab0e56e4f45eb08b9aea6455845ec83172092f09b0e077ece2cf7a", + "sha256:da7e547706e69e45d95e116e6939488d62174e033b763ab1496b4c29b76fabea", + "sha256:db5283d90da4174865d520e7366801a93777201e91e79bacbac6e6927cbceede", + "sha256:e92a513161077b53447160b9bd8f522edfbed4bd9759e4c18ab05d7ef7e49408", + "sha256:ecb1fa0db7bf4cff9dac752abb19505a233c7f16684c5826d1f11ebd9472b871", + "sha256:efda5fc8cc1c61e4f639b8067d118e742b812c930f708e6667a5ce0d13499e29", + "sha256:f0a71d85ecdd570ded8ac3d1c0f480842f49a40beb423bb8014539a9f32a5897", + "sha256:f4f270de01dd3e129a72efad823da90cc4d6aafb64c410c9033aba70db9f1ff0", + "sha256:f8ad8285b01b0d4695102546b342b493b3ccc6781fc28c8c6a1bb63e95d22f09", + "sha256:f9f39e2f049db33a908319cf46624a569b36983c7c78318e9726a4cb8923b26c" ], "markers": "python_version >= '3.7'", - "version": "==1.4.2" + "version": "==1.4.4" }, "matplotlib": { "hashes": [ @@ -690,106 +713,104 @@ }, "netcdf4": { "hashes": [ - "sha256:06f7364086fd3ae097e757d2493dc1fe006e9ae9636a109a1e4c914db05d7e18", - "sha256:0774309a8b684654c0bbd4d55bcccaa399ffa6870b5dc2e35dbe919ec637a51f", - "sha256:0a33a953b60ee30dcb78db174231f7ab61923331af9645f84adff684e9add4e2", - "sha256:0f570b5b4cc0434ef8a2a648fdebfa017de695ea7c836b24ae7b216ede4e3345", - "sha256:1442048c93ac668d61d1675a0b0f3aea3a73efa2969636af0cb95a52d119acec", - "sha256:18e257843fc29846909557a9502a29e37b381ee7760923f9280d3b26d844db8c", - "sha256:225d17f7a487ebdab99640294203b61e39e01c951b4e6a4f578d8251623f5f5a", - "sha256:29426faabdc017e89572ff1d7835dab426aae4c22ad1a12d1877b932e969b6ac", - "sha256:318ef7dd29b365f3921f9b359ba54f62cfe3aa46ab4464f45c43624d15c4bf55", - "sha256:339e534057be0b1998f80faeb8f9851f28b42cee5a6047e1adf12c0e38235437", - "sha256:49a44c7382e5e1da39d8bab5d8e406ad30d46fda9386e85a3e69491e6caaca93", - "sha256:5c883a02c55fd1e5b61ad4f83dd7f11f90b894e14d120ba678d9c33d9e4b3a77", - "sha256:7326afa46fd0c1b50d30db9764a1eefbcff576fcffa8e48bef403094590563b8", - "sha256:7519c1e8ad8d3d72636a0aedb5d79bb90556037a59d83811e2057f09ca272b52", - "sha256:75ecf1ef2c841aace539f3326d101efda7c99f6c3283c48a444370aba48525af", - "sha256:804d312a10e6fb196df16bf0d811624d885a51b7ebb3bc648278faa62ffece26", - "sha256:8b4fac95819b0c17ca5fc1a4e8bb31116b6b808cceca0aa8b475bb50abab1063", - "sha256:916434a13ea317934cf248fb70dd5476c498f1def71041fc7e3fd23882ef2cda", - "sha256:a9ba575047ba1be7cf5f1b712c987ce329b4d092582d92c2e268296a06bfa639", - "sha256:b21af57acca0d70c5401f8f779409ab4e818c505fb81706eea8d9475e1f0bb9b", - "sha256:b2b0f370cda5cc5f46239e0a219bf8d4cf442c7d49dbd1c89abb39e71e266c60", - "sha256:bd35c37342d9051d2b8fb12a9208856cc59201a94c78a742a198c81813cb00a8", - "sha256:bdba6ea34680a4c1b7018a4a7155f6112acd063289923c0c61918707e9f26910", - "sha256:bdd344d8de65849fa200f69941f1b15a2611b11b307161ce2fd8ff42148507e8", - "sha256:ca3d468f4812c0999df86e3f428851fb0c17ac34ce0827115c246b0b690e4e84", - "sha256:cdb54afe51c1b06e900c0df5e8567d713ff7a26bf087116a88a99858345dadc6", - "sha256:d784d6cf5baa90909f385bf60ead91138f13ff7f870467e458fb3650ef71b48d", - "sha256:db02d42f7b9c7d68cec351ea63ef3fc2a1ad5e7e74fc7b570b34ceb8c7645bf2", - "sha256:e118bfccda464a381187b1f9c771bf9581c83c566faab309a8ec3f781668da4e", - "sha256:f5e9e1ca99aca1f6d40054ef42b97cf6d00c59d8a438f580cf9dfc309a8eb870", - "sha256:f86399073b582bccd278006ee0213548e7037395e1119f1af9f4faad38279b1e", - "sha256:f92b92f758dfc903af2a8a287fd68a531f73ffd3e5be72b5ad1eb3f083e7aaa2", - "sha256:fd501ccb28ebae6770112968c750a14feb39cb495d788aa67c28360f7c1f2324" - ], - "version": "==1.5.8" + "sha256:018bda7badd7dd1c228d3edf1fab46c228333678d3b60e79d8a83caeffd90577", + "sha256:01af66cf480c6b1d2ac04a0c91a87bb20d01ef13377fa4b91e9d54b13255d5c1", + "sha256:134661e230c12676431b78f3acf5d20df73e16648f9c27edb32ef98a7df49f06", + "sha256:35cad527a95c3afd202df07e2909186283619c39d6102e048f511e454ce2a5e7", + "sha256:396266a4e91dc26006cb19e6d132bb22de320e52da2ba62ed81ed6791538834b", + "sha256:46ea2de93f66cdb06989dba9c2d4bfb66890d1095497bcd33b496f4f15d3dcfe", + "sha256:47ec9525a5f2b9e8c89994363c52d66feb70dda0ba470a1d89d56c553361d1b4", + "sha256:5b834b2f035cf6323f605b7d40dca501bc666825d58514313bb9f92c64d09ea3", + "sha256:680d4e7079721789d6d2e88b245c6ee37838774c4241ee0406dbba6959fbd3c8", + "sha256:7aea5661b2288d3ff13796ad94e35de9282004b38b79b4ea145210d0f0fb694f", + "sha256:7f6cb8de2907c041689e9b08b8a73d1d242c2e77f9f30c9ffe096bd589740b71", + "sha256:821c2962d9861236a88f150a1d1532a4a368ac4def302f6f8f4af89f818fe410", + "sha256:885c6a5e953afe4e6f929f3de2ab92caa1933c394bac70dd38c948a994065f6d", + "sha256:8cb157bf7a0ffaab2ad2a7f7ccb824d91c30ded5196eae39fba90e806d9800e3", + "sha256:8d4dbfe759c27a1186bd21537e57761959995c783b5140944b20ebad259ac62f", + "sha256:95efa373d9a3e1cd0df7193e76e6680d9eca28e60097ca8139afea8a4346ba63", + "sha256:98d716a63a147961948a35cd383f2d723855d63cb15cf5b827b3f8f2bf269820", + "sha256:9afbd3e22e6ffeb6e2d51c8de17debbfa4e0ce04aa31850c13accf41ad4a6637", + "sha256:ab83d3ce100a5cbdb631ae572c01f0e19d1f3cfae9cc2ead0490cfea941875e5", + "sha256:b4b8cfa01b77feab961109edeb4474c9b65f88d5f375cac293b6ebaa2d5e8b7f", + "sha256:c30715f5a41ac974ff848b282f9d55a7c5127cfeade00019b099312236bec529", + "sha256:c36f744d68c985ca1613d1e55d56fc8bd24df4e1d3cfb3e9f7ee3c3263ea60a2", + "sha256:d0eaa08997eb33c70c255909052b6f45388ef863602e064ae04e7d347ca4cb98", + "sha256:d4428c9e02a1f3d53fb9acc81e38117d9d7626ecb79fae7060095497ce7b11fe", + "sha256:d8df3d61d3f4e850fe94d14fa86e4a67ebf3dbee73bc8ae35bc5c6881d430a91", + "sha256:e055e1e7dcd62b3369786d6be80de5603f4f76a20408e6939695ed8340c5a5b3", + "sha256:e3d02c8a171091a8b86064dd95db583e0edec7447eba639e7a0eddb608d9fdc6", + "sha256:f21b657de01be678ecf321678c9b8bc73da1e070181a40a5916547e02cd1037d", + "sha256:f4f0646c04c2836e28fe915d950375a784aa8d89696529a7e53a7fe85999e0b8", + "sha256:f6ccd91a523d923a7d76f137db9cf86b1d180ef8ff948b468768671941063ca0" + ], + "version": "==1.6.0" }, "networkx": { "hashes": [ - "sha256:1a1e8fe052cc1b4e0339b998f6795099562a264a13a5af7a32cad45ab9d4e126", - "sha256:4a52cf66aed221955420e11b3e2e05ca44196b4829aab9576d4d439212b0a14f" + "sha256:15a7b81a360791c458c55a417418ea136c13378cfdc06a2dcdc12bd2f9cf09c1", + "sha256:a762f4b385692d9c3a6f2912d058d76d29a827deaedf9e63ed14d397b8030687" ], "index": "pypi", - "version": "==2.8" + "version": "==2.8.5" }, "numexpr": { "hashes": [ - "sha256:1639561d056d2d790a56ddab7e7df40b6181ad50338b50fba94aa42874a00958", - "sha256:2247d92da60b85de619e797e59a80e9c7302fba82dcd0525de8f7dd729a0d60f", - "sha256:24fb5b2c17273a76e7de9cea7817c54262198657998a093fceb4030f273524c7", - "sha256:3bab5add6628fa8bb66fba7b1f0eed5d8d0ce05fdd2dcc326dde8a297a961c46", - "sha256:48258db3ba89ad028744e07b09dde963f82da7f081849d3a003bb0b96b112d4f", - "sha256:4eb79d9026f013cf8d16de8be74911c74c0c09362627bf4b39e2b7f1f3188c28", - "sha256:517f299c4bc8491b5117aa276e8f3cf7ee2e89223922e92e2ea78a32985d5087", - "sha256:5d0c98c4d8bcc25962e5859176e5728f69209cffb9b7f64bf6d1c801fe350946", - "sha256:6bce8a183afe157c25385d27be314be22f06ba644c89b611d20e2570a06bd5dd", - "sha256:79ec94295aa57f5a9d212116bb7359744cd2f9e05d477df0dee383b7f44b9588", - "sha256:80db25e2934fd1a1b787440d5fa7946adb79a1289d7dc64e2c8bcd6ceae660ad", - "sha256:920c6a3088790573765e103e20592864977aa4b4d1b819c298fa9d88771cde1b", - "sha256:a591f99ecbb413749725e8da4e52e663f0939dd5fbf1ae5a7c6c50ba734f57de", - "sha256:a97a087a5f5e56cd81c69215918fdaca60eb478a099daa757455e4ff887f7600", - "sha256:a9f046cb5752f08a9291dc1fd37a9cfd15770262188bb984e4418490fef9c9ec", - "sha256:ab6b2cb64bc9391f77f08203fda5af3647ed2abcefb928cc6282727854f97735", - "sha256:b57d3ab7054409d9b4d2148241ae70d738c0b0daeb1a0efd5ea89b9279752e22", - "sha256:bd402e43b8baf6436b7c2c14541f69eb4f97f023469585a7ad258c49622ff619", - "sha256:cd779aa44dd986c4ef10163519239602b027be06a527946656207acf1f58113b", - "sha256:cfd89f63028f8df3c9b11bf2c98085184f967a09f543a77c3335f4a0ec54f124", - "sha256:d148e99483e15de22d0acd5100136d39a336e91c8f8d37bf2e84e9f0ab4c0610", - "sha256:d2b4b6379763ec5d794d4aaa1834ae00f1bba82a36d0b99c6e2d559302a21e85", - "sha256:d7b64b125161e722c9dc8a27df282e755bd9a5adf826b2e3e1f038e3dfdc3307", - "sha256:da180aaec7f6c387540b251f6ec2b8d280220c0e45731778853c8b0d86c4ae22", - "sha256:ebf31aeed426840aefe1e94c89bb0b7530a72be36444ed4c73e4411865b79be5", - "sha256:fd6905bc80a11908e363c9821cbf8aeeca4dca5b6a2eea90a97b055bc73443e6" - ], - "version": "==2.8.1" + "sha256:052ec3a55cc1ccc447580ee5b828b2bd0bc14fea0756ddb81d9617b5472c77b5", + "sha256:08d8f8e31647815d979185eb455cb5b4d845e20ff808bd6f7f4edf5e0a35e2f6", + "sha256:1575a35190d650bf64d2efd8590a8ef3ca564ef20b9f8727428b57759712becb", + "sha256:19cd7563421862de85404bd5de06bee8a3ebff4fc9f718de09cc704bc3348f08", + "sha256:33be3bbbad71d97d14a39d84957c2bcc368fec775369664d0c24be030c50c359", + "sha256:3a1ce79b7d32c55cce334566e3c6716f7b646f6eceb2ace38adaa795848f3583", + "sha256:4ddc46c1e5d726b57d008169b75074ab66869e1827098614ebafa45d152f81b7", + "sha256:4f291f0df7b25d9530991f880cc232a644a7a722d130c61b43e593b98fb6523f", + "sha256:5532bd7164eb8a05410771faf94a661fc69e5ca72deb8612f1bedc26311ed3c8", + "sha256:5b014d1c426c444102fb9eea6438052ee86c82684d27dd20b531caf2c60bc4c9", + "sha256:5c660dea90935b963db9529d963493c40fabb2343684b52083fb86b2547d60c8", + "sha256:828926f5d4dc9ace2bebd2eec56bee852518afa31e6df175d1706e6631dfd1a2", + "sha256:854541cf4214d747ab2f87229e9dde052fddc52c379f59047d64f9b7e2f4d578", + "sha256:99b9a91811de8cd24bd7d7fbc1883653dad6485e8c683d85b1007a13868713f6", + "sha256:a6954d65d7140864d9bb2302b7580c60c88c4d12e00c59a0a53f1660573e922b", + "sha256:b127b0d0e1665b94adcc658c5f9d688ac4903ef81da5d8f4e956c995cf69d5c7", + "sha256:ba9aa03f4213f4e0c0d964afd6a920c9000e73d22b88c72c46b151d292ee5581", + "sha256:be9b8309a4a2f785197b1a29f7767a5ff217ea505e5a751b237336f3b50b7e48", + "sha256:c35669268f602ac6412c8c6244b256ebb4f31ffc926b936ca0d9cffda251db8a", + "sha256:cb647c9d9c785dae0759bf6c875cde2bec472b5c3f7a6015734b161ae766d141", + "sha256:cbd75ac287923bd0c5b95143915648c62d97f994b06dacd770bd205da014f6bd", + "sha256:eba7fad925e3063a0434844a667fbdea30b53fe1344efef73475b32d33aa0fec", + "sha256:f29b882a21b5381c0e472bc66e8d1c519b8920edc2522b8b4ede79e314d31d20", + "sha256:fe6b49631c3bf54e92b0fb334c8e59694685924492d80c325e1b44ecbbc0f22d" + ], + "markers": "python_version >= '3.7'", + "version": "==2.8.3" }, "numpy": { "hashes": [ - "sha256:07a8c89a04997625236c5ecb7afe35a02af3896c8aa01890a849913a2309c676", - "sha256:08d9b008d0156c70dc392bb3ab3abb6e7a711383c3247b410b39962263576cd4", - "sha256:201b4d0552831f7250a08d3b38de0d989d6f6e4658b709a02a73c524ccc6ffce", - "sha256:2c10a93606e0b4b95c9b04b77dc349b398fdfbda382d2a39ba5a822f669a0123", - "sha256:3ca688e1b9b95d80250bca34b11a05e389b1420d00e87a0d12dc45f131f704a1", - "sha256:48a3aecd3b997bf452a2dedb11f4e79bc5bfd21a1d4cc760e703c31d57c84b3e", - "sha256:568dfd16224abddafb1cbcce2ff14f522abe037268514dd7e42c6776a1c3f8e5", - "sha256:5bfb1bb598e8229c2d5d48db1860bcf4311337864ea3efdbe1171fb0c5da515d", - "sha256:639b54cdf6aa4f82fe37ebf70401bbb74b8508fddcf4797f9fe59615b8c5813a", - "sha256:8251ed96f38b47b4295b1ae51631de7ffa8260b5b087808ef09a39a9d66c97ab", - "sha256:92bfa69cfbdf7dfc3040978ad09a48091143cffb778ec3b03fa170c494118d75", - "sha256:97098b95aa4e418529099c26558eeb8486e66bd1e53a6b606d684d0c3616b168", - "sha256:a3bae1a2ed00e90b3ba5f7bd0a7c7999b55d609e0c54ceb2b076a25e345fa9f4", - "sha256:c34ea7e9d13a70bf2ab64a2532fe149a9aced424cd05a2c4ba662fd989e3e45f", - "sha256:dbc7601a3b7472d559dc7b933b18b4b66f9aa7452c120e87dfb33d02008c8a18", - "sha256:e7927a589df200c5e23c57970bafbd0cd322459aa7b1ff73b7c2e84d6e3eae62", - "sha256:f8c1f39caad2c896bc0018f699882b345b2a63708008be29b1f355ebf6f933fe", - "sha256:f950f8845b480cffe522913d35567e29dd381b0dc7e4ce6a4a9f9156417d2430", - "sha256:fade0d4f4d292b6f39951b6836d7a3c7ef5b2347f3c420cd9820a1d90d794802", - "sha256:fdf3c08bce27132395d3c3ba1503cac12e17282358cb4bddc25cc46b0aca07aa" + "sha256:1408c3527a74a0209c781ac82bde2182b0f0bf54dea6e6a363fe0cc4488a7ce7", + "sha256:173f28921b15d341afadf6c3898a34f20a0569e4ad5435297ba262ee8941e77b", + "sha256:1865fdf51446839ca3fffaab172461f2b781163f6f395f1aed256b1ddc253622", + "sha256:3119daed207e9410eaf57dcf9591fdc68045f60483d94956bee0bfdcba790953", + "sha256:35590b9c33c0f1c9732b3231bb6a72d1e4f77872390c47d50a615686ae7ed3fd", + "sha256:37e5ebebb0eb54c5b4a9b04e6f3018e16b8ef257d26c8945925ba8105008e645", + "sha256:37ece2bd095e9781a7156852e43d18044fd0d742934833335599c583618181b9", + "sha256:3ab67966c8d45d55a2bdf40701536af6443763907086c0a6d1232688e27e5447", + "sha256:47f10ab202fe4d8495ff484b5561c65dd59177949ca07975663f4494f7269e3e", + "sha256:55df0f7483b822855af67e38fb3a526e787adf189383b4934305565d71c4b148", + "sha256:5d732d17b8a9061540a10fda5bfeabca5785700ab5469a5e9b93aca5e2d3a5fb", + "sha256:68b69f52e6545af010b76516f5daaef6173e73353e3295c5cb9f96c35d755641", + "sha256:7e8229f3687cdadba2c4faef39204feb51ef7c1a9b669247d49a24f3e2e1617c", + "sha256:8002574a6b46ac3b5739a003b5233376aeac5163e5dcd43dd7ad062f3e186129", + "sha256:876f60de09734fbcb4e27a97c9a286b51284df1326b1ac5f1bf0ad3678236b22", + "sha256:9ce242162015b7e88092dccd0e854548c0926b75c7924a3495e02c6067aba1f5", + "sha256:a35c4e64dfca659fe4d0f1421fc0f05b8ed1ca8c46fb73d9e5a7f175f85696bb", + "sha256:aeba539285dcf0a1ba755945865ec61240ede5432df41d6e29fab305f4384db2", + "sha256:b15c3f1ed08df4980e02cc79ee058b788a3d0bef2fb3c9ca90bb8cbd5b8a3a04", + "sha256:c2f91f88230042a130ceb1b496932aa717dcbd665350beb821534c5c7e15881c", + "sha256:d748ef349bfef2e1194b59da37ed5a29c19ea8d7e6342019921ba2ba4fd8b624", + "sha256:e0d7447679ae9a7124385ccf0ea990bb85bb869cef217e2ea6c844b6a6855073" ], "index": "pypi", - "version": "==1.22.3" + "version": "==1.23.1" }, "packaging": { "hashes": [ @@ -801,30 +822,30 @@ }, "pandas": { "hashes": [ - "sha256:0010771bd9223f7afe5f051eb47c4a49534345dfa144f2f5470b27189a4dd3b5", - "sha256:061609334a8182ab500a90fe66d46f6f387de62d3a9cb9aa7e62e3146c712167", - "sha256:09d8be7dd9e1c4c98224c4dfe8abd60d145d934e9fc1f5f411266308ae683e6a", - "sha256:295872bf1a09758aba199992c3ecde455f01caf32266d50abc1a073e828a7b9d", - "sha256:3228198333dd13c90b6434ddf61aa6d57deaca98cf7b654f4ad68a2db84f8cfe", - "sha256:385c52e85aaa8ea6a4c600a9b2821181a51f8be0aee3af6f2dcb41dafc4fc1d0", - "sha256:51649ef604a945f781105a6d2ecf88db7da0f4868ac5d45c51cb66081c4d9c73", - "sha256:5586cc95692564b441f4747c47c8a9746792e87b40a4680a2feb7794defb1ce3", - "sha256:5a206afa84ed20e07603f50d22b5f0db3fb556486d8c2462d8bc364831a4b417", - "sha256:5b79af3a69e5175c6fa7b4e046b21a646c8b74e92c6581a9d825687d92071b51", - "sha256:5c54ea4ef3823108cd4ec7fb27ccba4c3a775e0f83e39c5e17f5094cb17748bc", - "sha256:8c5bf555b6b0075294b73965adaafb39cf71c312e38c5935c93d78f41c19828a", - "sha256:92bc1fc585f1463ca827b45535957815b7deb218c549b7c18402c322c7549a12", - "sha256:95c1e422ced0199cf4a34385ff124b69412c4bc912011ce895582bee620dfcaa", - "sha256:b8134651258bce418cb79c71adeff0a44090c98d955f6953168ba16cc285d9f7", - "sha256:be67c782c4f1b1f24c2f16a157e12c2693fd510f8df18e3287c77f33d124ed07", - "sha256:c072c7f06b9242c855ed8021ff970c0e8f8b10b35e2640c657d2a541c5950f59", - "sha256:d0d4f13e4be7ce89d7057a786023c461dd9370040bdb5efa0a7fe76b556867a0", - "sha256:df82739e00bb6daf4bba4479a40f38c718b598a84654cbd8bb498fd6b0aa8c16", - "sha256:f549097993744ff8c41b5e8f2f0d3cbfaabe89b4ae32c8c08ead6cc535b80139", - "sha256:ff08a14ef21d94cdf18eef7c569d66f2e24e0bc89350bcd7d243dd804e3b5eb2" + "sha256:07238a58d7cbc8a004855ade7b75bbd22c0db4b0ffccc721556bab8a095515f6", + "sha256:0daf876dba6c622154b2e6741f29e87161f844e64f84801554f879d27ba63c0d", + "sha256:16ad23db55efcc93fa878f7837267973b61ea85d244fc5ff0ccbcfa5638706c5", + "sha256:1d9382f72a4f0e93909feece6fef5500e838ce1c355a581b3d8f259839f2ea76", + "sha256:24ea75f47bbd5574675dae21d51779a4948715416413b30614c1e8b480909f81", + "sha256:2893e923472a5e090c2d5e8db83e8f907364ec048572084c7d10ef93546be6d1", + "sha256:2ff7788468e75917574f080cd4681b27e1a7bf36461fe968b49a87b5a54d007c", + "sha256:41fc406e374590a3d492325b889a2686b31e7a7780bec83db2512988550dadbf", + "sha256:48350592665ea3cbcd07efc8c12ff12d89be09cd47231c7925e3b8afada9d50d", + "sha256:605d572126eb4ab2eadf5c59d5d69f0608df2bf7bcad5c5880a47a20a0699e3e", + "sha256:6dfbf16b1ea4f4d0ee11084d9c026340514d1d30270eaa82a9f1297b6c8ecbf0", + "sha256:6f803320c9da732cc79210d7e8cc5c8019aad512589c910c66529eb1b1818230", + "sha256:721a3dd2f06ef942f83a819c0f3f6a648b2830b191a72bbe9451bcd49c3bd42e", + "sha256:755679c49460bd0d2f837ab99f0a26948e68fa0718b7e42afbabd074d945bf84", + "sha256:78b00429161ccb0da252229bcda8010b445c4bf924e721265bec5a6e96a92e92", + "sha256:958a0588149190c22cdebbc0797e01972950c927a11a900fe6c2296f207b1d6f", + "sha256:a3924692160e3d847e18702bb048dc38e0e13411d2b503fecb1adf0fcf950ba4", + "sha256:d51674ed8e2551ef7773820ef5dab9322be0828629f2cbf8d1fc31a0c4fed640", + "sha256:d5ebc990bd34f4ac3c73a2724c2dcc9ee7bf1ce6cf08e87bb25c6ad33507e318", + "sha256:d6c0106415ff1a10c326c49bc5dd9ea8b9897a6ca0c8688eb9c30ddec49535ef", + "sha256:e48fbb64165cda451c06a0f9e4c7a16b534fcabd32546d531b3c240ce2844112" ], "index": "pypi", - "version": "==1.4.2" + "version": "==1.4.3" }, "pathspec": { "hashes": [ @@ -835,47 +856,67 @@ }, "pillow": { "hashes": [ - "sha256:088df396b047477dd1bbc7de6e22f58400dae2f21310d9e2ec2933b2ef7dfa4f", - "sha256:09e67ef6e430f90caa093528bd758b0616f8165e57ed8d8ce014ae32df6a831d", - "sha256:0b4d5ad2cd3a1f0d1df882d926b37dbb2ab6c823ae21d041b46910c8f8cd844b", - "sha256:0b525a356680022b0af53385944026d3486fc8c013638cf9900eb87c866afb4c", - "sha256:1d4331aeb12f6b3791911a6da82de72257a99ad99726ed6b63f481c0184b6fb9", - "sha256:20d514c989fa28e73a5adbddd7a171afa5824710d0ab06d4e1234195d2a2e546", - "sha256:2b291cab8a888658d72b575a03e340509b6b050b62db1f5539dd5cd18fd50578", - "sha256:3f6c1716c473ebd1649663bf3b42702d0d53e27af8b64642be0dd3598c761fb1", - "sha256:42dfefbef90eb67c10c45a73a9bc1599d4dac920f7dfcbf4ec6b80cb620757fe", - "sha256:488f3383cf5159907d48d32957ac6f9ea85ccdcc296c14eca1a4e396ecc32098", - "sha256:4d45dbe4b21a9679c3e8b3f7f4f42a45a7d3ddff8a4a16109dff0e1da30a35b2", - "sha256:53c27bd452e0f1bc4bfed07ceb235663a1df7c74df08e37fd6b03eb89454946a", - "sha256:55e74faf8359ddda43fee01bffbc5bd99d96ea508d8a08c527099e84eb708f45", - "sha256:59789a7d06c742e9d13b883d5e3569188c16acb02eeed2510fd3bfdbc1bd1530", - "sha256:5b650dbbc0969a4e226d98a0b440c2f07a850896aed9266b6fedc0f7e7834108", - "sha256:66daa16952d5bf0c9d5389c5e9df562922a59bd16d77e2a276e575d32e38afd1", - "sha256:6e760cf01259a1c0a50f3c845f9cad1af30577fd8b670339b1659c6d0e7a41dd", - "sha256:7502539939b53d7565f3d11d87c78e7ec900d3c72945d4ee0e2f250d598309a0", - "sha256:769a7f131a2f43752455cc72f9f7a093c3ff3856bf976c5fb53a59d0ccc704f6", - "sha256:7c150dbbb4a94ea4825d1e5f2c5501af7141ea95825fadd7829f9b11c97aaf6c", - "sha256:8844217cdf66eabe39567118f229e275f0727e9195635a15e0e4b9227458daaf", - "sha256:8a66fe50386162df2da701b3722781cbe90ce043e7d53c1fd6bd801bca6b48d4", - "sha256:9370d6744d379f2de5d7fa95cdbd3a4d92f0b0ef29609b4b1687f16bc197063d", - "sha256:937a54e5694684f74dcbf6e24cc453bfc5b33940216ddd8f4cd8f0f79167f765", - "sha256:9c857532c719fb30fafabd2371ce9b7031812ff3889d75273827633bca0c4602", - "sha256:a4165205a13b16a29e1ac57efeee6be2dfd5b5408122d59ef2145bc3239fa340", - "sha256:b3fe2ff1e1715d4475d7e2c3e8dabd7c025f4410f79513b4ff2de3d51ce0fa9c", - "sha256:b6617221ff08fbd3b7a811950b5c3f9367f6e941b86259843eab77c8e3d2b56b", - "sha256:b761727ed7d593e49671d1827044b942dd2f4caae6e51bab144d4accf8244a84", - "sha256:baf3be0b9446a4083cc0c5bb9f9c964034be5374b5bc09757be89f5d2fa247b8", - "sha256:c17770a62a71718a74b7548098a74cd6880be16bcfff5f937f900ead90ca8e92", - "sha256:c67db410508b9de9c4694c57ed754b65a460e4812126e87f5052ecf23a011a54", - "sha256:d78ca526a559fb84faaaf84da2dd4addef5edb109db8b81677c0bb1aad342601", - "sha256:e9ed59d1b6ee837f4515b9584f3d26cf0388b742a11ecdae0d9237a94505d03a", - "sha256:f054b020c4d7e9786ae0404278ea318768eb123403b18453e28e47cdb7a0a4bf", - "sha256:f372d0f08eff1475ef426344efe42493f71f377ec52237bf153c5713de987251", - "sha256:f3f6a6034140e9e17e9abc175fc7a266a6e63652028e157750bd98e804a8ed9a", - "sha256:ffde4c6fabb52891d81606411cbfaf77756e3b561b566efd270b3ed3791fde4e" + "sha256:0030fdbd926fb85844b8b92e2f9449ba89607231d3dd597a21ae72dc7fe26927", + "sha256:030e3460861488e249731c3e7ab59b07c7853838ff3b8e16aac9561bb345da14", + "sha256:0ed2c4ef2451de908c90436d6e8092e13a43992f1860275b4d8082667fbb2ffc", + "sha256:136659638f61a251e8ed3b331fc6ccd124590eeff539de57c5f80ef3a9594e58", + "sha256:13b725463f32df1bfeacbf3dd197fb358ae8ebcd8c5548faa75126ea425ccb60", + "sha256:1536ad017a9f789430fb6b8be8bf99d2f214c76502becc196c6f2d9a75b01b76", + "sha256:15928f824870535c85dbf949c09d6ae7d3d6ac2d6efec80f3227f73eefba741c", + "sha256:17d4cafe22f050b46d983b71c707162d63d796a1235cdf8b9d7a112e97b15bac", + "sha256:1802f34298f5ba11d55e5bb09c31997dc0c6aed919658dfdf0198a2fe75d5490", + "sha256:1cc1d2451e8a3b4bfdb9caf745b58e6c7a77d2e469159b0d527a4554d73694d1", + "sha256:1fd6f5e3c0e4697fa7eb45b6e93996299f3feee73a3175fa451f49a74d092b9f", + "sha256:254164c57bab4b459f14c64e93df11eff5ded575192c294a0c49270f22c5d93d", + "sha256:2ad0d4df0f5ef2247e27fc790d5c9b5a0af8ade9ba340db4a73bb1a4a3e5fb4f", + "sha256:2c58b24e3a63efd22554c676d81b0e57f80e0a7d3a5874a7e14ce90ec40d3069", + "sha256:2d33a11f601213dcd5718109c09a52c2a1c893e7461f0be2d6febc2879ec2402", + "sha256:337a74fd2f291c607d220c793a8135273c4c2ab001b03e601c36766005f36885", + "sha256:37ff6b522a26d0538b753f0b4e8e164fdada12db6c6f00f62145d732d8a3152e", + "sha256:3d1f14f5f691f55e1b47f824ca4fdcb4b19b4323fe43cc7bb105988cad7496be", + "sha256:408673ed75594933714482501fe97e055a42996087eeca7e5d06e33218d05aa8", + "sha256:4134d3f1ba5f15027ff5c04296f13328fecd46921424084516bdb1b2548e66ff", + "sha256:4ad2f835e0ad81d1689f1b7e3fbac7b01bb8777d5a985c8962bedee0cc6d43da", + "sha256:50dff9cc21826d2977ef2d2a205504034e3a4563ca6f5db739b0d1026658e004", + "sha256:510cef4a3f401c246cfd8227b300828715dd055463cdca6176c2e4036df8bd4f", + "sha256:5aed7dde98403cd91d86a1115c78d8145c83078e864c1de1064f52e6feb61b20", + "sha256:69bd1a15d7ba3694631e00df8de65a8cb031911ca11f44929c97fe05eb9b6c1d", + "sha256:6bf088c1ce160f50ea40764f825ec9b72ed9da25346216b91361eef8ad1b8f8c", + "sha256:6e8c66f70fb539301e064f6478d7453e820d8a2c631da948a23384865cd95544", + "sha256:727dd1389bc5cb9827cbd1f9d40d2c2a1a0c9b32dd2261db522d22a604a6eec9", + "sha256:74a04183e6e64930b667d321524e3c5361094bb4af9083db5c301db64cd341f3", + "sha256:75e636fd3e0fb872693f23ccb8a5ff2cd578801251f3a4f6854c6a5d437d3c04", + "sha256:7761afe0126d046974a01e030ae7529ed0ca6a196de3ec6937c11df0df1bc91c", + "sha256:7888310f6214f19ab2b6df90f3f06afa3df7ef7355fc025e78a3044737fab1f5", + "sha256:7b0554af24df2bf96618dac71ddada02420f946be943b181108cac55a7a2dcd4", + "sha256:7c7b502bc34f6e32ba022b4a209638f9e097d7a9098104ae420eb8186217ebbb", + "sha256:808add66ea764ed97d44dda1ac4f2cfec4c1867d9efb16a33d158be79f32b8a4", + "sha256:831e648102c82f152e14c1a0938689dbb22480c548c8d4b8b248b3e50967b88c", + "sha256:93689632949aff41199090eff5474f3990b6823404e45d66a5d44304e9cdc467", + "sha256:96b5e6874431df16aee0c1ba237574cb6dff1dcb173798faa6a9d8b399a05d0e", + "sha256:9a54614049a18a2d6fe156e68e188da02a046a4a93cf24f373bffd977e943421", + "sha256:a138441e95562b3c078746a22f8fca8ff1c22c014f856278bdbdd89ca36cff1b", + "sha256:a647c0d4478b995c5e54615a2e5360ccedd2f85e70ab57fbe817ca613d5e63b8", + "sha256:a9c9bc489f8ab30906d7a85afac4b4944a572a7432e00698a7239f44a44e6efb", + "sha256:ad2277b185ebce47a63f4dc6302e30f05762b688f8dc3de55dbae4651872cdf3", + "sha256:b6d5e92df2b77665e07ddb2e4dbd6d644b78e4c0d2e9272a852627cdba0d75cf", + "sha256:bc431b065722a5ad1dfb4df354fb9333b7a582a5ee39a90e6ffff688d72f27a1", + "sha256:bdd0de2d64688ecae88dd8935012c4a72681e5df632af903a1dca8c5e7aa871a", + "sha256:c79698d4cd9318d9481d89a77e2d3fcaeff5486be641e60a4b49f3d2ecca4e28", + "sha256:cb6259196a589123d755380b65127ddc60f4c64b21fc3bb46ce3a6ea663659b0", + "sha256:d5b87da55a08acb586bad5c3aa3b86505f559b84f39035b233d5bf844b0834b1", + "sha256:dcd7b9c7139dc8258d164b55696ecd16c04607f1cc33ba7af86613881ffe4ac8", + "sha256:dfe4c1fedfde4e2fbc009d5ad420647f7730d719786388b7de0999bf32c0d9fd", + "sha256:ea98f633d45f7e815db648fd7ff0f19e328302ac36427343e4432c84432e7ff4", + "sha256:ec52c351b35ca269cb1f8069d610fc45c5bd38c3e91f9ab4cbbf0aebc136d9c8", + "sha256:eef7592281f7c174d3d6cbfbb7ee5984a671fcd77e3fc78e973d492e9bf0eb3f", + "sha256:f07f1f00e22b231dd3d9b9208692042e29792d6bd4f6639415d2f23158a80013", + "sha256:f3fac744f9b540148fa7715a435d2283b71f68bfb6d4aae24482a890aed18b59", + "sha256:fa768eff5f9f958270b081bb33581b4b569faabf8774726b283edb06617101dc", + "sha256:fac2d65901fb0fdf20363fbd345c01958a742f2dc62a8dd4495af66e3ff502a4" ], - "index": "pypi", - "version": "==9.1.1" + "markers": "python_version >= '3.7'", + "version": "==9.2.0" }, "platformdirs": { "hashes": [ @@ -975,40 +1016,40 @@ }, "requests": { "hashes": [ - "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61", - "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d" + "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983", + "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349" ], "index": "pypi", - "version": "==2.27.1" + "version": "==2.28.1" }, "scipy": { "hashes": [ - "sha256:011d4386b53b933142f58a652aa0f149c9b9242abd4f900b9f4ea5fbafc86b89", - "sha256:16e09ef68b352d73befa8bcaf3ebe25d3941fe1a58c82909d5589856e6bc8174", - "sha256:31d4f2d6b724bc9a98e527b5849b8a7e589bf1ea630c33aa563eda912c9ff0bd", - "sha256:38aa39b6724cb65271e469013aeb6f2ce66fd44f093e241c28a9c6bc64fd79ed", - "sha256:3d573228c10a3a8c32b9037be982e6440e411b443a6267b067cac72f690b8d56", - "sha256:3d9dd6c8b93a22bf9a3a52d1327aca7e092b1299fb3afc4f89e8eba381be7b59", - "sha256:559a8a4c03a5ba9fe3232f39ed24f86457e4f3f6c0abbeae1fb945029f092720", - "sha256:5e73343c5e0d413c1f937302b2e04fb07872f5843041bcfd50699aef6e95e399", - "sha256:723b9f878095ed994756fa4ee3060c450e2db0139c5ba248ee3f9628bd64e735", - "sha256:87b01c7d5761e8a266a0fbdb9d88dcba0910d63c1c671bdb4d99d29f469e9e03", - "sha256:8f4d059a97b29c91afad46b1737274cb282357a305a80bdd9e8adf3b0ca6a3f0", - "sha256:92b2c2af4183ed09afb595709a8ef5783b2baf7f41e26ece24e1329c109691a7", - "sha256:937d28722f13302febde29847bbe554b89073fbb924a30475e5ed7b028898b5f", - "sha256:a279e27c7f4566ef18bab1b1e2c37d168e365080974758d107e7d237d3f0f484", - "sha256:ad5be4039147c808e64f99c0e8a9641eb5d2fa079ff5894dcd8240e94e347af4", - "sha256:ae3e327da323d82e918e593460e23babdce40d7ab21490ddf9fc06dec6b91a18", - "sha256:bb7088e89cd751acf66195d2f00cf009a1ea113f3019664032d9075b1e727b6c", - "sha256:c17a1878d00a5dd2797ccd73623ceca9d02375328f6218ee6d921e1325e61aff", - "sha256:c2bae431d127bf0b1da81fc24e4bba0a84d058e3a96b9dd6475dfcb3c5e8761e", - "sha256:de2e80ee1d925984c2504812a310841c241791c5279352be4707cdcd7c255039", - "sha256:e6f0cd9c0bd374ef834ee1e0f0999678d49dcc400ea6209113d81528958f97c7", - "sha256:f3720d0124aced49f6f2198a6900304411dbbeed12f56951d7c66ebef05e3df6", - "sha256:f4a6d3b9f9797eb2d43938ac2c5d96d02aed17ef170c8b38f11798717523ddba" + "sha256:2454ed30f47dda09c07057e4da368215836e0b5f9c4ce1b81fd96e95b1128fa7", + "sha256:2a3273724612819da49f975ee3cf14b9910aa0eaab2e23eea1d502702c939a48", + "sha256:320fa84c502b9aa77c4cfde2d973fa982b71ff06460d983904695f2b9d722580", + "sha256:32c954e00e89c942611e327cbfb9e868228b55068983a18b6d87a956b76db891", + "sha256:3eef55fb77e64437cc600a75c69f01902e385cfceae0bf3761ab1f99fb011a9a", + "sha256:3f165c2074558e76f8e7e70e3fb2cabef2408936645327b8b0e906bf247cc728", + "sha256:489d095a6f443aeeaea0e69bb4dfacceb8850870d33df9209fad930ea79639a5", + "sha256:6997d0d2cb1bcc57d9ea3d607c5f1227f4abe7dd15807911071825de06aafb15", + "sha256:73cc5b3392982c2bcdf6ee5c129b45eda257a18752655281942febc4aa088d76", + "sha256:7c28481580f5778e7ae788d4219026b2cd62544ff379612423f4dbcb7ad9c46f", + "sha256:93114c9292841406e27c0fe2b5c00d2de1cbd1ec3a4343c8ecf961cce17872c5", + "sha256:9704b7362b2e8cab80c273c07d3a6dacc0270c8e71f88bef052fa7b4ed280505", + "sha256:a13eb973be297e64a8a6aa1537820357cd7810aa5bae1d9076531f3ba63fec34", + "sha256:b3b14c1d6c79c1c8edbae120c4b87c863907805e187e17a38de5f72f1735769f", + "sha256:b67129931ca84fc8f0b9f1224ad88b19f2a4e4cbb98d54af254ed1ee0f423a07", + "sha256:b99872fce4a87ce4a01c21c0f8c9d85bf21a21ac4e5c74a50b94143850a56e0d", + "sha256:c24930b712cd7dc3a272776ea4150b0c4a2acc5a2868ece6e70acce5b8993820", + "sha256:d1b9120bf5e68d64de219a90f337a56de7f2e55c3c23fe85af785a6ff5932d4e", + "sha256:ea6659b8360184883e5193acc5de2c2277bc62bccabb953d48d9fc57ad228c5a", + "sha256:f51b051ce3c66cd529ae07c0af87f2804f54ecf14a5fbded0021a7b4579e94ee", + "sha256:fc90b11627641bf89713496d0e6f5b391ba70c3ecf2634fa8c697c97b3ba038e", + "sha256:fc9ba93ddb04fac40fef8f53a1d752da75492ec17cbf5c451b1102e257fd39b8", + "sha256:ffefbadde5afdc8c3003b77e1cc7845a996c8129a2012eff1b3b78074cb25ef9" ], "index": "pypi", - "version": "==1.8.0" + "version": "==1.9.0rc3" }, "six": { "hashes": [ @@ -1058,13 +1099,21 @@ "markers": "python_version >= '3.7'", "version": "==2.0.1" }, + "typing-extensions": { + "hashes": [ + "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02", + "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6" + ], + "markers": "python_version >= '3.7'", + "version": "==4.3.0" + }, "urllib3": { "hashes": [ - "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14", - "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e" + "sha256:c33ccba33c819596124764c23a97d25f32b28433ba0dedeb77d873a38722c9bc", + "sha256:ea6e8fb210b19d950fab93b60c9009226c63a28808bc8386e05301e25883ac0a" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.26.9" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'", + "version": "==1.26.11" }, "wget": { "hashes": [ @@ -1074,11 +1123,11 @@ }, "xarray": { "hashes": [ - "sha256:398344bf7d170477aaceff70210e11ebd69af6b156fe13978054d25c48729440", - "sha256:560f36eaabe7a989d5583d37ec753dd737357aa6a6453e55c80bb4f92291a69e" + "sha256:1028d198493f66bb15bd35dcfdd11defd831cbee3af6589fff16f41bddd67e84", + "sha256:c052208afc3f261984049f28b962d64eb6741a7967898315642f5da85448b6b0" ], "markers": "python_version >= '3.8'", - "version": "==2022.3.0" + "version": "==2022.6.0" }, "zenodo-get": { "hashes": [ diff --git a/requirements.txt b/requirements.txt index de118f789..046a893f8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,4 +11,4 @@ coverage pytest-cov fs==2.4.14 fs.sshfs -fs-azureblob +fs-azureblob>=0.2.1 diff --git a/setup.cfg b/setup.cfg index c93da6764..f476b64b4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,7 +31,7 @@ install_requires = requests fs==2.4.14 fs.sshfs - fs-azureblob + fs-azureblob>=0.2.1 [options.package_data] powersimdata = From 1ef96caedfeb858db463c59e2b1b095e2f2b24da Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Wed, 27 Jul 2022 16:26:46 -0700 Subject: [PATCH 46/59] fix: unindent methods in Analyze class --- powersimdata/scenario/analyze.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/powersimdata/scenario/analyze.py b/powersimdata/scenario/analyze.py index dbd900709..95b3e1b06 100644 --- a/powersimdata/scenario/analyze.py +++ b/powersimdata/scenario/analyze.py @@ -202,22 +202,22 @@ def get_load_shed(self): return load_shed - def get_load_shift_up(self): - """Returns LOAD_SHIFT_UP data frame. This is the amount that flexible demand - deviates above (e.g., recovers) the base demand. - - :return: (*pandas.DataFrame*) -- data frame of load shifted up (hour x bus). - """ - return self._get_data("LOAD_SHIFT_UP") - - def get_load_shift_dn(self): - """Returns LOAD_SHIFT_DN data frame. This is the amount that flexible demand - deviates below (e.g., curtails) the base demand. - - :return: (*pandas.DataFrame*) -- data frame of load shifted down (hour x - bus). - """ - return self._get_data("LOAD_SHIFT_DN") + def get_load_shift_up(self): + """Returns LOAD_SHIFT_UP data frame. This is the amount that flexible demand + deviates above (e.g., recovers) the base demand. + + :return: (*pandas.DataFrame*) -- data frame of load shifted up (hour x bus). + """ + return self._get_data("LOAD_SHIFT_UP") + + def get_load_shift_dn(self): + """Returns LOAD_SHIFT_DN data frame. This is the amount that flexible demand + deviates below (e.g., curtails) the base demand. + + :return: (*pandas.DataFrame*) -- data frame of load shifted down (hour x + bus). + """ + return self._get_data("LOAD_SHIFT_DN") def get_demand(self, original=True): """Returns demand profiles. From 5208addd6b486c2ff285a31bc0d2f59032d1c36f Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Wed, 27 Jul 2022 16:40:17 -0700 Subject: [PATCH 47/59] docs: update list of data that can be loaded --- powersimdata/output/output_data.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/powersimdata/output/output_data.py b/powersimdata/output/output_data.py index fedcd7b51..7ec848236 100644 --- a/powersimdata/output/output_data.py +++ b/powersimdata/output/output_data.py @@ -19,9 +19,9 @@ def get_data(self, scenario_id, field_name): """Returns data either from server or from local directory. :param str scenario_id: scenario id. - :param str field_name: *'PG'*, *'PF'*, *'LMP'*, *'CONGU'*, *'CONGL'*, - *'AVERAGED_CONG'*, *'STORAGE_PG'*, *'STORAGE_E'*, *'LOAD_SHIFT_UP'*, - or *'LOAD_SHIFT_DN'*. + :param str field_name: *'PG'*, *'PF'*, *'PF_DCLINE'*, *'LMP'*, *'CONGU'*, + *'CONGL'*, *'AVERAGED_CONG'*, *'STORAGE_PG'*, *'STORAGE_E'*, *'LOAD_SHED'*, + *'LOAD_SHIFT_UP'*, or *'LOAD_SHIFT_DN'*. :return: (*pandas.DataFrame*) -- specified field as a data frame. :raises FileNotFoundError: if file not found on local machine. :raises ValueError: if second argument is not an allowable field. From 7d22a66a49f1db484c587c5db5749cd21cae3aef Mon Sep 17 00:00:00 2001 From: BainanXia <52716585+BainanXia@users.noreply.github.com> Date: Mon, 1 Aug 2022 11:50:02 -0500 Subject: [PATCH 48/59] fix: replace "energy_price" by "energy_value" in storage template (#663) * fix: replace "energy_price" by "energy_value" in storage_template * chore: rerun scenario_and_grid_cheatsheet notebook to demonstrate the latest UI --- powersimdata/input/abstract_grid.py | 2 +- .../demo/scenario_and_grid_cheatsheet.ipynb | 320 +++++++++++++----- 2 files changed, 236 insertions(+), 86 deletions(-) diff --git a/powersimdata/input/abstract_grid.py b/powersimdata/input/abstract_grid.py index d3256a260..dd5de7a54 100644 --- a/powersimdata/input/abstract_grid.py +++ b/powersimdata/input/abstract_grid.py @@ -40,7 +40,7 @@ def storage_template(): "InEff": None, "OutEff": None, "LossFactor": None, # stored energy fraction / hour - "energy_price": None, # $/MWh + "energy_value": None, # $/MWh "terminal_min": None, "terminal_max": None, } diff --git a/powersimdata/scenario/demo/scenario_and_grid_cheatsheet.ipynb b/powersimdata/scenario/demo/scenario_and_grid_cheatsheet.ipynb index 4986084eb..3c6221064 100644 --- a/powersimdata/scenario/demo/scenario_and_grid_cheatsheet.ipynb +++ b/powersimdata/scenario/demo/scenario_and_grid_cheatsheet.ipynb @@ -16,7 +16,7 @@ "import pandas as pd\n", "\n", "from pprint import pprint\n", - "from powersimdata.scenario.scenario import Scenario" + "from powersimdata import Scenario" ] }, { @@ -52,11 +52,72 @@ "name": "stdout", "output_type": "stream", "text": [ + "Initialized remote filesystem with ssh_fs,profile_fs,scenario_fs\n", + "Transferring ScenarioList.csv from ssh_fs\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|##########| 669k/669k [00:01<00:00, 153kb/s] \n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Transferring ScenarioList.csv.2 from scenario_fs\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\r", + "0.00b [00:00, ?b/s]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Transferring ExecuteList.csv from ssh_fs\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|##########| 50.5k/50.5k [00:00<00:00, 76.7kb/s]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Transferring ExecuteList.csv.2 from scenario_fs\n", "SCENARIO: Julia | USABase_2020_Anchor_profile_fix_1\n", "\n", "--> State\n", "analyze\n", "--> Loading grid\n", + "Initialized remote filesystem with ssh_fs,profile_fs,scenario_fs\n", + "Transferring ScenarioList.csv from ssh_fs\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|##########| 669k/669k [00:01<00:00, 153kb/s] \n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Transferring ScenarioList.csv.2 from scenario_fs\n", "Loading bus\n", "Loading plant\n", "Loading heat_rate_curve\n", @@ -83,20 +144,64 @@ "name": "stdout", "output_type": "stream", "text": [ - "{'_execute_list_manager': ,\n", - " '_scenario_list_manager': ,\n", + "{'_default_info': [('plan', ''),\n", + " ('name', ''),\n", + " ('state', 'create'),\n", + " ('grid_model', ''),\n", + " ('interconnect', ''),\n", + " ('base_demand', ''),\n", + " ('base_hydro', ''),\n", + " ('base_solar', ''),\n", + " ('base_wind', ''),\n", + " ('change_table', ''),\n", + " ('start_date', ''),\n", + " ('end_date', ''),\n", + " ('interval', ''),\n", + " ('engine', '')],\n", + " '_execute_list_manager': ,\n", + " '_scenario_list_manager': ,\n", " '_set_info': 'function',\n", " '_set_status': 'function',\n", + " '_setattr_allowlist': {'_execute_list_manager',\n", + " '_scenario_list_manager',\n", + " 'data_access',\n", + " 'info',\n", + " 'state',\n", + " 'status'},\n", " 'change': 'function',\n", + " 'data_access': ,\n", + " 'get_averaged_cong': 'function',\n", + " 'get_base_grid': 'function',\n", + " 'get_bus_demand': 'function',\n", + " 'get_congl': 'function',\n", + " 'get_congu': 'function',\n", + " 'get_ct': 'function',\n", + " 'get_dcline_pf': 'function',\n", + " 'get_demand': 'function',\n", + " 'get_grid': 'function',\n", + " 'get_hydro': 'function',\n", + " 'get_lmp': 'function',\n", + " 'get_load_shed': 'function',\n", + " 'get_load_shift_dn': 'function',\n", + " 'get_load_shift_up': 'function',\n", + " 'get_pf': 'function',\n", + " 'get_pg': 'function',\n", + " 'get_scenario_table': 'function',\n", + " 'get_solar': 'function',\n", + " 'get_storage_e': 'function',\n", + " 'get_storage_pg': 'function',\n", + " 'get_wind': 'function',\n", + " 'get_wind_offshore': 'function',\n", " 'info': OrderedDict([('id', '824'),\n", " ('plan', 'Julia'),\n", " ('name', 'USABase_2020_Anchor_profile_fix_1'),\n", " ('state', 'analyze'),\n", + " ('grid_model', 'usa_tamu'),\n", " ('interconnect', 'USA'),\n", - " ('base_demand', 'v6'),\n", - " ('base_hydro', 'v4'),\n", - " ('base_solar', 'v4.3.1'),\n", - " ('base_wind', 'v5.4'),\n", + " ('base_demand', 'vJan2021'),\n", + " ('base_hydro', 'vJan2021'),\n", + " ('base_solar', 'vJan2021'),\n", + " ('base_wind', 'vJan2021'),\n", " ('change_table', 'Yes'),\n", " ('start_date', '2016-01-01 00:00:00'),\n", " ('end_date', '2016-12-31 23:00:00'),\n", @@ -104,9 +209,9 @@ " ('engine', 'REISE.jl'),\n", " ('runtime', '22:10'),\n", " ('infeasibilities', '')]),\n", + " 'print_infeasibilities': 'function',\n", " 'print_scenario_info': 'function',\n", - " 'ssh': ,\n", - " 'state': ,\n", + " 'state': ,\n", " 'status': 'extracted'}\n" ] } @@ -132,7 +237,7 @@ "metadata": {}, "outputs": [], "source": [ - "grid = s.state.get_grid()" + "grid = s.get_grid()" ] }, { @@ -144,14 +249,18 @@ "name": "stdout", "output_type": "stream", "text": [ - "['branch',\n", + "['SUPPORTED_ENGINES',\n", + " 'SUPPORTED_MODELS',\n", + " 'branch',\n", " 'bus',\n", " 'bus2sub',\n", " 'data_loc',\n", " 'dcline',\n", " 'gencost',\n", + " 'grid_model',\n", " 'id2zone',\n", " 'interconnect',\n", + " 'model_immutables',\n", " 'plant',\n", " 'storage',\n", " 'sub',\n", @@ -445,23 +554,26 @@ "output_type": "stream", "text": [ "{'InEff': None,\n", + " 'LossFactor': None,\n", " 'OutEff': None,\n", " 'StorageData': Empty DataFrame\n", - "Columns: [UnitIdx, InitialStorage, InitialStorageLowerBound, InitialStorageUpperBound, InitialStorageCost, TerminalStoragePrice, MinStorageLevel, MaxStorageLevel, OutEff, InEff, LossFactor, rho]\n", + "Columns: [UnitIdx, InitialStorage, InitialStorageLowerBound, InitialStorageUpperBound, InitialStorageCost, TerminalStoragePrice, MinStorageLevel, MaxStorageLevel, OutEff, InEff, LossFactor, rho, ExpectedTerminalStorageMax, ExpectedTerminalStorageMin]\n", "Index: [],\n", " 'duration': None,\n", - " 'energy_price': None,\n", + " 'energy_value': None,\n", " 'gen': Empty DataFrame\n", - "Columns: [bus_id, Pg, Qg, Qmax, Qmin, Vg, mBase, status, Pmax, Pmin, Pc1, Pc2, Qc1min, Qc1max, Qc2min, Qc2max, ramp_agc, ramp_10, ramp_30, ramp_q, apf]\n", + "Columns: [bus_id, Pg, Qg, Qmax, Qmin, Vg, mBase, status, Pmax, Pmin, Pc1, Pc2, Qc1min, Qc1max, Qc2min, Qc2max, ramp_agc, ramp_10, ramp_30, ramp_q, apf, mu_Pmax, mu_Pmin, mu_Qmax, mu_Qmin]\n", "Index: []\n", "\n", - "[0 rows x 21 columns],\n", + "[0 rows x 25 columns],\n", " 'gencost': Empty DataFrame\n", "Columns: [type, startup, shutdown, n, c2, c1, c0]\n", "Index: [],\n", " 'genfuel': [],\n", " 'max_stor': None,\n", - " 'min_stor': None}\n" + " 'min_stor': None,\n", + " 'terminal_max': None,\n", + " 'terminal_min': None}\n" ] } ], @@ -539,19 +651,23 @@ "name": "stdout", "output_type": "stream", "text": [ - "{'_enter': 'function',\n", - " '_execute_list_manager': ,\n", + "{'_data_access': ,\n", + " '_enter': 'function',\n", + " '_execute_list_manager': ,\n", + " '_get_data': 'function',\n", " '_leave': 'function',\n", " '_parse_infeasibilities': 'function',\n", + " '_scenario': ,\n", " '_scenario_info': OrderedDict([('id', '824'),\n", " ('plan', 'Julia'),\n", " ('name', 'USABase_2020_Anchor_profile_fix_1'),\n", " ('state', 'analyze'),\n", + " ('grid_model', 'usa_tamu'),\n", " ('interconnect', 'USA'),\n", - " ('base_demand', 'v6'),\n", - " ('base_hydro', 'v4'),\n", - " ('base_solar', 'v4.3.1'),\n", - " ('base_wind', 'v5.4'),\n", + " ('base_demand', 'vJan2021'),\n", + " ('base_hydro', 'vJan2021'),\n", + " ('base_solar', 'vJan2021'),\n", + " ('base_wind', 'vJan2021'),\n", " ('change_table', 'Yes'),\n", " ('start_date', '2016-01-01 00:00:00'),\n", " ('end_date', '2016-12-31 23:00:00'),\n", @@ -559,11 +675,11 @@ " ('engine', 'REISE.jl'),\n", " ('runtime', '22:10'),\n", " ('infeasibilities', '')]),\n", - " '_scenario_list_manager': ,\n", + " '_scenario_list_manager': ,\n", " '_scenario_status': 'extracted',\n", " '_set_allowed_state': 'function',\n", " '_set_ct_and_grid': 'function',\n", - " '_ssh': ,\n", + " '_update_scenario_info': 'function',\n", " 'allowed': ['delete', 'move'],\n", " 'ct': {'branch': {'branch_id': {6648: 1.1977118341079729,\n", " 16204: 2.0797364138406134,\n", @@ -1116,7 +1232,13 @@ " 11705: 0,\n", " 11764: 0,\n", " 11780: 0,\n", - " 11781: 0,\n", + " 11781: 0,\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ " 11782: 0,\n", " 11783: 0,\n", " 11801: 0,\n", @@ -1358,7 +1480,13 @@ " 48: 10.0,\n", " 49: 3.079365079365079,\n", " 50: 1.0,\n", - " 51: 1.0,\n", + " 51: 1.0,\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ " 52: 1.0,\n", " 201: 0.04051051349101375,\n", " 202: 0.8319519856828816,\n", @@ -1421,13 +1549,7 @@ " 39: 1.0580403343030635,\n", " 40: 1.0,\n", " 41: 1.0,\n", - " 42: 1.0,\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ + " 42: 1.0,\n", " 43: 1.0,\n", " 44: 1.0248192158483063,\n", " 45: 1.0248192158483063,\n", @@ -1528,8 +1650,31 @@ " 13932: 0,\n", " 13933: 0,\n", " 13934: 0}}},\n", - " 'data_loc': None,\n", + " 'exported_methods': {'get_averaged_cong',\n", + " 'get_base_grid',\n", + " 'get_bus_demand',\n", + " 'get_congl',\n", + " 'get_congu',\n", + " 'get_ct',\n", + " 'get_dcline_pf',\n", + " 'get_demand',\n", + " 'get_grid',\n", + " 'get_hydro',\n", + " 'get_lmp',\n", + " 'get_load_shed',\n", + " 'get_load_shift_dn',\n", + " 'get_load_shift_up',\n", + " 'get_pf',\n", + " 'get_pg',\n", + " 'get_solar',\n", + " 'get_storage_e',\n", + " 'get_storage_pg',\n", + " 'get_wind',\n", + " 'get_wind_offshore',\n", + " 'print_infeasibilities',\n", + " 'print_scenario_info'},\n", " 'get_averaged_cong': 'function',\n", + " 'get_base_grid': 'function',\n", " 'get_bus_demand': 'function',\n", " 'get_congl': 'function',\n", " 'get_congu': 'function',\n", @@ -1540,16 +1685,22 @@ " 'get_hydro': 'function',\n", " 'get_lmp': 'function',\n", " 'get_load_shed': 'function',\n", + " 'get_load_shift_dn': 'function',\n", + " 'get_load_shift_up': 'function',\n", " 'get_pf': 'function',\n", " 'get_pg': 'function',\n", + " 'get_profile': 'function',\n", " 'get_solar': 'function',\n", " 'get_storage_e': 'function',\n", " 'get_storage_pg': 'function',\n", " 'get_wind': 'function',\n", - " 'grid': ,\n", + " 'get_wind_offshore': 'function',\n", + " 'get_wind_onshore': 'function',\n", + " 'grid': ,\n", " 'name': 'analyze',\n", " 'print_infeasibilities': 'function',\n", " 'print_scenario_info': 'function',\n", + " 'refresh': 'function',\n", " 'switch': 'function'}\n" ] } @@ -1581,14 +1732,6 @@ "text": [ "--> Loading CONGL\n", "--> Loading CONGU\n", - "Reading bus.csv\n", - "Reading plant.csv\n", - "Reading gencost.csv\n", - "Reading branch.csv\n", - "Reading dcline.csv\n", - "Reading sub.csv\n", - "Reading bus2sub.csv\n", - "Reading zone.csv\n", "--> Loading demand\n", "Multiply demand in Maine (#1) by 0.98\n", "Multiply demand in New Hampshire (#2) by 0.98\n", @@ -1673,13 +1816,13 @@ } ], "source": [ - "congl = s.state.get_congl() # congestion lower limit, time x branch in $/MWh\n", - "congu = s.state.get_congu() # congestion upper limit, time x branch in $/MWh\n", + "congl = s.get_congl() # congestion lower limit, time x branch in $/MWh\n", + "congu = s.get_congu() # congestion upper limit, time x branch in $/MWh\n", "\n", - "demand = s.state.get_demand() # demand, time x loadzone in MWh\n", - "lmp = s.state.get_lmp() # locational marginal price, time x busId in $/MWh\n", - "pf = s.state.get_pf() # power flow, time x branch in MWh\n", - "pg = s.state.get_pg() # power generated, time x plant in MWh" + "demand = s.get_demand() # demand, time x loadzone in MWh\n", + "lmp = s.get_lmp() # locational marginal price, time x busId in $/MWh\n", + "pf = s.get_pf() # power flow, time x branch in MWh\n", + "pg = s.get_pg() # power generated, time x plant in MWh" ] }, { @@ -1710,9 +1853,9 @@ } ], "source": [ - "hydro = s.state.get_hydro() # hydro profile, time x plant in MWh\n", - "solar = s.state.get_solar() # solar profile, time x plant in MWh\n", - "wind = s.state.get_wind() # wind profile, time x plant in MWh" + "hydro = s.get_hydro() # hydro profile, time x plant in MWh\n", + "solar = s.get_solar() # solar profile, time x plant in MWh\n", + "wind = s.get_wind() # wind profile, time x plant in MWh" ] }, { @@ -1728,59 +1871,59 @@ "text": [ " 5 6 7 8 9 \\\n", "UTC \n", - "2016-01-01 00:00:00 0.478793 0.478793 0.478793 0.478793 0.478793 \n", - "2016-01-01 01:00:00 0.476795 0.476795 0.476795 0.476795 0.476795 \n", + "2016-01-01 00:00:00 0.478794 0.478794 0.478794 0.478794 0.478794 \n", + "2016-01-01 01:00:00 0.476794 0.476794 0.476794 0.476794 0.476794 \n", "2016-01-01 02:00:00 0.500844 0.500844 0.500844 0.500844 0.500844 \n", - "2016-01-01 03:00:00 0.513546 0.513546 0.513546 0.513546 0.513546 \n", - "2016-01-01 04:00:00 0.535974 0.535974 0.535974 0.535974 0.535974 \n", + "2016-01-01 03:00:00 0.513545 0.513545 0.513545 0.513545 0.513545 \n", + "2016-01-01 04:00:00 0.535973 0.535973 0.535973 0.535973 0.535973 \n", "... ... ... ... ... ... \n", "2016-12-31 19:00:00 0.398887 0.398887 0.398887 0.398887 0.398887 \n", - "2016-12-31 20:00:00 0.371861 0.371861 0.371861 0.371861 0.371861 \n", - "2016-12-31 21:00:00 0.340429 0.340429 0.340429 0.340429 0.340429 \n", + "2016-12-31 20:00:00 0.371862 0.371862 0.371862 0.371862 0.371862 \n", + "2016-12-31 21:00:00 0.340430 0.340430 0.340430 0.340430 0.340430 \n", "2016-12-31 22:00:00 0.340107 0.340107 0.340107 0.340107 0.340107 \n", "2016-12-31 23:00:00 0.338237 0.338237 0.338237 0.338237 0.338237 \n", "\n", " 10 12 13 14 15 ... \\\n", "UTC ... \n", - "2016-01-01 00:00:00 0.478793 2.593464 2.593464 2.553565 2.553565 ... \n", - "2016-01-01 01:00:00 0.476795 2.582638 2.582638 2.542905 2.542905 ... \n", + "2016-01-01 00:00:00 0.478794 2.593461 2.593461 2.553565 2.553565 ... \n", + "2016-01-01 01:00:00 0.476794 2.582639 2.582639 2.542903 2.542903 ... \n", "2016-01-01 02:00:00 0.500844 2.712905 2.712905 2.671168 2.671168 ... \n", - "2016-01-01 03:00:00 0.513546 2.781705 2.781705 2.738910 2.738910 ... \n", - "2016-01-01 04:00:00 0.535974 2.903191 2.903191 2.858527 2.858527 ... \n", + "2016-01-01 03:00:00 0.513545 2.781704 2.781704 2.738912 2.738912 ... \n", + "2016-01-01 04:00:00 0.535973 2.903192 2.903192 2.858528 2.858528 ... \n", "... ... ... ... ... ... ... \n", - "2016-12-31 19:00:00 0.398887 2.160638 2.160638 2.127398 2.127398 ... \n", - "2016-12-31 20:00:00 0.371861 2.014248 2.014248 1.983260 1.983260 ... \n", - "2016-12-31 21:00:00 0.340429 1.843993 1.843993 1.815624 1.815624 ... \n", - "2016-12-31 22:00:00 0.340107 1.842247 1.842247 1.813904 1.813904 ... \n", - "2016-12-31 23:00:00 0.338237 1.832119 1.832119 1.803932 1.803932 ... \n", + "2016-12-31 19:00:00 0.398887 2.160640 2.160640 2.127396 2.127396 ... \n", + "2016-12-31 20:00:00 0.371862 2.014245 2.014245 1.983257 1.983257 ... \n", + "2016-12-31 21:00:00 0.340430 1.843993 1.843993 1.815623 1.815623 ... \n", + "2016-12-31 22:00:00 0.340107 1.842248 1.842248 1.813905 1.813905 ... \n", + "2016-12-31 23:00:00 0.338237 1.832120 1.832120 1.803933 1.803933 ... \n", "\n", " 13201 13202 13203 13204 13205 13206 13461 \\\n", "UTC \n", - "2016-01-01 00:00:00 0.0 0.0 0.0 0.0 0.0 0.0 16.647811 \n", - "2016-01-01 01:00:00 0.0 0.0 0.0 0.0 0.0 0.0 14.497928 \n", - "2016-01-01 02:00:00 0.0 0.0 0.0 0.0 0.0 0.0 7.221402 \n", + "2016-01-01 00:00:00 0.0 0.0 0.0 0.0 0.0 0.0 16.647828 \n", + "2016-01-01 01:00:00 0.0 0.0 0.0 0.0 0.0 0.0 14.497974 \n", + "2016-01-01 02:00:00 0.0 0.0 0.0 0.0 0.0 0.0 7.221406 \n", "2016-01-01 03:00:00 0.0 0.0 0.0 0.0 0.0 0.0 3.234012 \n", - "2016-01-01 04:00:00 0.0 0.0 0.0 0.0 0.0 0.0 2.627635 \n", + "2016-01-01 04:00:00 0.0 0.0 0.0 0.0 0.0 0.0 2.627632 \n", "... ... ... ... ... ... ... ... \n", - "2016-12-31 19:00:00 0.0 0.0 0.0 0.0 0.0 0.0 11.602635 \n", - "2016-12-31 20:00:00 0.0 0.0 0.0 0.0 0.0 0.0 11.643360 \n", - "2016-12-31 21:00:00 0.0 0.0 0.0 0.0 0.0 0.0 11.793778 \n", - "2016-12-31 22:00:00 0.0 0.0 0.0 0.0 0.0 0.0 17.905988 \n", - "2016-12-31 23:00:00 0.0 0.0 0.0 0.0 0.0 0.0 28.408159 \n", + "2016-12-31 19:00:00 0.0 0.0 0.0 0.0 0.0 0.0 11.602602 \n", + "2016-12-31 20:00:00 0.0 0.0 0.0 0.0 0.0 0.0 11.643402 \n", + "2016-12-31 21:00:00 0.0 0.0 0.0 0.0 0.0 0.0 11.793750 \n", + "2016-12-31 22:00:00 0.0 0.0 0.0 0.0 0.0 0.0 17.905998 \n", + "2016-12-31 23:00:00 0.0 0.0 0.0 0.0 0.0 0.0 28.408122 \n", "\n", " 13462 13463 13464 \n", "UTC \n", - "2016-01-01 00:00:00 10.785048 3.892148 6.984034 \n", - "2016-01-01 01:00:00 9.392277 3.389520 6.082122 \n", - "2016-01-01 02:00:00 4.678282 1.688316 3.029498 \n", + "2016-01-01 00:00:00 10.785059 3.892152 6.984041 \n", + "2016-01-01 01:00:00 9.392306 3.389531 6.082141 \n", + "2016-01-01 02:00:00 4.678285 1.688317 3.029500 \n", "2016-01-01 03:00:00 2.095109 0.756091 1.356722 \n", - "2016-01-01 04:00:00 1.702276 0.614324 1.102336 \n", + "2016-01-01 04:00:00 1.702274 0.614323 1.102335 \n", "... ... ... ... \n", - "2016-12-31 19:00:00 7.516602 2.712619 4.867499 \n", - "2016-12-31 20:00:00 7.542986 2.722141 4.884583 \n", - "2016-12-31 21:00:00 7.640431 2.757307 4.947686 \n", - "2016-12-31 22:00:00 11.600140 4.186302 7.511860 \n", - "2016-12-31 23:00:00 18.403822 6.641640 11.917695 \n", + "2016-12-31 19:00:00 7.516581 2.712612 4.867485 \n", + "2016-12-31 20:00:00 7.543013 2.722150 4.884601 \n", + "2016-12-31 21:00:00 7.640413 2.757301 4.947674 \n", + "2016-12-31 22:00:00 11.600146 4.186304 7.511864 \n", + "2016-12-31 23:00:00 18.403798 6.641631 11.917680 \n", "\n", "[8784 rows x 3043 columns]\n" ] @@ -1789,6 +1932,13 @@ "source": [ "print(hydro)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { From 41026349aa8510592d5e294331931571f728dcc5 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 4 Aug 2022 19:40:45 -0700 Subject: [PATCH 49/59] feat: augment list of exported methods (#664) --- powersimdata/scenario/ready.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/powersimdata/scenario/ready.py b/powersimdata/scenario/ready.py index 4d8178fd5..3762da69f 100644 --- a/powersimdata/scenario/ready.py +++ b/powersimdata/scenario/ready.py @@ -13,10 +13,12 @@ class Ready(State): "get_base_grid", "get_bus_demand", "get_demand", + "get_profile", "get_hydro", "get_grid", "get_solar", "get_wind", + "get_wind_onshore", "get_wind_offshore", } | State.exported_methods From 2dbdbfe0fe696eb5cf4b285101aa0d25831dfc08 Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 4 Aug 2022 21:30:14 -0700 Subject: [PATCH 50/59] feat: add get_profile to MockAnalyze --- powersimdata/tests/mock_analyze.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/powersimdata/tests/mock_analyze.py b/powersimdata/tests/mock_analyze.py index 8447e9486..662f619d4 100644 --- a/powersimdata/tests/mock_analyze.py +++ b/powersimdata/tests/mock_analyze.py @@ -55,6 +55,7 @@ class MockAnalyze: "get_solar", "get_wind", "get_hydro", + "get_profile", ] def __init__( @@ -183,6 +184,24 @@ def get_hydro(self): """ return self.hydro + def get_profile(self, kind): + """Return profile + + :param str kind: either *'demand'*, *'hydro'*, *'solar'*, *'wind'*. + :return: (*pandas.DataFrame*) -- dummy profile. + :raises ValueError: if kind is invalid. + """ + if kind == "demand": + return self.demand + elif kind == "hydro": + return self.hydro + elif kind == "solar": + return self.solar + elif kind == "wind": + return self.wind + else: + raise ValueError("kind must be one of demand | hydro | solar | wind") + @property def __class__(self): """If anyone asks, I'm an Analyze object!""" From 7393bdb91c392b7026defe98b9454ac3fabcd31e Mon Sep 17 00:00:00 2001 From: Ben RdO Date: Thu, 4 Aug 2022 21:31:56 -0700 Subject: [PATCH 51/59] test: ensure get_profile is correctly implemented --- powersimdata/tests/test_mocks.py | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/powersimdata/tests/test_mocks.py b/powersimdata/tests/test_mocks.py index 8b02e9865..7ed3f821a 100644 --- a/powersimdata/tests/test_mocks.py +++ b/powersimdata/tests/test_mocks.py @@ -81,28 +81,44 @@ def test_mock_grid_failures(self): class TestMockScenario: def test_mock_pg_stored_properly(self, mock_pg): scenario = MockScenario(grid_attrs={"plant": mock_plant}, pg=mock_pg) - pg = scenario.state.get_pg() + pg = scenario.get_pg() err_msg = "pg should have dimension (periodNum * len(plant))" assert pg.shape == mock_pg.shape, err_msg def test_mock_solar_stored_properly(self, mock_solar): scenario = MockScenario(grid_attrs={"plant": mock_plant}, solar=mock_solar) - solar = scenario.state.get_solar() + solar = scenario.get_solar() err_msg = "solar should have dimension (periodNum * len(solar_plant))" assert solar.shape == mock_solar.shape, err_msg def test_mock_wind_stored_properly(self, mock_wind): scenario = MockScenario(grid_attrs={"plant": mock_plant}, wind=mock_wind) - wind = scenario.state.get_wind() + wind = scenario.get_wind() err_msg = "wind should have dimension (periodNum * len(wind_plant))" assert wind.shape == mock_wind.shape, err_msg def test_mock_hydro_stored_properly(self, mock_hydro): scenario = MockScenario(grid_attrs={"plant": mock_plant}, hydro=mock_hydro) - hydro = scenario.state.get_hydro() + hydro = scenario.get_hydro() err_msg = "hydro should have dimension (periodNum * len(hydro_plant))" assert hydro.shape == mock_hydro.shape, err_msg + def test_mock_profile(self, mock_hydro, mock_solar, mock_wind): + scenario = MockScenario( + grid_attrs={"plant": mock_plant}, + hydro=mock_hydro, + solar=mock_solar, + wind=mock_wind, + ) + pd.testing.assert_frame_equal(scenario.get_profile("hydro"), mock_hydro) + pd.testing.assert_frame_equal(scenario.get_profile("solar"), mock_solar) + pd.testing.assert_frame_equal(scenario.get_profile("wind"), mock_wind) + + def test_mock_profile_value(self): + scenario = MockScenario(grid_attrs={"plant": mock_plant}) + with pytest.raises(ValueError): + scenario.get_profile("coal") + class TestMockScenarioInfo: def test_create_mock_scenario_info(self): @@ -121,7 +137,7 @@ def test_info_set_correctly(self): def test_grid_set_correctly(self): mock_scenario = MockScenario() mock_s_info = MockScenarioInfo(mock_scenario) - assert mock_scenario.state.get_grid() == mock_s_info.grid + assert mock_scenario.get_grid() == mock_s_info.grid class TestMockInputData: From c470d024af9a973fd575bf930f5a44bd1451bbb4 Mon Sep 17 00:00:00 2001 From: Jen Hagg Date: Wed, 27 Jul 2022 18:10:08 -0700 Subject: [PATCH 52/59] refactor: simplify constructor parameters --- powersimdata/data_access/context.py | 10 ++++---- powersimdata/data_access/data_access.py | 31 ++++++++----------------- 2 files changed, 15 insertions(+), 26 deletions(-) diff --git a/powersimdata/data_access/context.py b/powersimdata/data_access/context.py index e2fafed4d..c99b8f060 100644 --- a/powersimdata/data_access/context.py +++ b/powersimdata/data_access/context.py @@ -8,18 +8,18 @@ class Context: """Factory for data access instances""" @staticmethod - def get_data_access(): + def get_data_access(_fs=None): """Return a data access instance appropriate for the current environment. + :param fs.base.FS _fs: a filesystem instance, or None to use a class specific + default :return: (:class:`powersimdata.data_access.data_access.DataAccess`) -- a data access instance """ - root = server_setup.DATA_ROOT_DIR - if server_setup.DEPLOYMENT_MODE == DeploymentMode.Server: - return SSHDataAccess(root) - return LocalDataAccess(root) + return SSHDataAccess(_fs) + return LocalDataAccess(_fs) @staticmethod def get_launcher(scenario): diff --git a/powersimdata/data_access/data_access.py b/powersimdata/data_access/data_access.py index b49e25022..91f45eef4 100644 --- a/powersimdata/data_access/data_access.py +++ b/powersimdata/data_access/data_access.py @@ -16,9 +16,8 @@ class DataAccess: """Interface to a local or remote data store.""" - def __init__(self, root): + def __init__(self): """Constructor""" - self.root = root self.join = fs.path.join self.local_fs = None @@ -163,10 +162,10 @@ def push(self, file_name, checksum): class LocalDataAccess(DataAccess): """Interface to shared data volume""" - def __init__(self, root=server_setup.LOCAL_DIR): - super().__init__(root) - self.local_fs = fs.open_fs(root) - self.fs = self._get_fs() + def __init__(self, _fs=None): + super().__init__() + self.local_fs = fs.open_fs(server_setup.LOCAL_DIR) + self.fs = _fs if _fs is not None else self._get_fs() def _get_fs(self): mfs = MultiFS() @@ -193,23 +192,13 @@ def push(self, file_name, checksum): class SSHDataAccess(DataAccess): """Interface to a remote data store, accessed via SSH.""" - def __init__(self, root=server_setup.DATA_ROOT_DIR): + def __init__(self, _fs=None): """Constructor""" - super().__init__(root) - self._fs = None + super().__init__() + self.root = server_setup.DATA_ROOT_DIR + self.fs = _fs if _fs is not None else get_multi_fs(self.root) self.local_fs = fs.open_fs(server_setup.LOCAL_DIR) - @property - def fs(self): - """Get or create the filesystem object - - :raises IOError: if connection failed or still within retry window - :return: (*fs.multifs.MultiFS*) -- filesystem instance - """ - if self._fs is None: - self._fs = get_multi_fs(self.root) - return self._fs - def exec_command(self, command): ssh_fs = self.fs.get_fs("ssh_fs") return ssh_fs.exec_command(command) @@ -285,7 +274,7 @@ class _DataAccessTemplate(SSHDataAccess): def __init__(self, fs_url): self.local_fs = fs.open_fs(fs_url) - self._fs = self._get_fs(fs_url) + self.fs = self._get_fs(fs_url) self.root = "foo" self.join = fs.path.join From c8ec216ee5ed751a1e8d21af1ec222094030346a Mon Sep 17 00:00:00 2001 From: Jen Hagg Date: Wed, 27 Jul 2022 18:39:01 -0700 Subject: [PATCH 53/59] perf: pass minimal multifs based on the context --- powersimdata/data_access/context.py | 12 +++++++----- powersimdata/data_access/fs_helper.py | 19 +++++++++++++++++++ powersimdata/input/input_base.py | 3 +-- powersimdata/input/input_data.py | 3 +++ powersimdata/input/profile_input.py | 3 +++ powersimdata/output/output_data.py | 3 ++- 6 files changed, 35 insertions(+), 8 deletions(-) diff --git a/powersimdata/data_access/context.py b/powersimdata/data_access/context.py index c99b8f060..5b2aa2b19 100644 --- a/powersimdata/data_access/context.py +++ b/powersimdata/data_access/context.py @@ -8,18 +8,20 @@ class Context: """Factory for data access instances""" @staticmethod - def get_data_access(_fs=None): + def get_data_access(make_fs=None): """Return a data access instance appropriate for the current environment. - :param fs.base.FS _fs: a filesystem instance, or None to use a class specific - default + :param callable make_fs: a function that returns a filesystem instance, or + None to use a default :return: (:class:`powersimdata.data_access.data_access.DataAccess`) -- a data access instance """ if server_setup.DEPLOYMENT_MODE == DeploymentMode.Server: - return SSHDataAccess(_fs) - return LocalDataAccess(_fs) + if make_fs is None: + make_fs = lambda: None # noqa: E731 + return SSHDataAccess(make_fs()) + return LocalDataAccess() @staticmethod def get_launcher(scenario): diff --git a/powersimdata/data_access/fs_helper.py b/powersimdata/data_access/fs_helper.py index 78ef551c4..2919962fa 100644 --- a/powersimdata/data_access/fs_helper.py +++ b/powersimdata/data_access/fs_helper.py @@ -49,3 +49,22 @@ def get_multi_fs(root): remotes = ",".join([f[0] for f in mfs.iterate_fs()]) print(f"Initialized remote filesystem with {remotes}") return mfs + + +def get_scenario_fs(): + """Create filesystem combining the server (if connected) with blob storage, + prioritizing the server if connected. + + :return: (*fs.base.FS*) -- filesystem instance + """ + scenario_data = get_blob_fs("scenariodata") + mfs = MultiFS() + try: + ssh_fs = get_ssh_fs(server_setup.DATA_ROOT_DIR) + mfs.add_fs("ssh_fs", ssh_fs, write=True, priority=2) + except: # noqa + print("Could not connect to ssh server") + mfs.add_fs("scenario_fs", scenario_data, priority=1) + remotes = ",".join([f[0] for f in mfs.iterate_fs()]) + print(f"Initialized remote filesystem with {remotes}") + return mfs diff --git a/powersimdata/input/input_base.py b/powersimdata/input/input_base.py index deb957d96..ba5879171 100644 --- a/powersimdata/input/input_base.py +++ b/powersimdata/input/input_base.py @@ -1,4 +1,3 @@ -from powersimdata.data_access.context import Context from powersimdata.utility.helpers import MemoryCache, cache_key _cache = MemoryCache() @@ -11,7 +10,7 @@ class InputBase: def __init__(self): """Constructor.""" - self.data_access = Context.get_data_access() + self.data_access = None self._file_extension = {} def _check_field(self, field_name): diff --git a/powersimdata/input/input_data.py b/powersimdata/input/input_data.py index 9fb302e28..134f4706e 100644 --- a/powersimdata/input/input_data.py +++ b/powersimdata/input/input_data.py @@ -3,6 +3,8 @@ import pandas as pd +from powersimdata.data_access.context import Context +from powersimdata.data_access.fs_helper import get_scenario_fs from powersimdata.input.input_base import InputBase from powersimdata.utility import server_setup @@ -13,6 +15,7 @@ class InputData(InputBase): def __init__(self): super().__init__() self._file_extension = {"ct": "pkl", "grid": "mat"} + self.data_access = Context.get_data_access(get_scenario_fs) def _get_file_path(self, scenario_info, field_name): """Get the path to either grid or ct for the scenario diff --git a/powersimdata/input/profile_input.py b/powersimdata/input/profile_input.py index 7d6167bd0..1c2183d4e 100644 --- a/powersimdata/input/profile_input.py +++ b/powersimdata/input/profile_input.py @@ -1,5 +1,7 @@ import pandas as pd +from powersimdata.data_access.context import Context +from powersimdata.data_access.fs_helper import get_blob_fs from powersimdata.input.input_base import InputBase profile_kind = { @@ -39,6 +41,7 @@ class ProfileInput(InputBase): def __init__(self): super().__init__() self._file_extension = {k: "csv" for k in profile_kind} + self.data_access = Context.get_data_access(lambda: get_blob_fs("profiles")) def _get_file_path(self, scenario_info, field_name): """Get the path to the specified profile diff --git a/powersimdata/output/output_data.py b/powersimdata/output/output_data.py index 7ec848236..7ae3843a7 100644 --- a/powersimdata/output/output_data.py +++ b/powersimdata/output/output_data.py @@ -3,6 +3,7 @@ from scipy.sparse import coo_matrix from powersimdata.data_access.context import Context +from powersimdata.data_access.fs_helper import get_scenario_fs from powersimdata.input.input_data import distribute_demand_from_zones_to_buses from powersimdata.input.transform_profile import TransformProfile from powersimdata.utility import server_setup @@ -13,7 +14,7 @@ class OutputData: def __init__(self): """Constructor""" - self._data_access = Context.get_data_access() + self.data_access = Context.get_data_access(get_scenario_fs) def get_data(self, scenario_id, field_name): """Returns data either from server or from local directory. From 768d5c4ca6a8d3f6e1fd6e7a35c29bbd77ee32d1 Mon Sep 17 00:00:00 2001 From: Jen Hagg Date: Wed, 27 Jul 2022 18:58:39 -0700 Subject: [PATCH 54/59] fix: keep delayed evaluation of ssh fs --- powersimdata/data_access/data_access.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/powersimdata/data_access/data_access.py b/powersimdata/data_access/data_access.py index 91f45eef4..b417be5d3 100644 --- a/powersimdata/data_access/data_access.py +++ b/powersimdata/data_access/data_access.py @@ -196,9 +196,15 @@ def __init__(self, _fs=None): """Constructor""" super().__init__() self.root = server_setup.DATA_ROOT_DIR - self.fs = _fs if _fs is not None else get_multi_fs(self.root) + self._fs = _fs self.local_fs = fs.open_fs(server_setup.LOCAL_DIR) + @property + def fs(self): + if self._fs is None: + self._fs = get_multi_fs(self.root) + return self._fs + def exec_command(self, command): ssh_fs = self.fs.get_fs("ssh_fs") return ssh_fs.exec_command(command) @@ -274,7 +280,7 @@ class _DataAccessTemplate(SSHDataAccess): def __init__(self, fs_url): self.local_fs = fs.open_fs(fs_url) - self.fs = self._get_fs(fs_url) + self._fs = self._get_fs(fs_url) self.root = "foo" self.join = fs.path.join From b127e0a7a8c9e6bb41ede76fcad7fec23f9d7d64 Mon Sep 17 00:00:00 2001 From: Jen Hagg Date: Fri, 5 Aug 2022 13:39:19 -0400 Subject: [PATCH 55/59] feat: use new storage account --- powersimdata/data_access/fs_helper.py | 5 +++-- powersimdata/utility/server_setup.py | 1 + 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/powersimdata/data_access/fs_helper.py b/powersimdata/data_access/fs_helper.py index 2919962fa..1cfbc0440 100644 --- a/powersimdata/data_access/fs_helper.py +++ b/powersimdata/data_access/fs_helper.py @@ -11,8 +11,9 @@ def get_blob_fs(container): :param str container: the container name :return: (*fs.base.FS*) -- filesystem instance """ - account = "besciences" - return fs.open_fs(f"azblob://{account}@{container}") + account = "esmi" + sas_token = server_setup.BLOB_TOKEN_RO + return fs.open_fs(f"azblobv2://{account}:{sas_token}@{container}") def get_ssh_fs(root=""): diff --git a/powersimdata/utility/server_setup.py b/powersimdata/utility/server_setup.py index 1587fefd3..5988ddd47 100644 --- a/powersimdata/utility/server_setup.py +++ b/powersimdata/utility/server_setup.py @@ -14,6 +14,7 @@ MODEL_DIR = config.MODEL_DIR ENGINE_DIR = config.ENGINE_DIR DEPLOYMENT_MODE = get_deployment_mode() +BLOB_TOKEN_RO = "?sv=2021-06-08&ss=b&srt=co&sp=rl&se=2050-08-06T01:31:08Z&st=2022-08-05T17:31:08Z&spr=https&sig=ORHiRQQCocyaHXV2phhSN92GFhRnaHuGOecskxsmG3U%3D" os.makedirs(LOCAL_DIR, exist_ok=True) From 748c6dfaa8087f999099f693d02a0449b2c2282b Mon Sep 17 00:00:00 2001 From: Jen Hagg Date: Fri, 5 Aug 2022 15:54:02 -0400 Subject: [PATCH 56/59] chore: update remote fs for scenario object --- powersimdata/scenario/scenario.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/powersimdata/scenario/scenario.py b/powersimdata/scenario/scenario.py index 29e4683de..e9130eff8 100644 --- a/powersimdata/scenario/scenario.py +++ b/powersimdata/scenario/scenario.py @@ -4,6 +4,7 @@ from powersimdata.data_access.context import Context from powersimdata.data_access.execute_list import ExecuteListManager +from powersimdata.data_access.fs_helper import get_scenario_fs from powersimdata.data_access.scenario_list import ScenarioListManager from powersimdata.scenario.analyze import Analyze from powersimdata.scenario.create import Create, _Builder @@ -52,7 +53,7 @@ def __init__(self, descriptor=None): if descriptor is not None and not isinstance(descriptor, str): raise TypeError("Descriptor must be a string or int (for a Scenario ID)") - self.data_access = Context.get_data_access() + self.data_access = Context.get_data_access(get_scenario_fs) self._scenario_list_manager = ScenarioListManager(self.data_access) self._execute_list_manager = ExecuteListManager(self.data_access) From d183e1d3c19019b6f967c01402cfeac59eddff03 Mon Sep 17 00:00:00 2001 From: Jen Hagg Date: Fri, 5 Aug 2022 16:43:32 -0400 Subject: [PATCH 57/59] fix: typo while renaming --- powersimdata/output/output_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/powersimdata/output/output_data.py b/powersimdata/output/output_data.py index 7ae3843a7..bc5e6c7cd 100644 --- a/powersimdata/output/output_data.py +++ b/powersimdata/output/output_data.py @@ -14,7 +14,7 @@ class OutputData: def __init__(self): """Constructor""" - self.data_access = Context.get_data_access(get_scenario_fs) + self._data_access = Context.get_data_access(get_scenario_fs) def get_data(self, scenario_id, field_name): """Returns data either from server or from local directory. From c51d095e799686a079fc6d86c37e9c93f092e19e Mon Sep 17 00:00:00 2001 From: Jen Hagg Date: Wed, 17 Aug 2022 10:46:32 -0700 Subject: [PATCH 58/59] docs: fix docstrings --- powersimdata/data_access/data_access.py | 5 +++++ powersimdata/data_access/fs_helper.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/powersimdata/data_access/data_access.py b/powersimdata/data_access/data_access.py index b417be5d3..8e6eaff7d 100644 --- a/powersimdata/data_access/data_access.py +++ b/powersimdata/data_access/data_access.py @@ -201,6 +201,11 @@ def __init__(self, _fs=None): @property def fs(self): + """Get or create a filesystem object, defaulting to a MultiFS that combines the + server and blob containers. + + :return: (*fs.base.FS*) -- filesystem instance + """ if self._fs is None: self._fs = get_multi_fs(self.root) return self._fs diff --git a/powersimdata/data_access/fs_helper.py b/powersimdata/data_access/fs_helper.py index 1cfbc0440..45624dd37 100644 --- a/powersimdata/data_access/fs_helper.py +++ b/powersimdata/data_access/fs_helper.py @@ -54,7 +54,7 @@ def get_multi_fs(root): def get_scenario_fs(): """Create filesystem combining the server (if connected) with blob storage, - prioritizing the server if connected. + prioritizing the server if connected. :return: (*fs.base.FS*) -- filesystem instance """ From 8972008d50d44eede951d6b920322eab09ee179d Mon Sep 17 00:00:00 2001 From: jenhagg <66005238+jenhagg@users.noreply.github.com> Date: Wed, 17 Aug 2022 13:28:16 -0700 Subject: [PATCH 59/59] chore: bump version to 0.5.4 (#669) --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index f476b64b4..3d556f10c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = powersimdata -version = 0.5.3 +version = 0.5.4 author = Breakthrough Energy author_email = sciences@breakthroughenergy.org description = Power Simulation Data