Skip to content

Commit

Permalink
Merge pull request #583 from Breakthrough-Energy/develop
Browse files Browse the repository at this point in the history
chore: merge develop into master for v0.5.0 release
  • Loading branch information
danielolsen authored Feb 4, 2022
2 parents 8dab8e4 + 5282848 commit f08ba77
Show file tree
Hide file tree
Showing 21 changed files with 508 additions and 396 deletions.
6 changes: 5 additions & 1 deletion .github/workflows/lint.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
name: Lint

on: push
on:
push:
pull_request:
branches:
- develop

jobs:
formatting:
Expand Down
8 changes: 6 additions & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
name: Pytest

on: push
on:
push:
pull_request:
branches:
- develop

jobs:
test:
Expand All @@ -9,7 +13,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: [3.7, 3.8, 3.9]
python-version: [3.8, 3.9, '3.10']

name: Python ${{ matrix.python-version }}
steps:
Expand Down
1 change: 1 addition & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,4 @@ tqdm = "==4.29.1"
requests = "~=2.25"
fs = "*"
"fs.sshfs" = "*"
fs-azureblob = "*"
608 changes: 315 additions & 293 deletions Pipfile.lock

Large diffs are not rendered by default.

2 changes: 0 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
version: '3.7'

services:
powersimdata:
image: powersimdata:latest
Expand Down
9 changes: 6 additions & 3 deletions powersimdata/data_access/data_access.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@
import fs as fs2
from fs.tempfs import TempFS

from powersimdata.data_access.profile_helper import ProfileHelper
from powersimdata.data_access.profile_helper import (
get_profile_version_cloud,
get_profile_version_local,
)
from powersimdata.data_access.ssh_fs import WrapSSHFS
from powersimdata.utility import server_setup

Expand Down Expand Up @@ -127,8 +130,8 @@ def get_profile_version(self, grid_model, kind):
:param str kind: *'demand'*, *'hydro'*, *'solar'* or *'wind'*.
:return: (*list*) -- available profile version.
"""
blob_version = ProfileHelper.get_profile_version_cloud(grid_model, kind)
local_version = ProfileHelper.get_profile_version_local(grid_model, kind)
blob_version = get_profile_version_cloud(grid_model, kind)
local_version = get_profile_version_local(grid_model, kind)
return list(set(blob_version + local_version))


Expand Down
72 changes: 33 additions & 39 deletions powersimdata/data_access/profile_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,39 @@
from powersimdata.utility import server_setup


def _get_profile_version(_fs, kind):
"""Returns available raw profiles from the give filesystem
:param fs.base.FS _fs: filesystem instance
:param str kind: *'demand'*, *'hydro'*, *'solar'* or *'wind'*.
:return: (*list*) -- available profile version.
"""
matching = [f for f in _fs.listdir(".") if kind in f]
return [f.lstrip(f"{kind}_").rstrip(".csv") for f in matching]


def get_profile_version_cloud(grid_model, kind):
"""Returns available raw profile from blob storage.
:param str grid_model: grid model.
:param str kind: *'demand'*, *'hydro'*, *'solar'* or *'wind'*.
:return: (*list*) -- available profile version.
"""
bfs = fs.open_fs("azblob://besciences@profiles").opendir(f"raw/{grid_model}")
return _get_profile_version(bfs, kind)


def get_profile_version_local(grid_model, kind):
"""Returns available raw profile from local file.
:param str grid_model: grid model.
:param str kind: *'demand'*, *'hydro'*, *'solar'* or *'wind'*.
:return: (*list*) -- available profile version.
"""
profile_dir = fs.path.join(server_setup.LOCAL_DIR, "raw", grid_model)
lfs = fs.open_fs(profile_dir)
return _get_profile_version(lfs, kind)


class ProfileHelper:
BASE_URL = "https://besciences.blob.core.windows.net/profiles"

Expand Down Expand Up @@ -52,42 +85,3 @@ def download_file(file_name, from_dir):
pbar.update(len(chunk))

return dest

@staticmethod
def parse_version(grid_model, kind, version):
"""Parse available versions from the given spec.
:param str grid_model: grid model.
:param str kind: *'demand'*, *'hydro'*, *'solar'* or *'wind'*.
:param dict version: version information per grid model.
:return: (*list*) -- available profile version.
"""
if grid_model in version and kind in version[grid_model]:
return version[grid_model][kind]
print("No %s profiles available." % kind)
return []

@staticmethod
def get_profile_version_cloud(grid_model, kind):
"""Returns available raw profile from blob storage.
:param str grid_model: grid model.
:param str kind: *'demand'*, *'hydro'*, *'solar'* or *'wind'*.
:return: (*list*) -- available profile version.
"""

resp = requests.get(f"{ProfileHelper.BASE_URL}/version.json")
return ProfileHelper.parse_version(grid_model, kind, resp.json())

@staticmethod
def get_profile_version_local(grid_model, kind):
"""Returns available raw profile from local file.
:param str grid_model: grid model.
:param str kind: *'demand'*, *'hydro'*, *'solar'* or *'wind'*.
:return: (*list*) -- available profile version.
"""
profile_dir = fs.path.join(server_setup.LOCAL_DIR, "raw", grid_model)
lfs = fs.open_fs(profile_dir)
matching = [f for f in lfs.listdir(".") if kind in f]
return [f.lstrip(f"{kind}_").rstrip(".csv") for f in matching]
33 changes: 16 additions & 17 deletions powersimdata/data_access/tests/test_profile_helper.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,19 @@
from powersimdata.data_access.profile_helper import ProfileHelper


def test_parse_version_default():
assert [] == ProfileHelper.parse_version("usa_tamu", "solar", {})


def test_parse_version_missing_key():
version = {"solar": ["v123"]}
assert [] == ProfileHelper.parse_version("usa_tamu", "solar", version)


def test_parse_version():
expected = ["v123", "v456"]
version = {"usa_tamu": {"solar": expected}}
assert expected == ProfileHelper.parse_version("usa_tamu", "solar", version)
assert [] == ProfileHelper.parse_version("usa_tamu", "hydro", version)
from fs.tempfs import TempFS

from powersimdata.data_access.profile_helper import ProfileHelper, _get_profile_version


def test_get_profile_version():
with TempFS() as tmp_fs:
tfs = tmp_fs.makedirs("raw/usa_tamu", recreate=True)
tfs.touch("solar_vOct2022.csv")
tfs.touch("foo_v1.0.1.csv")
v_solar = _get_profile_version(tfs, "solar")
v_foo = _get_profile_version(tfs, "foo")
v_missing = _get_profile_version(tfs, "missing")
assert "vOct2022" == v_solar[0]
assert "v1.0.1" == v_foo[0]
assert [] == v_missing


def test_get_file_components():
Expand Down
4 changes: 2 additions & 2 deletions powersimdata/design/generation/cost_curves.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def linearize_gencost(input_grid, num_segments=1):
price_label = "f" + str(i + 1)
capacity_data = plant.Pmin + power_step * i
price_data = (
quad_term * capacity_data ** 2 + lin_term * capacity_data + const_term
quad_term * capacity_data**2 + lin_term * capacity_data + const_term
)
gencost_after.loc[dispatchable_gens, capacity_label] = capacity_data[
dispatchable_gens
Expand All @@ -76,7 +76,7 @@ def linearize_gencost(input_grid, num_segments=1):
nondispatchable_gens, "n"
]
power = plant.Pmax
price_data = quad_term * power ** 2 + lin_term * power + const_term
price_data = quad_term * power**2 + lin_term * power + const_term
gencost_after.loc[nondispatchable_gens, ["c2", "c1"]] = 0
gencost_after.loc[nondispatchable_gens, "c0"] = price_data[nondispatchable_gens]

Expand Down
13 changes: 13 additions & 0 deletions powersimdata/input/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ def check_grid(grid):
_check_line_voltages,
_check_plant_against_gencost,
_check_connected_components,
_check_for_loop_branches,
]:
try:
check(grid, error_messages)
Expand Down Expand Up @@ -225,6 +226,18 @@ def _check_connected_components(grid, error_messages):
)


def _check_for_loop_branches(grid, error_messages):
"""Check whether any branches in a grid have the same start and end bus.
:param powersimdata.input.grid.Grid grid: grid or grid-like object to check.
:param list error_messages: list, to be appended to with a str if:
there are any branches with the same start and end bus.
"""
if not all(grid.branch.from_bus_id != grid.branch.to_bus_id):
loop_lines = grid.branch.query("from_bus_id == to_bus_id").index # noqa: F841
error_messages.append(f"This grid contains loop lines: {list(loop_lines)}")


def _check_grid_models_match(grid1, grid2):
"""Check whether an object is an internally-consistent Grid object.
Expand Down
5 changes: 3 additions & 2 deletions powersimdata/input/export_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def export_case_mat(grid, filepath, storage_filepath=None):
savemat(filepath, mpc, appendmat=False)


def export_transformed_profile(kind, scenario_info, grid, ct, filepath):
def export_transformed_profile(kind, scenario_info, grid, ct, filepath, slice=True):
"""Apply transformation to the given kind of profile and save the result locally.
:param str kind: which profile to export. This parameter is passed to
Expand All @@ -135,8 +135,9 @@ def export_transformed_profile(kind, scenario_info, grid, ct, filepath):
transformed.
:param dict ct: change table.
:param str filepath: path to save the result, including the filename
:param bool slice: whether to slice the profiles by the Scenario's time range.
"""
tp = TransformProfile(scenario_info, grid, ct)
tp = TransformProfile(scenario_info, grid, ct, slice)
profile = tp.get_profile(kind)
print(f"Writing scaled {kind} profile to {filepath} on local machine")
profile.to_csv(filepath)
36 changes: 22 additions & 14 deletions powersimdata/input/input_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,20 +140,28 @@ def _read_data(filepath):
return data


def get_bus_demand(scenario_info, grid):
"""Returns demand profiles by bus.
:param dict scenario_info: scenario information.
:param powersimdata.input.grid.Grid grid: grid to construct bus demand for.
:return: (*pandas.DataFrame*) -- data frame of demand.
def distribute_demand_from_zones_to_buses(zone_demand, bus):
"""Decomposes zone demand to bus demand based on bus 'Pd' column.
:param pandas.DataFrame zone_demand: demand by zone. Index is timestamp, columns are
zone IDs, values are zone demand (MW).
:param pandas.DataFrame bus: table of bus data, containing at least 'zone_id' and
'Pd' columns.
:return: (*pandas.DataFrame*) -- data frame of demand. Index is timestamp, columns
are bus IDs, values are bus demand (MW).
:raises ValueError: if the columns of ``zone_demand`` don't match the set of zone
IDs within the 'zone_id' column of ``bus``.
"""
bus = grid.bus.copy()
demand = InputData().get_data(scenario_info, "demand")[bus.zone_id.unique()]
bus["zone_Pd"] = bus.groupby("zone_id")["Pd"].transform("sum")
bus["zone_share"] = bus["Pd"] / bus["zone_Pd"]
zone_bus_shares = pd.DataFrame(
{z: bus.groupby("zone_id").get_group(z).zone_share for z in demand.columns}
).fillna(0)
bus_demand = demand.dot(zone_bus_shares.T)
if set(bus["zone_id"].unique()) != set(zone_demand.columns):
raise ValueError("zones don't match between zone_demand and bus dataframes")
grouped_buses = bus.groupby("zone_id")
bus_zone_pd = grouped_buses["Pd"].transform("sum")
bus_zone_share = pd.concat(
[pd.Series(bus["Pd"] / bus_zone_pd, name="zone_share"), bus["zone_id"]], axis=1
)
zone_bus_shares = bus_zone_share.pivot_table(
index="bus_id", columns="zone_id", values="zone_share", fill_value=0
)
bus_demand = zone_demand.dot(zone_bus_shares.T)

return bus_demand
23 changes: 19 additions & 4 deletions powersimdata/input/transform_profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,20 @@
class TransformProfile:
"""Transform profile according to operations listed in change table."""

def __init__(self, scenario_info, grid, ct):
_default_dates = {"start_date": "2016-01-01 00:00", "end_date": "2016-12-31 23:00"}

def __init__(self, scenario_info, grid, ct, slice=True):
"""Constructor.
:param dict scenario_info: scenario information.
:param powersimdata.input.grid.Grid grid: a Grid object previously
transformed.
:param dict ct: change table.
:param bool slice: whether to slice the profiles by the Scenario's time range.
"""
self.slice = slice
self._input_data = InputData()
self.scenario_info = scenario_info
self.scenario_info = {**self._default_dates, **scenario_info}

self.ct = copy.deepcopy(ct)
self.grid = copy.deepcopy(grid)
Expand Down Expand Up @@ -113,6 +117,17 @@ def _get_demand_profile(self):
demand.loc[:, key] *= value
return demand

def _slice_df(self, df):
"""Return dataframe, sliced by the times specified in scenario_info if and only
if ``self.slice`` = True.
:param pandas.DataFrame df: data frame to be sliced.
:return: (*pandas.DataFrame*) -- sliced data frame.
"""
if not self.slice:
return df
return df.loc[self.scenario_info["start_date"] : self.scenario_info["end_date"]]

def get_profile(self, name):
"""Return profile.
Expand All @@ -125,6 +140,6 @@ def get_profile(self, name):
if name not in possible:
raise ValueError("Choose from %s" % " | ".join(possible))
elif name == "demand":
return self._get_demand_profile()
return self._slice_df(self._get_demand_profile())
else:
return self._get_renewable_profile(name)
return self._slice_df(self._get_renewable_profile(name))
9 changes: 6 additions & 3 deletions powersimdata/output/output_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
from scipy.sparse import coo_matrix

from powersimdata.data_access.context import Context
from powersimdata.input.input_data import get_bus_demand
from powersimdata.input.input_data import distribute_demand_from_zones_to_buses
from powersimdata.input.transform_profile import TransformProfile
from powersimdata.utility import server_setup


Expand Down Expand Up @@ -76,11 +77,12 @@ def _check_field(field_name):
raise ValueError("Only %s data can be loaded" % " | ".join(possible))


def construct_load_shed(scenario_info, grid, infeasibilities=None):
def construct_load_shed(scenario_info, grid, ct, infeasibilities=None):
"""Constructs load_shed dataframe from relevant scenario/grid data.
:param dict scenario_info: info attribute of Scenario object.
:param powersimdata.input.grid.Grid grid: grid to construct load_shed for.
:param dict ct: ChangeTable dictionary.
:param dict/None infeasibilities: dictionary of
{interval (int): load shed percentage (int)}, or None.
:return: (*pandas.DataFrame*) -- data frame of load_shed.
Expand All @@ -95,7 +97,8 @@ def construct_load_shed(scenario_info, grid, infeasibilities=None):
load_shed = pd.DataFrame.sparse.from_spmatrix(load_shed_data)
else:
print("Infeasibilities, constructing DataFrame")
bus_demand = get_bus_demand(scenario_info, grid)
zone_demand = TransformProfile(scenario_info, grid, ct)
bus_demand = distribute_demand_from_zones_to_buses(zone_demand, grid.bus)
load_shed = np.zeros((len(hours), len(buses)))
# Convert '24H' to 24
interval = int(scenario_info["interval"][:-1])
Expand Down
Loading

0 comments on commit f08ba77

Please sign in to comment.