Skip to content

Commit

Permalink
fix: enable grid equality for back converted pypsa networks (#689)
Browse files Browse the repository at this point in the history
  • Loading branch information
FabianHofmann authored Sep 29, 2022
1 parent 173512c commit ce898ff
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 26 deletions.
1 change: 1 addition & 0 deletions powersimdata/input/const/grid_const.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,7 @@
"rho",
"ExpectedTerminalStorageMax",
"ExpectedTerminalStorageMin",
"duration",
]
col_type_storage_storagedata = [
"int",
Expand Down
3 changes: 2 additions & 1 deletion powersimdata/input/const/pypsa_const.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@
"storage_gen": {
"rename": {
"bus_id": "bus",
"Pmax": "p_nom",
},
"default_drop_cols": [
"GenFuelCost",
Expand All @@ -133,7 +134,6 @@
"Pc1",
"Pc2",
"Pg",
"Pmax",
"Pmin",
"Qc1max",
"Qc1min",
Expand Down Expand Up @@ -172,6 +172,7 @@
"LossFactor": "standing_loss",
"duration": "max_hours",
"genfuel": "carrier",
"InitialStorage": "state_of_charge_initial",
},
"default_drop_cols": [
"ExpectedTerminalStorageMax",
Expand Down
36 changes: 17 additions & 19 deletions powersimdata/input/converter/pypsa_to_grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,17 +40,17 @@ def _get_storage_storagedata(n, storage_type):
:return: (*pandas.DataFrame*) -- data frame with storage data.
"""
if storage_type == "storage_units":

storage_storagedata = _translate_df(n.storage_units, "storage_storagedata")

p_nom = n.storage_units["p_nom"]
e_nom = p_nom * n.storage_units["max_hours"]
cyclic_state_of_charge = n.storage_units["cyclic_state_of_charge"]
e_nom = n.storage_units.eval("p_nom * max_hours")
state_of_charge_initial = n.storage_units["state_of_charge_initial"]

elif storage_type == "stores":

storage_storagedata = _translate_df(n.stores, "storage_storagedata")

e_nom = n.stores["e_nom"]
cyclic_state_of_charge = n.stores["e_cyclic"]
state_of_charge_initial = n.stores["e_initial"]

# Efficiencies of Store are captured in link/dcline
Expand All @@ -61,27 +61,25 @@ def _get_storage_storagedata(n, storage_type):
"Inapplicable storage_type passed to function _get_storage_storagedata."
)

# Initial storage: If cyclic, then fill half. If not cyclic, then apply PyPSA's state_of_charge_initial.
storage_storagedata["InitialStorage"] = state_of_charge_initial.where(
~cyclic_state_of_charge, e_nom / 2
)
# Initial storage bounds: PSD's default is same as initial storage
storage_storagedata["InitialStorageLowerBound"] = storage_storagedata[
"InitialStorage"
]
storage_storagedata["InitialStorageUpperBound"] = storage_storagedata[
"InitialStorage"
]
# Terminal storage bounds: If cyclic, then both same as initial storage. If not cyclic, then full capacity and zero.
storage_storagedata["ExpectedTerminalStorageMax"] = e_nom * 1
storage_storagedata["ExpectedTerminalStorageMin"] = e_nom * 0
storage_storagedata["InitialStorageLowerBound"] = state_of_charge_initial
storage_storagedata["InitialStorageUpperBound"] = state_of_charge_initial
# Apply PSD's default relationships/assumptions for remaining columns
storage_storagedata["InitialStorageCost"] = storage_const["energy_value"]
storage_storagedata["TerminalStoragePrice"] = storage_const["energy_value"]
storage_storagedata["MinStorageLevel"] = e_nom * storage_const["min_stor"]
storage_storagedata["MaxStorageLevel"] = e_nom * storage_const["max_stor"]
storage_storagedata["rho"] = 1

# fill with heuristic defaults if non-existent
defaults = {
"MinStorageLevel": e_nom * storage_const["min_stor"],
"MaxStorageLevel": e_nom * storage_const["max_stor"],
"ExpectedTerminalStorageMax": 1,
"ExpectedTerminalStorageMin": 0,
}
for k, v in defaults.items():
if k not in storage_storagedata:
storage_storagedata[k] = v

return storage_storagedata


Expand Down
28 changes: 22 additions & 6 deletions powersimdata/input/exporter/export_to_pypsa.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,16 @@
from powersimdata.utility.helpers import _check_import


def restore_original_columns(df, overwrite=[]):
prefix = "pypsa_"
for col in df.columns[df.columns.str.startswith(prefix)]:
target = col[len(prefix) :]
fallback = df.pop(col)
if target not in df or target in overwrite:
df[target] = fallback
return df


def export_to_pypsa(
scenario_or_grid,
add_all_columns=False,
Expand Down Expand Up @@ -61,7 +71,7 @@ def export_to_pypsa(
drop_cols += list(bus_rename_t)

buses = grid.bus.rename(columns=bus_rename)
buses.control.replace([1, 2, 3, 4], ["PQ", "PV", "slack", ""], inplace=True)
buses.control.replace([1, 2, 3, 4], ["PQ", "PV", "Slack", ""], inplace=True)
buses["zone_name"] = buses.zone_id.map({v: k for k, v in grid.zone2id.items()})
buses["substation"] = "sub" + grid.bus2sub["sub_id"].astype(str)

Expand All @@ -72,7 +82,8 @@ def export_to_pypsa(

loads = {"proportionality_factor": buses["Pd"]}

shunts = {k: buses.pop(k) for k in ["b_pu", "g_pu"]}
shunts = pd.DataFrame({k: buses.pop(k) for k in ["b_pu", "g_pu"]})
shunts = shunts.dropna(how="all")

substations = grid.sub.copy().rename(columns={"lat": "y", "lon": "x"})
substations.index = "sub" + substations.index.astype(str)
Expand All @@ -82,6 +93,7 @@ def export_to_pypsa(
substations["v_nom"] = v_nom

buses = buses.drop(columns=drop_cols, errors="ignore").sort_index(axis=1)
buses = restore_original_columns(buses)

# now time-dependent
if scenario:
Expand Down Expand Up @@ -119,6 +131,9 @@ def export_to_pypsa(
gencost = gencost.rename(columns=pypsa_const["gencost"]["rename"])
gencost = gencost[pypsa_const["gencost"]["rename"].values()]

generators = generators.assign(**gencost)
generators = restore_original_columns(generators)

carriers = pd.DataFrame(index=generators.carrier.unique(), dtype=object)

cars = carriers.index
Expand Down Expand Up @@ -167,6 +182,7 @@ def export_to_pypsa(

lines = branches.query("branch_device_type == 'Line'")
lines = lines.drop(columns="branch_device_type")
lines = restore_original_columns(lines)

transformers = branches.query(
"branch_device_type in ['TransformerWinding', 'Transformer']"
Expand Down Expand Up @@ -194,6 +210,7 @@ def export_to_pypsa(

links = grid.dcline.rename(columns=link_rename).drop(columns=drop_cols)
links.p_min_pu /= links.p_nom.where(links.p_nom != 0, 1)
links = restore_original_columns(links, overwrite=["p_min_pu", "p_max_pu"])

# SUBSTATION CONNECTORS
sublinks = dict(
Expand Down Expand Up @@ -225,17 +242,16 @@ def export_to_pypsa(
for k, v in defaults.items():
storage[k] = storage[k].fillna(v) if k in storage else v

storage["p_nom"] = storage.get("Pmax")
storage["state_of_charge_initial"] = storage.pop("InitialStorage")
storage = restore_original_columns(storage)

# Import everything to a new pypsa network
n = pypsa.Network()
if scenario:
n.snapshots = loads_t["p_set"].index
n.madd("Bus", buses.index, **buses, **buses_t)
n.madd("Load", buses.index, bus=buses.index, **loads, **loads_t)
n.madd("ShuntImpedance", buses.index, bus=buses.index, **shunts)
n.madd("Generator", generators.index, **generators, **gencost, **generators_t)
n.madd("ShuntImpedance", shunts.index, bus=shunts.index, **shunts)
n.madd("Generator", generators.index, **generators, **generators_t)
n.madd("Carrier", carriers.index, **carriers)
n.madd("Line", lines.index, **lines, **lines_t)
n.madd("Transformer", transformers.index, **transformers, **transformers_t)
Expand Down

0 comments on commit ce898ff

Please sign in to comment.