Skip to content

Commit

Permalink
Merge branch 'develop' into 'master'
Browse files Browse the repository at this point in the history
Release 2.3.1

See merge request iek-3/shared-code/fine!302
  • Loading branch information
kknos committed Nov 16, 2023
2 parents 2a46d83 + 08a7c58 commit b962680
Show file tree
Hide file tree
Showing 25 changed files with 2,759 additions and 1,609 deletions.
205 changes: 129 additions & 76 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,67 +2,105 @@ image: mambaorg/micromamba

stages:
- test
- prebuild
- build

variables:
DOCKER_HOST: tcp://docker:2375
DOCKER_TLS_CERTDIR: ""

# Templates

.test_template:
stage: test
before_script:
- micromamba install -n base -y --file=requirements_dev.yml
rules:
# Run only for pushes to master or develop and for merge requests to master
- if: '$CI_COMMIT_BRANCH == "master"'
- if: '$CI_COMMIT_BRANCH == "develop"'
- if: '$CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "master"'
# Switch from branch pipeline to merge pipeline once a merge request has
# been created on the branch.
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push"
when: never
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
when: never

.test_formatting_template:
.test_docker_template_noupdate:
stage: test
image: jugit-registry.fz-juelich.de/iek-3/shared-code/fine/fine-dev:latest
before_script:
- micromamba install -y -c conda-forge "black" "python=3.10"
- python -m pip install -e .
rules:
# Do not run for pushes to master or develop and for merge requests to master
- if: $CI_COMMIT_BRANCH == "master"
when: never
- if: $CI_COMMIT_BRANCH == "develop"
when: never
- if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "master"
when: never
# Switch from branch pipeline to merge pipeline once a merge request has
# been created on the branch.
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push"
when: never
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
when: never
- changes:
- pyproject.toml
- requirements.yml
- requirements_dev.yml
when: never
- when: on_success


.test_docker_template:
stage: test
image: jugit-registry.fz-juelich.de/iek-3/shared-code/fine/fine-dev:latest
before_script:
- micromamba install -n base -y --file=requirements_dev.yml
rules:
# Do not run for pushes to master or develop and for merge requests to master
- if: $CI_COMMIT_BRANCH == "master"
when: never
- if: $CI_COMMIT_BRANCH == "develop"
when: never
- if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "master"
when: never
# Switch from branch pipeline to merge pipeline once a merge request has
# been created on the branch.
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push"
when: never
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
when: never
- changes:
- pyproject.toml
- requirements.yml
- requirements_dev.yml
when: on_success

.build_template:
stage: build
image: docker@sha256:c8bb6fa5388b56304dd770c4bc0478de81ce18540173b1a589178c0d31bfce90
services:
- docker:dind@sha256:c8bb6fa5388b56304dd770c4bc0478de81ce18540173b1a589178c0d31bfce90
when: on_success
dependencies:
- prebuild

# Tests

test-code:
# Switch from branch pipeline to merge pipeline once a merge request has been
# created on the branch.
rules:
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push"
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
when: never
- if: $CI_COMMIT_BRANCH
extends: .test_template
script:
- python -m pytest --cov=FINE test/
artifacts:
paths:
- test/data/*
expire_in: 1 week

test-formatting:
# Switch from branch pipeline to merge pipeline once a merge request has been
# created on the branch.
rules:
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push"
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
when: never
- if: $CI_COMMIT_BRANCH
extends: .test_formatting_template
test-code-docker:
extends: .test_docker_template
script:
# Dry-run black auto-formatter. If the code needs reformatting this test
# will fail.
- black --version
- black --check FINE
- black --check test
- python -m pytest --cov=FINE test/

test-code-docker-noupdate:
extends: .test_docker_template_noupdate
script:
- python -m pytest --cov=FINE test/

test-notebooks:
extends: .test_template
Expand All @@ -72,55 +110,70 @@ test-notebooks:
# execution errors Cells tagged 'nbval-check-output' are checked for
# consistent output Cells tagged 'nbval-skip' are skipped.
- python -m pytest --nbval-lax --current-env --durations=20 examples/
# Only test examples notebooks for branches master and develop
only:
refs:
- master
- develop
- merge_requests

prebuild:
stage: prebuild

test-notebooks-docker:
extends: .test_docker_template
script:
# Extract the current image version from setup.py
# Check image version
- export IMAGE_VERSION=$(awk -F\= '{gsub(/"/,"",$2);print $1"="$2}' pyproject.toml | grep -E -o "(version = )(*.*)" | cut -d" " -f 3)
- echo "IMAGE_VERSION=$IMAGE_VERSION"
# Export a master and dev tag for each versions using the current image version
- export MASTER_TAG=fzjiek3/fine:${IMAGE_VERSION}
- export DEV_TAG=fzjiek3/fine-dev:${IMAGE_VERSION}
# Pass tags to build.env to be usable in the next stage
- echo "MASTER_TAG=$MASTER_TAG" >> build.env
- echo "DEV_TAG=$DEV_TAG" >> build.env
artifacts:
reports:
dotenv: build.env
only:
refs:
- master
when: on_success
- python -m pytest --nbval-lax --current-env --durations=20 examples/

master-build:
test-notebooks-docker-noupdate:
extends: .test_docker_template_noupdate
script:
- python -m pytest --nbval-lax --current-env --durations=20 examples/

test-formatting:
stage: test
image: pyfound/black:latest_release
script:
# Dry-run black auto-formatter. If the code needs reformatting this test
# will fail.
- black --version
- black --check FINE
- black --check test
rules:
# Switch from branch pipeline to merge pipeline once a merge request has
# been created on the branch.
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push"
when: never
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
when: never

# Deployment

build-master-latest:
extends: .build_template
script:
# Login to the DockerHub repo using a specialized access token.
# Then, build the docker image with the tested code and tag it
# with the current version, as well as latest.
# Afterwards, push to DockerHub.
- docker login -u fzjiek3 -p $DOCKER_AT
- docker build -t $MASTER_TAG -t "fzjiek3/fine:latest" .
- docker push $MASTER_TAG
- docker build -t "fzjiek3/fine:latest" .
- docker push fzjiek3/fine:latest
only:
refs:
- master
# Uncomment to enable dev builds
#dev-build:
# extends: .build_template
# script:
# - docker login -u fzjiek3 -p $DOCKER_AT
# - docker build -t $DEV_TAG
# - docker push $DEV_TAG
# only:
# refs:
# - develop
rules:
- if: '$CI_COMMIT_BRANCH == "master"'
- if: '$CI_PIPELINE_SOURCE == "schedule"'
when: never

build-tag:
extends: .build_template
script:
- docker login -u fzjiek3 -p $DOCKER_AT
- docker build -t fzjiek3/fine:${CI_COMMIT_TAG} .
- docker push fzjiek3/fine:${CI_COMMIT_TAG}
rules:
- if: '$CI_COMMIT_TAG'
- if: '$CI_PIPELINE_SOURCE == "schedule"'
when: never

# Push develop build to local jugit registry
build-dev:
extends: .build_template
script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- export DEV_TAG=jugit-registry.fz-juelich.de/iek-3/shared-code/fine/fine-dev:${CI_COMMIT_SHA}
- docker build -t $DEV_TAG -t "jugit-registry.fz-juelich.de/iek-3/shared-code/fine/fine-dev:latest" .
- docker push $DEV_TAG
- docker push jugit-registry.fz-juelich.de/iek-3/shared-code/fine/fine-dev:latest
rules:
- if: '$CI_COMMIT_BRANCH == "develop"'
31 changes: 2 additions & 29 deletions FINE/IOManagement/xarrayIO.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def convertOptimizationInputToDatasets(esM, useProcessedValues=False):
return xr_dss


def convertOptimizationOutputToDatasets(esM, optSumOutputLevel=0, optValOutputLevel=1):
def convertOptimizationOutputToDatasets(esM, optSumOutputLevel=0):
"""
Takes esM instance output and converts it into an xarray dataset.
Expand All @@ -84,17 +84,6 @@ def convertOptimizationOutputToDatasets(esM, optSumOutputLevel=0, optValOutputLe
|br| * the default value is 2
:type optSumOutputLevel: int (0,1,2) or dict
:param optValOutputLevel: Output level of the optimal values. Either an
integer (0,1) which holds for all model classes or a dictionary with
model class names as keys and an integer (0,1) for each key (e.g.
{'StorageModel':1,'SourceSinkModel':1,...}
- 0: all values are kept.
- 1: Lines containing only zeroes are dropped.
|br| * the default value is 1
:type optValOutputLevel: int (0,1) or dict
:return: xr_ds - EnergySystemModel instance output data in xarray dataset format
:rtype: xarray.dataset
"""
Expand Down Expand Up @@ -178,8 +167,6 @@ def convertOptimizationOutputToDatasets(esM, optSumOutputLevel=0, optValOutputLe

# Write output from esM.esM.componentModelingDict[name].getOptimalValues() to datasets
data = esM.componentModelingDict[name].getOptimalValues(ip=ip)
oL = optValOutputLevel
oL_ = oL[name] if type(oL) == dict else oL
dataTD1dim, indexTD1dim, dataTD2dim, indexTD2dim = [], [], [], []
dataTI, indexTI = [], []
for key, d in data.items():
Expand Down Expand Up @@ -903,7 +890,6 @@ def writeEnergySystemModelToNetCDF(
outputFilePath="my_esm.nc",
overwriteExisting=False,
optSumOutputLevel=0,
optValOutputLevel=1,
groupPrefix=None,
):
"""
Expand All @@ -927,17 +913,6 @@ def writeEnergySystemModelToNetCDF(
|br| * the default value is 2
:type optSumOutputLevel: int (0,1,2) or dict
:param optValOutputLevel: Output level of the optimal values. Either an
integer (0,1) which holds for all model classes or a dictionary with
model class names as keys and an integer (0,1) for each key (e.g.
{'StorageModel':1,'SourceSinkModel':1,...}
* 0: all values are kept.
* 1: Lines containing only zeroes are dropped.
|br| * the default value is 1
:type optValOutputLevel: int (0,1) or dict
:param groupPrefix: if specified, multiple xarray datasets (with esM
instance data) are saved to the same netcdf file. The dictionary
structure is then {group_prefix}/{group}/{...} instead of {group}/{...}
Expand All @@ -959,9 +934,7 @@ def writeEnergySystemModelToNetCDF(
xr_dss_input = convertOptimizationInputToDatasets(esM)
writeDatasetsToNetCDF(xr_dss_input, outputFilePath, groupPrefix=groupPrefix)
if esM.objectiveValue != None: # model was optimized
xr_dss_output = convertOptimizationOutputToDatasets(
esM, optSumOutputLevel, optValOutputLevel
)
xr_dss_output = convertOptimizationOutputToDatasets(esM, optSumOutputLevel)
writeDatasetsToNetCDF(xr_dss_output, outputFilePath, groupPrefix=groupPrefix)

utils.output("Done. (%.4f" % (time.time() - _t) + " sec)", esM.verbose, 0)
Expand Down
Loading

0 comments on commit b962680

Please sign in to comment.