Skip to content

Commit

Permalink
Updated install to pyproject.toml (#324)
Browse files Browse the repository at this point in the history
* updated to pyproject.toml

* updated main.yaml to use micromamba from docker image

* pr sched

* ci

* remove concurrency

* pre-commit update

* Update .github/workflows/main.yaml

Co-authored-by: Max Jones <[email protected]>

* Update .github/workflows/main.yaml

Co-authored-by: Max Jones <[email protected]>

* Update cmip6_downscaling/_version.py

Co-authored-by: Max Jones <[email protected]>

* Update pyproject.toml

Co-authored-by: Max Jones <[email protected]>

* Update pyproject.toml

Co-authored-by: Max Jones <[email protected]>

* Update pyproject.toml

Co-authored-by: Max Jones <[email protected]>

* Update .github/workflows/main.yaml

Co-authored-by: Max Jones <[email protected]>

* pre-commit modified files

* added xesmf to pyproj

* esmf

* updated pytest dir

* removed esmf

---------

Co-authored-by: Max Jones <[email protected]>
  • Loading branch information
norlandrhagen and maxrjones authored Jan 31, 2024
1 parent 9693dd4 commit 0d8508c
Show file tree
Hide file tree
Showing 18 changed files with 197 additions and 147 deletions.
60 changes: 43 additions & 17 deletions .github/workflows/main.yaml
Original file line number Diff line number Diff line change
@@ -1,30 +1,56 @@
name: CI

on:
push:
branches:
- main
branches: ['main']
pull_request:
branches: ['main']
workflow_dispatch:
schedule:
- cron: '0 0 * * 0' # Weekly “At 00:00”

# concurrency:
# group: ${{ github.workflow }}-${{ github.ref }}
# cancel-in-progress: true

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
# Note: This CI used to run off of the image:
# carbonplan/cmip6-downscaling-single-user:2022.06.19

jobs:
test:
name: build-and-test
name: ${{ matrix.python-version }}-build
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.9', '3.10'] #TODO: add 3.11 once sparse/numba support it
timeout-minutes: 20
container:
image: carbonplan/cmip6-downscaling-single-user:2022.06.19
options: --user root
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v3
- name: Install
- name: Checkout
uses: actions/checkout@v4

- name: Setup Micromamba
uses: mamba-org/[email protected]
with:
environment-name: cmip6-downscaling
condarc: |
channels:
- conda-forge
- nodefaults
cache-downloads: false
cache-environment: true
create-args: >-
python=${{ matrix.python-version }}
- name: Install package
run: |
python -m pip install -e .[dev]
- name: Conda list information
run: |
git config --global --add safe.directory /__w/cmip6-downscaling/cmip6-downscaling
python -m pip install -e .
python -m pip list
- name: Running Tests
conda env list
conda list
- name: Run tests
run: |
python -m pytest --verbose
python -m pytest tests
21 changes: 5 additions & 16 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,24 +32,13 @@ repos:
hooks:
- id: blackdoc

- repo: https://github.com/PyCQA/flake8
rev: 6.1.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: 'v0.0.276'
hooks:
- id: flake8

- repo: https://github.com/asottile/seed-isort-config
rev: v2.2.0
hooks:
- id: seed-isort-config
- repo: https://github.com/PyCQA/isort
rev: 5.12.0
hooks:
- id: isort
- id: ruff
args: ['--fix']

- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.0.3
rev: v3.0.0-alpha.9-for-vscode
hooks:
- id: prettier
additional_dependencies:
- prettier
- '@carbonplan/prettier'
9 changes: 5 additions & 4 deletions cmip6_downscaling/_version.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from importlib.metadata import PackageNotFoundError, version
from importlib.metadata import PackageNotFoundError as _PackageNotFoundError
from importlib.metadata import version as _version

try:
__version__ = version("cmip6-downscaling")
except PackageNotFoundError:
__version__ = _version('cmip6_downscaling')
except _PackageNotFoundError:
# package is not installed
__version__ = 'unknown'
__version__ = "unknown"
3 changes: 2 additions & 1 deletion cmip6_downscaling/data/cmip.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
import xarray as xr

from .. import config
from .utils import lon_to_180, to_standard_calendar as convert_to_standard_calendar
from .utils import lon_to_180
from .utils import to_standard_calendar as convert_to_standard_calendar

xr.set_options(keep_attrs=True)

Expand Down
2 changes: 1 addition & 1 deletion cmip6_downscaling/disagg/wrapper.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

import os
from typing import Iterable
from collections.abc import Iterable

import dask
import numba
Expand Down
3 changes: 2 additions & 1 deletion cmip6_downscaling/methods/bcsd/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
from skdownscale.pointwise_models.bcsd import BcsdPrecipitation, BcsdTemperature
from upath import UPath

from ... import __version__ as version, config
from ... import __version__ as version
from ... import config
from ...constants import ABSOLUTE_VARS, RELATIVE_VARS
from ...utils import str_to_hash
from ..common.containers import RunParameters
Expand Down
3 changes: 2 additions & 1 deletion cmip6_downscaling/methods/common/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@
from xarray_schema import DataArraySchema, DatasetSchema
from xarray_schema.base import SchemaError

from ... import __version__ as version, config
from ... import __version__ as version
from ... import config
from ...data.cmip import get_gcm
from ...data.observations import open_era5
from ...utils import str_to_hash
Expand Down
12 changes: 6 additions & 6 deletions cmip6_downscaling/methods/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,14 @@ def validate_zarr_store(target: str, raise_on_error=True) -> bool:

try:
store = zarr.open_consolidated(target)
except:
except Exception:
errors.append('error opening zarr store')

if not errors:
groups = list(store.groups())
# if groups is empty (not a datatree)
if not groups:
groups = [("root", store["/"])]
groups = [('root', store['/'])]

for key, group in groups:
data_group = group
Expand Down Expand Up @@ -92,7 +92,7 @@ def blocking_to_zarr(
if write_empty_chunks:
if packaging.version.Version(
packaging.version.Version(xr.__version__).base_version
) < packaging.version.Version("2022.03"):
) < packaging.version.Version('2022.03'):
raise NotImplementedError(
f'`write_empty_chunks` not supported in xarray < 2022.06. Your xarray version is: {xr.__version__}'
)
Expand Down Expand Up @@ -188,7 +188,7 @@ def apply_land_mask(ds: xr.Dataset) -> xr.Dataset:

def calc_auspicious_chunks_dict(
da: xr.DataArray,
chunk_dims: tuple = ("lat", "lon"),
chunk_dims: tuple = ('lat', 'lon'),
) -> dict:
"""Figure out a chunk size that, given the size of the dataset, the dimension(s) you want to chunk on
and the data type, will fit under the target_size. Currently only works for 100mb which
Expand All @@ -210,7 +210,7 @@ def calc_auspicious_chunks_dict(
"""
if not isinstance(chunk_dims, tuple):
raise TypeError(
"Your chunk_dims likely includes one string but needs a comma after it! to be a tuple!"
'Your chunk_dims likely includes one string but needs a comma after it! to be a tuple!'
)
# setting target_size_bytes to the 100mb chunksize recommended by dask. could modify in future.
target_size_bytes = 100e6
Expand All @@ -228,7 +228,7 @@ def calc_auspicious_chunks_dict(
# so we'll always just give it the full length of the dimension
chunks_dict[dim] = dim_sizes[dim]
# calculate the bytesize given the dtype bitsize and divide by 8
data_bytesize = int(re.findall(r"\d+", str(da.dtype))[0]) / 8
data_bytesize = int(re.findall(r'\d+', str(da.dtype))[0]) / 8
# calculate the size of the smallest minimum chunk based upon dtype and the
# length of the unchunked dim(s). chunks_dict currently only has unchunked dims right now
smallest_size_one_chunk = data_bytesize * np.prod([dim_sizes[dim] for dim in chunks_dict])
Expand Down
3 changes: 2 additions & 1 deletion cmip6_downscaling/methods/deepsd/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
from prefect import task
from upath import UPath

from ... import __version__ as version, config
from ... import __version__ as version
from ... import config
from ...data.observations import open_era5
from ...data.utils import lon_to_180
from ..common.bias_correction import bias_correct_gcm_by_method
Expand Down
3 changes: 2 additions & 1 deletion cmip6_downscaling/methods/gard/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
from skdownscale.pointwise_models.utils import default_none_kwargs
from upath import UPath

from ... import __version__ as version, config
from ... import __version__ as version
from ... import config
from ..common.bias_correction import bias_correct_gcm_by_method
from ..common.containers import RunParameters, str_to_hash
from ..common.utils import apply_land_mask, blocking_to_zarr, set_zarr_encoding, zmetadata_exists
Expand Down
3 changes: 2 additions & 1 deletion cmip6_downscaling/methods/maca/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
from prefect import task
from upath import UPath

from cmip6_downscaling import __version__ as version, config
from cmip6_downscaling import __version__ as version
from cmip6_downscaling import config
from cmip6_downscaling.methods.common.containers import RunParameters
from cmip6_downscaling.methods.common.utils import blocking_to_zarr, is_cached
from cmip6_downscaling.methods.maca import core as maca_core
Expand Down
4 changes: 0 additions & 4 deletions dev-requirements.txt

This file was deleted.

5 changes: 2 additions & 3 deletions flows/ERA5/ERA5_transfer.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Imports -----------------------------------------------------------
import json
import os
from typing import Dict, List

import fsspec # type: ignore
import pandas as pd # type: ignore
Expand Down Expand Up @@ -106,7 +105,7 @@ def map_and_open_zarr_link(file_loc_str: str) -> xr.Dataset:
return ds


def create_formatted_links() -> List:
def create_formatted_links() -> list:
"""Create list of tuples representing all year/month/variable combinations
Returns
Expand Down Expand Up @@ -141,7 +140,7 @@ def create_formatted_links() -> List:
return file_pattern_list


def open_json_catalog() -> Dict:
def open_json_catalog() -> dict:
"""Loads local CMIP6 JSON intake catalog
Returns
Expand Down
3 changes: 2 additions & 1 deletion flows/cmip6_raw_pyramids.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
from prefect.client import Client
from upath import UPath

from cmip6_downscaling import __version__ as version, config, runtimes
from cmip6_downscaling import __version__ as version
from cmip6_downscaling import config, runtimes
from cmip6_downscaling.data.cmip import postprocess
from cmip6_downscaling.methods.common.tasks import _pyramid_postprocess
from cmip6_downscaling.utils import write
Expand Down
Loading

0 comments on commit 0d8508c

Please sign in to comment.