Skip to content

Commit

Permalink
pre-commit update
Browse files Browse the repository at this point in the history
  • Loading branch information
norlandrhagen committed Jan 31, 2024
1 parent 9ee551c commit ef89e72
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 38 deletions.
41 changes: 9 additions & 32 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,52 +4,29 @@ ci:

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.5.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-docstring-first
- id: check-json
- id: check-yaml
- id: double-quote-string-fixer
- id: debug-statements
- id: mixed-line-ending

- repo: https://github.com/asottile/pyupgrade
rev: v3.14.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.1.9"
hooks:
- id: pyupgrade
args:
- '--py38-plus'

- repo: https://github.com/psf/black
rev: 23.9.1
hooks:
- id: black
- id: black-jupyter
- id: ruff
args: ["--fix"]

- repo: https://github.com/keewis/blackdoc
rev: v0.3.8
rev: v0.3.9
hooks:
- id: blackdoc

- repo: https://github.com/PyCQA/flake8
rev: 6.1.0
hooks:
- id: flake8

- repo: https://github.com/asottile/seed-isort-config
rev: v2.2.0
hooks:
- id: seed-isort-config
- repo: https://github.com/PyCQA/isort
rev: 5.12.0
hooks:
- id: isort

- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.0.3
rev: v4.0.0-alpha.8
hooks:
- id: prettier
additional_dependencies:
- prettier
- '@carbonplan/prettier'
- id: prettier
12 changes: 6 additions & 6 deletions cmip6_downscaling/methods/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,14 @@ def validate_zarr_store(target: str, raise_on_error=True) -> bool:

try:
store = zarr.open_consolidated(target)
except:
except Exception:
errors.append('error opening zarr store')

if not errors:
groups = list(store.groups())
# if groups is empty (not a datatree)
if not groups:
groups = [("root", store["/"])]
groups = [('root', store['/'])]

for key, group in groups:
data_group = group
Expand Down Expand Up @@ -92,7 +92,7 @@ def blocking_to_zarr(
if write_empty_chunks:
if packaging.version.Version(
packaging.version.Version(xr.__version__).base_version
) < packaging.version.Version("2022.03"):
) < packaging.version.Version('2022.03'):
raise NotImplementedError(
f'`write_empty_chunks` not supported in xarray < 2022.06. Your xarray version is: {xr.__version__}'
)
Expand Down Expand Up @@ -188,7 +188,7 @@ def apply_land_mask(ds: xr.Dataset) -> xr.Dataset:

def calc_auspicious_chunks_dict(
da: xr.DataArray,
chunk_dims: tuple = ("lat", "lon"),
chunk_dims: tuple = ('lat', 'lon'),
) -> dict:
"""Figure out a chunk size that, given the size of the dataset, the dimension(s) you want to chunk on
and the data type, will fit under the target_size. Currently only works for 100mb which
Expand All @@ -210,7 +210,7 @@ def calc_auspicious_chunks_dict(
"""
if not isinstance(chunk_dims, tuple):
raise TypeError(
"Your chunk_dims likely includes one string but needs a comma after it! to be a tuple!"
'Your chunk_dims likely includes one string but needs a comma after it! to be a tuple!'
)
# setting target_size_bytes to the 100mb chunksize recommended by dask. could modify in future.
target_size_bytes = 100e6
Expand All @@ -228,7 +228,7 @@ def calc_auspicious_chunks_dict(
# so we'll always just give it the full length of the dimension
chunks_dict[dim] = dim_sizes[dim]
# calculate the bytesize given the dtype bitsize and divide by 8
data_bytesize = int(re.findall(r"\d+", str(da.dtype))[0]) / 8
data_bytesize = int(re.findall(r'\d+', str(da.dtype))[0]) / 8
# calculate the size of the smallest minimum chunk based upon dtype and the
# length of the unchunked dim(s). chunks_dict currently only has unchunked dims right now
smallest_size_one_chunk = data_bytesize * np.prod([dim_sizes[dim] for dim in chunks_dict])
Expand Down

0 comments on commit ef89e72

Please sign in to comment.