Skip to content

Commit

Permalink
Bump ruff to 0.2.0
Browse files Browse the repository at this point in the history
  • Loading branch information
tqa236 committed Feb 1, 2024
1 parent 29a3682 commit 0e6b01b
Show file tree
Hide file tree
Showing 8 changed files with 16 additions and 14 deletions.
6 changes: 2 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ ci:
skip: [pylint, pyright, mypy]
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.6
rev: v0.2.0
hooks:
- id: ruff
args: [--exit-non-zero-on-fix]
Expand All @@ -31,10 +31,8 @@ repos:
exclude: ^pandas/tests
args: [--select, "ANN001,ANN2", --fix-only, --exit-non-zero-on-fix]
- id: ruff-format
# TODO: "." not needed in ruff 0.1.8
args: ["."]
- repo: https://github.com/jendrikseipp/vulture
rev: 'v2.10'
rev: v2.11
hooks:
- id: vulture
entry: python scripts/run_vulture.py
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -1798,7 +1798,7 @@ def normalize_keyword_aggregation(


def _make_unique_kwarg_list(
seq: Sequence[tuple[Any, Any]]
seq: Sequence[tuple[Any, Any]],
) -> Sequence[tuple[Any, Any]]:
"""
Uniquify aggfunc name of the pairs in the order list
Expand Down
2 changes: 1 addition & 1 deletion pandas/io/parsers/c_parser_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ def _concatenate_chunks(chunks: list[dict[int, ArrayLike]]) -> dict:


def ensure_dtype_objs(
dtype: DtypeArg | dict[Hashable, DtypeArg] | None
dtype: DtypeArg | dict[Hashable, DtypeArg] | None,
) -> DtypeObj | dict[Hashable, DtypeObj] | None:
"""
Ensure we have either None, a dtype object, or a dictionary mapping to
Expand Down
2 changes: 1 addition & 1 deletion pandas/plotting/_matplotlib/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -465,7 +465,7 @@ def _validate_color_args(self, color, colormap):
@final
@staticmethod
def _iter_data(
data: DataFrame | dict[Hashable, Series | DataFrame]
data: DataFrame | dict[Hashable, Series | DataFrame],
) -> Iterator[tuple[Hashable, np.ndarray]]:
for col, values in data.items():
# This was originally written to use values.values before EAs
Expand Down
6 changes: 4 additions & 2 deletions pandas/plotting/_matplotlib/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,13 +98,15 @@ def _get_layout(
nrows, ncols = layout

if nrows == -1 and ncols > 0:
layout = nrows, ncols = (ceil(nplots / ncols), ncols)
layout = (ceil(nplots / ncols), ncols)
elif ncols == -1 and nrows > 0:
layout = nrows, ncols = (nrows, ceil(nplots / nrows))
layout = (nrows, ceil(nplots / nrows))
elif ncols <= 0 and nrows <= 0:
msg = "At least one dimension of layout must be positive"
raise ValueError(msg)

nrows, ncols = layout

if nrows * ncols < nplots:
raise ValueError(
f"Layout of {nrows}x{ncols} must be larger than required size {nplots}"
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/indexes/multi/test_join.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ def test_join_dtypes_all_nan(any_numeric_ea_dtype):

def test_join_index_levels():
# GH#53093
midx = midx = MultiIndex.from_tuples([("a", "2019-02-01"), ("a", "2019-02-01")])
midx = MultiIndex.from_tuples([("a", "2019-02-01"), ("a", "2019-02-01")])
midx2 = MultiIndex.from_tuples([("a", "2019-01-31")])
result = midx.join(midx2, how="outer")
expected = MultiIndex.from_tuples(
Expand Down
8 changes: 5 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,8 @@ environment = {CFLAGS="-g0"}
line-length = 88
target-version = "py310"
fix = true

[tool.ruff.lint]
unfixable = []
typing-modules = ["pandas._typing"]

Expand Down Expand Up @@ -294,7 +296,7 @@ ignore = [
# Use `typing.NamedTuple` instead of `collections.namedtuple`
"PYI024",
# No builtin `eval()` allowed
"PGH001",
# "S307", # flake8-bandit is not enabled yet
# compare-to-empty-string
"PLC1901",
# while int | float can be shortened to float, the former is more explicit
Expand Down Expand Up @@ -337,7 +339,7 @@ ignore = [
# pairwise-over-zipped (>=PY310 only)
"RUF007",
# mutable-class-default
"RUF012"
"RUF012",
]

exclude = [
Expand All @@ -352,7 +354,7 @@ exclude = [
"env",
]

[tool.ruff.per-file-ignores]
[tool.ruff.lint.per-file-ignores]
# relative imports allowed for asv_bench
"asv_bench/*" = ["TID", "NPY002"]
# to be enabled gradually
Expand Down
2 changes: 1 addition & 1 deletion scripts/validate_min_versions_in_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def get_operator_from(dependency: str) -> str | None:


def get_yaml_map_from(
yaml_dic: list[str | dict[str, list[str]]]
yaml_dic: list[str | dict[str, list[str]]],
) -> dict[str, list[str] | None]:
yaml_map: dict[str, list[str] | None] = {}
for dependency in yaml_dic:
Expand Down

0 comments on commit 0e6b01b

Please sign in to comment.