Skip to content

Commit

Permalink
Migrate to new Ruff "2025 style guide" formatter (#2749)
Browse files Browse the repository at this point in the history
* run new ruff style formatter

* upgrade the ruff version
  • Loading branch information
hanouticelina authored Jan 14, 2025
1 parent 2702ec2 commit 70f3a7a
Show file tree
Hide file tree
Showing 11 changed files with 19 additions and 20 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def get_version() -> str:
]

extras["quality"] = [
"ruff>=0.5.0",
"ruff>=0.9.0",
"mypy==1.5.1",
"libcst==1.4.0",
]
Expand Down
2 changes: 1 addition & 1 deletion src/huggingface_hub/_commit_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,7 @@ def _upload_lfs_files(
if batch_errors_chunk:
message = "\n".join(
[
f'Encountered error for file with OID {err.get("oid")}: `{err.get("error", {}).get("message")}'
f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
for err in batch_errors_chunk
]
)
Expand Down
2 changes: 1 addition & 1 deletion src/huggingface_hub/commands/scan_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def run(self):
self._print_hf_cache_info_as_table(hf_cache_info)

print(
f"\nDone in {round(t1-t0,1)}s. Scanned {len(hf_cache_info.repos)} repo(s)"
f"\nDone in {round(t1 - t0, 1)}s. Scanned {len(hf_cache_info.repos)} repo(s)"
f" for a total of {ANSI.red(hf_cache_info.size_on_disk_str)}."
)
if len(hf_cache_info.warnings) > 0:
Expand Down
3 changes: 1 addition & 2 deletions src/huggingface_hub/file_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -1581,8 +1581,7 @@ def _chmod_and_move(src: Path, dst: Path) -> None:
os.chmod(str(src), stat.S_IMODE(cache_dir_mode))
except OSError as e:
logger.warning(
f"Could not set the permissions on the file '{src}'. "
f"Error: {e}.\nContinuing without setting permissions."
f"Could not set the permissions on the file '{src}'. Error: {e}.\nContinuing without setting permissions."
)
finally:
try:
Expand Down
2 changes: 1 addition & 1 deletion src/huggingface_hub/hf_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -5612,7 +5612,7 @@ def parse_safetensors_file_metadata(
if metadata_size <= 100000:
metadata_as_bytes = response.content[8 : 8 + metadata_size]
else: # 3.b. Request full metadata
response = get_session().get(url, headers={**_headers, "range": f"bytes=8-{metadata_size+7}"})
response = get_session().get(url, headers={**_headers, "range": f"bytes=8-{metadata_size + 7}"})
hf_raise_for_status(response)
metadata_as_bytes = response.content

Expand Down
2 changes: 1 addition & 1 deletion src/huggingface_hub/serialization/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def split_state_dict_into_shards_factory(
tensor_name_to_filename = {}
filename_to_tensors = {}
for idx, shard in enumerate(shard_list):
filename = filename_pattern.format(suffix=f"-{idx+1:05d}-of-{nb_shards:05d}")
filename = filename_pattern.format(suffix=f"-{idx + 1:05d}-of-{nb_shards:05d}")
for key in shard:
tensor_name_to_filename[key] = filename
filename_to_tensors[filename] = list(shard.keys())
Expand Down
2 changes: 1 addition & 1 deletion src/huggingface_hub/serialization/_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -649,7 +649,7 @@ def load_state_dict_from_file(
from torch import load
except ImportError as e:
raise ImportError(
"Please install `torch` to load torch tensors. " "You can install it with `pip install torch`."
"Please install `torch` to load torch tensors. You can install it with `pip install torch`."
) from e
# Add additional kwargs, mmap is only supported in torch >= 2.1.0
additional_kwargs = {}
Expand Down
2 changes: 1 addition & 1 deletion src/huggingface_hub/utils/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def _get_default_logging_level():
return log_levels[env_level_str]
else:
logging.getLogger().warning(
f"Unknown option HF_HUB_VERBOSITY={env_level_str}, has to be one of: { ', '.join(log_levels.keys()) }"
f"Unknown option HF_HUB_VERBOSITY={env_level_str}, has to be one of: {', '.join(log_levels.keys())}"
)
return _default_log_level

Expand Down
12 changes: 6 additions & 6 deletions tests/test_cache_layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def test_file_downloaded_in_cache(self):
revision=revision,
)

expected_directory_name = f'models--{MODEL_IDENTIFIER.replace("/", "--")}'
expected_directory_name = f"models--{MODEL_IDENTIFIER.replace('/', '--')}"
expected_path = os.path.join(cache, expected_directory_name)

refs = os.listdir(os.path.join(expected_path, "refs"))
Expand Down Expand Up @@ -87,7 +87,7 @@ def test_no_exist_file_is_cached(self):
# The file does not exist, so we get an exception.
hf_hub_download(MODEL_IDENTIFIER, filename, cache_dir=cache, revision=revision)

expected_directory_name = f'models--{MODEL_IDENTIFIER.replace("/", "--")}'
expected_directory_name = f"models--{MODEL_IDENTIFIER.replace('/', '--')}"
expected_path = os.path.join(cache, expected_directory_name)

refs = os.listdir(os.path.join(expected_path, "refs"))
Expand Down Expand Up @@ -151,7 +151,7 @@ def test_multiple_refs_for_same_file(self):
hf_hub_download(MODEL_IDENTIFIER, "file_0.txt", cache_dir=cache)
hf_hub_download(MODEL_IDENTIFIER, "file_0.txt", cache_dir=cache, revision="file-2")

expected_directory_name = f'models--{MODEL_IDENTIFIER.replace("/", "--")}'
expected_directory_name = f"models--{MODEL_IDENTIFIER.replace('/', '--')}"
expected_path = os.path.join(cache, expected_directory_name)

refs = os.listdir(os.path.join(expected_path, "refs"))
Expand Down Expand Up @@ -184,7 +184,7 @@ def test_file_downloaded_in_cache(self):
with SoftTemporaryDirectory() as cache:
snapshot_download(MODEL_IDENTIFIER, cache_dir=cache)

expected_directory_name = f'models--{MODEL_IDENTIFIER.replace("/", "--")}'
expected_directory_name = f"models--{MODEL_IDENTIFIER.replace('/', '--')}"
expected_path = os.path.join(cache, expected_directory_name)

refs = os.listdir(os.path.join(expected_path, "refs"))
Expand Down Expand Up @@ -216,7 +216,7 @@ def test_file_downloaded_in_cache_several_revisions(self):
snapshot_download(MODEL_IDENTIFIER, cache_dir=cache, revision="file-3")
snapshot_download(MODEL_IDENTIFIER, cache_dir=cache, revision="file-2")

expected_directory_name = f'models--{MODEL_IDENTIFIER.replace("/", "--")}'
expected_directory_name = f"models--{MODEL_IDENTIFIER.replace('/', '--')}"
expected_path = os.path.join(cache, expected_directory_name)

refs = os.listdir(os.path.join(expected_path, "refs"))
Expand Down Expand Up @@ -289,7 +289,7 @@ def test_update_reference(self):
with SoftTemporaryDirectory() as cache:
hf_hub_download(repo_id, "file.txt", cache_dir=cache)

expected_directory_name = f'models--{repo_id.replace("/", "--")}'
expected_directory_name = f"models--{repo_id.replace('/', '--')}"
expected_path = os.path.join(cache, expected_directory_name)

refs = os.listdir(os.path.join(expected_path, "refs"))
Expand Down
2 changes: 1 addition & 1 deletion tests/test_utils_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -492,7 +492,7 @@ def test_ref_to_missing_revision(self) -> None:
self.assertEqual(
str(report.warnings[0]),
"Reference(s) refer to missing commit hashes: {'revision_hash_that_does_not_exist': {'not_main'}} "
+ f"({self.repo_path }).",
+ f"({self.repo_path}).",
)

@xfail_on_windows("Last modified/last accessed work a bit differently on Windows.")
Expand Down
8 changes: 4 additions & 4 deletions tests/testing_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def parse_flag_from_env(key: str, default: bool = False) -> bool:
return False
else:
# More values are supported, but let's keep the message simple.
raise ValueError(f"If set, '{key}' must be one of {YES+NO}. Got '{value}'.")
raise ValueError(f"If set, '{key}' must be one of {YES + NO}. Got '{value}'.")


def parse_int_from_env(key, default=None):
Expand Down Expand Up @@ -396,9 +396,9 @@ def _inner(*args, **kwargs):
if name == "self":
continue
assert parameter.annotation is Mock
assert (
name in mocks
), f"Mock `{name}` not found for test `{fn.__name__}`. Available: {', '.join(sorted(mocks.keys()))}"
assert name in mocks, (
f"Mock `{name}` not found for test `{fn.__name__}`. Available: {', '.join(sorted(mocks.keys()))}"
)
new_kwargs[name] = mocks[name]

# Run test only with a subset of mocks
Expand Down

0 comments on commit 70f3a7a

Please sign in to comment.