From a76417e9a241128b1e6c0395129bd687a1c8ef22 Mon Sep 17 00:00:00 2001 From: "John T. Wodder II" Date: Thu, 6 May 2021 11:30:32 -0400 Subject: [PATCH 1/4] Add "sync" upload option --- dandi/cli/cmd_upload.py | 5 +++++ dandi/tests/fixtures.py | 4 ++-- dandi/tests/test_upload.py | 35 +++++++++++++++++++++++++++++++++++ dandi/upload.py | 25 +++++++++++++++++++++++++ 4 files changed, 67 insertions(+), 2 deletions(-) diff --git a/dandi/cli/cmd_upload.py b/dandi/cli/cmd_upload.py index 1632922c1..27e468b73 100644 --- a/dandi/cli/cmd_upload.py +++ b/dandi/cli/cmd_upload.py @@ -50,6 +50,9 @@ def get_metavar(self, param): type=IntColonInt(), help="Number of files to upload in parallel and, optionally, number of upload threads per file", ) +@click.option( + "--sync", is_flag=True, help="Delete assets on the server that do not exist locally" +) @click.option( "--validation", help="Data must pass validation before the upload. Use of this option is highly discouraged.", @@ -80,6 +83,7 @@ def get_metavar(self, param): def upload( paths, jobs, + sync, existing="refresh", validation="require", dandiset_path=None, @@ -121,4 +125,5 @@ def upload( devel_debug=devel_debug, jobs=jobs, jobs_per_file=jobs_per_file, + sync=sync, ) diff --git a/dandi/tests/fixtures.py b/dandi/tests/fixtures.py index 7eab60369..6069db822 100644 --- a/dandi/tests/fixtures.py +++ b/dandi/tests/fixtures.py @@ -284,11 +284,11 @@ def text_dandiset(local_dandi_api, monkeypatch, tmp_path_factory): (dspath / "subdir2" / "banana.txt").write_text("Banana\n") (dspath / "subdir2" / "coconut.txt").write_text("Coconut\n") - def upload_dandiset(**kwargs): + def upload_dandiset(paths=None, **kwargs): with monkeypatch.context() as m: m.setenv("DANDI_API_KEY", local_dandi_api["api_key"]) upload( - paths=[], + paths=paths or [], dandiset_path=dspath, dandi_instance=local_dandi_api["instance_id"], devel_debug=True, diff --git a/dandi/tests/test_upload.py b/dandi/tests/test_upload.py index 1de9fe3b5..d53d9ab84 100644 --- a/dandi/tests/test_upload.py +++ b/dandi/tests/test_upload.py @@ -151,3 +151,38 @@ def test_upload_download_small_file(contents, local_dandi_api, monkeypatch, tmp_ download_dir / dandiset_id / "file.txt", ] assert files[1].read_bytes() == contents + + +@pytest.mark.parametrize("confirm", [True, False]) +def test_upload_sync(confirm, mocker, text_dandiset): + (text_dandiset["dspath"] / "file.txt").unlink() + confirm_mock = mocker.patch("click.confirm", return_value=confirm) + text_dandiset["reupload"](sync=True) + confirm_mock.assert_called_with("Delete 1 assets on server?") + asset = text_dandiset["client"].get_asset_bypath( + text_dandiset["dandiset_id"], "draft", "file.txt" + ) + if confirm: + assert asset is None + else: + assert asset is not None + + +def test_upload_sync_folder(mocker, text_dandiset): + (text_dandiset["dspath"] / "file.txt").unlink() + (text_dandiset["dspath"] / "subdir2" / "banana.txt").unlink() + confirm_mock = mocker.patch("click.confirm", return_value=True) + text_dandiset["reupload"](paths=[text_dandiset["dspath"] / "subdir2"], sync=True) + confirm_mock.assert_called_with("Delete 1 assets on server?") + assert ( + text_dandiset["client"].get_asset_bypath( + text_dandiset["dandiset_id"], "draft", "file.txt" + ) + is not None + ) + assert ( + text_dandiset["client"].get_asset_bypath( + text_dandiset["dandiset_id"], "draft", "subdir2/banana.txt" + ) + is None + ) diff --git a/dandi/upload.py b/dandi/upload.py index 5adba8da2..8695f5a46 100644 --- a/dandi/upload.py +++ b/dandi/upload.py @@ -1,7 +1,11 @@ +from functools import reduce +import os.path from pathlib import Path, PurePosixPath import re import time +import click + from .consts import dandiset_identifier_regex, dandiset_metadata_file from . import lgr from .utils import ensure_datetime, get_instance @@ -18,6 +22,7 @@ def upload( devel_debug=False, jobs=None, jobs_per_file=None, + sync=False, ): from .dandiapi import DandiAPIClient from .dandiset import APIDandiset, Dandiset @@ -61,6 +66,7 @@ def upload( # if not paths: paths = [dandiset.path] + original_paths = paths # Expand and validate all paths -- they should reside within dandiset paths = find_files(".*", paths) if allow_any_path else find_dandi_files(paths) @@ -335,3 +341,22 @@ def upload_agg(*ignored): else: rec.update(skip_file(exc)) out(rec) + + if sync: + relpaths = [] + for p in original_paths: + rp = os.path.relpath(p, dandiset.path) + relpaths.append("" if rp == "." else rp) + path_prefix = reduce(os.path.commonprefix, relpaths) + to_delete = [] + for asset in client.get_dandiset_assets( + ds_identifier, "draft", path=path_prefix + ): + if ( + any(p == "" or path_is_subpath(asset["path"], p) for p in relpaths) + and not Path(dandiset.path, asset["path"]).exists() + ): + to_delete.append(asset["asset_id"]) + if to_delete and click.confirm(f"Delete {len(to_delete)} assets on server?"): + for asset_id in to_delete: + client.delete_asset(ds_identifier, "draft", asset_id) From fcea4034699d73836e18acb77976f795273a7b16 Mon Sep 17 00:00:00 2001 From: "John T. Wodder II" Date: Fri, 7 May 2021 11:31:33 -0400 Subject: [PATCH 2/4] Add "sync" download option --- dandi/cli/cmd_download.py | 6 ++- dandi/cli/tests/test_download.py | 7 +++ dandi/download.py | 87 ++++++++++++++++++++------------ dandi/tests/test_download.py | 40 +++++++++++++++ 4 files changed, 108 insertions(+), 32 deletions(-) diff --git a/dandi/cli/cmd_download.py b/dandi/cli/cmd_download.py index 67e8882c5..165994b52 100644 --- a/dandi/cli/cmd_download.py +++ b/dandi/cli/cmd_download.py @@ -73,6 +73,9 @@ def get_metavar(self, param): default="all", show_default=True, ) +@click.option( + "--sync", is_flag=True, help="Delete local assets that do not exist on the server" +) @instance_option() # Might be a cool feature, not unlike verifying a checksum, we verify that # downloaded file passes the validator, and if not -- alert @@ -91,7 +94,7 @@ def get_metavar(self, param): @click.argument("url", nargs=-1) @map_to_click_exceptions def download( - url, output_dir, existing, jobs, format, download_types, dandi_instance=None + url, output_dir, existing, jobs, format, download_types, sync, dandi_instance=None ): """Download a file or entire folder from DANDI""" # We need to import the download module rather than the download function @@ -133,5 +136,6 @@ def download( jobs=jobs, get_metadata="dandiset.yaml" in download_types, get_assets="assets" in download_types, + sync=sync, # develop_debug=develop_debug ) diff --git a/dandi/cli/tests/test_download.py b/dandi/cli/tests/test_download.py index 31a59cb8c..21ded7aa0 100644 --- a/dandi/cli/tests/test_download.py +++ b/dandi/cli/tests/test_download.py @@ -21,6 +21,7 @@ def test_download_defaults(mocker): jobs=6, get_metadata=True, get_assets=True, + sync=False, ) @@ -36,6 +37,7 @@ def test_download_all_types(mocker): jobs=6, get_metadata=True, get_assets=True, + sync=False, ) @@ -51,6 +53,7 @@ def test_download_metadata_only(mocker): jobs=6, get_metadata=True, get_assets=False, + sync=False, ) @@ -66,6 +69,7 @@ def test_download_assets_only(mocker): jobs=6, get_metadata=False, get_assets=True, + sync=False, ) @@ -96,6 +100,7 @@ def test_download_gui_instance_in_dandiset(mocker): jobs=6, get_metadata=True, get_assets=True, + sync=False, ) @@ -117,6 +122,7 @@ def test_download_api_instance_in_dandiset(mocker): jobs=6, get_metadata=True, get_assets=True, + sync=False, ) @@ -142,6 +148,7 @@ def test_download_url_instance_match(mocker): jobs=6, get_metadata=True, get_assets=True, + sync=False, ) diff --git a/dandi/download.py b/dandi/download.py index 764b506ed..16d612953 100644 --- a/dandi/download.py +++ b/dandi/download.py @@ -8,6 +8,7 @@ import sys import time +import click import humanize import requests @@ -16,7 +17,7 @@ from .dandiset import Dandiset from . import get_logger from .support.pyout import naturalsize -from .utils import ensure_datetime, flattened, is_same_time +from .utils import ensure_datetime, find_files, flattened, is_same_time, on_windows lgr = get_logger() @@ -30,12 +31,31 @@ def download( jobs=1, get_metadata=True, get_assets=True, + sync=False, ): # TODO: unduplicate with upload. For now stole from that one # We will again use pyout to provide a neat table summarizing our progress # with upload etc from .support import pyout as pyouts + urls = flattened([urls]) + if len(urls) > 1: + raise NotImplementedError("multiple URLs not supported") + if not urls: + # if no paths provided etc, we will download dandiset path + # we are at, BUT since we are not git -- we do not even know + # on which instance it exists! Thus ATM we would do nothing but crash + raise NotImplementedError("No URLs were provided. Cannot download anything") + + parsed_url = parse_dandi_url(urls[0]) + + # TODO: if we are ALREADY in a dandiset - we can validate that it is the + # same dandiset and use that dandiset path as the one to download under + if isinstance(parsed_url, DandisetURL): + output_path = op.join(output_dir, parsed_url.dandiset_id) + else: + output_path = output_dir + # dandi.cli.formatters are used in cmd_ls to provide switchable pyout_style = pyouts.get_style(hide_if_missing=False) @@ -56,8 +76,8 @@ def download( kw["yield_generator_for_fields"] = rec_fields[1:] # all but path gen_ = download_generator( - urls, - output_dir, + parsed_url, + output_path, existing=existing, get_metadata=get_metadata, get_assets=get_assets, @@ -81,10 +101,38 @@ def download( else: raise ValueError(format) + if sync and not isinstance(parsed_url, SingleAssetURL): + client = parsed_url.get_client() + with client.session(): + asset_paths = {asset["path"] for asset in parsed_url.get_assets(client)} + if isinstance(parsed_url, DandisetURL): + prefix = os.curdir + download_dir = output_path + elif isinstance(parsed_url, MultiAssetURL): + folder_path = op.normpath(parsed_url.path) + prefix = folder_path + download_dir = op.join(output_path, op.basename(folder_path)) + else: + raise NotImplementedError( + f"Unexpected URL type {type(parsed_url).__name__}" + ) + to_delete = [] + for p in find_files(".*", download_dir, exclude_datalad=True): + if p == op.join(output_path, dandiset_metadata_file): + continue + a_path = op.normpath(op.join(prefix, op.relpath(p, download_dir))) + if on_windows: + a_path = a_path.replace("\\", "/") + if a_path not in asset_paths: + to_delete.append(p) + if to_delete and click.confirm(f"Delete {len(to_delete)} local assets?"): + for p in to_delete: + os.unlink(p) + def download_generator( - urls, - output_dir, + parsed_url, + output_path, *, assets_it=None, yield_generator_for_fields=None, @@ -107,38 +155,15 @@ def download_generator( summary statistics while already downloading. TODO: reimplement properly! """ - urls = flattened([urls]) - if len(urls) > 1: - raise NotImplementedError("multiple URLs not supported") - if not urls: - # if no paths provided etc, we will download dandiset path - # we are at, BUT since we are not git -- we do not even know - # on which instance it exists! Thus ATM we would do nothing but crash - raise NotImplementedError("No URLs were provided. Cannot download anything") - parsed_url = parse_dandi_url(urls[0]) with parsed_url.navigate() as (client, dandiset, assets): if assets_it: assets_it.gen = assets assets = assets_it - # TODO: if we are ALREADY in a dandiset - we can validate that it is the - # same dandiset and use that dandiset path as the one to download under - if dandiset: - identifier = Dandiset._get_identifier(dandiset) - if not identifier: - raise ValueError(f"Cannot deduce dandiset identifier from {dandiset}") - if isinstance(parsed_url, DandisetURL): - output_path = op.join(output_dir, identifier) - if get_metadata: - for resp in _populate_dandiset_yaml( - output_path, dandiset, existing - ): - yield dict(path=dandiset_metadata_file, **resp) - else: - output_path = output_dir - else: - output_path = output_dir + if isinstance(parsed_url, DandisetURL) and get_metadata: + for resp in _populate_dandiset_yaml(output_path, dandiset, existing): + yield dict(path=dandiset_metadata_file, **resp) # TODO: do analysis of assets for early detection of needed renames etc # to avoid any need for late treatment of existing and also for diff --git a/dandi/tests/test_download.py b/dandi/tests/test_download.py index a31d8ca29..ff3ab558d 100644 --- a/dandi/tests/test_download.py +++ b/dandi/tests/test_download.py @@ -180,3 +180,43 @@ def test_download_asset_id(local_dandi_api, text_dandiset, tmp_path): tmp_path / "coconut.txt" ] assert (tmp_path / "coconut.txt").read_text() == "Coconut\n" + + +@pytest.mark.parametrize("confirm", [True, False]) +def test_download_sync(confirm, local_dandi_api, mocker, text_dandiset, tmp_path): + text_dandiset["client"].delete_asset_bypath( + text_dandiset["dandiset_id"], "draft", "file.txt" + ) + dspath = tmp_path / text_dandiset["dandiset_id"] + os.rename(text_dandiset["dspath"], dspath) + confirm_mock = mocker.patch("click.confirm", return_value=confirm) + download( + f"dandi://{local_dandi_api['instance_id']}/{text_dandiset['dandiset_id']}", + tmp_path, + existing="overwrite", + sync=True, + ) + confirm_mock.assert_called_with("Delete 1 local assets?") + if confirm: + assert not (dspath / "file.txt").exists() + else: + assert (dspath / "file.txt").exists() + + +def test_download_sync_folder(local_dandi_api, mocker, text_dandiset): + text_dandiset["client"].delete_asset_bypath( + text_dandiset["dandiset_id"], "draft", "file.txt" + ) + text_dandiset["client"].delete_asset_bypath( + text_dandiset["dandiset_id"], "draft", "subdir2/banana.txt" + ) + confirm_mock = mocker.patch("click.confirm", return_value=True) + download( + f"dandi://{local_dandi_api['instance_id']}/{text_dandiset['dandiset_id']}/subdir2/", + text_dandiset["dspath"], + existing="overwrite", + sync=True, + ) + confirm_mock.assert_called_with("Delete 1 local assets?") + assert (text_dandiset["dspath"] / "file.txt").exists() + assert not (text_dandiset["dspath"] / "subdir2" / "banana.txt").exists() From b86d5f8bec4964da3da0017edbfbe4962508a3a6 Mon Sep 17 00:00:00 2001 From: "John T. Wodder II" Date: Fri, 7 May 2021 16:08:45 -0400 Subject: [PATCH 3/4] Proper pluralization --- dandi/download.py | 13 +++++++++++-- dandi/tests/test_download.py | 4 ++-- dandi/tests/test_upload.py | 4 ++-- dandi/upload.py | 6 ++++-- dandi/utils.py | 9 +++++++++ 5 files changed, 28 insertions(+), 8 deletions(-) diff --git a/dandi/download.py b/dandi/download.py index 16d612953..bcc680f68 100644 --- a/dandi/download.py +++ b/dandi/download.py @@ -17,7 +17,14 @@ from .dandiset import Dandiset from . import get_logger from .support.pyout import naturalsize -from .utils import ensure_datetime, find_files, flattened, is_same_time, on_windows +from .utils import ( + ensure_datetime, + find_files, + flattened, + is_same_time, + on_windows, + pluralize, +) lgr = get_logger() @@ -125,7 +132,9 @@ def download( a_path = a_path.replace("\\", "/") if a_path not in asset_paths: to_delete.append(p) - if to_delete and click.confirm(f"Delete {len(to_delete)} local assets?"): + if to_delete and click.confirm( + f"Delete {pluralize(len(to_delete), 'local asset')}?" + ): for p in to_delete: os.unlink(p) diff --git a/dandi/tests/test_download.py b/dandi/tests/test_download.py index ff3ab558d..fda8cb95c 100644 --- a/dandi/tests/test_download.py +++ b/dandi/tests/test_download.py @@ -196,7 +196,7 @@ def test_download_sync(confirm, local_dandi_api, mocker, text_dandiset, tmp_path existing="overwrite", sync=True, ) - confirm_mock.assert_called_with("Delete 1 local assets?") + confirm_mock.assert_called_with("Delete 1 local asset?") if confirm: assert not (dspath / "file.txt").exists() else: @@ -217,6 +217,6 @@ def test_download_sync_folder(local_dandi_api, mocker, text_dandiset): existing="overwrite", sync=True, ) - confirm_mock.assert_called_with("Delete 1 local assets?") + confirm_mock.assert_called_with("Delete 1 local asset?") assert (text_dandiset["dspath"] / "file.txt").exists() assert not (text_dandiset["dspath"] / "subdir2" / "banana.txt").exists() diff --git a/dandi/tests/test_upload.py b/dandi/tests/test_upload.py index d53d9ab84..47a55469e 100644 --- a/dandi/tests/test_upload.py +++ b/dandi/tests/test_upload.py @@ -158,7 +158,7 @@ def test_upload_sync(confirm, mocker, text_dandiset): (text_dandiset["dspath"] / "file.txt").unlink() confirm_mock = mocker.patch("click.confirm", return_value=confirm) text_dandiset["reupload"](sync=True) - confirm_mock.assert_called_with("Delete 1 assets on server?") + confirm_mock.assert_called_with("Delete 1 asset on server?") asset = text_dandiset["client"].get_asset_bypath( text_dandiset["dandiset_id"], "draft", "file.txt" ) @@ -173,7 +173,7 @@ def test_upload_sync_folder(mocker, text_dandiset): (text_dandiset["dspath"] / "subdir2" / "banana.txt").unlink() confirm_mock = mocker.patch("click.confirm", return_value=True) text_dandiset["reupload"](paths=[text_dandiset["dspath"] / "subdir2"], sync=True) - confirm_mock.assert_called_with("Delete 1 assets on server?") + confirm_mock.assert_called_with("Delete 1 asset on server?") assert ( text_dandiset["client"].get_asset_bypath( text_dandiset["dandiset_id"], "draft", "file.txt" diff --git a/dandi/upload.py b/dandi/upload.py index 8695f5a46..7ace87bf1 100644 --- a/dandi/upload.py +++ b/dandi/upload.py @@ -8,7 +8,7 @@ from .consts import dandiset_identifier_regex, dandiset_metadata_file from . import lgr -from .utils import ensure_datetime, get_instance +from .utils import ensure_datetime, get_instance, pluralize def upload( @@ -357,6 +357,8 @@ def upload_agg(*ignored): and not Path(dandiset.path, asset["path"]).exists() ): to_delete.append(asset["asset_id"]) - if to_delete and click.confirm(f"Delete {len(to_delete)} assets on server?"): + if to_delete and click.confirm( + f"Delete {pluralize(len(to_delete), 'asset')} on server?" + ): for asset_id in to_delete: client.delete_asset(ds_identifier, "draft", asset_id) diff --git a/dandi/utils.py b/dandi/utils.py index c457b32c9..31d74f6e2 100644 --- a/dandi/utils.py +++ b/dandi/utils.py @@ -708,3 +708,12 @@ def get_module_version(module: Union[str, types.ModuleType]) -> Optional[str]: except Exception as exc: lgr.debug("Failed to determine version of the %s: %s", mod_name, exc) return version + + +def pluralize(n: int, word: str, plural: Optional[str] = None) -> str: + if n == 1: + return f"{n} {word}" + else: + if plural is None: + plural = word + "s" + return f"{n} {plural}" From f698a67f8014cb2aa90ebbe9afa5f6e6f8174ea2 Mon Sep 17 00:00:00 2001 From: "John T. Wodder II" Date: Mon, 10 May 2021 09:53:31 -0400 Subject: [PATCH 4/4] Add option for user to list to-be-deleted assets when doing a sync download --- dandi/download.py | 24 ++++++++++++++++++------ dandi/tests/test_download.py | 31 +++++++++++++++++++++++++++---- dandi/utils.py | 29 +++++++++++++++++++++++++++++ setup.cfg | 2 ++ 4 files changed, 76 insertions(+), 10 deletions(-) diff --git a/dandi/download.py b/dandi/download.py index bcc680f68..d0f8abf32 100644 --- a/dandi/download.py +++ b/dandi/download.py @@ -8,7 +8,6 @@ import sys import time -import click import humanize import requests @@ -18,6 +17,7 @@ from . import get_logger from .support.pyout import naturalsize from .utils import ( + abbrev_prompt, ensure_datetime, find_files, flattened, @@ -132,11 +132,23 @@ def download( a_path = a_path.replace("\\", "/") if a_path not in asset_paths: to_delete.append(p) - if to_delete and click.confirm( - f"Delete {pluralize(len(to_delete), 'local asset')}?" - ): - for p in to_delete: - os.unlink(p) + if to_delete: + while True: + opt = abbrev_prompt( + f"Delete {pluralize(len(to_delete), 'local asset')}?", + "yes", + "no", + "list", + ) + if opt == "list": + for p in to_delete: + print(p) + elif opt == "yes": + for p in to_delete: + os.unlink(p) + break + else: + break def download_generator( diff --git a/dandi/tests/test_download.py b/dandi/tests/test_download.py index fda8cb95c..787dbef3f 100644 --- a/dandi/tests/test_download.py +++ b/dandi/tests/test_download.py @@ -189,14 +189,16 @@ def test_download_sync(confirm, local_dandi_api, mocker, text_dandiset, tmp_path ) dspath = tmp_path / text_dandiset["dandiset_id"] os.rename(text_dandiset["dspath"], dspath) - confirm_mock = mocker.patch("click.confirm", return_value=confirm) + confirm_mock = mocker.patch( + "dandi.download.abbrev_prompt", return_value="yes" if confirm else "no" + ) download( f"dandi://{local_dandi_api['instance_id']}/{text_dandiset['dandiset_id']}", tmp_path, existing="overwrite", sync=True, ) - confirm_mock.assert_called_with("Delete 1 local asset?") + confirm_mock.assert_called_with("Delete 1 local asset?", "yes", "no", "list") if confirm: assert not (dspath / "file.txt").exists() else: @@ -210,13 +212,34 @@ def test_download_sync_folder(local_dandi_api, mocker, text_dandiset): text_dandiset["client"].delete_asset_bypath( text_dandiset["dandiset_id"], "draft", "subdir2/banana.txt" ) - confirm_mock = mocker.patch("click.confirm", return_value=True) + confirm_mock = mocker.patch("dandi.download.abbrev_prompt", return_value="yes") download( f"dandi://{local_dandi_api['instance_id']}/{text_dandiset['dandiset_id']}/subdir2/", text_dandiset["dspath"], existing="overwrite", sync=True, ) - confirm_mock.assert_called_with("Delete 1 local asset?") + confirm_mock.assert_called_with("Delete 1 local asset?", "yes", "no", "list") assert (text_dandiset["dspath"] / "file.txt").exists() assert not (text_dandiset["dspath"] / "subdir2" / "banana.txt").exists() + + +def test_download_sync_list(capsys, local_dandi_api, mocker, text_dandiset, tmp_path): + text_dandiset["client"].delete_asset_bypath( + text_dandiset["dandiset_id"], "draft", "file.txt" + ) + dspath = tmp_path / text_dandiset["dandiset_id"] + os.rename(text_dandiset["dspath"], dspath) + input_mock = mocker.patch("dandi.utils.input", side_effect=["list", "yes"]) + download( + f"dandi://{local_dandi_api['instance_id']}/{text_dandiset['dandiset_id']}", + tmp_path, + existing="overwrite", + sync=True, + ) + assert not (dspath / "file.txt").exists() + assert input_mock.call_args_list == [ + mocker.call("Delete 1 local asset? ([y]es/[n]o/[l]ist): "), + mocker.call("Delete 1 local asset? ([y]es/[n]o/[l]ist): "), + ] + assert capsys.readouterr().out.splitlines()[-1] == str(dspath / "file.txt") diff --git a/dandi/utils.py b/dandi/utils.py index 31d74f6e2..aa20c4a17 100644 --- a/dandi/utils.py +++ b/dandi/utils.py @@ -717,3 +717,32 @@ def pluralize(n: int, word: str, plural: Optional[str] = None) -> str: if plural is None: plural = word + "s" return f"{n} {plural}" + + +def abbrev_prompt(msg: str, *options: str) -> str: + """ + Prompt the user to input one of several options, which can be entered as + either a whole word or the first letter of a word. All input is handled + case-insensitively. Returns the complete word corresponding to the input, + lowercased. + + For example, ``abbrev_prompt("Delete assets?", "yes", "no", "list")`` + prompts the user with the message ``Delete assets? ([y]es/[n]o/[l]ist): `` + and accepts as input ``y`, ``yes``, ``n``, ``no``, ``l``, and ``list``. + """ + options_map = {} + optstrs = [] + for opt in options: + opt = opt.lower() + if opt in options_map: + raise ValueError(f"Repeated option: {opt}") + elif opt[0] in options_map: + raise ValueError(f"Repeated abbreviated option: {opt[0]}") + options_map[opt] = opt + options_map[opt[0]] = opt + optstrs.append(f"[{opt[0]}]{opt[1:]}") + msg += " (" + "/".join(optstrs) + "): " + while True: + answer = input(msg).lower() + if answer in options_map: + return options_map[answer] diff --git a/setup.cfg b/setup.cfg index 6239822ee..22b10b6eb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -117,3 +117,5 @@ parentdir_prefix = [codespell] skip = dandi/_version.py,dandi/due.py,versioneer.py +# Don't warn about "[l]ist" in the abbrev_prompt() docstring: +ignore-regex = \[\w\]\w+