From 319d27e6edf9328a2401dea6a852ff3f306c1b9f Mon Sep 17 00:00:00 2001 From: "John T. Wodder II" Date: Mon, 25 Oct 2021 17:38:32 -0400 Subject: [PATCH] Give the resource classes `api_url` properties --- dandi/dandiapi.py | 48 ++++++++++++++++++++++++++------ dandi/dandiarchive.py | 2 +- dandi/tests/test_dandiapi.py | 36 ++++++------------------ dandi/tests/test_dandiarchive.py | 42 ++++++++++++++++++++++++++++ dandi/tests/test_delete.py | 15 ++-------- dandi/tests/test_download.py | 34 +++++++++------------- dandi/tests/test_upload.py | 22 ++++----------- 7 files changed, 113 insertions(+), 86 deletions(-) diff --git a/dandi/dandiapi.py b/dandi/dandiapi.py index d35b8f71a..222a8070d 100644 --- a/dandi/dandiapi.py +++ b/dandi/dandiapi.py @@ -702,20 +702,36 @@ def draft_version(self) -> Version: @property def api_path(self) -> str: """ - The API path (relative to the base endpoint for a Dandi Archive API) at - which requests for interacting with the Dandiset itself are made + The path (relative to the base endpoint for a Dandi Archive API) at + which API requests for interacting with the Dandiset itself are made """ return f"/dandisets/{self.identifier}/" + @property + def api_url(self) -> str: + """ + The URL at which API requests for interacting with the Dandiset itself + are made + """ + return self.client.get_url(self.api_path) + @property def version_api_path(self) -> str: """ - The API path (relative to the base endpoint for a Dandi Archive API) at - which requests for interacting with the version in question of the + The path (relative to the base endpoint for a Dandi Archive API) at + which API requests for interacting with the version in question of the Dandiset are made """ return f"/dandisets/{self.identifier}/versions/{self.version_id}/" + @property + def version_api_url(self) -> str: + """ + The URL at which API requests for interacting with the version in + question of the Dandiset are made + """ + return self.client.get_url(self.version_api_path) + @classmethod def from_data( cls, client: "DandiAPIClient", data: Dict[str, Any] @@ -1237,11 +1253,19 @@ def from_metadata( @property def api_path(self) -> str: """ - The API path (relative to the base endpoint for a Dandi Archive API) at - which requests for interacting with the asset itself are made + The path (relative to the base endpoint for a Dandi Archive API) at + which API requests for interacting with the asset itself are made """ return f"/assets/{self.identifier}/" + @property + def api_url(self) -> str: + """ + The URL at which API requests for interacting with the asset itself are + made + """ + return self.client.get_url(self.api_path) + @property def base_download_url(self) -> str: """ @@ -1417,11 +1441,19 @@ def from_data( @property def api_path(self) -> str: """ - The API path (relative to the base endpoint for a Dandi Archive API) at - which requests for interacting with the asset itself are made + The path (relative to the base endpoint for a Dandi Archive API) at + which API requests for interacting with the asset itself are made """ return f"/dandisets/{self.dandiset_id}/versions/{self.version_id}/assets/{self.identifier}/" + @property + def api_url(self) -> str: + """ + The URL at which API requests for interacting with the asset itself are + made + """ + return self.client.get_url(self.api_path) + @property def download_url(self) -> str: """ diff --git a/dandi/dandiarchive.py b/dandi/dandiarchive.py index af990034a..a78c41a48 100644 --- a/dandi/dandiarchive.py +++ b/dandi/dandiarchive.py @@ -360,7 +360,7 @@ class _dandi_url_parser: ( re.compile( rf"{server_grp}(?Pdandiset)s/{dandiset_id_grp}" - rf"(/(versions(/(?P{VERSION_REGEX}))?)?)?" + rf"(/(versions(/(?P{VERSION_REGEX}))?/?)?)?" ), {}, "https://[/api]/dandisets/[/versions[/]]", diff --git a/dandi/tests/test_dandiapi.py b/dandi/tests/test_dandiapi.py index 0c2d07dc8..ec8f71079 100644 --- a/dandi/tests/test_dandiapi.py +++ b/dandi/tests/test_dandiapi.py @@ -81,10 +81,8 @@ def test_publish_and_manipulate(local_dandi_api, monkeypatch, tmp_path): v = d.publish().version version_id = v.identifier assert str(v) == version_id - assert ( - str(d.for_version(v)) - == f"DANDI-API-LOCAL-DOCKER-TESTS:{dandiset_id}/{version_id}" - ) + dv = d.for_version(v) + assert str(dv) == f"DANDI-API-LOCAL-DOCKER-TESTS:{dandiset_id}/{version_id}" download_dir = tmp_path / "download" download_dir.mkdir() @@ -95,10 +93,7 @@ def downloaded_files(): dandiset_yaml = download_dir / dandiset_id / dandiset_metadata_file file_in_version = download_dir / dandiset_id / "subdir" / "file.txt" - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/{version_id}", - download_dir, - ) + download(dv.version_api_url, download_dir) assert downloaded_files() == [dandiset_yaml, file_in_version] assert file_in_version.read_text() == "This is test text.\n" @@ -111,10 +106,7 @@ def downloaded_files(): validation="skip", ) rmtree(download_dir / dandiset_id) - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/{version_id}", - download_dir, - ) + download(dv.version_api_url, download_dir) assert downloaded_files() == [dandiset_yaml, file_in_version] assert file_in_version.read_text() == "This is test text.\n" @@ -128,10 +120,7 @@ def downloaded_files(): ) rmtree(download_dir / dandiset_id) - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft", - download_dir, - ) + download(d.version_api_url, download_dir) assert sorted(downloaded_files()) == [ dandiset_yaml, file_in_version, @@ -141,28 +130,19 @@ def downloaded_files(): assert file_in_version.with_name("file2.txt").read_text() == "This is more text.\n" rmtree(download_dir / dandiset_id) - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/{version_id}", - download_dir, - ) + download(dv.version_api_url, download_dir) assert downloaded_files() == [dandiset_yaml, file_in_version] assert file_in_version.read_text() == "This is test text.\n" d.get_asset_by_path("subdir/file.txt").delete() rmtree(download_dir / dandiset_id) - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft", - download_dir, - ) + download(d.version_api_url, download_dir) assert downloaded_files() == [dandiset_yaml, file_in_version.with_name("file2.txt")] assert file_in_version.with_name("file2.txt").read_text() == "This is more text.\n" rmtree(download_dir / dandiset_id) - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/{version_id}", - download_dir, - ) + download(dv.version_api_url, download_dir) assert downloaded_files() == [dandiset_yaml, file_in_version] assert file_in_version.read_text() == "This is test text.\n" diff --git a/dandi/tests/test_dandiarchive.py b/dandi/tests/test_dandiarchive.py index 37ccf131f..ae8d8912a 100644 --- a/dandi/tests/test_dandiarchive.py +++ b/dandi/tests/test_dandiarchive.py @@ -109,6 +109,22 @@ ), marks=mark.skipif_no_network, ), + ( + "http://localhost:8000/api/dandisets/000002/", + DandisetURL( + api_url="http://localhost:8000/api", + dandiset_id="000002", + version_id=None, + ), + ), + ( + "http://localhost:8000/api/dandisets/000002", + DandisetURL( + api_url="http://localhost:8000/api", + dandiset_id="000002", + version_id=None, + ), + ), ( "http://localhost:8000/api/dandisets/000002/versions/draft", DandisetURL( @@ -117,6 +133,14 @@ version_id="draft", ), ), + ( + "http://localhost:8000/api/dandisets/000002/versions/draft/", + DandisetURL( + api_url="http://localhost:8000/api", + dandiset_id="000002", + version_id="draft", + ), + ), ( "https://gui.dandiarchive.org/#/dandiset/000001/files" "?location=%2Fsub-anm369962", @@ -193,6 +217,16 @@ asset_id="0a748f90-d497-4a9c-822e-9c63811db412", ), ), + ( + "https://api.dandiarchive.org/api/dandisets/000003/versions/draft" + "/assets/0a748f90-d497-4a9c-822e-9c63811db412/download", + AssetIDURL( + api_url="https://api.dandiarchive.org/api", + dandiset_id="000003", + version_id="draft", + asset_id="0a748f90-d497-4a9c-822e-9c63811db412", + ), + ), ( "https://api.dandiarchive.org/api" "/assets/0a748f90-d497-4a9c-822e-9c63811db412/download/", @@ -201,6 +235,14 @@ asset_id="0a748f90-d497-4a9c-822e-9c63811db412", ), ), + ( + "https://api.dandiarchive.org/api" + "/assets/0a748f90-d497-4a9c-822e-9c63811db412/download", + BaseAssetIDURL( + api_url="https://api.dandiarchive.org/api", + asset_id="0a748f90-d497-4a9c-822e-9c63811db412", + ), + ), ( "https://api.dandiarchive.org/api/dandisets/000003/versions/draft" "/assets/?path=sub-YutaMouse20", diff --git a/dandi/tests/test_delete.py b/dandi/tests/test_delete.py index 7074d0766..a71cde396 100644 --- a/dandi/tests/test_delete.py +++ b/dandi/tests/test_delete.py @@ -69,10 +69,7 @@ def test_delete_paths( force=True, ) delete_spy.assert_called() - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft", - tmp_path, - ) + download(text_dandiset["dandiset"].version_api_url, tmp_path) files = sorted(map(Path, find_files(r".*", paths=[tmp_path]))) assert files == [tmp_path / dandiset_id / f for f in ["dandiset.yaml"] + remainder] @@ -278,10 +275,7 @@ def test_delete_nonexistent_asset_skip_missing( skip_missing=True, ) delete_spy.assert_called() - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft", - tmp_path, - ) + download(text_dandiset["dandiset"].version_api_url, tmp_path) files = sorted(map(Path, find_files(r".*", paths=[tmp_path]))) assert files == [ tmp_path / dandiset_id / "dandiset.yaml", @@ -333,10 +327,7 @@ def test_delete_nonexistent_asset_folder_skip_missing( skip_missing=True, ) delete_spy.assert_called() - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft", - tmp_path, - ) + download(text_dandiset["dandiset"].version_api_url, tmp_path) files = sorted(map(Path, find_files(r".*", paths=[tmp_path]))) assert files == [ tmp_path / dandiset_id / "dandiset.yaml", diff --git a/dandi/tests/test_download.py b/dandi/tests/test_download.py index bdb10c30e..41c9fafeb 100644 --- a/dandi/tests/test_download.py +++ b/dandi/tests/test_download.py @@ -117,16 +117,17 @@ def test_download_000027_resume(tmp_path, resizer, version): assert digester(str(nwb)) == digests -def test_download_newest_version(local_dandi_api, text_dandiset, tmp_path): +def test_download_newest_version(text_dandiset, tmp_path): + dandiset = text_dandiset["dandiset"] dandiset_id = text_dandiset["dandiset_id"] - download(f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}", tmp_path) + download(dandiset.api_url, tmp_path) assert (tmp_path / dandiset_id / "file.txt").read_text() == "This is test text.\n" - text_dandiset["dandiset"].wait_until_valid() - text_dandiset["dandiset"].publish() + dandiset.wait_until_valid() + dandiset.publish() (text_dandiset["dspath"] / "file.txt").write_text("This is different text.\n") text_dandiset["reupload"]() rmtree(tmp_path / dandiset_id) - download(f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}", tmp_path) + download(dandiset.api_url, tmp_path) assert (tmp_path / dandiset_id / "file.txt").read_text() == "This is test text.\n" @@ -156,26 +157,18 @@ def test_download_item(local_dandi_api, text_dandiset, tmp_path): assert (tmp_path / "coconut.txt").read_text() == "Coconut\n" -def test_download_asset_id(local_dandi_api, text_dandiset, tmp_path): - dandiset_id = text_dandiset["dandiset_id"] +def test_download_asset_id(text_dandiset, tmp_path): asset = text_dandiset["dandiset"].get_asset_by_path("subdir2/coconut.txt") - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions" - f"/draft/assets/{asset.identifier}/download/", - tmp_path, - ) + download(asset.download_url, tmp_path) assert list(map(Path, find_files(r".*", paths=[tmp_path], dirs=True))) == [ tmp_path / "coconut.txt" ] assert (tmp_path / "coconut.txt").read_text() == "Coconut\n" -def test_download_asset_id_only(local_dandi_api, text_dandiset, tmp_path): +def test_download_asset_id_only(text_dandiset, tmp_path): asset = text_dandiset["dandiset"].get_asset_by_path("subdir2/coconut.txt") - download( - f"{local_dandi_api['instance'].api}/assets/{asset.identifier}/download/", - tmp_path, - ) + download(asset.base_download_url, tmp_path) assert list(map(Path, find_files(r".*", paths=[tmp_path], dirs=True))) == [ tmp_path / "coconut.txt" ] @@ -238,13 +231,12 @@ def test_download_sync_list(capsys, local_dandi_api, mocker, text_dandiset, tmp_ @responses.activate -def test_download_no_blobDateModified(local_dandi_api, text_dandiset, tmp_path): +def test_download_no_blobDateModified(text_dandiset, tmp_path): # Regression test for #806 responses.add_passthru(re.compile("^http")) - client = text_dandiset["client"] dandiset = text_dandiset["dandiset"] asset = dandiset.get_asset_by_path("file.txt") metadata = asset.get_raw_metadata() del metadata["blobDateModified"] - responses.add(responses.GET, client.get_url(asset.api_path), json=metadata) - download(client.get_url(dandiset.api_path), tmp_path) + responses.add(responses.GET, asset.api_url, json=metadata) + download(dandiset.api_url, tmp_path) diff --git a/dandi/tests/test_upload.py b/dandi/tests/test_upload.py index 21df6699b..e4b1cc16f 100644 --- a/dandi/tests/test_upload.py +++ b/dandi/tests/test_upload.py @@ -23,10 +23,7 @@ def test_new_upload_download(local_dandi_api, monkeypatch, organized_nwb_dir, tm monkeypatch.chdir(organized_nwb_dir) monkeypatch.setenv("DANDI_API_KEY", local_dandi_api["api_key"]) upload(paths=[], dandi_instance=local_dandi_api["instance_id"], devel_debug=True) - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft", - tmp_path, - ) + download(d.version_api_url, tmp_path) (nwb_file2,) = tmp_path.glob(f"{dandiset_id}{os.sep}*{os.sep}*.nwb") assert nwb_file.name == nwb_file2.name assert nwb_file.parent.name == nwb_file2.parent.name @@ -77,18 +74,13 @@ def test_new_upload_extant_eq_overwrite(existing, mocker, text_dandiset): @pytest.mark.parametrize("existing", ["overwrite", "refresh"]) -def test_new_upload_extant_neq_overwrite( - existing, local_dandi_api, mocker, text_dandiset, tmp_path -): +def test_new_upload_extant_neq_overwrite(existing, mocker, text_dandiset, tmp_path): dandiset_id = text_dandiset["dandiset_id"] (text_dandiset["dspath"] / "file.txt").write_text("This is different text.\n") iter_upload_spy = mocker.spy(RemoteDandiset, "iter_upload_raw_asset") text_dandiset["reupload"](existing=existing) iter_upload_spy.assert_called() - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft", - tmp_path, - ) + download(text_dandiset["dandiset"].version_api_url, tmp_path) assert ( tmp_path / dandiset_id / "file.txt" ).read_text() == "This is different text.\n" @@ -128,7 +120,8 @@ def test_new_upload_extant_bad_existing(mocker, text_dandiset): ) def test_upload_download_small_file(contents, local_dandi_api, monkeypatch, tmp_path): client = local_dandi_api["client"] - dandiset_id = client.create_dandiset("Small Dandiset", {}).identifier + d = client.create_dandiset("Small Dandiset", {}) + dandiset_id = d.identifier dspath = tmp_path / "upload" dspath.mkdir() (dspath / dandiset_metadata_file).write_text(f"identifier: '{dandiset_id}'\n") @@ -144,10 +137,7 @@ def test_upload_download_small_file(contents, local_dandi_api, monkeypatch, tmp_ ) download_dir = tmp_path / "download" download_dir.mkdir() - download( - f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft", - download_dir, - ) + download(d.version_api_url, download_dir) files = sorted(map(Path, find_files(r".*", paths=[download_dir]))) assert files == [ download_dir / dandiset_id / dandiset_metadata_file,