Skip to content

Commit

Permalink
Merge pull request #816 from dandi/api-url
Browse files Browse the repository at this point in the history
Give the resource classes `api_url` properties
  • Loading branch information
yarikoptic authored Oct 25, 2021
2 parents 814fa6f + 319d27e commit 1620fcd
Show file tree
Hide file tree
Showing 7 changed files with 113 additions and 86 deletions.
48 changes: 40 additions & 8 deletions dandi/dandiapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -702,20 +702,36 @@ def draft_version(self) -> Version:
@property
def api_path(self) -> str:
"""
The API path (relative to the base endpoint for a Dandi Archive API) at
which requests for interacting with the Dandiset itself are made
The path (relative to the base endpoint for a Dandi Archive API) at
which API requests for interacting with the Dandiset itself are made
"""
return f"/dandisets/{self.identifier}/"

@property
def api_url(self) -> str:
"""
The URL at which API requests for interacting with the Dandiset itself
are made
"""
return self.client.get_url(self.api_path)

@property
def version_api_path(self) -> str:
"""
The API path (relative to the base endpoint for a Dandi Archive API) at
which requests for interacting with the version in question of the
The path (relative to the base endpoint for a Dandi Archive API) at
which API requests for interacting with the version in question of the
Dandiset are made
"""
return f"/dandisets/{self.identifier}/versions/{self.version_id}/"

@property
def version_api_url(self) -> str:
"""
The URL at which API requests for interacting with the version in
question of the Dandiset are made
"""
return self.client.get_url(self.version_api_path)

@classmethod
def from_data(
cls, client: "DandiAPIClient", data: Dict[str, Any]
Expand Down Expand Up @@ -1237,11 +1253,19 @@ def from_metadata(
@property
def api_path(self) -> str:
"""
The API path (relative to the base endpoint for a Dandi Archive API) at
which requests for interacting with the asset itself are made
The path (relative to the base endpoint for a Dandi Archive API) at
which API requests for interacting with the asset itself are made
"""
return f"/assets/{self.identifier}/"

@property
def api_url(self) -> str:
"""
The URL at which API requests for interacting with the asset itself are
made
"""
return self.client.get_url(self.api_path)

@property
def base_download_url(self) -> str:
"""
Expand Down Expand Up @@ -1417,11 +1441,19 @@ def from_data(
@property
def api_path(self) -> str:
"""
The API path (relative to the base endpoint for a Dandi Archive API) at
which requests for interacting with the asset itself are made
The path (relative to the base endpoint for a Dandi Archive API) at
which API requests for interacting with the asset itself are made
"""
return f"/dandisets/{self.dandiset_id}/versions/{self.version_id}/assets/{self.identifier}/"

@property
def api_url(self) -> str:
"""
The URL at which API requests for interacting with the asset itself are
made
"""
return self.client.get_url(self.api_path)

@property
def download_url(self) -> str:
"""
Expand Down
2 changes: 1 addition & 1 deletion dandi/dandiarchive.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,7 +360,7 @@ class _dandi_url_parser:
(
re.compile(
rf"{server_grp}(?P<asset_type>dandiset)s/{dandiset_id_grp}"
rf"(/(versions(/(?P<version>{VERSION_REGEX}))?)?)?"
rf"(/(versions(/(?P<version>{VERSION_REGEX}))?/?)?)?"
),
{},
"https://<server>[/api]/dandisets/<dandiset id>[/versions[/<version>]]",
Expand Down
36 changes: 8 additions & 28 deletions dandi/tests/test_dandiapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,8 @@ def test_publish_and_manipulate(local_dandi_api, monkeypatch, tmp_path):
v = d.publish().version
version_id = v.identifier
assert str(v) == version_id
assert (
str(d.for_version(v))
== f"DANDI-API-LOCAL-DOCKER-TESTS:{dandiset_id}/{version_id}"
)
dv = d.for_version(v)
assert str(dv) == f"DANDI-API-LOCAL-DOCKER-TESTS:{dandiset_id}/{version_id}"

download_dir = tmp_path / "download"
download_dir.mkdir()
Expand All @@ -95,10 +93,7 @@ def downloaded_files():
dandiset_yaml = download_dir / dandiset_id / dandiset_metadata_file
file_in_version = download_dir / dandiset_id / "subdir" / "file.txt"

download(
f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/{version_id}",
download_dir,
)
download(dv.version_api_url, download_dir)
assert downloaded_files() == [dandiset_yaml, file_in_version]
assert file_in_version.read_text() == "This is test text.\n"

Expand All @@ -111,10 +106,7 @@ def downloaded_files():
validation="skip",
)
rmtree(download_dir / dandiset_id)
download(
f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/{version_id}",
download_dir,
)
download(dv.version_api_url, download_dir)
assert downloaded_files() == [dandiset_yaml, file_in_version]
assert file_in_version.read_text() == "This is test text.\n"

Expand All @@ -128,10 +120,7 @@ def downloaded_files():
)

rmtree(download_dir / dandiset_id)
download(
f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft",
download_dir,
)
download(d.version_api_url, download_dir)
assert sorted(downloaded_files()) == [
dandiset_yaml,
file_in_version,
Expand All @@ -141,28 +130,19 @@ def downloaded_files():
assert file_in_version.with_name("file2.txt").read_text() == "This is more text.\n"

rmtree(download_dir / dandiset_id)
download(
f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/{version_id}",
download_dir,
)
download(dv.version_api_url, download_dir)
assert downloaded_files() == [dandiset_yaml, file_in_version]
assert file_in_version.read_text() == "This is test text.\n"

d.get_asset_by_path("subdir/file.txt").delete()

rmtree(download_dir / dandiset_id)
download(
f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft",
download_dir,
)
download(d.version_api_url, download_dir)
assert downloaded_files() == [dandiset_yaml, file_in_version.with_name("file2.txt")]
assert file_in_version.with_name("file2.txt").read_text() == "This is more text.\n"

rmtree(download_dir / dandiset_id)
download(
f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/{version_id}",
download_dir,
)
download(dv.version_api_url, download_dir)
assert downloaded_files() == [dandiset_yaml, file_in_version]
assert file_in_version.read_text() == "This is test text.\n"

Expand Down
42 changes: 42 additions & 0 deletions dandi/tests/test_dandiarchive.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,22 @@
),
marks=mark.skipif_no_network,
),
(
"http://localhost:8000/api/dandisets/000002/",
DandisetURL(
api_url="http://localhost:8000/api",
dandiset_id="000002",
version_id=None,
),
),
(
"http://localhost:8000/api/dandisets/000002",
DandisetURL(
api_url="http://localhost:8000/api",
dandiset_id="000002",
version_id=None,
),
),
(
"http://localhost:8000/api/dandisets/000002/versions/draft",
DandisetURL(
Expand All @@ -117,6 +133,14 @@
version_id="draft",
),
),
(
"http://localhost:8000/api/dandisets/000002/versions/draft/",
DandisetURL(
api_url="http://localhost:8000/api",
dandiset_id="000002",
version_id="draft",
),
),
(
"https://gui.dandiarchive.org/#/dandiset/000001/files"
"?location=%2Fsub-anm369962",
Expand Down Expand Up @@ -193,6 +217,16 @@
asset_id="0a748f90-d497-4a9c-822e-9c63811db412",
),
),
(
"https://api.dandiarchive.org/api/dandisets/000003/versions/draft"
"/assets/0a748f90-d497-4a9c-822e-9c63811db412/download",
AssetIDURL(
api_url="https://api.dandiarchive.org/api",
dandiset_id="000003",
version_id="draft",
asset_id="0a748f90-d497-4a9c-822e-9c63811db412",
),
),
(
"https://api.dandiarchive.org/api"
"/assets/0a748f90-d497-4a9c-822e-9c63811db412/download/",
Expand All @@ -201,6 +235,14 @@
asset_id="0a748f90-d497-4a9c-822e-9c63811db412",
),
),
(
"https://api.dandiarchive.org/api"
"/assets/0a748f90-d497-4a9c-822e-9c63811db412/download",
BaseAssetIDURL(
api_url="https://api.dandiarchive.org/api",
asset_id="0a748f90-d497-4a9c-822e-9c63811db412",
),
),
(
"https://api.dandiarchive.org/api/dandisets/000003/versions/draft"
"/assets/?path=sub-YutaMouse20",
Expand Down
15 changes: 3 additions & 12 deletions dandi/tests/test_delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,7 @@ def test_delete_paths(
force=True,
)
delete_spy.assert_called()
download(
f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft",
tmp_path,
)
download(text_dandiset["dandiset"].version_api_url, tmp_path)
files = sorted(map(Path, find_files(r".*", paths=[tmp_path])))
assert files == [tmp_path / dandiset_id / f for f in ["dandiset.yaml"] + remainder]

Expand Down Expand Up @@ -278,10 +275,7 @@ def test_delete_nonexistent_asset_skip_missing(
skip_missing=True,
)
delete_spy.assert_called()
download(
f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft",
tmp_path,
)
download(text_dandiset["dandiset"].version_api_url, tmp_path)
files = sorted(map(Path, find_files(r".*", paths=[tmp_path])))
assert files == [
tmp_path / dandiset_id / "dandiset.yaml",
Expand Down Expand Up @@ -333,10 +327,7 @@ def test_delete_nonexistent_asset_folder_skip_missing(
skip_missing=True,
)
delete_spy.assert_called()
download(
f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions/draft",
tmp_path,
)
download(text_dandiset["dandiset"].version_api_url, tmp_path)
files = sorted(map(Path, find_files(r".*", paths=[tmp_path])))
assert files == [
tmp_path / dandiset_id / "dandiset.yaml",
Expand Down
34 changes: 13 additions & 21 deletions dandi/tests/test_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,16 +117,17 @@ def test_download_000027_resume(tmp_path, resizer, version):
assert digester(str(nwb)) == digests


def test_download_newest_version(local_dandi_api, text_dandiset, tmp_path):
def test_download_newest_version(text_dandiset, tmp_path):
dandiset = text_dandiset["dandiset"]
dandiset_id = text_dandiset["dandiset_id"]
download(f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}", tmp_path)
download(dandiset.api_url, tmp_path)
assert (tmp_path / dandiset_id / "file.txt").read_text() == "This is test text.\n"
text_dandiset["dandiset"].wait_until_valid()
text_dandiset["dandiset"].publish()
dandiset.wait_until_valid()
dandiset.publish()
(text_dandiset["dspath"] / "file.txt").write_text("This is different text.\n")
text_dandiset["reupload"]()
rmtree(tmp_path / dandiset_id)
download(f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}", tmp_path)
download(dandiset.api_url, tmp_path)
assert (tmp_path / dandiset_id / "file.txt").read_text() == "This is test text.\n"


Expand Down Expand Up @@ -156,26 +157,18 @@ def test_download_item(local_dandi_api, text_dandiset, tmp_path):
assert (tmp_path / "coconut.txt").read_text() == "Coconut\n"


def test_download_asset_id(local_dandi_api, text_dandiset, tmp_path):
dandiset_id = text_dandiset["dandiset_id"]
def test_download_asset_id(text_dandiset, tmp_path):
asset = text_dandiset["dandiset"].get_asset_by_path("subdir2/coconut.txt")
download(
f"{local_dandi_api['instance'].api}/dandisets/{dandiset_id}/versions"
f"/draft/assets/{asset.identifier}/download/",
tmp_path,
)
download(asset.download_url, tmp_path)
assert list(map(Path, find_files(r".*", paths=[tmp_path], dirs=True))) == [
tmp_path / "coconut.txt"
]
assert (tmp_path / "coconut.txt").read_text() == "Coconut\n"


def test_download_asset_id_only(local_dandi_api, text_dandiset, tmp_path):
def test_download_asset_id_only(text_dandiset, tmp_path):
asset = text_dandiset["dandiset"].get_asset_by_path("subdir2/coconut.txt")
download(
f"{local_dandi_api['instance'].api}/assets/{asset.identifier}/download/",
tmp_path,
)
download(asset.base_download_url, tmp_path)
assert list(map(Path, find_files(r".*", paths=[tmp_path], dirs=True))) == [
tmp_path / "coconut.txt"
]
Expand Down Expand Up @@ -238,13 +231,12 @@ def test_download_sync_list(capsys, local_dandi_api, mocker, text_dandiset, tmp_


@responses.activate
def test_download_no_blobDateModified(local_dandi_api, text_dandiset, tmp_path):
def test_download_no_blobDateModified(text_dandiset, tmp_path):
# Regression test for #806
responses.add_passthru(re.compile("^http"))
client = text_dandiset["client"]
dandiset = text_dandiset["dandiset"]
asset = dandiset.get_asset_by_path("file.txt")
metadata = asset.get_raw_metadata()
del metadata["blobDateModified"]
responses.add(responses.GET, client.get_url(asset.api_path), json=metadata)
download(client.get_url(dandiset.api_path), tmp_path)
responses.add(responses.GET, asset.api_url, json=metadata)
download(dandiset.api_url, tmp_path)
Loading

0 comments on commit 1620fcd

Please sign in to comment.