Skip to content

Commit

Permalink
Added readme listing and removed some diagnostic print calls
Browse files Browse the repository at this point in the history
  • Loading branch information
TheChymera committed Nov 28, 2022
1 parent 41c7401 commit 18397b8
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 50 deletions.
4 changes: 2 additions & 2 deletions dandi/cli/cmd_ls.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,7 @@ def fn():
rec = {}
# No need for calling get_metadata if no keys are needed from it
print(path)
print("jsfkfjskl")
print("QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ")
if keys is None or list(keys) != ["nwb_version"]:
try:
if schema is not None:
Expand All @@ -360,7 +360,7 @@ def fn():
digest=Digest.dandi_etag(digest),
).json_dict()
else:
print("99999999999")
print("000000000000000000000000000000000")
rec = get_metadata(path)
except Exception as exc:
_add_exc_error(path, rec, errors, exc)
Expand Down
22 changes: 15 additions & 7 deletions dandi/files/bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,20 @@ def _validate(self) -> None:
bids_paths = [str(self.filepath)] + [
str(asset.filepath) for asset in self.dataset_files
]
# This is an ad-hoc fix which should be removed once bidsschematools greater than
# 0.6.0 is released.
# It won't cause any trouble afterwards, but it will no longer fulfill any
# purpose. The issue is that README* is still required and if we don't
# include it explicitly in the listing validation will implicitly fail, even
# if the file is present.
readme_extensions = ["", "md", "rst", "txt"]

This comment has been minimized.

Copy link
@jwodder

jwodder Nov 28, 2022

Member

@TheChymera Shouldn't the nonempty strings start with periods?

for ext in readme_extensions:
ds_root = self.filepath.parent
readme_candidate = ds_root / Path("README" + ext)
if readme_candidate.exists():
bids_paths += [readme_candidate]
# end of ad-hoc fix.

results = validate_bids(*bids_paths)
self._dataset_errors: list[ValidationResult] = []
self._asset_errors: dict[str, list[ValidationResult]] = defaultdict(
Expand All @@ -79,9 +93,7 @@ def _validate(self) -> None:
self._dataset_errors.append(result)
elif result.id == "BIDS.MATCH":
assert result.path
print("mimimimimim")
bids_path = result.path.relative_to(self.bids_root).as_posix()
print("lililililil")
assert result.metadata is not None
self._asset_metadata[bids_path] = prepare_metadata(
result.metadata
Expand Down Expand Up @@ -156,11 +168,7 @@ def bids_path(self) -> str:
"""
``/``-separated path to the asset from the root of the BIDS dataset
"""
print("111111")
a = self.filepath.absolute().relative_to(self.bids_root).as_posix()
print("222222")
return a
# return self.filepath.relative_to(self.bids_root).as_posix()
return self.filepath.relative_to(self.bids_root).as_posix()

def get_validation_errors(
self,
Expand Down
52 changes: 11 additions & 41 deletions dandi/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,13 @@ def get_metadata(path: Union[str, Path]) -> Optional[dict]:
dict
"""
# from .files import dandi_file, find_dandi_files
import os

from .files import dandi_file, find_bids_dataset_description

# when we run in parallel, these annoying warnings appear
ignore_benign_pynwb_warnings()
path = str(path) # for Path
path = os.path.abspath(str(path)) # for Path
meta = dict()

print("00000000000000000")
Expand All @@ -89,13 +91,13 @@ def get_metadata(path: Union[str, Path]) -> Optional[dict]:
)

# First read out possibly available versions of specifications for NWB(:N)
print("99999999999")
print(meta)
print("1111111111")
print(meta)
print("2222222222")
meta["nwb_version"] = get_nwb_version(path)
print("99999999999")
print("3333333333")
print(meta)
print("1111111111")
print("4444444444")

# PyNWB might fail to load because of missing extensions.
# There is a new initiative of establishing registry of such extensions.
Expand Down Expand Up @@ -134,52 +136,20 @@ def get_metadata(path: Union[str, Path]) -> Optional[dict]:

meta["nd_types"] = get_neurodata_types(path)
else:
dataset_path = find_parent_directory_containing(
"dataset_description.json", path
)
# dataset_path = find_parent_directory_containing(
# "dataset_description.json", path
# )
dandiset_path = find_parent_directory_containing("dandiset.yaml", path)
print("ßßßßßßßßßßßßßßßßßßßßßßßß")
bids_dataset_description = find_bids_dataset_description(path)
print("ſſſſſſſſſſſſſſſſſſſſ")
print(path)
# df = list(
# find_dandi_files(
# path,
# dataset_path,
# dandiset_path=dandiset_path,
# allow_all=True,
# )
# )
print(type(path))
p = Path(path)
print(type(dataset_path))
# df = dandi_file(pathlib.PosixPath(path), dandiset_path,
# bids_dataset_description=dataset_path,
# )
print("łłłłłłłłłłłłłłłłłłłłłłłłłłłłłłłłłłł")
print(p, type(p))
print(dandiset_path, type(p))
print(bids_dataset_description)
# df = list(
# dandi_file(
# p,
# dandiset_path,
# bids_dataset_description=bids_dataset_description,
# )
# )
df = dandi_file(
p,
dandiset_path,
bids_dataset_description=bids_dataset_description,
)
print("ăăăăăăăăăăăăăăăăă")
# for i in df:
# print(i)
# assert len(df) == 1
# df = df[0]
# print("aaaaaaaaaaaaa")
# a = df.get_metadata()
a = df.get_metadata()
print("ßßßßßßßßßßßßßßßßßßßßßßßß")
print(a)

return meta
Expand Down

0 comments on commit 18397b8

Please sign in to comment.