Skip to content

Commit

Permalink
GeoDataset: allow a mix of str and pathlib paths (#2270)
Browse files Browse the repository at this point in the history
* GeoDataset: allow a mix of str and pathlib paths

* Update type hints

* Simpler str conversion
  • Loading branch information
adamjstewart authored Sep 3, 2024
1 parent da6ff04 commit 6576b40
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 3 deletions.
8 changes: 8 additions & 0 deletions tests/datasets/test_geo.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,14 @@ def test_files_property_deterministic(self) -> None:
CustomGeoDataset(paths=paths1).files == CustomGeoDataset(paths=paths2).files
)

def test_files_property_mix_str_and_pathlib(self, tmp_path: Path) -> None:
foo = tmp_path / 'foo.txt'
bar = tmp_path / 'bar.txt'
foo.touch()
bar.touch()
ds = CustomGeoDataset(paths=[str(foo), bar])
assert ds.files == [str(bar), str(foo)]


class TestRasterDataset:
naip_dir = os.path.join('tests', 'data', 'naip')
Expand Down
6 changes: 3 additions & 3 deletions torchgeo/datasets/geo.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ def res(self, new_res: float) -> None:
self._res = new_res

@property
def files(self) -> list[Path]:
def files(self) -> list[str]:
"""A list of all files in the dataset.
Returns:
Expand All @@ -306,15 +306,15 @@ def files(self) -> list[Path]:
paths = self.paths

# Using set to remove any duplicates if directories are overlapping
files: set[Path] = set()
files: set[str] = set()
for path in paths:
if os.path.isdir(path):
pathname = os.path.join(path, '**', self.filename_glob)
files |= set(glob.iglob(pathname, recursive=True))
elif (os.path.isfile(path) or path_is_vsi(path)) and fnmatch.fnmatch(
str(path), f'*{self.filename_glob}'
):
files.add(path)
files.add(str(path))
elif not hasattr(self, 'download'):
warnings.warn(
f"Could not find any relevant files for provided path '{path}'. "
Expand Down

0 comments on commit 6576b40

Please sign in to comment.