Skip to content

Commit

Permalink
#1 initial implementation of /file_formats
Browse files Browse the repository at this point in the history
  • Loading branch information
soxofaan committed Aug 16, 2021
1 parent 43aab3d commit 02bc7b2
Show file tree
Hide file tree
Showing 3 changed files with 114 additions and 0 deletions.
19 changes: 19 additions & 0 deletions src/openeo_aggregator/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,25 @@ def __init__(self, backends: MultiBackendConnection, config: AggregatorConfig):
batch_jobs=batch_jobs,
user_defined_processes=None,
)
self._cache = TtlCache(default_ttl=CACHE_TTL_DEFAULT)

def oidc_providers(self) -> List[OidcProvider]:
return self._backends.get_oidc_providers()

def file_formats(self) -> dict:
return self._cache.get_or_call(key="file_formats", callback=self._file_formats)

def _file_formats(self) -> dict:
input_formats = {}
output_formats = {}
for con in self._backends:
try:
file_formats = con.get("/file_formats").json()
except Exception:
# TODO: fail instead of warn?
_log.warning(f"Failed to get file_formats from {con.id}", exc_info=True)
continue
# TODO smarter merging: case insensitive format name handling, parameter differences?
input_formats.update(file_formats.get("input", {}))
output_formats.update(file_formats.get("output", {}))
return {"input": input_formats, "output": output_formats}
3 changes: 3 additions & 0 deletions src/openeo_aggregator/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,3 +63,6 @@ def get_or_call(self, key, callback, ttl=None):
res = callback()
self.set(key, res, ttl=ttl)
return res

def flush_all(self):
self._cache = {}
92 changes: 92 additions & 0 deletions tests/test_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,98 @@ def test_oidc_providers(self, multi_backend_connection, config, backend1, backen
expected = {"id": "y", "issuer": "https://y.test", "title": "YY", "scopes": ["openid"]}
assert provider.prepare_for_json() == expected

def test_file_formats_simple(self, multi_backend_connection, config, backend1, backend2, requests_mock):
just_geotiff = {
"input": {"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"}},
"output": {"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"}}
}
requests_mock.get(backend1 + "/file_formats", json=just_geotiff)
requests_mock.get(backend2 + "/file_formats", json=just_geotiff)
implementation = AggregatorBackendImplementation(backends=multi_backend_connection, config=config)
file_formats = implementation.file_formats()
assert file_formats == just_geotiff

def test_file_formats_caching(self, multi_backend_connection, config, backend1, backend2, requests_mock):
just_geotiff = {
"input": {"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"}},
"output": {"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"}}
}
mock1 = requests_mock.get(backend1 + "/file_formats", json=just_geotiff)
mock2 = requests_mock.get(backend2 + "/file_formats", json=just_geotiff)
implementation = AggregatorBackendImplementation(backends=multi_backend_connection, config=config)
file_formats = implementation.file_formats()
assert file_formats == just_geotiff
assert mock1.call_count == 1
assert mock2.call_count == 1
_ = implementation.file_formats()
assert mock1.call_count == 1
assert mock2.call_count == 1
implementation._cache.flush_all()
_ = implementation.file_formats()
assert mock1.call_count == 2
assert mock2.call_count == 2

def test_file_formats_merging(self, multi_backend_connection, config, backend1, backend2, requests_mock):
requests_mock.get(backend1 + "/file_formats", json={
"input": {
"GeoJSON": {"gis_data_types": ["vector"], "parameters": {}}},
"output": {
"CSV": {"gis_data_types": ["raster"], "parameters": {}, "title": "Comma Separated Values"},
"GTiff": {
"gis_data_types": ["raster"],
"parameters": {
"ZLEVEL": {"type": "string", "default": "6"},
"tile_grid": {"type": "string", "enum": ["none", "wgs84", "utm-10km"], "default": "none"}
},
"title": "GeoTiff"
},
"JSON": {"gis_data_types": ["raster"], "parameters": {}},
"NetCDF": {
"gis_data_types": ["other", "raster"],
"parameters": {
"feature_id_property": {"type": "string", "default": None, "description": "..."},
},
"title": "Network Common Data Form",
},
}
})
requests_mock.get(backend2 + "/file_formats", json={
"input": {
"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"},
},
"output": {
"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"},
"netCDF": {"gis_data_types": ["raster"], "parameters": {}, "title": "netCDF"},
}
})
implementation = AggregatorBackendImplementation(backends=multi_backend_connection, config=config)
file_formats = implementation.file_formats()
assert file_formats == {
"input": {
"GeoJSON": {"gis_data_types": ["vector"], "parameters": {}},
"GTiff": {"gis_data_types": ["raster"], "parameters": {}, "title": "GeoTiff"},
},
"output": {
"CSV": {"gis_data_types": ["raster"], "parameters": {}, "title": "Comma Separated Values"},
"GTiff": {
"gis_data_types": ["raster"],
# TODO: merge parameters of backend1 and backend2?
"parameters": {},
"title": "GeoTiff"
},
"JSON": {"gis_data_types": ["raster"], "parameters": {}},
"NetCDF": {
"gis_data_types": ["other", "raster"],
"parameters": {
"feature_id_property": {"type": "string", "default": None, "description": "..."},
},
"title": "Network Common Data Form",
},
# TODO: merge "NetCDF" and "netCDF"?
"netCDF": {"gis_data_types": ["raster"], "parameters": {}, "title": "netCDF"},
}
}


class TestAggregatorCollectionCatalog:

Expand Down

0 comments on commit 02bc7b2

Please sign in to comment.