Skip to content

Commit

Permalink
all lint permissions
Browse files Browse the repository at this point in the history
  • Loading branch information
Hgherzog committed Oct 21, 2024
1 parent e66dfaf commit 22a0e2b
Show file tree
Hide file tree
Showing 25 changed files with 3,949 additions and 366 deletions.
5 changes: 2 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
exclude_types: ["*.geojson"]
exclude: ^one_off_projects/
exclude: ^(one_off_projects|landsat|platforms|sentinel2_postprocess_revamp|amazon_conservation|convert_satlas_webmercator_to_rslearn)/ # Exlcluding projects that have not been migrated to rslp
ci:
autofix_prs: false
autoupdate_branch: ""
Expand Down Expand Up @@ -56,8 +56,7 @@ repos:
exclude: ^tests/
args:
- -s
- B101
- B311
- B101,B311,B324,B108,B104
- repo: local
hooks:
- id: interrogate
Expand Down
4 changes: 3 additions & 1 deletion amazon_conservation/make_dataset/lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
sentinel2_url = "https://se-tile-api.allen.ai/image_mosaic/sentinel2/[LABEL]/tci/[ZOOM]/[COL]/[ROW].png"
chip_size = 512

TIMEOUT = 10000


def get_sentinel2_callback(label):
def callback(tile):
Expand All @@ -19,7 +21,7 @@ def callback(tile):
cur_url = cur_url.replace("[COL]", str(tile[0]))
cur_url = cur_url.replace("[ROW]", str(tile[1]))

response = requests.get(cur_url)
response = requests.get(cur_url, timeout=TIMEOUT)
if response.status_code != 200:
print(f"got status_code={response.status_code} url={cur_url}")
if response.status_code == 404:
Expand Down
3,937 changes: 3,726 additions & 211 deletions data/forest_loss_driver/config_planet.json

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions landsat/existing_dataset_to_utm/prepare_windows.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""This script prepares UTM rslearn windows corresponding to the existing WebMercator landsat windows.
But it also produces:
1. File containing four corners of rectangle of original window in the new coordinate system.
The image should be blacked out outside of this quadrilateral.
Expand Down Expand Up @@ -54,6 +55,7 @@


def handle(example_id):
"""Handle a single example."""
# Extract polygon in source projection coordinates from the example folder name.
parts = example_id.split("_")
col = int(parts[0]) - total_pixels // 2
Expand Down
84 changes: 52 additions & 32 deletions one_off_projects/super_resolution/global_windows/config.json
Original file line number Diff line number Diff line change
@@ -1,34 +1,54 @@
{
"layers": {
"sentinel2": {
"type": "raster",
"band_sets": [{
"dtype": "uint16",
"bands": ["B02", "B03", "B04", "B08"],
"format": "geotiff"
}, {
"dtype": "uint16",
"bands": ["B05", "B06", "B07", "B8A", "B11", "B12"],
"format": "geotiff",
"zoom_offset": -1
}, {
"dtype": "uint16",
"bands": ["B01", "B09", "B10"],
"zoom_offset": -2
}],
"data_source": {
"name": "rslearn.data_sources.gcp_public_data.Sentinel2",
"index_cache_dir": "/data/favyenb/rslearn_superres_non_us/cache/sentinel2_gcp/",
"max_time_delta": "1d",
"sort_by": "cloud_cover",
"query_config": {
"max_matches": 32
}
}
}
},
"tile_store": {
"name": "file",
"root_dir": "/data/favyenb/rslearn_superres_non_us/tiles"
}
"layers": {
"sentinel2": {
"band_sets": [
{
"bands": [
"B02",
"B03",
"B04",
"B08"
],
"dtype": "uint16",
"format": "geotiff"
},
{
"bands": [
"B05",
"B06",
"B07",
"B8A",
"B11",
"B12"
],
"dtype": "uint16",
"format": "geotiff",
"zoom_offset": -1
},
{
"bands": [
"B01",
"B09",
"B10"
],
"dtype": "uint16",
"zoom_offset": -2
}
],
"data_source": {
"index_cache_dir": "/data/favyenb/rslearn_superres_non_us/cache/sentinel2_gcp/",
"max_time_delta": "1d",
"name": "rslearn.data_sources.gcp_public_data.Sentinel2",
"query_config": {
"max_matches": 32
},
"sort_by": "cloud_cover"
},
"type": "raster"
}
},
"tile_store": {
"name": "file",
"root_dir": "/data/favyenb/rslearn_superres_non_us/tiles"
}
}
80 changes: 48 additions & 32 deletions one_off_projects/super_resolution/us_dataset/config_landsat.json
Original file line number Diff line number Diff line change
@@ -1,34 +1,50 @@
{
"layers": {
"sentinel2": {
"type": "raster",
"band_sets": [{
"dtype": "uint16",
"bands": ["B1", "B2", "B3", "B4", "B5", "B6", "B7", "B9", "B10", "B11"],
"format": "geotiff",
"zoom_offset": -1
}, {
"dtype": "uint16",
"bands": ["B8"],
"format": "geotiff"
}],
"data_source": {
"name": "rslearn.data_sources.aws_landsat.LandsatOliTirs",
"metadata_cache_dir": "cache/landsat/",
"max_time_delta": "1d",
"query_config": {
"max_matches": 4
},
"sort_by": "cloud_cover"
}
}
},
"tile_store": {
"name": "file",
"root_dir": "tiles",
"raster_format": {
"name": "image_tile",
"format": "geotiff"
}
}
"layers": {
"sentinel2": {
"band_sets": [
{
"bands": [
"B1",
"B2",
"B3",
"B4",
"B5",
"B6",
"B7",
"B9",
"B10",
"B11"
],
"dtype": "uint16",
"format": "geotiff",
"zoom_offset": -1
},
{
"bands": [
"B8"
],
"dtype": "uint16",
"format": "geotiff"
}
],
"data_source": {
"max_time_delta": "1d",
"metadata_cache_dir": "cache/landsat/",
"name": "rslearn.data_sources.aws_landsat.LandsatOliTirs",
"query_config": {
"max_matches": 4
},
"sort_by": "cloud_cover"
},
"type": "raster"
}
},
"tile_store": {
"name": "file",
"raster_format": {
"format": "geotiff",
"name": "image_tile"
},
"root_dir": "tiles"
}
}
4 changes: 2 additions & 2 deletions rslp/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
class BaseDataPipelineConfig:
"""Base configuration for a data pipeline."""

def __init__(self, ds_root: str, workers: int = 1):
def __init__(self, ds_root: str, workers: int = 1) -> None:
"""Create a new BaseDataPipelineConfig.
Args:
Expand All @@ -18,6 +18,6 @@ def __init__(self, ds_root: str, workers: int = 1):
class BaseTrainPipelineConfig:
"""Base configuration for a model training pipeline."""

def __init__(self):
def __init__(self) -> None:
"""Create a new BaseTrainPipelineConfig."""
pass
2 changes: 1 addition & 1 deletion rslp/docker_entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import multiprocessing


def main():
def main() -> None:
"""Docker entrypoint for rslp.
Downloads the code from GCS before running the job.
Expand Down
24 changes: 14 additions & 10 deletions rslp/landsat_vessels/predict_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
LANDSAT_RESOLUTION = 15

CLASSIFY_WINDOW_SIZE = 64
"""The size of windows expected by the classifier."""


class VesselDetection:
Expand All @@ -38,7 +37,7 @@ def __init__(
projection: Projection,
score: float,
crop_window_dir: UPath | None = None,
):
) -> None:
"""Create a new VesselDetection.
Args:
Expand Down Expand Up @@ -186,7 +185,7 @@ def predict_pipeline(
crop_path: str,
image_files: dict[str, str] | None = None,
scene_id: str | None = None,
):
) -> None:
"""Run the Landsat vessel prediction pipeline.
This inputs a Landsat scene (consisting of per-band GeoTIFFs) and produces the
Expand All @@ -209,7 +208,7 @@ def predict_pipeline(
# Setup the dataset configuration file with the provided image files.
with open(LOCAL_FILES_DATASET_CONFIG) as f:
cfg = json.load(f)
item_spec = {
item_spec: dict = {
"fnames": [],
"bands": [],
}
Expand Down Expand Up @@ -262,18 +261,23 @@ def predict_pipeline(

# Run pipeline.
detections = get_vessel_detections(
ds_path, projection, scene_bounds, time_range=time_range
ds_path,
projection,
scene_bounds, # type: ignore
time_range=time_range,
)
detections = run_classifier(ds_path, detections, time_range=time_range)

# Write JSON and crops.
json_path = UPath(json_path)
crop_path = UPath(crop_path)
json_upath = UPath(json_path)
crop_upath = UPath(crop_path)

json_data = []
for idx, detection in enumerate(detections):
# Load crops from the window directory.
images = {}
if detection.crop_window_dir is None:
raise ValueError("Crop window directory is None")
for band in ["B2", "B3", "B4", "B8"]:
image_fname = (
detection.crop_window_dir / "layers" / "landsat" / band / "geotiff.tif"
Expand All @@ -300,11 +304,11 @@ def predict_pipeline(
[images["B4_sharp"], images["B3_sharp"], images["B2_sharp"]], axis=2
)

rgb_fname = crop_path / f"{idx}_rgb.png"
rgb_fname = crop_upath / f"{idx}_rgb.png"
with rgb_fname.open("wb") as f:
Image.fromarray(rgb).save(f, format="PNG")

b8_fname = crop_path / f"{idx}_b8.png"
b8_fname = crop_upath / f"{idx}_b8.png"
with b8_fname.open("wb") as f:
Image.fromarray(images["B8"]).save(f, format="PNG")

Expand All @@ -326,5 +330,5 @@ def predict_pipeline(
)
)

with json_path.open("w") as f:
with json_upath.open("w") as f:
json.dump(json_data, f)
28 changes: 14 additions & 14 deletions rslp/launch_beaker.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def launch_job(
mode: str,
workspace: str = DEFAULT_WORKSPACE,
username: str | None = None,
):
) -> None:
"""Launch training for the specified config on Beaker.
Args:
Expand All @@ -45,27 +45,27 @@ def launch_job(
with beaker.session():
env_vars = [
EnvVar(
name="WANDB_API_KEY",
secret="RSLEARN_WANDB_API_KEY",
name="WANDB_API_KEY", # nosec
secret="RSLEARN_WANDB_API_KEY", # nosec
),
EnvVar(
name="GOOGLE_APPLICATION_CREDENTIALS",
value="/etc/credentials/gcp_credentials.json",
name="GOOGLE_APPLICATION_CREDENTIALS", # nosec
value="/etc/credentials/gcp_credentials.json", # nosec
),
EnvVar(
name="GCLOUD_PROJECT",
value="prior-satlas",
name="GCLOUD_PROJECT", # nosec
value="prior-satlas", # nosec
),
EnvVar(
name="S3_ACCESS_KEY_ID",
secret="RSLEARN_WEKA_KEY",
name="S3_ACCESS_KEY_ID", # nosec
secret="RSLEARN_WEKA_KEY", # nosec
),
EnvVar(
name="S3_SECRET_ACCESS_KEY",
secret="RSLEARN_WEKA_SECRET",
name="S3_SECRET_ACCESS_KEY", # nosec
secret="RSLEARN_WEKA_SECRET", # nosec
),
EnvVar(
name="RSLP_PROJECT",
name="RSLP_PROJECT", # nosec
value=project_id,
),
EnvVar(
Expand Down Expand Up @@ -100,8 +100,8 @@ def launch_job(
preemptible=True,
datasets=[
DataMount(
source=DataSource(secret="RSLEARN_GCP_CREDENTIALS"),
mount_path="/etc/credentials/gcp_credentials.json",
source=DataSource(secret="RSLEARN_GCP_CREDENTIALS"), # nosec
mount_path="/etc/credentials/gcp_credentials.json", # nosec
),
],
env_vars=env_vars,
Expand Down
Loading

0 comments on commit 22a0e2b

Please sign in to comment.