Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

geo-inference call returns mask name #20

Merged
merged 3 commits into from
Sep 5, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 12 additions & 4 deletions geo_inference/geo_inference.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import gc
import re
import sys
import time
import torch
import pystac
Expand Down Expand Up @@ -40,6 +41,7 @@
logger = logging.getLogger(__name__)



class GeoInference:

"""
Expand Down Expand Up @@ -114,7 +116,7 @@ async def run_async():
# Start the periodic garbage collection task
self.gc_task = asyncio.create_task(self.constant_gc(5)) # Calls gc.collect() every 5 seconds
# Run the main computation asynchronously
await self.async_run_inference(
self.mask_layer_name = await self.async_run_inference(
inference_input=inference_input,
bands_requested=bands_requested,
patch_size=patch_size,
Expand All @@ -126,9 +128,11 @@ async def run_async():
try:
await self.gc_task
except asyncio.CancelledError:
logger.info("The End of Inference")
pass

asyncio.run(run_async())
return self.mask_layer_name
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The return type of the __call__() function should be str instead of None



async def async_run_inference(self,
inference_input: Union[Path, str],
Expand Down Expand Up @@ -189,6 +193,8 @@ async def async_run_inference(self,
yolo_csv_path = self.work_dir.joinpath(prefix_base_name + "_yolo.csv")
coco_json_path = self.work_dir.joinpath(prefix_base_name + "_coco.json")
stride_patch_size = int(patch_size / 2)


""" Processing starts"""
start_time = time.time()
try:
Expand Down Expand Up @@ -332,6 +338,7 @@ async def async_run_inference(self,
)
)
torch.cuda.empty_cache()
return mask_path.name

except Exception as e:
print(f"Processing on the Dask cluster failed due to: {e}")
Expand All @@ -356,14 +363,15 @@ def main() -> None:
num_classes=arguments["classes"],
prediction_threshold=arguments["prediction_threshold"]
)
geo_inference(
inference_mask_layer_name = geo_inference(
inference_input=arguments["image"],
bands_requested=arguments["bands_requested"],
patch_size=arguments["patch_size"],
workers=arguments["workers"],
bbox=arguments["bbox"],
)



if __name__ == "__main__":
main()
main()
Loading