Skip to content

Commit

Permalink
avoid prints, replace with logger.info
Browse files Browse the repository at this point in the history
  • Loading branch information
pattonw committed Feb 18, 2025
1 parent a9c6eda commit f5c26fd
Show file tree
Hide file tree
Showing 18 changed files with 56 additions and 1,128 deletions.
14 changes: 7 additions & 7 deletions dacapo/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def apply(
), "Either validation_dataset and criterion, or iteration must be provided."

# retrieving run
print(f"Loading run {run_name}")
logger.info(f"Loading run {run_name}")
config_store = create_config_store()
run_config = config_store.retrieve_run_config(run_name)
run = Run(run_config)
Expand All @@ -102,7 +102,7 @@ def apply(
# load weights
if iteration is None:
iteration = weights_store.retrieve_best(run_name, validation_dataset, criterion) # type: ignore
print(f"Loading weights for iteration {iteration}")
logger.info(f"Loading weights for iteration {iteration}")
weights_store.retrieve_weights(run_name, iteration)

if parameters is None:
Expand All @@ -121,7 +121,7 @@ def apply(
raise ValueError(
"validation_dataset must be a dataset name or a Dataset object, or parameters must be provided explicitly."
)
print(f"Finding best parameters for validation dataset {_validation_dataset}")
logger.info(f"Finding best parameters for validation dataset {_validation_dataset}")
parameters = run.task.evaluator.get_overall_best_parameters(
_validation_dataset, criterion
)
Expand Down Expand Up @@ -183,7 +183,7 @@ def apply(
output_container, f"output_{run_name}_{iteration}_{parameters}"
)

print(
logger.info(
f"Applying best results from run {run.name} at iteration {iteration} to dataset {Path(input_container, input_dataset)}"
)
return apply_run(
Expand Down Expand Up @@ -243,7 +243,7 @@ def apply_run(
... )
"""
# render prediction dataset
print(f"Predicting on dataset {prediction_array_identifier}")
logger.info(f"Predicting on dataset {prediction_array_identifier}")
predict(
run.name,
iteration,
Expand All @@ -257,13 +257,13 @@ def apply_run(
)

# post-process the output
print(
logger.info(
f"Post-processing output to dataset {output_array_identifier}",
output_array_identifier,
)
post_processor = run.task.post_processor
post_processor.set_prediction(prediction_array_identifier)
post_processor.process(parameters, output_array_identifier, num_workers=num_workers)

print("Done")
logger.info("Done")
return
8 changes: 4 additions & 4 deletions dacapo/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -680,7 +680,7 @@ def segment_blockwise(
overwrite=overwrite,
write_size=write_roi.shape,
)
print(
logger.info(
f"Created output array {output_array_identifier.container}:{output_array_identifier.dataset} with ROI {_total_roi}."
)

Expand Down Expand Up @@ -791,7 +791,7 @@ def config():
def generate_dacapo_yaml(config):
with open("dacapo.yaml", "w") as f:
yaml.dump(config.serialize(), f, default_flow_style=False)
print("dacapo.yaml has been created.")
logger.info("dacapo.yaml has been created.")


def generate_config(
Expand Down Expand Up @@ -832,7 +832,7 @@ def unpack_ctx(ctx):
Example:
>>> ctx = ...
>>> kwargs = unpack_ctx(ctx)
>>> print(kwargs)
>>> logger.info(kwargs)
{'arg1': value1, 'arg2': value2, ...}
"""
kwargs = {
Expand All @@ -843,7 +843,7 @@ def unpack_ctx(ctx):
kwargs[k] = int(v)
elif v.replace(".", "").isnumeric():
kwargs[k] = float(v)
print(f"{k}: {kwargs[k]}")
logger.info(f"{k}: {kwargs[k]}")
return kwargs


Expand Down
5 changes: 4 additions & 1 deletion dacapo/experiments/datasplits/datasplit.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
import json
import itertools

import logging

logger = logging.getLogger(__name__)

class DataSplit(ABC):
"""
Expand Down Expand Up @@ -84,7 +87,7 @@ def _neuroglancer(self, embedded=False, bind_address="0.0.0.0", bind_port=0):
neuroglancer.LayerGroupViewer(layers=list(validate_layers.keys())),
]
)
print(f"Neuroglancer link: {viewer}")
logger.info(f"Neuroglancer link: {viewer}")
if embedded:
from IPython.display import IFrame

Expand Down
Loading

0 comments on commit f5c26fd

Please sign in to comment.