Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use debug for loggers #655

Merged
merged 2 commits into from
Aug 31, 2022
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 16 additions & 16 deletions src/accelerate/tracking.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,8 @@ def __init__(self, run_name: str, logging_dir: Optional[Union[str, os.PathLike]]
self.run_name = run_name
self.logging_dir = os.path.join(logging_dir, run_name)
self.writer = tensorboard.SummaryWriter(self.logging_dir, **kwargs)
logger.info(f"Initialized TensorBoard project {self.run_name} logging to {self.logging_dir}")
logger.info(
logger.debug(f"Initialized TensorBoard project {self.run_name} logging to {self.logging_dir}")
logger.debug(
"Make sure to log any initial configurations with `self.store_init_configuration` before training!"
)

Expand Down Expand Up @@ -164,7 +164,7 @@ def store_init_configuration(self, values: dict):
except yaml.representer.RepresenterError:
logger.error("Serialization to store hyperparameters failed")
raise
logger.info("Stored initial configuration hyperparameters to TensorBoard and hparams yaml file")
logger.debug("Stored initial configuration hyperparameters to TensorBoard and hparams yaml file")

def log(self, values: dict, step: Optional[int] = None, **kwargs):
"""
Expand All @@ -188,14 +188,14 @@ def log(self, values: dict, step: Optional[int] = None, **kwargs):
elif isinstance(v, dict):
self.writer.add_scalars(k, v, global_step=step, **kwargs)
self.writer.flush()
logger.info("Successfully logged to TensorBoard")
logger.debug("Successfully logged to TensorBoard")

def finish(self):
"""
Closes `TensorBoard` writer
"""
self.writer.close()
logger.info("TensorBoard writer closed")
logger.debug("TensorBoard writer closed")


class WandBTracker(GeneralTracker):
Expand All @@ -215,8 +215,8 @@ class WandBTracker(GeneralTracker):
def __init__(self, run_name: str, **kwargs):
self.run_name = run_name
self.run = wandb.init(project=self.run_name, **kwargs)
logger.info(f"Initialized WandB project {self.run_name}")
logger.info(
logger.debug(f"Initialized WandB project {self.run_name}")
logger.debug(
"Make sure to log any initial configurations with `self.store_init_configuration` before training!"
)

Expand All @@ -234,7 +234,7 @@ def store_init_configuration(self, values: dict):
`str`, `float`, `int`, or `None`.
"""
wandb.config.update(values)
logger.info("Stored initial configuration hyperparameters to WandB")
logger.debug("Stored initial configuration hyperparameters to WandB")

def log(self, values: dict, step: Optional[int] = None, **kwargs):
"""
Expand All @@ -250,14 +250,14 @@ def log(self, values: dict, step: Optional[int] = None, **kwargs):
Additional key word arguments passed along to the `wandb.log` method.
"""
self.run.log(values, step=step, **kwargs)
logger.info("Successfully logged to WandB")
logger.debug("Successfully logged to WandB")

def finish(self):
"""
Closes `wandb` writer
"""
self.run.finish()
logger.info("WandB run closed")
logger.debug("WandB run closed")


class CometMLTracker(GeneralTracker):
Expand All @@ -279,8 +279,8 @@ class CometMLTracker(GeneralTracker):
def __init__(self, run_name: str, **kwargs):
self.run_name = run_name
self.writer = Experiment(project_name=run_name, **kwargs)
logger.info(f"Initialized CometML project {self.run_name}")
logger.info(
logger.debug(f"Initialized CometML project {self.run_name}")
logger.debug(
"Make sure to log any initial configurations with `self.store_init_configuration` before training!"
)

Expand All @@ -298,7 +298,7 @@ def store_init_configuration(self, values: dict):
`str`, `float`, `int`, or `None`.
"""
self.writer.log_parameters(values)
logger.info("Stored initial configuration hyperparameters to CometML")
logger.debug("Stored initial configuration hyperparameters to CometML")

def log(self, values: dict, step: Optional[int] = None, **kwargs):
"""
Expand All @@ -323,14 +323,14 @@ def log(self, values: dict, step: Optional[int] = None, **kwargs):
self.writer.log_other(k, v, **kwargs)
elif isinstance(v, dict):
self.writer.log_metrics(v, step=step, **kwargs)
logger.info("Successfully logged to CometML")
logger.debug("Successfully logged to CometML")

def finish(self):
"""
Closes `comet-ml` writer
"""
self.writer.end()
logger.info("CometML run closed")
logger.debug("CometML run closed")


LOGGER_TYPE_TO_CLASS = {"tensorboard": TensorBoardTracker, "wandb": WandBTracker, "comet_ml": CometMLTracker}
Expand Down Expand Up @@ -384,6 +384,6 @@ def filter_trackers(
)
loggers.append(log_type)
else:
logger.info(f"Tried adding logger {log_type}, but package is unavailable in the system.")
logger.debug(f"Tried adding logger {log_type}, but package is unavailable in the system.")

return loggers