From dd6fd3d5a52abe02de44f2147ccf734d30367cd8 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Sun, 4 Aug 2024 07:58:12 +0100 Subject: [PATCH] logging --- src/arcsf/models/model.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/arcsf/models/model.py b/src/arcsf/models/model.py index e602d87..d51f22d 100644 --- a/src/arcsf/models/model.py +++ b/src/arcsf/models/model.py @@ -34,7 +34,9 @@ def load_maybe_peft_model( model = AutoPeftModelForCausalLM.from_pretrained( model_path_or_id, **model_kwargs, torch_dtype="auto" ) + logger.info("Loaded PEFT model") if merge: + logger.info("Merging PEFT adapters") model = model.merge_and_unload() except ValueError as err: if "Can't find 'adapter_config.json'" not in str(err): @@ -42,6 +44,8 @@ def load_maybe_peft_model( model = AutoModelForCausalLM.from_pretrained( model_path_or_id, **model_kwargs, torch_dtype="auto" ) + logger.info("Loaded model normally without PEFT") + return model @@ -76,7 +80,7 @@ def load_model_and_tokenizer( # Optionally add padding token if tokenizer.pad_token is None: if add_padding_token: - logger.info("Adding pad token tok tokenizer and model.") + logger.info("Adding pad token to tokenizer and model.") tokenizer.add_special_tokens({"pad_token": "<|padding|>"}) add_token_to_model = True else: