diff --git a/src/llmcompressor/transformers/finetune/session_mixin.py b/src/llmcompressor/transformers/finetune/session_mixin.py index 8764c8854..d1ea22efb 100644 --- a/src/llmcompressor/transformers/finetune/session_mixin.py +++ b/src/llmcompressor/transformers/finetune/session_mixin.py @@ -88,11 +88,13 @@ def __init__( # inherits HuggingFace's `TrainingArguments` training_args_dict = training_args.to_dict() if "max_seq_length" in training_args_dict: - training_args_dict["training_args_max_seq_length"] = training_args_dict.pop("max_seq_length") + training_args_dict["training_args_max_seq_length"] = ( + training_args_dict.pop("max_seq_length") + ) logger.warning( "Detected `max_seq_length` in both data_args ", "andx training_args. This is expected for TRL in distillation. ", - "Updating metadata to `training_args_max_seq_length`" + "Updating metadata to `training_args_max_seq_length`", ) self.metadata = self._extract_metadata(