Skip to content

Commit

Permalink
rename to training_args_max_seq_length
Browse files Browse the repository at this point in the history
Signed-off-by: George Ohashi <[email protected]>
  • Loading branch information
horheynm committed Feb 18, 2025
1 parent fada8f0 commit 52a4d85
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions src/llmcompressor/transformers/finetune/session_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,11 @@ def __init__(
# inherits HuggingFace's `TrainingArguments`
training_args_dict = training_args.to_dict()
if "max_seq_length" in training_args_dict:
training_args_dict.pop("max_seq_length")
training_args_dict["training_args_max_seq_length"] = training_args_dict.pop("max_seq_length")
logger.warning(
"Detected `max_seq_length` in both data_args ",
"and training_args. This is expected for TRL in distillation. ",
"Updating metadata with "
f"`max_seq_length`: {data_args.max_seq_length} from data_args.",
"andx training_args. This is expected for TRL in distillation. ",
"Updating metadata to `training_args_max_seq_length`"
)

self.metadata = self._extract_metadata(
Expand Down

0 comments on commit 52a4d85

Please sign in to comment.