diff --git a/src/llmcompressor/transformers/finetune/runner.py b/src/llmcompressor/transformers/finetune/runner.py index 769b84248..d3a7515f6 100644 --- a/src/llmcompressor/transformers/finetune/runner.py +++ b/src/llmcompressor/transformers/finetune/runner.py @@ -288,3 +288,4 @@ def run_sequential_stages(self, checkpoint: Optional[str] = None): torch.cuda.empty_cache() self.trainer.accelerator.free_memory() self.trainer.accelerator.wait_for_everyone() +