Skip to content

Commit

Permalink
merge main
Browse files Browse the repository at this point in the history
  • Loading branch information
horheynm committed Mar 6, 2025
1 parent 5edf461 commit 0ce00ad
Show file tree
Hide file tree
Showing 9 changed files with 9 additions and 16 deletions.
4 changes: 1 addition & 3 deletions src/llmcompressor/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,6 @@
active_session,
callbacks,
create_session,
finalize,
initialize,
reset_session,
)
from llmcompressor.entrypoints import Oneshot, oneshot
from llmcompressor.entrypoints import Oneshot, oneshot, train
4 changes: 0 additions & 4 deletions src/llmcompressor/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@
active_session,
callbacks,
create_session,
finalize,
initialize,
reset_session,
)
from llmcompressor.core.state import Data, Hardware, ModifiedState, State
Expand All @@ -35,8 +33,6 @@
"create_session",
"active_session",
"reset_session",
"initialize",
"finalize",
"apply",
"callbacks",
"LifecycleCallbacks",
Expand Down
4 changes: 2 additions & 2 deletions src/llmcompressor/transformers/finetune/session_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from transformers.trainer_callback import TrainerState
from transformers.trainer_utils import get_last_checkpoint

from llmcompressor.core import active_session, callbacks, create_session, finalize
from llmcompressor.core import active_session, callbacks, create_session
from llmcompressor.metrics import LoggerManager
from llmcompressor.modifiers.distillation.utils.pytorch.model_wrapper import (
KDModelWrapper,
Expand Down Expand Up @@ -182,7 +182,7 @@ def finalize_session(self):

with summon_full_params_context(self.model, offload_to_cpu=True):
# in order to update each layer we need to gathers all its parameters
finalize()
active_session().finalize()
logger.info("Finalized LLM Compressor session")
model = get_session_model()
self.model = model
Expand Down
2 changes: 1 addition & 1 deletion src/llmcompressor/transformers/finetune/text_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def oneshot(**kwargs) -> None:

@deprecated(
message=(
"`from llmcompressor.transformers import train` is deprecated, "
"`from llmcompressor import train` is deprecated, "
"please use `from llmcompressor import train`."
)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

class TestFinetuneNoRecipeCustomDataset(unittest.TestCase):
def _test_finetune_wout_recipe_custom_dataset(self):
from llmcompressor.transformers import train
from llmcompressor import train

dataset_path = Path(tempfile.mkdtemp())

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def setUp(self):
self.output = "./finetune_output"

def test_finetune_without_recipe(self):
from llmcompressor.transformers import train
from llmcompressor import train

recipe_str = None
device = "cuda:0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,9 @@
from transformers import AutoModelForCausalLM
from transformers.utils.quantization_config import CompressedTensorsConfig

from llmcompressor import oneshot
from llmcompressor import oneshot, train
from llmcompressor.core import create_session
from llmcompressor.modifiers.quantization import QuantizationModifier
from llmcompressor.transformers import train


@pytest.mark.unit
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def setUp(self):
self.output = Path("./finetune_output")

def test_safetensors(self):
from llmcompressor.transformers import train
from llmcompressor import train

device = "cuda:0"
output_dir = self.output / "output1"
Expand Down
2 changes: 1 addition & 1 deletion tests/llmcompressor/transformers/test_clear_ml.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
except Exception:
is_clearml = False

from llmcompressor.transformers import train
from llmcompressor import train


@pytest.mark.skipif(not is_clearml, reason="clearML not installed")
Expand Down

0 comments on commit 0ce00ad

Please sign in to comment.