Skip to content

Commit

Permalink
Bug Fix: recipe stages were not being concatenated (#150)
Browse files Browse the repository at this point in the history
* Bug Fix: recipe stages were not being concatenated

* Remove extraneous comment
  • Loading branch information
rahul-tuli authored Sep 9, 2024
1 parent c4f7778 commit 108afb0
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 1 deletion.
10 changes: 9 additions & 1 deletion src/llmcompressor/transformers/sparsification/sparse_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,14 @@
from torch.nn import Module
from transformers import AutoModelForCausalLM, PreTrainedModel

from llmcompressor.pytorch.model_load.helpers import initialize_recipe
from llmcompressor.transformers.sparsification.compressed_tensors_utils import (
modify_save_pretrained,
)
from llmcompressor.transformers.utils.helpers import download_model_directory
from llmcompressor.transformers.utils.helpers import (
download_model_directory,
resolve_recipe,
)

__all__ = ["SparseAutoModel", "SparseAutoModelForCausalLM", "get_shared_tokenizer_src"]

Expand Down Expand Up @@ -142,6 +146,10 @@ def skip(*args, **kwargs):
compressor.decompress(
model_path=pretrained_model_name_or_path, model=model
)
recipe = resolve_recipe(recipe=recipe, model_path=pretrained_model_name_or_path)

if recipe:
initialize_recipe(model=model, recipe_path=recipe)

return model

Expand Down
1 change: 1 addition & 0 deletions tests/testing_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ def _parse_configs_dir(current_config_dir):
logging.info(
f"Skipping testing model: {file} for cadence: {expected_cadence}"
)

if isinstance(configs_directory, list):
for config in configs_directory:
_parse_configs_dir(config)
Expand Down

0 comments on commit 108afb0

Please # to comment.