Skip to content

Commit

Permalink
Revert ":package: Update transformers to min of 4.36.0 to get new cac…
Browse files Browse the repository at this point in the history
…hing module"
  • Loading branch information
gkumbhat authored Feb 9, 2024
1 parent aa066f8 commit d64c316
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 1,902 deletions.
5 changes: 2 additions & 3 deletions caikit_nlp/modules/text_generation/peft_prompt_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ def train(

# Remove _name_or_path field as a model can be
# saved in different location but still same
base_model_config.pop("_name_or_path", None)
del base_model_config["_name_or_path"]
error.value_check(
"<NLP07232147E>",
"_name_or_path" not in base_model_config,
Expand Down Expand Up @@ -571,8 +571,7 @@ def load(
if peft_config.task_type == "CAUSAL_LM":
# get the transformers Causal LM model
base_model = AutoModelForCausalLM.from_pretrained(
peft_config.base_model_name_or_path,
torch_dtype=torch_dtype,
peft_config.base_model_name_or_path
)
# get the PEFT causal LM model
model = PeftModel.from_pretrained(base_model, model_config)
Expand Down
5 changes: 0 additions & 5 deletions caikit_nlp/resources/pretrained_model/hf_auto_causal_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,6 @@

# Local
from ...data_model import GenerationTrainRecord, PromptOutputModelType

# Note: Below module is imported to allow loading of fm stack sphinx models
from ...toolkit.text_generation import ( # pylint: disable=unused-import
granite_modeling_llama,
)
from ...toolkit.verbalizer_utils import render_verbalizer
from .base import PretrainedModelBase

Expand Down
Loading

0 comments on commit d64c316

Please sign in to comment.