Skip to content

Commit

Permalink
no need to set the lora_model_dir on resume
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Jan 31, 2024
1 parent e58f030 commit 8882d32
Showing 1 changed file with 0 additions and 4 deletions.
4 changes: 0 additions & 4 deletions src/axolotl/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,6 @@ def train(
)
resume_from_checkpoint = cfg.resume_from_checkpoint

if cfg.adapter and cfg.resume_from_checkpoint and not cfg.lora_model_dir:
LOG.info(f"setting lora_model_dir to use {cfg.resume_from_checkpoint}")
cfg.lora_model_dir = cfg.resume_from_checkpoint

# Load the model and tokenizer
msg = "loading model"
if cfg.adapter:
Expand Down

0 comments on commit 8882d32

Please sign in to comment.