Skip to content

Commit

Permalink
a monkey patch for lora_target
Browse files Browse the repository at this point in the history
  • Loading branch information
hiyouga committed Jul 17, 2023
1 parent f8193e8 commit 262252d
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 0 deletions.
9 changes: 9 additions & 0 deletions src/llmtuner/extras/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,12 @@
"InternLM-7B-Base": "internlm/internlm-7b",
"InternLM-7B-Chat": "internlm/internlm-chat-7b"
}

DEFAULT_MODULE = { # will be deprecated
"LLaMA": "q_proj,v_proj",
"BLOOM": "query_key_value",
"BLOOMZ": "query_key_value",
"Falcon": "query_key_value",
"Baichuan": "W_pack",
"InternLM": "q_proj,v_proj"
}
2 changes: 2 additions & 0 deletions src/llmtuner/webui/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from typing import Optional, Tuple

from llmtuner.extras.callbacks import LogCallback
from llmtuner.extras.constants import DEFAULT_MODULE # will be deprecated
from llmtuner.extras.logging import LoggerHandler
from llmtuner.extras.misc import torch_gc
from llmtuner.tuner import get_train_args, run_sft
Expand Down Expand Up @@ -79,6 +80,7 @@ def run_train(
model_name_or_path=model_name_or_path,
do_train=True,
finetuning_type=finetuning_type,
lora_target=DEFAULT_MODULE.get(model_name.split("-")[0], None) or "q_proj,v_proj",
prompt_template=template,
dataset=",".join(dataset),
dataset_dir=dataset_dir,
Expand Down

0 comments on commit 262252d

Please sign in to comment.