Skip to content

Commit

Permalink
add guanaco-33b-api (#106)
Browse files Browse the repository at this point in the history
  • Loading branch information
YannDubs authored Jul 31, 2023
1 parent e990412 commit bb5018f
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 2 deletions.
4 changes: 2 additions & 2 deletions src/alpaca_eval/decoders/huggingface_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def huggingface_api_completions(
model_name: str,
gpu: bool = False,
do_sample: bool = False,
num_procs: int = 8,
num_procs: int = 1,
**kwargs,
) -> dict[str, list]:
"""Decode with the API from hugging face hub.
Expand Down Expand Up @@ -96,7 +96,7 @@ def inference_helper(prompt: str, inference, params, n_retries=100, waiting_time
elif "Input validation error" in error and "max_new_tokens" in error:
params["max_new_tokens"] = int(params["max_new_tokens"] * 0.8)
logging.warning(
f"`max_new_tokens` too large. Reducing target length to {params['max_tokens']}, " f"Retrying..."
f"`max_new_tokens` too large. Reducing target length to {params['max_new_tokens']}, " f"Retrying..."
)
if params["max_new_tokens"] == 0:
raise ValueError(f"Error in inference. Full error: {error}")
Expand Down
5 changes: 5 additions & 0 deletions src/alpaca_eval/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,11 @@ def evaluate_from_model(

if chunksize is not None and not is_load_outputs:
logging.info("`is_load_outputs` has to be true to use chunksize. Setting it to True.")
is_load_outputs = True

if chunksize is not None and max_instances is not None:
logging.info("cannot use `chunksize` with max_instances. Setting `chunksize` to None.")
chunksize = None

model_configs = utils.load_configs(model_configs, relative_to=constants.MODELS_CONFIG_DIR)
if reference_model_configs is not None:
Expand Down
9 changes: 9 additions & 0 deletions src/alpaca_eval/models_configs/guanaco-33b-api/configs.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
guanaco-33b-api:
prompt_template: "guanaco-7b/prompt.txt"
fn_completions: "huggingface_api_completions"
completions_kwargs:
model_name: "timdettmers/guanaco-33b-merged"
max_new_tokens: 256
temperature: 0.7
do_sample: True
gpu: True

0 comments on commit bb5018f

Please sign in to comment.