File size: 473 Bytes
f3cd04e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
zephyr-7b-beta:
prompt_template: "/mnt/bn/xuruijie-llm/eval/dpo/prompt.txt"
fn_completions: "huggingface_local_completions"
completions_kwargs:
model_name: "/mnt/bn/xuruijie-llm/checkpoints/dpo/zephyr-7b-dpo-full-different-0.05-0.04-dpop/"
model_kwargs:
torch_dtype: 'bfloat16'
max_new_tokens: 2048
temperature: 0.7
top_p: 1.0
do_sample: True
pretty_name: "Zephyr 7B Beta"
link: "https://huggingface.co/HuggingFaceH4/zephyr-7b-beta"
|