AutoL2S-7b / examples /inference.py
Feng Luo
update example usage
e73d95b
from vllm import SamplingParams
from prefixLLM import PrefixLLM
from template import SYSTEM_PROMPT, SHORT_TRIGGER
if __name__ == "__main__":
llm = PrefixLLM(model="amandaa/AutoL2S-7b")
max_tokens, temp = 32768, 0.7
sampling_params_route = SamplingParams(max_tokens=max_tokens, temperature=temp, stop=["<specialLong>"], include_stop_str_in_output=True)
sampling_params_force_think = SamplingParams(max_tokens=max_tokens, temperature=temp)
question = "Convert the point $(0,3)$ in rectangular coordinates to polar coordinates. Enter your answer in the form $(r,\\theta),$ where $r > 0$ and $0 \\le \\theta < 2 \\pi.$"
messages = [
{"role": "system", "content": SYSTEM_PROMPT},
{"role": "user", "content": question}
]
responses = llm.route_chat(messages=messages, sampling_params_route=sampling_params_route, sampling_params_force_think=sampling_params_force_think, use_tqdm=True, trigger_word=SHORT_TRIGGER)
print(SHORT_TRIGGER + responses[0].outputs[0].text)