Spaces:
Running
on
Zero
Running
on
Zero
Locutusque
commited on
Commit
•
724a6c2
1
Parent(s):
fc61ac0
Update app.py
Browse files
app.py
CHANGED
@@ -26,7 +26,7 @@ def generate(
|
|
26 |
prompt = f"<|im_start|>system\nYou are an AI assistant that follows instruction extremely well. Help as much as you can.<|im_end|>\n<|im_start|>user\n{user_input}<|im_end|>\n<|im_start|>assistant\n"
|
27 |
streamer = TextIteratorStreamer(pipe.tokenizer, timeout=240.0, skip_prompt=True, skip_special_tokens=True)
|
28 |
generation_kwargs = dict(text_inputs=prompt, streamer=streamer, max_new_tokens=max_new_tokens, do_sample=True, top_p=top_p, top_k=top_k,
|
29 |
-
temperature=temperature, num_beams=1, repetition_penalty=
|
30 |
t = Thread(target=pipe.__call__, kwargs=generation_kwargs)
|
31 |
t.start()
|
32 |
outputs = []
|
|
|
26 |
prompt = f"<|im_start|>system\nYou are an AI assistant that follows instruction extremely well. Help as much as you can.<|im_end|>\n<|im_start|>user\n{user_input}<|im_end|>\n<|im_start|>assistant\n"
|
27 |
streamer = TextIteratorStreamer(pipe.tokenizer, timeout=240.0, skip_prompt=True, skip_special_tokens=True)
|
28 |
generation_kwargs = dict(text_inputs=prompt, streamer=streamer, max_new_tokens=max_new_tokens, do_sample=True, top_p=top_p, top_k=top_k,
|
29 |
+
temperature=temperature, num_beams=1, repetition_penalty=1.1)
|
30 |
t = Thread(target=pipe.__call__, kwargs=generation_kwargs)
|
31 |
t.start()
|
32 |
outputs = []
|