Spaces:
Running
on
Zero
Running
on
Zero
NGUYEN, Xuan Phi
commited on
Commit
•
390b3ef
1
Parent(s):
a273e68
update
Browse files
multipurpose_chatbot/demos/text_completion.py
CHANGED
@@ -87,7 +87,7 @@ def generate_text_completion_stream_engine(
|
|
87 |
stop_strings = [x.strip() for x in stop_strings.strip().split(",")]
|
88 |
stop_strings = list(set(stop_strings + ['</s>', '<|im_start|>', '<|im_end|>']))
|
89 |
if message.strip() != message:
|
90 |
-
gr.Warning(f'There are preceding/trailing spaces in the message
|
91 |
if len(message) == 0:
|
92 |
raise gr.Error("The message cannot be empty!")
|
93 |
num_tokens = len(MODEL_ENGINE.tokenizer.encode(message))
|
@@ -131,7 +131,7 @@ class TextCompletionDemo(BaseDemo):
|
|
131 |
model_name = kwargs.get("model_name", MODEL_NAME)
|
132 |
# frequence_penalty = FREQUENCE_PENALTY
|
133 |
# presence_penalty = PRESENCE_PENALTY
|
134 |
-
max_tokens = max_tokens //
|
135 |
|
136 |
description = description or f"""Put any context string (like few-shot prompts)"""
|
137 |
|
@@ -166,7 +166,8 @@ class TextCompletionDemo(BaseDemo):
|
|
166 |
stop_strings = gr.Textbox(value="<eos>,<s>,</s>,<|im_start|>,<|im_end|>", label='Stop strings', info='Comma-separated string to stop generation only in FEW-SHOT mode', lines=1)
|
167 |
examples = gr.Examples(
|
168 |
examples=[
|
169 |
-
["The following is the recite the declaration of independence:",]
|
|
|
170 |
],
|
171 |
inputs=[txt, temp_input, length_input, stop_strings],
|
172 |
# outputs=[txt]
|
|
|
87 |
stop_strings = [x.strip() for x in stop_strings.strip().split(",")]
|
88 |
stop_strings = list(set(stop_strings + ['</s>', '<|im_start|>', '<|im_end|>']))
|
89 |
if message.strip() != message:
|
90 |
+
gr.Warning(f'There are preceding/trailing spaces in the message.')
|
91 |
if len(message) == 0:
|
92 |
raise gr.Error("The message cannot be empty!")
|
93 |
num_tokens = len(MODEL_ENGINE.tokenizer.encode(message))
|
|
|
131 |
model_name = kwargs.get("model_name", MODEL_NAME)
|
132 |
# frequence_penalty = FREQUENCE_PENALTY
|
133 |
# presence_penalty = PRESENCE_PENALTY
|
134 |
+
max_tokens = max_tokens // 4
|
135 |
|
136 |
description = description or f"""Put any context string (like few-shot prompts)"""
|
137 |
|
|
|
166 |
stop_strings = gr.Textbox(value="<eos>,<s>,</s>,<|im_start|>,<|im_end|>", label='Stop strings', info='Comma-separated string to stop generation only in FEW-SHOT mode', lines=1)
|
167 |
examples = gr.Examples(
|
168 |
examples=[
|
169 |
+
["The following is the recite the declaration of independence:",],
|
170 |
+
["<|im_start|>system\nYou are a helpful assistant.<eos>\n<|im_start|>user\nTell me a joke.<eos>\n<|im_start|>assistant\n",]
|
171 |
],
|
172 |
inputs=[txt, temp_input, length_input, stop_strings],
|
173 |
# outputs=[txt]
|
multipurpose_chatbot/engines/transformers_engine.py
CHANGED
@@ -216,7 +216,6 @@ class NewGenerationMixin(GenerationMixin):
|
|
216 |
['Today is a beautiful day, and we must do everything possible to make it a day of celebration.']
|
217 |
```"""
|
218 |
# init values
|
219 |
-
print(f'Streaming tokens...')
|
220 |
from transformers.generation.utils import (
|
221 |
validate_stopping_criteria, GenerateEncoderDecoderOutput, GenerateDecoderOnlyOutput
|
222 |
)
|
|
|
216 |
['Today is a beautiful day, and we must do everything possible to make it a day of celebration.']
|
217 |
```"""
|
218 |
# init values
|
|
|
219 |
from transformers.generation.utils import (
|
220 |
validate_stopping_criteria, GenerateEncoderDecoderOutput, GenerateDecoderOnlyOutput
|
221 |
)
|