Spaces:
Running
Running
Commit
•
0feb131
1
Parent(s):
5726c7d
Update app.py
Browse files
app.py
CHANGED
@@ -4,22 +4,22 @@ import gradio as gr
|
|
4 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
5 |
|
6 |
def format_prompt(message, history, system_prompt=None):
|
7 |
-
prompt = "<s>"
|
8 |
-
for user_prompt, bot_response in history:
|
9 |
-
prompt += f"[INST] {user_prompt} [/INST]"
|
10 |
-
prompt += f" {bot_response}</s> "
|
11 |
-
if system_prompt:
|
12 |
-
prompt += f"[SYS] {system_prompt} [/SYS]"
|
13 |
-
prompt += f"[INST] {message} [/INST]"
|
14 |
-
return prompt
|
15 |
|
16 |
def generate(
|
17 |
-
prompt, history, system_prompt=None, temperature=0.
|
18 |
):
|
19 |
-
temperature = float(temperature)
|
20 |
-
if temperature < 1e-2:
|
21 |
-
temperature = 1e-2
|
22 |
-
top_p = float(top_p)
|
23 |
|
24 |
generate_kwargs = dict(
|
25 |
temperature=temperature,
|
|
|
4 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
5 |
|
6 |
def format_prompt(message, history, system_prompt=None):
|
7 |
+
prompt = "<s>"
|
8 |
+
for user_prompt, bot_response in history:
|
9 |
+
prompt += f"[INST] {user_prompt} [/INST]"
|
10 |
+
prompt += f" {bot_response}</s> "
|
11 |
+
if system_prompt:
|
12 |
+
prompt += f"[SYS] {system_prompt} [/SYS]"
|
13 |
+
prompt += f"[INST] {message} [/INST]"
|
14 |
+
return prompt
|
15 |
|
16 |
def generate(
|
17 |
+
prompt, history, system_prompt=None, temperature=0.2, max_new_tokens=512, top_p=0.95, repetition_penalty=1.0,
|
18 |
):
|
19 |
+
temperature = float(temperature)
|
20 |
+
if temperature < 1e-2:
|
21 |
+
temperature = 1e-2
|
22 |
+
top_p = float(top_p)
|
23 |
|
24 |
generate_kwargs = dict(
|
25 |
temperature=temperature,
|