Spaces:
Runtime error
Runtime error
fix: parameters has not been correctly updating
Browse files
app.py
CHANGED
@@ -101,8 +101,13 @@ PUSH_FREQUENCY = 60 # every minute
|
|
101 |
|
102 |
HISTORY = ""
|
103 |
PROMPT = ""
|
104 |
-
USERNAME = ""
|
105 |
NAME = ""
|
|
|
|
|
|
|
|
|
|
|
|
|
106 |
|
107 |
REVISION = "" #"4f68dc3740b410835da71811d92bd3b2d690e79c"
|
108 |
|
@@ -570,14 +575,28 @@ def format_prompt(message, history, system_prompt):
|
|
570 |
return prompt.replace('\n','؛').replace('\t','/').replace(' * ','/').replace('\u200c',' ').strip()
|
571 |
|
572 |
def generate(
|
573 |
-
prompt, history,
|
574 |
temperature=0.9, max_new_tokens=100, top_p=0.95, top_k=100,
|
575 |
repetition_penalty=1.0, seed=42,
|
576 |
):
|
577 |
-
global HISTORY
|
578 |
-
HISTORY = history
|
579 |
global PROMPT
|
580 |
PROMPT = prompt
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
581 |
|
582 |
temperature = float(temperature)
|
583 |
if temperature < 1e-2:
|
@@ -1065,14 +1084,14 @@ def vote(data: gr.LikeData):
|
|
1065 |
{
|
1066 |
"time_stamp": time.time(),
|
1067 |
"model_version":MODEL_VERSION,
|
1068 |
-
"
|
1069 |
"prompt": PROMPT.replace('\n','؛').replace('\t','/').replace(' * ','/').replace('\u200c',' ').strip(),
|
1070 |
-
"system prompt":
|
1071 |
-
"temperature":
|
1072 |
-
"max_new_tokens":
|
1073 |
-
"top_p":
|
1074 |
-
"top_k":
|
1075 |
-
"repetition_penalty":
|
1076 |
"response": data.value.replace('\n','؛').replace('\t','/').replace(' * ','/').replace('\u200c',' ').strip(),
|
1077 |
"label": data.liked,
|
1078 |
}, ensure_ascii=False
|
|
|
101 |
|
102 |
HISTORY = ""
|
103 |
PROMPT = ""
|
|
|
104 |
NAME = ""
|
105 |
+
SYSTEM_PROMPT = ""
|
106 |
+
TEMPERATURE = ""
|
107 |
+
MAX_NEW_TOKENS = ""
|
108 |
+
TOP_P = ""
|
109 |
+
TOP_K = ""
|
110 |
+
REPETITION_PENALTY = ""
|
111 |
|
112 |
REVISION = "" #"4f68dc3740b410835da71811d92bd3b2d690e79c"
|
113 |
|
|
|
575 |
return prompt.replace('\n','؛').replace('\t','/').replace(' * ','/').replace('\u200c',' ').strip()
|
576 |
|
577 |
def generate(
|
578 |
+
prompt, history, name, system_prompt,
|
579 |
temperature=0.9, max_new_tokens=100, top_p=0.95, top_k=100,
|
580 |
repetition_penalty=1.0, seed=42,
|
581 |
):
|
|
|
|
|
582 |
global PROMPT
|
583 |
PROMPT = prompt
|
584 |
+
global HISTORY
|
585 |
+
HISTORY = history
|
586 |
+
global NAME
|
587 |
+
NAME = name
|
588 |
+
global SYSTEM_PROMPT
|
589 |
+
SYSTEM_PROMPT = system_prompt
|
590 |
+
global TEMPERATURE
|
591 |
+
TEMPERATURE = temperature
|
592 |
+
global MAX_NEW_TOKENS
|
593 |
+
MAX_NEW_TOKENS = max_new_tokens
|
594 |
+
global TOP_P
|
595 |
+
TOP_P = top_p
|
596 |
+
global TOP_K
|
597 |
+
TOP_K = top_k
|
598 |
+
global REPETITION_PENALTY
|
599 |
+
REPETITION_PENALTY = repetition_penalty
|
600 |
|
601 |
temperature = float(temperature)
|
602 |
if temperature < 1e-2:
|
|
|
1084 |
{
|
1085 |
"time_stamp": time.time(),
|
1086 |
"model_version":MODEL_VERSION,
|
1087 |
+
"name":NAME,
|
1088 |
"prompt": PROMPT.replace('\n','؛').replace('\t','/').replace(' * ','/').replace('\u200c',' ').strip(),
|
1089 |
+
"system prompt": SYSTEM_PROMPT,
|
1090 |
+
"temperature": TEMPERATURE,
|
1091 |
+
"max_new_tokens": MAX_NEW_TOKENS,
|
1092 |
+
"top_p": TOP_P,
|
1093 |
+
"top_k": TOP_K,
|
1094 |
+
"repetition_penalty": REPETITION_PENALTY,
|
1095 |
"response": data.value.replace('\n','؛').replace('\t','/').replace(' * ','/').replace('\u200c',' ').strip(),
|
1096 |
"label": data.liked,
|
1097 |
}, ensure_ascii=False
|