Spaces:
Runtime error
Runtime error
yentinglin
commited on
Commit
•
1996a44
1
Parent(s):
f76c0d7
Update app.py
Browse files
app.py
CHANGED
@@ -44,10 +44,9 @@ Taiwan-LLaMa is a fine-tuned model specifically designed for traditional mandari
|
|
44 |
|
45 |
Different versions of Taiwan-LLaMa are available:
|
46 |
|
47 |
-
- **Taiwan-LLM
|
48 |
-
- **Taiwan-LLM
|
49 |
-
- **Taiwan-LLM
|
50 |
-
- **Taiwan-LLM v0.0**: No Traditional Mandarin pretraining
|
51 |
|
52 |
The models can be accessed from the provided links in the Hugging Face repository.
|
53 |
|
@@ -126,13 +125,6 @@ with gr.Blocks() as demo:
|
|
126 |
step=0.05,
|
127 |
value=0.95,
|
128 |
)
|
129 |
-
top_k = gr.Slider(
|
130 |
-
label='Top-k',
|
131 |
-
minimum=1,
|
132 |
-
maximum=1000,
|
133 |
-
step=1,
|
134 |
-
value=50,
|
135 |
-
)
|
136 |
|
137 |
def user(user_message, history):
|
138 |
return "", history + [[user_message, None]]
|
@@ -179,7 +171,6 @@ with gr.Blocks() as demo:
|
|
179 |
max_new_tokens,
|
180 |
temperature,
|
181 |
top_p,
|
182 |
-
top_k,
|
183 |
system_prompt,
|
184 |
],
|
185 |
outputs=chatbot
|
@@ -193,7 +184,6 @@ with gr.Blocks() as demo:
|
|
193 |
max_new_tokens,
|
194 |
temperature,
|
195 |
top_p,
|
196 |
-
top_k,
|
197 |
system_prompt,
|
198 |
],
|
199 |
outputs=chatbot
|
@@ -233,7 +223,6 @@ with gr.Blocks() as demo:
|
|
233 |
max_new_tokens,
|
234 |
temperature,
|
235 |
top_p,
|
236 |
-
top_k,
|
237 |
system_prompt,
|
238 |
],
|
239 |
outputs=chatbot,
|
|
|
44 |
|
45 |
Different versions of Taiwan-LLaMa are available:
|
46 |
|
47 |
+
- **Taiwan-LLM v3.0 (This demo)**
|
48 |
+
- **Taiwan-LLM v2.0**
|
49 |
+
- **Taiwan-LLM v1.0**
|
|
|
50 |
|
51 |
The models can be accessed from the provided links in the Hugging Face repository.
|
52 |
|
|
|
125 |
step=0.05,
|
126 |
value=0.95,
|
127 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
128 |
|
129 |
def user(user_message, history):
|
130 |
return "", history + [[user_message, None]]
|
|
|
171 |
max_new_tokens,
|
172 |
temperature,
|
173 |
top_p,
|
|
|
174 |
system_prompt,
|
175 |
],
|
176 |
outputs=chatbot
|
|
|
184 |
max_new_tokens,
|
185 |
temperature,
|
186 |
top_p,
|
|
|
187 |
system_prompt,
|
188 |
],
|
189 |
outputs=chatbot
|
|
|
223 |
max_new_tokens,
|
224 |
temperature,
|
225 |
top_p,
|
|
|
226 |
system_prompt,
|
227 |
],
|
228 |
outputs=chatbot,
|