Spaces:
Runtime error
Runtime error
Update app/main.py
Browse files- app/main.py +7 -1
app/main.py
CHANGED
@@ -6,7 +6,13 @@ def main(args):
|
|
6 |
demo = gr.ChatInterface(
|
7 |
fn=chat,
|
8 |
examples=["hello", "how are you?", "What is Large Language Model?"],
|
9 |
-
title="
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
multimodal=False
|
11 |
)
|
12 |
|
|
|
6 |
demo = gr.ChatInterface(
|
7 |
fn=chat,
|
8 |
examples=["hello", "how are you?", "What is Large Language Model?"],
|
9 |
+
title="Gradio π€ TGI",
|
10 |
+
description="This space is a template that you can fork/duplicate for your own usage. "
|
11 |
+
"This space let you build LLM powered idea on top of [Gradio](https://www.gradio.app/) "
|
12 |
+
"and open LLM served locally by [TGI(Text Generation Inference)](https://huggingface.co/docs/text-generation-inference/en/index). "
|
13 |
+
"To use this space, [duplicate]() this space, set which model you want to use (i.e. mistralai/Mistral-7B-Instruct-v0.2), then "
|
14 |
+
"you are all good to go. Just focus on the implementation of your idea π‘. For your convenience, this space also provides "
|
15 |
+
"some handy [utility functions](https://huggingface.co/spaces/chansung/gradio_together_tgi/blob/main/app/gen/openllm.py) to aynchronously generate text by interacting with the locally served LLM.",
|
16 |
multimodal=False
|
17 |
)
|
18 |
|