Spaces:
Running
Running
prithivMLmods
commited on
Commit
β’
68c5b4c
1
Parent(s):
0ba7801
Update app.py
Browse files
app.py
CHANGED
@@ -39,7 +39,7 @@ def respond(
|
|
39 |
|
40 |
response = ""
|
41 |
|
42 |
-
for message in
|
43 |
model="meta-llama/Meta-Llama-3.1-8B-Instruct",
|
44 |
max_tokens=max_tokens,
|
45 |
stream=True,
|
@@ -48,7 +48,6 @@ def respond(
|
|
48 |
messages=messages,
|
49 |
):
|
50 |
token = message.choices[0].delta.content
|
51 |
-
|
52 |
response += token
|
53 |
yield response
|
54 |
|
@@ -81,46 +80,38 @@ def save_to_file(history, file_format):
|
|
81 |
|
82 |
return file_name
|
83 |
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
)
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
handle_save,
|
113 |
-
|
114 |
-
outputs=output_file
|
115 |
-
)
|
116 |
-
|
117 |
-
demo = gr.Blocks()
|
118 |
|
119 |
-
|
120 |
-
with gr.Column():
|
121 |
-
demo.render()
|
122 |
-
save_button.render()
|
123 |
-
output_file.render()
|
124 |
|
125 |
if __name__ == "__main__":
|
126 |
demo.launch()
|
|
|
39 |
|
40 |
response = ""
|
41 |
|
42 |
+
for message in client.chat.completions.create(
|
43 |
model="meta-llama/Meta-Llama-3.1-8B-Instruct",
|
44 |
max_tokens=max_tokens,
|
45 |
stream=True,
|
|
|
48 |
messages=messages,
|
49 |
):
|
50 |
token = message.choices[0].delta.content
|
|
|
51 |
response += token
|
52 |
yield response
|
53 |
|
|
|
80 |
|
81 |
return file_name
|
82 |
|
83 |
+
# Gradio Interface Setup
|
84 |
+
with gr.Blocks(css=css) as demo:
|
85 |
+
system_message = gr.Textbox(value="", label="System message")
|
86 |
+
max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
|
87 |
+
temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
|
88 |
+
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-P")
|
89 |
+
save_as = gr.Radio(["PDF", "DOCX", "TXT"], label="Save As")
|
90 |
+
|
91 |
+
chat = gr.Chatbot()
|
92 |
+
msg = gr.Textbox(label="Your message")
|
93 |
+
|
94 |
+
def respond_wrapper(message, history):
|
95 |
+
response_generator = respond(
|
96 |
+
message,
|
97 |
+
history,
|
98 |
+
system_message.value,
|
99 |
+
max_tokens.value,
|
100 |
+
temperature.value,
|
101 |
+
top_p.value
|
102 |
+
)
|
103 |
+
response = next(response_generator)
|
104 |
+
return history + [(message, response)]
|
105 |
+
|
106 |
+
msg.submit(respond_wrapper, [msg, chat], [chat])
|
107 |
+
|
108 |
+
save_button = gr.Button("Save Conversation")
|
109 |
+
output_file = gr.File(label="Download File")
|
110 |
+
|
111 |
+
def handle_save(history, file_format):
|
112 |
+
return save_to_file(history, file_format)
|
|
|
|
|
|
|
|
|
113 |
|
114 |
+
save_button.click(handle_save, inputs=[chat, save_as], outputs=output_file)
|
|
|
|
|
|
|
|
|
115 |
|
116 |
if __name__ == "__main__":
|
117 |
demo.launch()
|