Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -2,6 +2,8 @@ import gradio as gr
|
|
2 |
import random
|
3 |
import time
|
4 |
from ctransformers import AutoModelForCausalLM
|
|
|
|
|
5 |
|
6 |
params = {
|
7 |
"max_new_tokens":512,
|
@@ -12,9 +14,19 @@ params = {
|
|
12 |
"batch_size": 8}
|
13 |
|
14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
llm = AutoModelForCausalLM.from_pretrained("Aspik101/Llama-2-7b-chat-hf-pl-lora_GGML", model_type="llama")
|
16 |
|
17 |
-
with gr.Blocks() as demo:
|
18 |
chatbot = gr.Chatbot()
|
19 |
msg = gr.Textbox()
|
20 |
clear = gr.Button("Clear")
|
@@ -23,13 +35,17 @@ with gr.Blocks() as demo:
|
|
23 |
return "", history + [[user_message, None]]
|
24 |
|
25 |
def bot(history):
|
|
|
26 |
stream = llm(prompt = f"Jesteś AI assystentem. Odpowiadaj po polsku. <user>: {history}. <assistant>:", **params)
|
27 |
history[-1][1] = ""
|
|
|
28 |
for character in stream:
|
29 |
history[-1][1] += character
|
|
|
30 |
time.sleep(0.005)
|
31 |
yield history
|
32 |
|
|
|
33 |
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
|
34 |
bot, chatbot, chatbot
|
35 |
)
|
|
|
2 |
import random
|
3 |
import time
|
4 |
from ctransformers import AutoModelForCausalLM
|
5 |
+
import datetime
|
6 |
+
|
7 |
|
8 |
params = {
|
9 |
"max_new_tokens":512,
|
|
|
14 |
"batch_size": 8}
|
15 |
|
16 |
|
17 |
+
log_file_path = "gradio_logs.txt"
|
18 |
+
with open(log_file_path, "a") as log_file:
|
19 |
+
log_file.write("------- Logi Gradio -------\n")
|
20 |
+
|
21 |
+
def save_log(task, to_save):
|
22 |
+
with open(log_file_path, "a") as log_file:
|
23 |
+
current_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
24 |
+
log_file.write(f"[{current_time}] - {task}: {to_save}\n")
|
25 |
+
|
26 |
+
|
27 |
llm = AutoModelForCausalLM.from_pretrained("Aspik101/Llama-2-7b-chat-hf-pl-lora_GGML", model_type="llama")
|
28 |
|
29 |
+
with gr.Blocks(allow_flagging="auto") as demo:
|
30 |
chatbot = gr.Chatbot()
|
31 |
msg = gr.Textbox()
|
32 |
clear = gr.Button("Clear")
|
|
|
35 |
return "", history + [[user_message, None]]
|
36 |
|
37 |
def bot(history):
|
38 |
+
save_log("question", history)
|
39 |
stream = llm(prompt = f"Jesteś AI assystentem. Odpowiadaj po polsku. <user>: {history}. <assistant>:", **params)
|
40 |
history[-1][1] = ""
|
41 |
+
answer_save = ""
|
42 |
for character in stream:
|
43 |
history[-1][1] += character
|
44 |
+
answer_save += character
|
45 |
time.sleep(0.005)
|
46 |
yield history
|
47 |
|
48 |
+
save_log("answer", answer_save)
|
49 |
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
|
50 |
bot, chatbot, chatbot
|
51 |
)
|