Spaces:
Sleeping
Sleeping
alonsosilva
commited on
Commit
•
4478f21
1
Parent(s):
38c7b49
Make it compliant with blog
Browse files
app.py
CHANGED
@@ -14,6 +14,28 @@ class MessageDict(TypedDict):
|
|
14 |
role: str
|
15 |
content: str
|
16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
messages: solara.Reactive[List[MessageDict]] = solara.reactive([])
|
18 |
@solara.component
|
19 |
def Page():
|
@@ -30,23 +52,8 @@ def Page():
|
|
30 |
messages.value = [*messages.value, {"role": "user", "content": message}]
|
31 |
def response(message):
|
32 |
messages.value = [*messages.value, {"role": "assistant", "content": ""}]
|
33 |
-
|
34 |
-
|
35 |
-
tokenize=False,
|
36 |
-
add_generation_prompt=True
|
37 |
-
)
|
38 |
-
inputs = tokenizer(text, return_tensors="pt")
|
39 |
-
generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=512)
|
40 |
-
thread = Thread(target=model.generate, kwargs=generation_kwargs)
|
41 |
-
thread.start()
|
42 |
-
for text in streamer:
|
43 |
-
messages.value = [
|
44 |
-
*messages.value[:-1],
|
45 |
-
{
|
46 |
-
"role": "assistant",
|
47 |
-
"content": messages.value[-1]["content"] + text,
|
48 |
-
},
|
49 |
-
]
|
50 |
def result():
|
51 |
if messages.value != []:
|
52 |
response(messages.value[-1]["content"])
|
|
|
14 |
role: str
|
15 |
content: str
|
16 |
|
17 |
+
def response_generator(message):
|
18 |
+
text = tokenizer.apply_chat_template(
|
19 |
+
[{"role": "user", "content": message}],
|
20 |
+
tokenize=False,
|
21 |
+
add_generation_prompt=True
|
22 |
+
)
|
23 |
+
inputs = tokenizer(text, return_tensors="pt")
|
24 |
+
generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=512)
|
25 |
+
thread = Thread(target=model.generate, kwargs=generation_kwargs)
|
26 |
+
thread.start()
|
27 |
+
for chunk in streamer:
|
28 |
+
yield chunk
|
29 |
+
|
30 |
+
def add_chunk_to_ai_message(chunk: str):
|
31 |
+
messages.value = [
|
32 |
+
*messages.value[:-1],
|
33 |
+
{
|
34 |
+
"role": "assistant",
|
35 |
+
"content": messages.value[-1]["content"] + chunk,
|
36 |
+
},
|
37 |
+
]
|
38 |
+
|
39 |
messages: solara.Reactive[List[MessageDict]] = solara.reactive([])
|
40 |
@solara.component
|
41 |
def Page():
|
|
|
52 |
messages.value = [*messages.value, {"role": "user", "content": message}]
|
53 |
def response(message):
|
54 |
messages.value = [*messages.value, {"role": "assistant", "content": ""}]
|
55 |
+
for chunk in response_generator(message):
|
56 |
+
add_chunk_to_ai_message(chunk)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
def result():
|
58 |
if messages.value != []:
|
59 |
response(messages.value[-1]["content"])
|