Spaces:
Runtime error
Runtime error
Sethblocks
commited on
Commit
•
85e4c9e
1
Parent(s):
f123edb
update v1
Browse files
app.py
CHANGED
@@ -2,20 +2,82 @@ import gradio as gr
|
|
2 |
from threading import Thread
|
3 |
import random
|
4 |
import llama_cpp
|
|
|
5 |
randtxt = ""
|
6 |
-
|
|
|
|
|
7 |
|
8 |
def randomize():
|
9 |
global randtxt
|
10 |
-
while
|
11 |
-
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
def watch(name):
|
15 |
-
|
16 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
|
18 |
-
|
|
|
19 |
|
20 |
if __name__ == "__main__":
|
|
|
|
|
21 |
demo.launch()
|
|
|
2 |
from threading import Thread
|
3 |
import random
|
4 |
import llama_cpp
|
5 |
+
import os
|
6 |
randtxt = ""
|
7 |
+
print("downloading!")
|
8 |
+
os.system("wget https://huggingface.co/QuantFactory/Meta-Llama-3-8B-Instruct-GGUF/resolve/main/Meta-Llama-3-8B-Instruct.Q3_K_L.gguf")
|
9 |
+
llama = llama_cpp.Llama("Meta-Llama-3-8B-Instruct.Q3_K_S.gguf", chat_format="llama-3")
|
10 |
|
11 |
def randomize():
|
12 |
global randtxt
|
13 |
+
while True:
|
14 |
+
print("generating")
|
15 |
+
genTurn()
|
16 |
+
|
17 |
+
|
18 |
+
chat = [{"role": "system", "content": "The following is a never-ending chat between Berry and Llama. Berry is the personification of a raspberry. Llama is Berry's best friend. They already know each other well. The chat will not end but may cut to a later date after a chat."},
|
19 |
+
{"role": "user", "content": "berry: Good morning"}] # POV: llama is "assistant"
|
20 |
+
print(chat[len(chat)-1]["content"])
|
21 |
+
|
22 |
+
|
23 |
+
#llama
|
24 |
+
|
25 |
+
def reversechat(chat):
|
26 |
+
nchat = []
|
27 |
+
nchat.append(chat[0])
|
28 |
+
for msg in chat:
|
29 |
+
nmsg = msg
|
30 |
+
if nmsg["role"] == "user":
|
31 |
+
nmsg["role"] = "assistant"
|
32 |
+
else:
|
33 |
+
nmsg["role"] = "user"
|
34 |
+
if msg["role"] != "system":
|
35 |
+
nchat.append(nmsg)
|
36 |
+
return nchat[1:]
|
37 |
+
reversechat(chat)
|
38 |
+
|
39 |
+
chara = "a"
|
40 |
+
def genTurn():
|
41 |
+
global chara
|
42 |
+
global chat
|
43 |
+
try:
|
44 |
+
if chara == "a":
|
45 |
+
msg = llama.create_chat_completion(chat, max_tokens=50)["choices"][0]["message"]["content"]
|
46 |
+
chat.append({"role": "assistant", "content": msg.removesuffix("<|eot_id|>")})
|
47 |
+
else:
|
48 |
+
#Arteex
|
49 |
+
msg = llama.create_chat_completion(reversechat(chat), max_tokens=50)["choices"][0]["message"]["content"]
|
50 |
+
chat.append({"role": "user", "content": msg.removesuffix("<|eot_id|>")})
|
51 |
+
print(msg)
|
52 |
+
except:
|
53 |
+
print("this chat is over now :(")
|
54 |
+
chara ="a"
|
55 |
+
chat = [{"role": "system", "content": "The following is a never-ending chat between Berry and Llama. Berry is the personification of a raspberry. Llama is Berry's best friend. They already know each other well. The chat will not end but may cut to a later date after a chat."},
|
56 |
+
{"role": "user", "content": "berry: Good morning"}]
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
|
61 |
def watch(name):
|
62 |
+
global chara
|
63 |
+
#global msgsview
|
64 |
+
msgsview = []
|
65 |
+
for msg in chat:
|
66 |
+
if msg["role"] == "system":
|
67 |
+
pass
|
68 |
+
else:
|
69 |
+
if not msg["content"].lower().startswith("llama:"):
|
70 |
+
msgsview.append((msg["content"], None))
|
71 |
+
else:
|
72 |
+
msgsview.append((None, msg["content"]))
|
73 |
+
yield msgsview
|
74 |
+
|
75 |
+
demo = gr.Interface(watch,inputs=None, outputs=gr.Chatbot(), live=True, description="click generate to show latest chat!", title="LlamaLive, watch an llm conversation!")
|
76 |
|
77 |
+
#randomize()
|
78 |
+
print(chat)
|
79 |
|
80 |
if __name__ == "__main__":
|
81 |
+
|
82 |
+
Thread(target=randomize).start()
|
83 |
demo.launch()
|