File size: 2,680 Bytes
6905ea3
3c3692a
6905ea3
3c3692a
6905ea3
 
 
 
 
 
3c3692a
 
6905ea3
3c3692a
6905ea3
 
 
 
 
 
 
 
 
3c3692a
6905ea3
 
3c3692a
 
 
6905ea3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3c3692a
 
 
 
6905ea3
3c3692a
 
 
6905ea3
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
from fasthtml.common import *
from fasthtml_hf import setup_hf_backup
import uvicorn
from transformers import pipeline

# Set up the app, including daisyui and tailwind for the chat component
tlink = Script(src="https://cdn.tailwindcss.com")
dlink = Link(rel="stylesheet", href="https://cdn.jsdelivr.net/npm/daisyui@4.11.1/dist/full.min.css")
app = FastHTML(hdrs=(tlink, dlink, picolink))

# Set up the text generation pipeline
pipe = pipeline("text-generation", model="ReliableAI/UCCIX-Llama2-13B-Instruct")

messages = []

# Chat message component (renders a chat bubble)
def ChatMessage(msg):
    bubble_class = f"chat-bubble-{'primary' if msg['role'] == 'user' else 'secondary'}"
    chat_class = f"chat-{'end' if msg['role'] == 'user' else 'start'}"
    return Div(Div(msg['role'], cls="chat-header"),
               Div(msg['content'], cls=f"chat-bubble {bubble_class}"),
               cls=f"chat {chat_class}")

# The input field for the user message. Also used to clear the
# input field after sending a message via an OOB swap
def ChatInput():
    return Input(type="text", name='msg', id='msg-input',
                  placeholder="Type a message",
                  cls="input input-bordered w-full", hx_swap_oob='true')

# The main screen
@app.route("/")
def get():
    page = Body(H1('Chatbot Demo'),
                Div(*[ChatMessage(msg) for msg in messages],
                    id="chatlist", cls="chat-box h-[73vh] overflow-y-auto"),
                Form(Group(ChatInput(), Button("Send", cls="btn btn-primary")),
                    hx_post="/", hx_target="#chatlist", hx_swap="beforeend",
                    cls="flex space-x-2 mt-2",
                ), cls="p-4 max-w-lg mx-auto")
    return Title('Chatbot Demo'), page

# Handle the form submission
@app.post("/")
def post(msg:str):
    messages.append({"role":"user", "content":msg})
    
    # Get response from the text generation pipeline
    full_prompt = "You are a helpful and concise assistant.\n\n"
    for m in messages:
        full_prompt += f"{m['role'].capitalize()}: {m['content']}\nAssistant: "
    
    response = pipe(full_prompt, max_length=2048, num_return_sequences=1)
    
    assistant_msg = response[0]['generated_text'].split("Assistant: ")[-1].strip()
    messages.append({"role":"assistant", "content":assistant_msg})
    
    return (ChatMessage(messages[-2]), # The user's message
            ChatMessage(messages[-1]), # The chatbot's response
            ChatInput()) # And clear the input field via an OOB swap

if __name__ == "__main__":
    setup_hf_backup(app)
    uvicorn.run(app, host="0.0.0.0", port=7860)