File size: 3,427 Bytes
9798bb4
 
 
 
 
 
 
 
fcff386
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9798bb4
fcff386
 
 
 
 
 
 
 
 
9798bb4
 
fcff386
9798bb4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
from huggingface_hub import InferenceClient
import gradio as gr
import random
from html2image import Html2Image
hti = Html2Image()
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")


html_user="""
<div class="user_chat">
{chat}
</div>
"""
html_bot="""
<div class="bot_chat">
{chat}
</div>
"""
html_card="""
<div class="chat_card">
{body}
</div>
"""


def get_screenshot(chat,css=None):
    html_body=""
    for user,bot in history:
        print (f'user::{user}')
        print (f'bot::{bot}')
        html_body += html_user.format(chat=user)
        html_body += html_bot.format(chat=bot)
    html=html_card.format(body=html_body)
    #css = css
    #hti.screenshot(html_str=html, save_as='red_page.png')
    hti.screenshot(html_str=html_card, css_str=css, save_as='red_page.png')
    return 'red_page.png'


def format_prompt(message, history):
    prompt = "<s>"
    if history:
        for user_prompt, bot_response in history:
            prompt += f"[INST] {user_prompt} [/INST]"
            prompt += f" {bot_response}</s> "
    prompt += f"[INST] {message} [/INST]"
    return prompt


def chat_inf(system_prompt,prompt,history):
    if not history:
        history = []
        hist_len=0
    if history:
        hist_len=len(history)
        print(hist_len)
    seed = random.randint(1,1111111111111111)
    generate_kwargs = dict(
        temperature=0.9,
        max_new_tokens=10480,
        top_p=0.95,
        repetition_penalty=1.0,
        do_sample=True,
        seed=seed,
    )
        
    formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
    stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
    output = ""
        
    for response in stream:
        output += response.token.text
        yield [(prompt,output)]
    history.append((prompt,output))
    yield history
        
chat=[('user','bot'),('user','bot')]

#get_screenshot(chat=[('user','bot'),('user','bot')])
with gr.Blocks() as app:
    with gr.Row():
        with gr.Column(scale=3):
            with gr.Group():
                chat_b = gr.Chatbot()
                with gr.Row():
                    with gr.Column(scale=3):
                        inp = gr.Textbox(label="Prompt")
                        sys_inp = gr.Textbox(label="System Prompt (optional)")
                        btn = gr.Button("Chat")
                        
                    with gr.Column(scale=1):
                        with gr.Group():
                            stop_btn=gr.Button("Stop")
                            clear_btn=gr.Button("Clear")
        with gr.Column(scale=1):
            with gr.Group():
                with gr.Row():
                    im_height=gr.Number(label="Height",value=5000)
                    im_width=gr.Number(label="Width",value=500)
                wait_time=gr.Number(label="Wait Time",value=3000)
                theme=gr.Radio(label="Theme", choices=["light","dark"],value="light")
                chatblock=gr.Dropdown(label="Chatblocks",choices=[c for c in range(1,40)],multiselect=True)
                
                im_btn=gr.Button("Screenshot")
                img=gr.Image(type='filepath')
    btn.click(chat_inf,[sys_inp,inp,chat_b],chat_b)
    im_btn.click(get_screenshot,[chat_b,im_height,im_width,chatblock,theme,wait_time],img)
    #app.load(get_screenshot,inp,img)
app.launch()