File size: 2,693 Bytes
8311a6d
d4cc99b
 
8311a6d
 
62d2e21
d4cc99b
8311a6d
 
 
62d2e21
 
8311a6d
 
 
 
 
3380901
62d2e21
8311a6d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3380901
62d2e21
c491e45
62d2e21
 
 
 
 
 
8311a6d
62d2e21
 
 
 
3380901
62d2e21
 
 
3380901
c491e45
62d2e21
c491e45
d4cc99b
8311a6d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import gradio as gr
import json
import asyncio
import os
import re
from EdgeGPT import Chatbot, ConversationStyle

# Read cookie from local file
with open('./cookies.json', 'r') as f:
    cookies = json.load(f)

# Async function to get model reply
async def get_model_reply(prompt, style, context=[]):
    context += [prompt]
    bot = Chatbot(cookies=cookies)
    prompt2 = '\n\n'.join(context)[:4096]
    raw_data = await bot.ask(prompt2, conversation_style=style)
    await bot.close()

    try:
        try:
            response = raw_data["item"]["messages"][1]["text"]
        except:
            response = raw_data["item"]["messages"][1]["adaptiveCards"][0]["body"][0]["text"]
        response = re.sub(r'\^', '', response)
        response = response.rstrip()
        context += [response]
        responses = [(u, b) for u, b in zip(context[::2], context[1::2])]
        return responses, context
    except:
        try:
            if raw_data["item"]["throttling"]["numUserMessagesInConversation"] > raw_data["item"]["throttling"]["maxNumUserMessagesInConversation"]:
                response = "> **Oops, I think we've reached the end of this conversation. Please reset the bot!**"
                context += [response]
                responses = [(u, b) for u, b in zip(context[::2], context[1::2])]
                return responses, context
        except:
            if raw_data["item"]["result"]["value"] == "Throttled":
                response = "> **Error: We're sorry, but you've reached the maximum number of messages you can send to Bing in a 24-hour period. Check back later!**"
                context += [response]
                responses = [(u, b) for u, b in zip(context[::2], context[1::2])]
                return responses, context

# Function to send user input to model and get reply
def get_reply(inputs, style, state):
    responses, context = asyncio.run(get_model_reply(inputs, style, state))
    chatbot.append_message("user", inputs)
    chatbot.append_message("bot", responses[-1][1])
    state.append(responses)

# Define Gradio interface
with gr.Blocks() as dialog_app:
    gr.Markdown("# A Simple Web to use New Bing Without Magic")
    chatbot = gr.Chatbot()
    state = gr.State([])
    markdown = gr.Markdown(label="Output")

    with gr.Row():
        inputs = gr.Textbox(label="Enter question", placeholder="Enter text and press enter")
        style = gr.Dropdown(label="Answer tendency", choices=["creative", "balanced", "precise"], multiselect=False, value="balanced", type="value")

    inputs.submit(get_reply, [inputs, style, state])
    send = gr.Button("Send")
    send.click(get_reply, [inputs, style, state])

dialog_app.launch()