File size: 1,959 Bytes
baaa7ac
 
 
 
9d69964
baaa7ac
 
 
 
4d93f7c
baaa7ac
 
 
3e27298
1e08849
3492705
 
 
 
 
 
 
 
902866e
baaa7ac
 
3e27298
 
4308778
 
902866e
4308778
4847ef1
 
baaa7ac
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import os
import openai
import gradio as gr

openai.api_key = "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"

start_sequence = "\nAI:"
restart_sequence = "\nHuman:"

def predict(input, history=[]):   
    s = list(sum(history, ()))
    s.append(input)

    response = openai.Completion.create(
    model_name="gpt-3.5-turbo",
    prompt= str(s),
    temperature=0.9,
    max_tokens=1050,

    top_p=1,
    frequency_penalty=0,
    presence_penalty=0.6,
    stop=[" Human:", " AI:"])
    
    response2 = response["choices"][0]["text"]
    history.append((input, response2))

    return history, history
    
gr.Markdown("heyoo")
gr.Interface(fn=predict,
            inputs=["text",'state'],
             
             outputs=["chatbot",'state']).launch()


#def get_model_reply(user_input, context=[]):
 #   context+=[user_input]

#    completion = openai.Completion.create(
#        engine="gpt-3.5-turbo", # one of the most capable models available
#        prompt='\\n'.join([f"I am {role}.", *context])[:4096],
#        max_tokens = 1048,
#        temperature = 0.9,
#        top_p = 1,
#        frequency_penalty=0,
#        presence_penalty=0.6,
#    )
    
    # append response to context
#    response = completion.choices[0].text.strip('\\n')
#    context += [response]
    
    # list of (user, bot) responses. We will use this format later
#    responses = [(u,b) for u,b in zip(context[::2], context[1::2])]
    
#    return responses, context
   # ```
    # defines a basic dialog interface using Gradio
#with gr.Blocks() as dialog_app:
 #   chatbot = gr.Chatbot() # dedicated "chatbot" component
 #   state = gr.State([]) # session state that persists across multiple submits
    
 #   with gr.Row():
 #       txt = gr.Textbox(
 #           show_label=False, 
 #           placeholder="Enter text and press enter"
 #       ).style(container=False)

 #   txt.submit(get_model_reply, [txt, state], [chatbot, state])

#dialog_app.launch()