Spaces:
PhilSpiel
/
Running

File size: 2,243 Bytes
ef731f5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import gradio as gr
import os
from openai import OpenAI
import os.path

################# Start PERSONA-SPECIFIC VALUES ######################
coach_code = "gp"
coach_name_short = "General Patton"
coach_name_upper = "GENERAL PATTON"
coach_name_long = "General George S. Patton"
sys_prompt_new = os.getenv("PROMPT_NEW")
################# End PERSONA-SPECIFIC VALUES ######################

################# Start OpenAI-SPECIFIC VALUES ######################
# Initialize OpenAI API client with API key
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
# OpenAI model
openai_model = "gpt-3.5-turbo-0125"
################# End OpenAI-SPECIFIC VALUES ######################

################# Log in ######################
security = os.getenv("AUTH")

###############  CHAT  ###################
def predict(user_input, history):
    max_length = 500
    if len(user_input) > max_length:
        raise gr.Error(f"Input is TOO LONG. Max length is {max_length} characters. Try again.")
    
    history_openai_format = [
            {"role": "system", "content": "IDENTITY: " + sys_prompt_new}
        ]
    for human, assistant in history:
        history_openai_format.append({"role": "user", "content": human })
        history_openai_format.append({"role": "assistant", "content":assistant})
    history_openai_format.append({"role": "user", "content": user_input})

    completion = client.chat.completions.create(
        model=openai_model,
        messages= history_openai_format,
        temperature=1.2,
        frequency_penalty=0.4,
        presence_penalty=0.1,
        stream=True
    )

    output_stream = ""
    for chunk in completion:
        if chunk.choices[0].delta.content is not None:
            output_stream = output_stream + (chunk.choices[0].delta.content)
            yield output_stream
    message_content = output_stream

    return message_content

def same_auth(username, password):
    username = username + security
    return username == password

#GUI
theme = gr.themes.Default()

with gr.Blocks(theme) as demo:

    gr.ChatInterface(predict, submit_btn="Chat with "+ coach_name_short, retry_btn=None, undo_btn=None, clear_btn=None, autofocus=True)
            
demo.launch(show_api=False, auth=same_auth)