Spaces:
PhilSpiel
/
Running

File size: 3,085 Bytes
ef731f5
 
 
 
4e214c0
ef731f5
 
7ab4356
 
 
ef731f5
7ab4356
ef731f5
 
 
 
 
 
 
 
 
4e214c0
7ab4356
4e214c0
 
ef731f5
 
3753eeb
4e214c0
 
 
 
 
 
 
 
 
 
7ab4356
4e214c0
ef731f5
4e214c0
ef731f5
4e214c0
 
ef731f5
4e214c0
 
ef731f5
 
 
 
4e214c0
ef731f5
 
 
 
 
 
 
4e214c0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ef731f5
4e214c0
ef731f5
 
8c57ca0
 
81e1809
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import gradio as gr
import os
from openai import OpenAI
import os.path
from datetime import datetime

################# Start PERSONA-SPECIFIC VALUES ######################
coach_code = os.getenv("COACH_CODE")
coach_name_short = os.getenv("COACH_NAME_SHORT")
coach_name_upper = os.getenv("COACH_NAME_UPPER")
sys_prompt_new = os.getenv("PROMPT_NEW")
theme=""
################# End PERSONA-SPECIFIC VALUES ######################

################# Start OpenAI-SPECIFIC VALUES ######################
# Initialize OpenAI API client with API key
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
# OpenAI model
openai_model = "gpt-3.5-turbo-0125"
################# End OpenAI-SPECIFIC VALUES ######################

tx = os.getenv("TX")
prefix = ""  #  "data/" if local or "/data/" if persistent in HF

############### CHAT ###################
def predict(user_input, history):
    max_length = 500
    transcript_file_path = f"{prefix}{coach_code}-transcript.txt"
    transcript = ""  # Initialize the transcript variable

    if user_input == tx:
        try:
            # Prepare the transcript for the Textbox output
            if os.path.exists(transcript_file_path):
                with open(transcript_file_path, "r", encoding="UTF-8") as file:
                    transcript = file.read()
                return transcript
        except FileNotFoundError:
            return "File 'transcript.txt' not found."
    elif len(user_input) > max_length:
        raise gr.Error(f"Input is TOO LONG. Max length is {max_length} characters. Try again.")

    history_openai_format = [
        {"role": "system", "content": "IDENTITY: " + sys_prompt_new}
    ]
    for human, assistant in history:
        history_openai_format.append({"role": "user", "content": human})
        history_openai_format.append({"role": "assistant", "content": assistant})
    history_openai_format.append({"role": "user", "content": user_input})

    completion = client.chat.completions.create(
        model=openai_model,
        messages=history_openai_format,
        temperature=1.2,
        frequency_penalty=0.4,
        presence_penalty=0.1,
        stream=True
    )

    output_stream = ""
    try:
        for chunk in completion:
            if chunk.choices[0].delta.content is not None:
                output_stream = output_stream + (chunk.choices[0].delta.content)
        message_content = output_stream
    except StopAsyncIteration:
        pass

    # Append latest user and assistant messages to the transcript
    transcript += "Date/Time: " + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\n\n"
    transcript += f"YOU: {user_input}\n\n"
    transcript += f"{coach_name_upper}: {message_content}\n\n\n"
    # Write the updated transcript to the file
    with open(transcript_file_path, "a", encoding="UTF-8") as file:
        file.write(transcript)

    return message_content

#GUI
with gr.Blocks(theme) as demo:
    gr.ChatInterface(predict, submit_btn="Chat with "+ coach_name_short, retry_btn=None, undo_btn=None, clear_btn=None, autofocus=True)
demo.launch(show_api=False)