|
import gradio as gr |
|
import os |
|
from openai import OpenAI |
|
import os.path |
|
from datetime import datetime |
|
|
|
|
|
coach_code = os.getenv("COACH_CODE") |
|
coach_name_short = os.getenv("COACH_NAME_SHORT") |
|
coach_name_upper = os.getenv("COACH_NAME_UPPER") |
|
sys_prompt_new = os.getenv("PROMPT_NEW") |
|
theme=os.getenv("THEME") |
|
|
|
|
|
|
|
|
|
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) |
|
|
|
openai_model = os.getenv("OPENAI_MODEL") |
|
|
|
|
|
tx = os.getenv("TX") |
|
prefix = os.getenv("PREFIX") |
|
file_name = os.getenv("FILE_NAME") |
|
|
|
|
|
def predict(user_input, history): |
|
max_length = 3000 |
|
transcript_file_path = f"{prefix}{coach_code}-{file_name}" |
|
transcript = "" |
|
|
|
if user_input == tx + coach_code: |
|
try: |
|
|
|
if os.path.exists(transcript_file_path): |
|
with open(transcript_file_path, "r", encoding="UTF-8") as file: |
|
transcript = file.read() |
|
return transcript |
|
except FileNotFoundError: |
|
return "File '" + file_name + "' not found." |
|
elif len(user_input) > max_length: |
|
user_input = "" |
|
|
|
|
|
history_openai_format = [ |
|
{"role": "system", "content": "IDENTITY: " + sys_prompt_new} |
|
] |
|
for human, assistant in history: |
|
history_openai_format.append({"role": "user", "content": human}) |
|
history_openai_format.append({"role": "assistant", "content": assistant}) |
|
history_openai_format.append({"role": "user", "content": user_input}) |
|
|
|
completion = client.chat.completions.create( |
|
model=openai_model, |
|
messages=history_openai_format, |
|
temperature=0.8, |
|
frequency_penalty=0.4, |
|
presence_penalty=0.1, |
|
stream=True |
|
) |
|
|
|
output_stream = "" |
|
try: |
|
for chunk in completion: |
|
if chunk.choices[0].delta.content is not None: |
|
output_stream = output_stream + (chunk.choices[0].delta.content) |
|
message_content = output_stream |
|
except StopAsyncIteration: |
|
pass |
|
|
|
|
|
transcript += "Date/Time: " + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\n" |
|
transcript += f"YOU: {user_input}\n\n" |
|
transcript += f"{coach_name_upper}: {message_content}\n\n\n" |
|
|
|
with open(transcript_file_path, "a", encoding="UTF-8") as file: |
|
file.write(transcript) |
|
|
|
return message_content |
|
|
|
|
|
with gr.Blocks(theme, css="footer {visibility: hidden}") as demo: |
|
gr.ChatInterface(predict, submit_btn="Chat with "+ coach_name_short, retry_btn=None, undo_btn=None, clear_btn=None, autofocus=True) |
|
demo.launch(show_api=False) |