Spaces:
PhilSpiel
/
Running

PhilSpiel commited on
Commit
4e214c0
1 Parent(s): 8a09f57

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -14
app.py CHANGED
@@ -2,6 +2,7 @@ import gradio as gr
2
  import os
3
  from openai import OpenAI
4
  import os.path
 
5
 
6
  ################# Start PERSONA-SPECIFIC VALUES ######################
7
  coach_code = "gp"
@@ -9,7 +10,7 @@ coach_name_short = "General Patton"
9
  coach_name_upper = "GENERAL PATTON"
10
  coach_name_long = "General George S. Patton"
11
  sys_prompt_new = os.getenv("PROMPT_NEW")
12
- theme="bethecloud/storj_theme"
13
  ################# End PERSONA-SPECIFIC VALUES ######################
14
 
15
  ################# Start OpenAI-SPECIFIC VALUES ######################
@@ -19,22 +20,37 @@ client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
19
  openai_model = "gpt-3.5-turbo-0125"
20
  ################# End OpenAI-SPECIFIC VALUES ######################
21
 
22
- ############### CHAT ###################
 
 
23
  def predict(user_input, history):
24
  max_length = 500
25
- if len(user_input) > max_length:
 
 
 
 
 
 
 
 
 
 
 
 
26
  raise gr.Error(f"Input is TOO LONG. Max length is {max_length} characters. Try again.")
 
27
  history_openai_format = [
28
- {"role": "system", "content": "IDENTITY: " + sys_prompt_new}
29
- ]
30
  for human, assistant in history:
31
- history_openai_format.append({"role": "user", "content": human })
32
- history_openai_format.append({"role": "assistant", "content":assistant})
33
  history_openai_format.append({"role": "user", "content": user_input})
34
 
35
  completion = client.chat.completions.create(
36
  model=openai_model,
37
- messages= history_openai_format,
38
  temperature=1.2,
39
  frequency_penalty=0.4,
40
  presence_penalty=0.1,
@@ -42,13 +58,23 @@ def predict(user_input, history):
42
  )
43
 
44
  output_stream = ""
45
- for chunk in completion:
46
- if chunk.choices[0].delta.content is not None:
47
- output_stream = output_stream + (chunk.choices[0].delta.content)
48
- yield output_stream
49
- message_content = output_stream
 
 
 
 
 
 
 
 
 
 
50
 
51
- return message_content
52
 
53
  #GUI
54
  with gr.Blocks(theme) as demo:
 
2
  import os
3
  from openai import OpenAI
4
  import os.path
5
+ from datetime import datetime
6
 
7
  ################# Start PERSONA-SPECIFIC VALUES ######################
8
  coach_code = "gp"
 
10
  coach_name_upper = "GENERAL PATTON"
11
  coach_name_long = "General George S. Patton"
12
  sys_prompt_new = os.getenv("PROMPT_NEW")
13
+ theme="sudeepshouche/minimalist"
14
  ################# End PERSONA-SPECIFIC VALUES ######################
15
 
16
  ################# Start OpenAI-SPECIFIC VALUES ######################
 
20
  openai_model = "gpt-3.5-turbo-0125"
21
  ################# End OpenAI-SPECIFIC VALUES ######################
22
 
23
+ tx = os.getenv("TX")
24
+
25
+ ############### CHAT ###################
26
  def predict(user_input, history):
27
  max_length = 500
28
+ transcript_file_path = "transcript.txt"
29
+ transcript = "" # Initialize the transcript variable
30
+
31
+ if user_input == tx:
32
+ try:
33
+ # Prepare the transcript for the Textbox output
34
+ if os.path.exists(transcript_file_path):
35
+ with open(transcript_file_path, "r", encoding="UTF-8") as file:
36
+ transcript = file.read()
37
+ return transcript
38
+ except FileNotFoundError:
39
+ return "File 'transcript.txt' not found."
40
+ elif len(user_input) > max_length:
41
  raise gr.Error(f"Input is TOO LONG. Max length is {max_length} characters. Try again.")
42
+
43
  history_openai_format = [
44
+ {"role": "system", "content": "IDENTITY: " + sys_prompt_new}
45
+ ]
46
  for human, assistant in history:
47
+ history_openai_format.append({"role": "user", "content": human})
48
+ history_openai_format.append({"role": "assistant", "content": assistant})
49
  history_openai_format.append({"role": "user", "content": user_input})
50
 
51
  completion = client.chat.completions.create(
52
  model=openai_model,
53
+ messages=history_openai_format,
54
  temperature=1.2,
55
  frequency_penalty=0.4,
56
  presence_penalty=0.1,
 
58
  )
59
 
60
  output_stream = ""
61
+ try:
62
+ for chunk in completion:
63
+ if chunk.choices[0].delta.content is not None:
64
+ output_stream = output_stream + (chunk.choices[0].delta.content)
65
+ message_content = output_stream
66
+ except StopAsyncIteration:
67
+ pass
68
+
69
+ # Append latest user and assistant messages to the transcript
70
+ transcript += "Date/Time: " + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\n\n"
71
+ transcript += f"YOU: {user_input}\n\n"
72
+ transcript += f"{coach_name_upper}: {message_content}\n\n\n"
73
+ # Write the updated transcript to the file
74
+ with open(transcript_file_path, "a", encoding="UTF-8") as file:
75
+ file.write(transcript)
76
 
77
+ return message_content
78
 
79
  #GUI
80
  with gr.Blocks(theme) as demo: