Spaces:
PhilSpiel
/
Running

PhilSpiel commited on
Commit
e8aeb51
1 Parent(s): 7030eb1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +106 -25
app.py CHANGED
@@ -1,8 +1,9 @@
1
  import gradio as gr
2
  import os
3
  from openai import OpenAI
4
- import os.path
5
- from datetime import datetime
 
6
 
7
  ################# Start PERSONA-SPECIFIC VALUES ######################
8
  coach_code = os.getenv("COACH_CODE")
@@ -23,24 +24,49 @@ tx = os.getenv("TX")
23
  prefix = os.getenv("PREFIX") # "/data/" if in HF or "data/" if local
24
  file_name = os.getenv("FILE_NAME")
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  ############### CHAT ###################
27
- def predict(user_input, history):
28
- max_length = 3000
 
 
 
 
 
 
29
  transcript_file_path = f"{prefix}{coach_code}-{file_name}"
30
- transcript = "" # Initialize the transcript variable
31
 
32
  if user_input == tx + coach_code:
33
  try:
34
- # Prepare the transcript for the Textbox output
35
  if os.path.exists(transcript_file_path):
36
  with open(transcript_file_path, "r", encoding="UTF-8") as file:
37
- transcript = file.read()
38
- return transcript
39
  except FileNotFoundError:
40
- return "File '" + file_name + "' not found."
41
- elif len(user_input) > max_length:
42
- user_input = ""
43
- # raise gr.Error(f"Input is TOO LONG. Max length is {max_length} characters. Try again.")
44
 
45
  history_openai_format = [
46
  {"role": "system", "content": "IDENTITY: " + sys_prompt_new}
@@ -59,26 +85,81 @@ def predict(user_input, history):
59
  stream=True
60
  )
61
 
62
- output_stream = ""
63
- try:
64
- for chunk in completion:
65
- if chunk.choices[0].delta.content is not None:
66
- output_stream = output_stream + (chunk.choices[0].delta.content)
67
- message_content = output_stream
68
- except StopAsyncIteration:
69
- pass
70
 
71
  # Append latest user and assistant messages to the transcript
72
- transcript += "Date/Time: " + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\n"
73
  transcript += f"YOU: {user_input}\n\n"
74
  transcript += f"{coach_name_upper}: {message_content}\n\n\n"
 
75
  # Write the updated transcript to the file
76
  with open(transcript_file_path, "a", encoding="UTF-8") as file:
77
  file.write(transcript)
78
 
79
- return message_content
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
 
81
- #GUI
82
- with gr.Blocks(theme, css="footer {visibility: hidden}") as demo:
83
- gr.ChatInterface(predict, submit_btn="Chat with "+ coach_name_short, retry_btn=None, undo_btn=None, clear_btn=None, autofocus=True)
84
  demo.launch(show_api=False)
 
1
  import gradio as gr
2
  import os
3
  from openai import OpenAI
4
+ from datetime import datetime, timezone, timedelta
5
+ import hashlib
6
+ import hmac
7
 
8
  ################# Start PERSONA-SPECIFIC VALUES ######################
9
  coach_code = os.getenv("COACH_CODE")
 
24
  prefix = os.getenv("PREFIX") # "/data/" if in HF or "data/" if local
25
  file_name = os.getenv("FILE_NAME")
26
 
27
+ ############### VERIFY USER ###################
28
+ def generate_access_code(time):
29
+ secret = os.getenv("SHARED_SECRET_KEY")
30
+
31
+ time_block = time.replace(minute=(time.minute // 10) * 10, second=0, microsecond=0)
32
+ time_string = time_block.strftime('%Y%m%d%H%M')
33
+
34
+ hmac_obj = hmac.new(secret.encode(), time_string.encode(), hashlib.sha256)
35
+ hmac_digest = hmac_obj.hexdigest()
36
+
37
+ xor_result = bytes(int(hmac_digest[i], 16) ^ int(hmac_digest[-4+i], 16) for i in range(4))
38
+
39
+ return xor_result.hex()[:4]
40
+
41
+ def verify_code(code, access_granted):
42
+ now = datetime.now(timezone.utc)
43
+
44
+ codes = [generate_access_code(now + timedelta(minutes=offset))
45
+ for offset in [-20, -10, 0, 10, 20]]
46
+
47
+ if code in codes:
48
+ return True, gr.update(interactive=True), gr.update(interactive=True), "Access granted. Please proceed to the Chat tab."
49
+ else:
50
+ return False, gr.update(interactive=False), gr.update(interactive=False), "Incorrect code. Please try again."
51
+
52
  ############### CHAT ###################
53
+ def predict(user_input, history, access_granted):
54
+ if not access_granted:
55
+ return history, "Access not granted. Please enter the correct code in the Access tab."
56
+
57
+ max_length = 1000
58
+ if len(user_input) > max_length:
59
+ user_input = ""
60
+
61
  transcript_file_path = f"{prefix}{coach_code}-{file_name}"
 
62
 
63
  if user_input == tx + coach_code:
64
  try:
 
65
  if os.path.exists(transcript_file_path):
66
  with open(transcript_file_path, "r", encoding="UTF-8") as file:
67
+ return history, file.read()
 
68
  except FileNotFoundError:
69
+ return history, "File '" + file_name + "' not found."
 
 
 
70
 
71
  history_openai_format = [
72
  {"role": "system", "content": "IDENTITY: " + sys_prompt_new}
 
85
  stream=True
86
  )
87
 
88
+ message_content = ""
89
+ for chunk in completion:
90
+ if chunk.choices[0].delta.content is not None:
91
+ message_content += chunk.choices[0].delta.content
 
 
 
 
92
 
93
  # Append latest user and assistant messages to the transcript
94
+ transcript = f"Date/Time: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n\n"
95
  transcript += f"YOU: {user_input}\n\n"
96
  transcript += f"{coach_name_upper}: {message_content}\n\n\n"
97
+
98
  # Write the updated transcript to the file
99
  with open(transcript_file_path, "a", encoding="UTF-8") as file:
100
  file.write(transcript)
101
 
102
+ history.append((user_input, message_content))
103
+ return history, ""
104
+
105
+ with gr.Blocks(theme, css="""
106
+ #chatbot { flex-grow: 1; height: 460px; overflow-y: auto; }
107
+ .gradio-container { height: 700px; max-width: 100% !important; padding: 0 !important; }
108
+ #component-0 { height: 100%; }
109
+ #component-3 { height: calc(100% - 250px); }
110
+ footer { display: none !important; }
111
+ #submit-btn { margin-top: 10px; }
112
+ #code_submit {
113
+ height: 50px !important;
114
+ font-size: 1.2em !important;
115
+ }
116
+ #code_message {
117
+ font-size: 1.4em !important;
118
+ font-weight: bold !important;
119
+ padding: 15px !important;
120
+ margin-top: 15px !important;
121
+ border-radius: 5px !important;
122
+ text-align: center !important;
123
+ }
124
+ #code_message.error { color: #d32f2f !important; background-color: #ffcdd2 !important; }
125
+ #code_message.success { color: #388e3c !important; background-color: #c8e6c9 !important; }
126
+ @media (max-width: 600px) {
127
+ #code_submit {
128
+ height: 60px !important;
129
+ font-size: 1.3em !important;
130
+ }
131
+ #code_message {
132
+ font-size: 1.2em !important;
133
+ padding: 10px !important;
134
+ }
135
+ }
136
+ """) as demo:
137
+ access_granted = gr.State(False)
138
+
139
+ with gr.Tab("Access"):
140
+ gr.Markdown("Enter the Access Code displayed in the upper-left corner.")
141
+ code_input = gr.Textbox(label="Access Code", type="text", placeholder="Enter CODE here...")
142
+ code_submit = gr.Button("Submit Code", elem_id="code_submit")
143
+ code_message = gr.Markdown(elem_id="code_message")
144
+
145
+ with gr.Tab("Chat"):
146
+ chatbot = gr.Chatbot(label="Conversation")
147
+ msg = gr.Textbox(
148
+ label=f"Chat with {coach_name_short}",
149
+ placeholder="Type your message here... (MAX: 1000 characters)",
150
+ interactive=False
151
+ )
152
+ submit = gr.Button("Submit Message", interactive=False)
153
+
154
+ def submit_code(code, access_granted):
155
+ success, _, _, message = verify_code(code, access_granted)
156
+ css_class = "success" if success else "error"
157
+ return success, gr.update(interactive=success), gr.update(interactive=success), gr.update(value=message, elem_classes=[css_class])
158
+
159
+ code_input.submit(submit_code, inputs=[code_input, access_granted], outputs=[access_granted, msg, submit, code_message])
160
+ code_submit.click(submit_code, inputs=[code_input, access_granted], outputs=[access_granted, msg, submit, code_message])
161
+
162
+ msg.submit(predict, [msg, chatbot, access_granted], [chatbot, msg])
163
+ submit.click(predict, [msg, chatbot, access_granted], [chatbot, msg])
164
 
 
 
 
165
  demo.launch(show_api=False)