Courtney Ford commited on
Commit
3c3b103
·
1 Parent(s): 61bea24

With logging

Browse files
Files changed (1) hide show
  1. app.py +60 -33
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import os
2
  import json
 
3
  from datetime import datetime
4
  import gradio as gr
5
  import openai
@@ -9,30 +10,35 @@ from pathlib import Path
9
  log_dir = Path("usage_logs")
10
  log_dir.mkdir(exist_ok=True)
11
 
12
- def log_interaction(interaction_type, data):
13
- """Log each interaction with timestamp"""
 
 
 
 
 
 
14
  timestamp = datetime.now().isoformat()
15
  log_entry = {
16
  "timestamp": timestamp,
 
17
  "interaction_type": interaction_type,
18
  "data": data
19
  }
20
 
21
- # Use date as filename for easier analysis
22
  date_str = datetime.now().strftime("%Y-%m-%d")
23
  log_file = log_dir / f"usage_log_{date_str}.jsonl"
24
 
25
- # Append to log file
26
  with open(log_file, "a") as f:
27
  json.dump(log_entry, f)
28
  f.write("\n")
29
 
30
- def generate_with_temperature(input_text, temperature):
31
  try:
32
- # Log the attempt
33
- log_interaction("temperature_generation", {
34
  "input_text": input_text,
35
- "temperature": temperature
 
36
  })
37
 
38
  openai_client = openai.OpenAI(api_key=OPENAI_TOKEN)
@@ -54,8 +60,7 @@ def generate_with_temperature(input_text, temperature):
54
  return response.choices[0].message.content
55
  except Exception as e:
56
  error_msg = str(e)
57
- # Log the error
58
- log_interaction("error", {
59
  "function": "generate_with_temperature",
60
  "error": error_msg,
61
  "input_text": input_text,
@@ -63,12 +68,12 @@ def generate_with_temperature(input_text, temperature):
63
  })
64
  return f"Error: {error_msg}"
65
 
66
- def generate_with_system_prompt(input_text, system_prompt):
67
  try:
68
- # Log the attempt
69
- log_interaction("system_prompt_generation", {
70
  "input_text": input_text,
71
- "system_prompt": system_prompt
 
72
  })
73
 
74
  openai_client = openai.OpenAI(api_key=OPENAI_TOKEN)
@@ -81,10 +86,18 @@ def generate_with_system_prompt(input_text, system_prompt):
81
  temperature=0.7,
82
  max_tokens=300
83
  )
 
 
 
 
 
 
 
 
84
  return response.choices[0].message.content
85
  except Exception as e:
86
  error_msg = str(e)
87
- log_interaction("error", {
88
  "function": "generate_with_system_prompt",
89
  "error": error_msg,
90
  "input_text": input_text,
@@ -92,12 +105,12 @@ def generate_with_system_prompt(input_text, system_prompt):
92
  })
93
  return f"Error: {error_msg}"
94
 
95
- def generate_with_examples(input_text, approach_type):
96
  try:
97
- # Log the attempt
98
- log_interaction("reasoning_approach_generation", {
99
  "input_text": input_text,
100
- "approach_type": approach_type
 
101
  })
102
 
103
  openai_client = openai.OpenAI(api_key=OPENAI_TOKEN)
@@ -120,10 +133,18 @@ def generate_with_examples(input_text, approach_type):
120
  temperature=0.7,
121
  max_tokens=300
122
  )
 
 
 
 
 
 
 
 
123
  return response.choices[0].message.content
124
  except Exception as e:
125
  error_msg = str(e)
126
- log_interaction("error", {
127
  "function": "generate_with_examples",
128
  "error": error_msg,
129
  "input_text": input_text,
@@ -131,15 +152,13 @@ def generate_with_examples(input_text, approach_type):
131
  })
132
  return f"Error: {error_msg}"
133
 
134
- # Tab selection tracking
135
- def on_tab_select(tab_name):
136
- log_interaction("tab_selection", {"tab": tab_name})
137
-
138
  with gr.Blocks() as demo:
 
 
139
  gr.Markdown("# Language Models & Methods Lab Interface")
140
 
141
  with gr.Tabs() as tabs:
142
- with gr.Tab("Temperature Effects", id="temperature_tab"):
143
  with gr.Row():
144
  temp_input = gr.Textbox(
145
  label="Enter your prompt",
@@ -158,7 +177,7 @@ with gr.Blocks() as demo:
158
  focused_output = gr.Textbox(label="Focused Output (Low Temperature)", lines=5)
159
  creative_output = gr.Textbox(label="Creative Output (High Temperature)", lines=5)
160
 
161
- with gr.Tab("System Prompts", id="system_tab"):
162
  with gr.Row():
163
  system_input = gr.Textbox(
164
  label="Enter your prompt",
@@ -183,7 +202,7 @@ with gr.Blocks() as demo:
183
  with gr.Row():
184
  system_output = gr.Textbox(label="Output", lines=5)
185
 
186
- with gr.Tab("Reasoning Approaches", id="reasoning_tab"):
187
  with gr.Row():
188
  shot_input = gr.Textbox(
189
  label="Enter your task/question",
@@ -203,26 +222,34 @@ with gr.Blocks() as demo:
203
  with gr.Row():
204
  shot_output = gr.Textbox(label="Output", lines=8)
205
 
206
- # Event handlers with logging
 
 
 
 
 
207
  generate_temp.click(
208
- lambda x, t1, t2: [generate_with_temperature(x, t1), generate_with_temperature(x, t2)],
209
- inputs=[temp_input, temp_slider1, temp_slider2],
210
  outputs=[focused_output, creative_output]
211
  )
212
 
213
  generate_system.click(
214
  generate_with_system_prompt,
215
- inputs=[system_input, system_prompt],
216
  outputs=system_output
217
  )
218
 
219
  generate_shot.click(
220
  generate_with_examples,
221
- inputs=[shot_input, approach_type],
222
  outputs=shot_output
223
  )
224
 
225
  # Tab selection logging
226
- tabs.select(lambda evt: on_tab_select(evt), None, None)
 
 
 
227
 
228
  demo.launch()
 
1
  import os
2
  import json
3
+ import uuid
4
  from datetime import datetime
5
  import gradio as gr
6
  import openai
 
10
  log_dir = Path("usage_logs")
11
  log_dir.mkdir(exist_ok=True)
12
 
13
+ OPENAI_TOKEN = os.getenv("openAI_TOKEN")
14
+
15
+ def generate_session_id():
16
+ """Generate a unique session ID"""
17
+ return str(uuid.uuid4())[:8]
18
+
19
+ def log_interaction(session_id, interaction_type, data):
20
+ """Log each interaction with timestamp and session ID"""
21
  timestamp = datetime.now().isoformat()
22
  log_entry = {
23
  "timestamp": timestamp,
24
+ "session_id": session_id,
25
  "interaction_type": interaction_type,
26
  "data": data
27
  }
28
 
 
29
  date_str = datetime.now().strftime("%Y-%m-%d")
30
  log_file = log_dir / f"usage_log_{date_str}.jsonl"
31
 
 
32
  with open(log_file, "a") as f:
33
  json.dump(log_entry, f)
34
  f.write("\n")
35
 
36
+ def generate_with_temperature(input_text, temperature, session_id):
37
  try:
38
+ log_interaction(session_id, "temperature_generation", {
 
39
  "input_text": input_text,
40
+ "temperature": temperature,
41
+ "type": "low" if temperature <= 0.4 else "high"
42
  })
43
 
44
  openai_client = openai.OpenAI(api_key=OPENAI_TOKEN)
 
60
  return response.choices[0].message.content
61
  except Exception as e:
62
  error_msg = str(e)
63
+ log_interaction(session_id, "error", {
 
64
  "function": "generate_with_temperature",
65
  "error": error_msg,
66
  "input_text": input_text,
 
68
  })
69
  return f"Error: {error_msg}"
70
 
71
+ def generate_with_system_prompt(input_text, system_prompt, session_id):
72
  try:
73
+ log_interaction(session_id, "system_prompt_generation", {
 
74
  "input_text": input_text,
75
+ "system_prompt": system_prompt,
76
+ "timestamp_start": datetime.now().isoformat()
77
  })
78
 
79
  openai_client = openai.OpenAI(api_key=OPENAI_TOKEN)
 
86
  temperature=0.7,
87
  max_tokens=300
88
  )
89
+
90
+ # Log completion
91
+ log_interaction(session_id, "system_prompt_completion", {
92
+ "input_text": input_text,
93
+ "system_prompt": system_prompt,
94
+ "success": True
95
+ })
96
+
97
  return response.choices[0].message.content
98
  except Exception as e:
99
  error_msg = str(e)
100
+ log_interaction(session_id, "error", {
101
  "function": "generate_with_system_prompt",
102
  "error": error_msg,
103
  "input_text": input_text,
 
105
  })
106
  return f"Error: {error_msg}"
107
 
108
+ def generate_with_examples(input_text, approach_type, session_id):
109
  try:
110
+ log_interaction(session_id, "reasoning_approach_generation", {
 
111
  "input_text": input_text,
112
+ "approach_type": approach_type,
113
+ "timestamp_start": datetime.now().isoformat()
114
  })
115
 
116
  openai_client = openai.OpenAI(api_key=OPENAI_TOKEN)
 
133
  temperature=0.7,
134
  max_tokens=300
135
  )
136
+
137
+ # Log completion
138
+ log_interaction(session_id, "reasoning_approach_completion", {
139
+ "input_text": input_text,
140
+ "approach_type": approach_type,
141
+ "success": True
142
+ })
143
+
144
  return response.choices[0].message.content
145
  except Exception as e:
146
  error_msg = str(e)
147
+ log_interaction(session_id, "error", {
148
  "function": "generate_with_examples",
149
  "error": error_msg,
150
  "input_text": input_text,
 
152
  })
153
  return f"Error: {error_msg}"
154
 
 
 
 
 
155
  with gr.Blocks() as demo:
156
+ session_id = gr.State(lambda: generate_session_id())
157
+
158
  gr.Markdown("# Language Models & Methods Lab Interface")
159
 
160
  with gr.Tabs() as tabs:
161
+ with gr.Tab("Temperature Effects"):
162
  with gr.Row():
163
  temp_input = gr.Textbox(
164
  label="Enter your prompt",
 
177
  focused_output = gr.Textbox(label="Focused Output (Low Temperature)", lines=5)
178
  creative_output = gr.Textbox(label="Creative Output (High Temperature)", lines=5)
179
 
180
+ with gr.Tab("System Prompts"):
181
  with gr.Row():
182
  system_input = gr.Textbox(
183
  label="Enter your prompt",
 
202
  with gr.Row():
203
  system_output = gr.Textbox(label="Output", lines=5)
204
 
205
+ with gr.Tab("Reasoning Approaches"):
206
  with gr.Row():
207
  shot_input = gr.Textbox(
208
  label="Enter your task/question",
 
222
  with gr.Row():
223
  shot_output = gr.Textbox(label="Output", lines=8)
224
 
225
+ # Event handlers with session logging
226
+ def handle_temperature_generation(input_text, temp1, temp2, session_id):
227
+ output1 = generate_with_temperature(input_text, temp1, session_id)
228
+ output2 = generate_with_temperature(input_text, temp2, session_id)
229
+ return [output1, output2]
230
+
231
  generate_temp.click(
232
+ handle_temperature_generation,
233
+ inputs=[temp_input, temp_slider1, temp_slider2, session_id],
234
  outputs=[focused_output, creative_output]
235
  )
236
 
237
  generate_system.click(
238
  generate_with_system_prompt,
239
+ inputs=[system_input, system_prompt, session_id],
240
  outputs=system_output
241
  )
242
 
243
  generate_shot.click(
244
  generate_with_examples,
245
+ inputs=[shot_input, approach_type, session_id],
246
  outputs=shot_output
247
  )
248
 
249
  # Tab selection logging
250
+ def on_tab_select(tab_name, session_id):
251
+ log_interaction(session_id, "tab_selection", {"tab": tab_name})
252
+
253
+ tabs.select(lambda evt, sid: on_tab_select(evt, sid), [None, session_id], None)
254
 
255
  demo.launch()