|
import json |
|
from typing import Generator, List |
|
|
|
import gradio as gr |
|
from openai import OpenAI |
|
|
|
from crop_utils import get_image_crop |
|
from prompts import ( |
|
get_chat_system_prompt, |
|
get_live_event_system_prompt, |
|
get_live_event_user_prompt, |
|
get_street_interview_prompt, |
|
get_street_interview_system_prompt, |
|
) |
|
from transcript import TranscriptProcessor |
|
from utils import css, get_transcript_for_url, head, js |
|
from utils import openai_tools as tools |
|
from utils import setup_openai_key |
|
|
|
client = OpenAI() |
|
|
|
|
|
def get_initial_analysis( |
|
transcript_processor: TranscriptProcessor, cid, rsid, origin, ct, uid |
|
) -> Generator[str, None, None]: |
|
"""Perform initial analysis of the transcript using OpenAI.""" |
|
hardcoded_messages = { |
|
( |
|
"9v3b-j426-kxxv_2024-11-19T204924", |
|
"2024-11-19T223131", |
|
): f"""**Mala Ramakrishnan** |
|
1. [Introduction and Event Overview <div id='topic' style="display: inline"> 40s at 03:25 </div>]({origin}/collab/{cid}/{rsid}?st={205}&et={240}&uid={uid}) |
|
2. [Advice for Startup Founders <div id='topic' style="display: inline"> 30s at 26:10 </div>]({origin}/collab/{cid}/{rsid}?st={1570}&et={1600}&uid={uid}) |
|
|
|
**Raymond Lee** |
|
1. [Event Introduction and Agenda <div id='topic' style="display: inline"> 120s at 00:39 </div>]({origin}/collab/{cid}/{rsid}?st={39}&et={159}&uid={uid}) |
|
2. [Introduction of Mala Ramakrishnan <div id='topic' style="display: inline"> 20s at 02:51 </div>]({origin}/collab/{cid}/{rsid}?st={171}&et={191}&uid={uid}) |
|
|
|
**Vince Lane** |
|
1. [Introduction and Background <div id='topic' style="display: inline"> 60s at 04:42 </div>]({origin}/collab/{cid}/{rsid}?st={282}&et={342}&uid={uid}) |
|
2. [Advice for Founders <div id='topic' style="display: inline"> 60s at 19:48 </div>]({origin}/collab/{cid}/{rsid}?st={1188}&et={1248}&uid={uid}) |
|
|
|
**Marriott Wharton** |
|
1. [Introduction and Investment Focus <div id='topic' style="display: inline"> 60s at 06:36 </div>]({origin}/collab/{cid}/{rsid}?st={396}&et={456}&uid={uid}) |
|
2. [AI as a Fundamental Tool <div id='topic' style="display: inline"> 60s at 08:39 </div>]({origin}/collab/{cid}/{rsid}?st={519}&et={579}&uid={uid}) |
|
|
|
**spk_2** |
|
1. [Introduction and Investment Focus <div id='topic' style="display: inline"> 60s at 05:56 </div>]({origin}/collab/{cid}/{rsid}?st={356}&et={416}&uid={uid}) |
|
2. [Caution in AI Investments <div id='topic' style="display: inline"> 60s at 10:50 </div>]({origin}/collab/{cid}/{rsid}?st={650}&et={710}&uid={uid}) |
|
""", |
|
( |
|
"9v3b-j426-kxxv_2024-11-19T204924", |
|
"2024-11-19T230912", |
|
): f"""**Napoleon Paxton** |
|
1. [Introduction and Background <div id='topic' style="display: inline"> 68s at 00:49 </div>](/collab/{cid}/{rsid}?st=49&et=117&uid={uid}) |
|
2. [AI Squared's Business Model <div id='topic' style="display: inline"> 52s at 15:18 </div>](/collab/{cid}/{rsid}?st=918&et=970&uid={uid}) |
|
3. [Federal Space and Networking <div id='topic' style="display: inline"> 88s at 24:35 </div>](/collab/{cid}/{rsid}?st=1475&et=1563&uid={uid}) |
|
|
|
**Lauren Hidalgo** |
|
1. [Introduction and Experience <div id='topic' style="display: inline"> 77s at 03:01 </div>](/collab/{cid}/{rsid}?st=181&et=258&uid={uid}) |
|
2. [AI Implementation Approach <div id='topic' style="display: inline"> 108s at 11:50 </div>](/collab/{cid}/{rsid}?st=710&et=818&uid={uid}) |
|
|
|
**Priti Padmanaban** |
|
1. [Introduction and AI Marketing <div id='topic' style="display: inline"> 66s at 06:17 </div>](/collab/{cid}/{rsid}?st=377&et=443&uid={uid}) |
|
2. [Responsible AI Framework <div id='topic' style="display: inline"> 109s at 08:15 </div>](/collab/{cid}/{rsid}?st=495&et=604&uid={uid}) |
|
3. [AI in Climate Tech <div id='topic' style="display: inline"> 72s at 31:30 </div>](/collab/{cid}/{rsid}?st=1890&et=1962&uid={uid}) |
|
|
|
**Rishi Sawane** |
|
1. [Introduction and Background <div id='topic' style="display: inline"> 98s at 04:17 </div>](/collab/{cid}/{rsid}?st=257&et=355&uid={uid}) |
|
2. [AI and Recruitment Automation <div id='topic' style="display: inline"> 56s at 32:52 </div>](/collab/{cid}/{rsid}?st=1972&et=2028&uid={uid})""", |
|
( |
|
"9v3b-j426-kxxv_2024-10-10T145749", |
|
"2024-10-10T160643", |
|
): f"""**Mahesh** |
|
1. [Zoom's AI Adoption Journey <div id='topic' style="display: inline"> 60s at 05:42 </div>](/collab/{cid}/{rsid}?st=342&et=402&uid={uid}) |
|
2. [AI's Impact on Business Metrics <div id='topic' style="display: inline"> 60s at 07:49 </div>](/collab/{cid}/{rsid}?st=469&et=529&uid={uid}) |
|
3. [AI's Role in Enterprise Adoption <div id='topic' style="display: inline"> 60s at 13:02 </div>](/collab/{cid}/{rsid}?st=782&et=842&uid={uid}) |
|
|
|
**Ben** |
|
1. [AI in Enterprise Content Management <div id='topic' style="display: inline"> 60s at 04:18 </div>](/collab/{cid}/{rsid}?st=258&et=318&uid={uid}) |
|
2. [Challenges in AI Adoption <div id='topic' style="display: inline"> 60s at 11:00 </div>](/collab/{cid}/{rsid}?st=660&et=720&uid={uid}) |
|
3. [Trust and AI Implementation <div id='topic' style="display: inline"> 60s at 31:02 </div>](/collab/{cid}/{rsid}?st=1862&et=1922&uid={uid}) |
|
|
|
**Jennifer Lee** |
|
1. [Introduction to Enterprise AI <div id='topic' style="display: inline"> 60s at 01:49 </div>](/collab/{cid}/{rsid}?st=109&et=169&uid={uid}) |
|
2. [Investor's Perspective on AI <div id='topic' style="display: inline"> 60s at 17:18 </div>](/collab/{cid}/{rsid}?st=1038&et=1098&uid={uid}) |
|
3. [Closing Remarks and Thanks <div id='topic' style="display: inline"> 60s at 58:57 </div>](/collab/{cid}/{rsid}?st=3537&et=3597&uid={uid}) |
|
|
|
**Robert** |
|
1. [AI's Role in Customer Support <div id='topic' style="display: inline"> 60s at 08:34 </div>](/collab/{cid}/{rsid}?st=514&et=574&uid={uid}) |
|
2. [Challenges in AI Implementation <div id='topic' style="display: inline"> 60s at 32:11 </div>](/collab/{cid}/{rsid}?st=1931&et=1991&uid={uid}) |
|
3. [AI's Impact on Business Processes <div id='topic' style="display: inline"> 60s at 54:01 </div>](/collab/{cid}/{rsid}?st=3241&et=3301&uid={uid})""", |
|
( |
|
"9v3b-j426-kxxv_2025-01-08T195932", |
|
"2025-01-08T201511", |
|
): f"""**Paul Sutchman** |
|
1. [Introduction and Purpose of the Panel <div id='topic' style="display: inline"> 46s at 00:11 </div>](/collab/{cid}/{rsid}?st=11&et=57&uid={uid}) |
|
2. [Closing Remarks and Excitement for 2025 <div id='topic' style="display: inline"> 60s at 30:05 </div>](/collab/{cid}/{rsid}?st=1805&et=1865&uid={uid}) |
|
|
|
**Tomas** |
|
1. [Introduction to Alembic Platform <div id='topic' style="display: inline"> 106s at 01:31 </div>](/collab/{cid}/{rsid}?st=91&et=197&uid={uid}) |
|
2. [Challenges in Marketing Measurement <div id='topic' style="display: inline"> 84s at 15:15 </div>](/collab/{cid}/{rsid}?st=915&et=999&uid={uid}) |
|
3. [Data Analysis and Customization <div id='topic' style="display: inline"> 112s at 23:16 </div>](/collab/{cid}/{rsid}?st=1396&et=1508&uid={uid}) |
|
|
|
**Jeffrey** |
|
1. [Investment Perspective on Alembic <div id='topic' style="display: inline"> 130s at 03:37 </div>](/collab/{cid}/{rsid}?st=217&et=347&uid={uid}) |
|
2. [Delta's Strategic Importance <div id='topic' style="display: inline"> 69s at 04:57 </div>](/collab/{cid}/{rsid}?st=297&et=366&uid={uid}) |
|
|
|
**Alicia** |
|
1. [Importance of Measurement in Marketing <div id='topic' style="display: inline"> 120s at 09:36 </div>](/collab/{cid}/{rsid}?st=576&et=696&uid={uid}) |
|
2. [Pilot with Alembic and Results <div id='topic' style="display: inline"> 120s at 12:10 </div>](/collab/{cid}/{rsid}?st=730&et=850&uid={uid}) |
|
3. [Collaboration and Building Together <div id='topic' style="display: inline"> 120s at 27:13 </div>](/collab/{cid}/{rsid}?st=1633&et=1740&uid={uid})""", |
|
} |
|
|
|
if (cid, rsid) in hardcoded_messages: |
|
if "localhost" in origin: |
|
link_start = "http" |
|
else: |
|
link_start = "https" |
|
|
|
hardcoded_message = hardcoded_messages[(cid, rsid)] |
|
collected_message = "" |
|
chunks = [ |
|
hardcoded_message[i : i + 10] for i in range(0, len(hardcoded_message), 10) |
|
] |
|
|
|
import time |
|
|
|
for chunk in chunks: |
|
collected_message += chunk |
|
yield collected_message |
|
time.sleep(0.05) |
|
return |
|
|
|
try: |
|
transcript = transcript_processor.get_transcript() |
|
speaker_mapping = transcript_processor.speaker_mapping |
|
client = OpenAI() |
|
if "localhost" in origin: |
|
link_start = "http" |
|
else: |
|
link_start = "https" |
|
if ct == "si": |
|
user_prompt = get_street_interview_prompt(transcript, uid, rsid, link_start) |
|
system_prompt = get_street_interview_system_prompt(cid, rsid, origin, ct) |
|
completion = client.chat.completions.create( |
|
model="gpt-4o", |
|
messages=[ |
|
{"role": "system", "content": system_prompt}, |
|
{"role": "user", "content": user_prompt}, |
|
], |
|
stream=True, |
|
temperature=0.1, |
|
) |
|
else: |
|
system_prompt = get_live_event_system_prompt( |
|
cid, rsid, origin, ct, speaker_mapping, transcript |
|
) |
|
user_prompt = get_live_event_user_prompt(uid, link_start) |
|
|
|
completion = client.chat.completions.create( |
|
model="gpt-4o", |
|
messages=[ |
|
{"role": "system", "content": system_prompt}, |
|
{"role": "user", "content": user_prompt}, |
|
], |
|
stream=True, |
|
temperature=0.1, |
|
) |
|
|
|
collected_messages = [] |
|
|
|
for chunk in completion: |
|
if chunk.choices[0].delta.content is not None: |
|
chunk_message = chunk.choices[0].delta.content |
|
collected_messages.append(chunk_message) |
|
|
|
yield "".join(collected_messages) |
|
|
|
except Exception as e: |
|
print(f"Error in initial analysis: {str(e)}") |
|
yield "An error occurred during initial analysis. Please check your API key and file path." |
|
|
|
|
|
def chat( |
|
message: str, |
|
chat_history: List, |
|
transcript_processor: TranscriptProcessor, |
|
cid, |
|
rsid, |
|
origin, |
|
ct, |
|
uid, |
|
): |
|
|
|
try: |
|
client = OpenAI() |
|
|
|
if "localhost" in origin: |
|
link_start = "http" |
|
else: |
|
link_start = "https" |
|
speaker_mapping = transcript_processor.speaker_mapping |
|
system_prompt = get_chat_system_prompt( |
|
cid=cid, |
|
rsid=rsid, |
|
origin=origin, |
|
ct=ct, |
|
speaker_mapping=speaker_mapping, |
|
transcript=transcript_processor.get_transcript(), |
|
link_start=link_start, |
|
) |
|
|
|
messages = [{"role": "system", "content": system_prompt}] |
|
|
|
for user_msg, assistant_msg in chat_history: |
|
if user_msg is not None: |
|
messages.append({"role": "user", "content": user_msg}) |
|
if assistant_msg is not None: |
|
messages.append({"role": "assistant", "content": assistant_msg}) |
|
|
|
|
|
messages.append({"role": "user", "content": message}) |
|
completion = client.chat.completions.create( |
|
model="gpt-4o", |
|
messages=messages, |
|
tools=tools, |
|
stream=True, |
|
temperature=0.3, |
|
) |
|
collected_messages = [] |
|
tool_calls_detected = False |
|
|
|
for chunk in completion: |
|
if chunk.choices[0].delta.tool_calls: |
|
tool_calls_detected = True |
|
|
|
response = client.chat.completions.create( |
|
model="gpt-4o", |
|
messages=messages, |
|
tools=tools, |
|
) |
|
|
|
if response.choices[0].message.tool_calls: |
|
tool_call = response.choices[0].message.tool_calls[0] |
|
if tool_call.function.name == "get_image": |
|
|
|
image_data = get_image_crop(cid, rsid, uid, ct) |
|
print(response.choices[0].message) |
|
messages.append(response.choices[0].message) |
|
function_call_result_message = { |
|
"role": "tool", |
|
"content": "Here are the Image Crops", |
|
"name": tool_call.function.name, |
|
"tool_call_id": tool_call.id, |
|
} |
|
messages.append(function_call_result_message) |
|
|
|
yield image_data |
|
return |
|
|
|
if tool_call.function.name == "correct_speaker_name_with_url": |
|
args = eval(tool_call.function.arguments) |
|
url = args.get("url", None) |
|
if url: |
|
transcript_processor.correct_speaker_mapping_with_agenda( |
|
url |
|
) |
|
corrected_speaker_mapping = ( |
|
transcript_processor.speaker_mapping |
|
) |
|
messages.append(response.choices[0].message) |
|
|
|
function_call_result_message = { |
|
"role": "tool", |
|
"content": json.dumps( |
|
{ |
|
"speaker_mapping": f"Corrected Speaker Mapping... {corrected_speaker_mapping}" |
|
} |
|
), |
|
"name": tool_call.function.name, |
|
"tool_call_id": tool_call.id, |
|
} |
|
messages.append(function_call_result_message) |
|
|
|
|
|
final_response = client.chat.completions.create( |
|
model="gpt-4o", |
|
messages=messages, |
|
stream=True, |
|
) |
|
|
|
collected_chunk = "" |
|
for final_chunk in final_response: |
|
if final_chunk.choices[0].delta.content: |
|
collected_chunk += final_chunk.choices[ |
|
0 |
|
].delta.content |
|
yield collected_chunk |
|
return |
|
else: |
|
function_call_result_message = { |
|
"role": "tool", |
|
"content": "No URL Provided", |
|
"name": tool_call.function.name, |
|
"tool_call_id": tool_call.id, |
|
} |
|
|
|
elif tool_call.function.name == "correct_call_type": |
|
args = eval(tool_call.function.arguments) |
|
call_type = args.get("call_type", None) |
|
if call_type: |
|
|
|
for content in get_initial_analysis( |
|
transcript_processor, |
|
call_type, |
|
rsid, |
|
origin, |
|
call_type, |
|
uid, |
|
): |
|
yield content |
|
return |
|
break |
|
|
|
if not tool_calls_detected and chunk.choices[0].delta.content is not None: |
|
chunk_message = chunk.choices[0].delta.content |
|
collected_messages.append(chunk_message) |
|
yield "".join(collected_messages) |
|
|
|
except Exception as e: |
|
print(f"Unexpected error in chat: {str(e)}") |
|
import traceback |
|
|
|
print(f"Traceback: {traceback.format_exc()}") |
|
yield "Sorry, there was an error processing your request." |
|
|
|
|
|
def create_chat_interface(): |
|
"""Create and configure the chat interface.""" |
|
|
|
with gr.Blocks( |
|
fill_height=True, |
|
fill_width=True, |
|
|
|
head=head, |
|
js=js, |
|
theme=gr.themes.Default( |
|
font=[gr.themes.GoogleFont("Inconsolata"), "Arial", "sans-serif"] |
|
).set( |
|
background_fill_secondary="#181818", |
|
background_fill_secondary_dark="#181818", |
|
block_background_fill="#181818", |
|
), |
|
) as demo: |
|
chatbot = gr.Chatbot( |
|
elem_id="chatbot_box", |
|
layout="bubble", |
|
show_label=False, |
|
show_share_button=False, |
|
show_copy_all_button=False, |
|
show_copy_button=False, |
|
render=True, |
|
) |
|
msg = gr.Textbox(elem_id="chatbot_textbox", show_label=False) |
|
transcript_processor_state = gr.State() |
|
call_id_state = gr.State() |
|
colab_id_state = gr.State() |
|
origin_state = gr.State() |
|
ct_state = gr.State() |
|
turl_state = gr.State() |
|
uid_state = gr.State() |
|
iframe_html = "<iframe id='link-frame'></iframe>" |
|
gr.HTML(value=iframe_html) |
|
|
|
def respond( |
|
message: str, |
|
chat_history: List, |
|
transcript_processor, |
|
cid, |
|
rsid, |
|
origin, |
|
ct, |
|
uid, |
|
): |
|
if not transcript_processor: |
|
bot_message = "Transcript processor not initialized." |
|
chat_history.append((message, bot_message)) |
|
return "", chat_history |
|
|
|
chat_history.append((message, "")) |
|
for chunk in chat( |
|
message, |
|
chat_history[:-1], |
|
transcript_processor, |
|
cid, |
|
rsid, |
|
origin, |
|
ct, |
|
uid, |
|
): |
|
chat_history[-1] = (message, chunk) |
|
yield "", chat_history |
|
|
|
msg.submit( |
|
respond, |
|
[ |
|
msg, |
|
chatbot, |
|
transcript_processor_state, |
|
call_id_state, |
|
colab_id_state, |
|
origin_state, |
|
ct_state, |
|
uid_state, |
|
], |
|
[msg, chatbot], |
|
) |
|
|
|
|
|
def on_app_load(request: gr.Request): |
|
turls = None |
|
cid = request.query_params.get("cid", None) |
|
rsid = request.query_params.get("rsid", None) |
|
origin = request.query_params.get("origin", None) |
|
ct = request.query_params.get("ct", None) |
|
turl = request.query_params.get("turl", None) |
|
uid = request.query_params.get("uid", None) |
|
pnames = request.query_params.get("pnames", None) |
|
|
|
required_params = ["cid", "rsid", "origin", "ct", "turl", "uid"] |
|
missing_params = [ |
|
param |
|
for param in required_params |
|
if request.query_params.get(param) is None |
|
] |
|
|
|
if missing_params: |
|
error_message = ( |
|
f"Missing required parameters: {', '.join(missing_params)}" |
|
) |
|
chatbot_value = [(None, error_message)] |
|
return [chatbot_value, None, None, None, None, None, None, None] |
|
|
|
if ct == "rp": |
|
|
|
turls = turl.split(",") |
|
pnames = [pname.replace("_", " ") for pname in pnames.split(",")] |
|
|
|
try: |
|
|
|
if turls: |
|
transcript_data = [] |
|
for turl in turls: |
|
print("Getting Transcript for URL") |
|
transcript_data.append(get_transcript_for_url(turl)) |
|
print("Now creating Processor") |
|
transcript_processor = TranscriptProcessor( |
|
transcript_data=transcript_data, |
|
call_type=ct, |
|
person_names=pnames, |
|
) |
|
|
|
else: |
|
transcript_data = get_transcript_for_url(turl) |
|
transcript_processor = TranscriptProcessor( |
|
transcript_data=transcript_data, call_type=ct |
|
) |
|
|
|
|
|
chatbot_value = [(None, "")] |
|
|
|
|
|
return [ |
|
chatbot_value, |
|
transcript_processor, |
|
cid, |
|
rsid, |
|
origin, |
|
ct, |
|
turl, |
|
uid, |
|
] |
|
except Exception as e: |
|
print(e) |
|
error_message = f"Error processing call_id {cid}: {str(e)}" |
|
chatbot_value = [(None, error_message)] |
|
return [chatbot_value, None, None, None, None, None, None, None] |
|
|
|
def display_processing_message(chatbot_value): |
|
"""Display the processing message while maintaining state.""" |
|
|
|
new_chatbot_value = [ |
|
(None, "Video is being processed. Please wait for the results...") |
|
] |
|
|
|
|
|
return new_chatbot_value |
|
|
|
def stream_initial_analysis( |
|
chatbot_value, transcript_processor, cid, rsid, origin, ct, uid |
|
): |
|
if not transcript_processor: |
|
return chatbot_value |
|
|
|
try: |
|
for chunk in get_initial_analysis( |
|
transcript_processor, cid, rsid, origin, ct, uid |
|
): |
|
|
|
chatbot_value[0] = (None, chunk) |
|
yield chatbot_value |
|
except Exception as e: |
|
chatbot_value[0] = (None, f"Error during analysis: {str(e)}") |
|
yield chatbot_value |
|
|
|
demo.load( |
|
on_app_load, |
|
inputs=None, |
|
outputs=[ |
|
chatbot, |
|
transcript_processor_state, |
|
call_id_state, |
|
colab_id_state, |
|
origin_state, |
|
ct_state, |
|
turl_state, |
|
uid_state, |
|
], |
|
).then( |
|
display_processing_message, |
|
inputs=[chatbot], |
|
outputs=[chatbot], |
|
).then( |
|
stream_initial_analysis, |
|
inputs=[ |
|
chatbot, |
|
transcript_processor_state, |
|
call_id_state, |
|
colab_id_state, |
|
origin_state, |
|
ct_state, |
|
uid_state, |
|
], |
|
outputs=[chatbot], |
|
) |
|
return demo |
|
|
|
|
|
def main(): |
|
"""Main function to run the application.""" |
|
try: |
|
setup_openai_key() |
|
demo = create_chat_interface() |
|
demo.launch(share=True) |
|
except Exception as e: |
|
print(f"Error starting application: {str(e)}") |
|
raise |
|
|
|
|
|
if __name__ == "__main__": |
|
main() |
|
|