Spaces:
Sleeping
Sleeping
import dotenv | |
from openai import OpenAI | |
import os | |
from pypdf import PdfReader | |
import gradio as gr | |
import json | |
import requests | |
dotenv.load_dotenv(override=True) | |
openai_api_key = os.getenv("OPENAI_API_KEY") | |
pushover_user = os.getenv("PUSHOVER_USER") | |
pushover_token = os.getenv("PUSHOVER_TOKEN") | |
#if pushover_user and pushover_token: | |
# print("Pushover user and token found") | |
# print("Pushover user and token not found") | |
def send_pushover_notification(message): | |
url = "https://api.pushover.net/1/messages.json" | |
data = { | |
"token": os.getenv("PUSHOVER_TOKEN"), | |
"user": os.getenv("PUSHOVER_USER"), | |
"message": message | |
} | |
response = requests.post(url, data=data) | |
if response.status_code == 200: | |
print("Pushover notification sent successfully") | |
else: | |
print("Failed to send Pushover notification") | |
def get_pdf_text(pdf_path): | |
reader = PdfReader(pdf_path) | |
text = "" | |
for page in reader.pages: | |
text += page.extract_text() | |
return text | |
def record_user_details(email, name="Not provided", notes="Not provided" ): | |
print(f"User details recorded: Name: {name}, Email: {email}, Notes: {notes}") | |
send_pushover_notification(f"Recording interest from : Name: {name}, Email: {email}, Notes: {notes}") | |
return {"recorded": "ok"} | |
def record_unknown_question(question): | |
print(f"Unknown question recorded: {question}") | |
send_pushover_notification(f"Unknown question recorded: {question}") | |
return {"recorded": "ok"} | |
record_user_details_json = { | |
"name" : "record_user_details", | |
"description" : "Record user details", | |
"parameters" : { | |
"type" : "object", | |
"properties" : { | |
"email" : {"type" : "string", "description" : "The email of the user"}, | |
"name" : {"type" : "string", "description" : "The name of the user, if they provided it"}, | |
"notes" : {"type" : "string", "description" : "Any additional information about conversation that worth's recording to given context"} | |
}, | |
"required" : ["email"], | |
"additionalProperties" : False | |
} | |
} | |
record_unknown_question_json = { | |
"name" : "record_unknown_question", | |
"description" : "Record unknown question", | |
"parameters" : { | |
"type" : "object", | |
"properties" : {"question" : {"type" : "string", "description" : "The question that the user asked"}} | |
} | |
} | |
tools = [ {"type":"function", "function": record_user_details_json}, | |
{"type" : "function", "function" : record_unknown_question_json}] | |
class Me : | |
def __init__(self): | |
self.openai = OpenAI() | |
self.name = "Ram Shah" | |
self.linked_profile = get_pdf_text("RamShah_Profile.pdf") | |
with open("Ram_summary.txt", "r", encoding="utf-8") as file: | |
self.summary = file.read() | |
self.name = "Ram Shah" | |
self.client = OpenAI(api_key=openai_api_key, base_url="https://generativelanguage.googleapis.com/v1beta/openai/") | |
def handle_tool_calls(self, tool_calls): | |
results = [] | |
for tool_call in tool_calls : | |
tool_name = tool_call.function.name | |
print(f"Tool called: {tool_name}", flush=True) | |
tool = globals().get(tool_name) | |
arguments = json.loads(tool_call.function.arguments) | |
result = tool(**arguments) if tool else {} | |
results.append({"role" : "tool", "content" : json.dumps(result), "tool_call_id" : tool_call.id}) | |
return results | |
def system_prompt(self): | |
system_prompt = f"You are acting as Ram Shah. You are answering questions on Ram Shah's website, \ | |
particularly questions related to Ram Shah's career, background, skills and experience. \ | |
Your responsibility is to represent Ram Shah for interactions on the website as faithfully as possible. \ | |
You are given a summary of Ram Shah's background and LinkedIn profile which you can use to answer questions. \ | |
Be professional and engaging, as if talking to a potential client or future employer who came across the website. \ | |
If you don't know the answer to any question, use your record_unknown_question tool to record the question that you couldn't answer, even if it's about something trivial or unrelated to career. \ | |
If the user is engaging in discussion, try to steer them towards getting in touch via email; ask for their email and record it using your record_user_details tool. " | |
system_prompt += f"\n\n## Summary:\n{self.summary}\n\n## LinkedIn Profile:\n{self.linked_profile}\n\n" | |
system_prompt += f"With this context, please chat with the user, always staying in character as {self.name}." | |
return system_prompt | |
def chat_with_me(self, message, history): | |
messages = [{"role" : "system", "content" : self.system_prompt()}] + history + [{"role" : "user","content" : message}] | |
done = False | |
while not done: | |
# this is the call to LLM - passing tool json | |
response = self.client.chat.completions.create(model="gemini-1.5-flash", messages=messages, tools=tools) | |
finish_reson = response.choices[0].finish_reason | |
if finish_reson == "tool_calls": | |
message = response.choices[0].message | |
tool_calls = message.tool_calls | |
results = self.handle_tool_calls(tool_calls) | |
messages.append(message) | |
messages.extend(results) | |
else: | |
done = True | |
return response.choices[0].message.content | |
if __name__ == "__main__": | |
me = Me() | |
gr.ChatInterface(me.chat_with_me, type="messages").launch(share=True) | |