File size: 5,797 Bytes
b18bbba
a31a270
 
 
9499cff
a31a270
 
 
 
 
034c5da
140cb01
b18bbba
 
a31a270
034c5da
 
 
a31a270
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13d258a
 
 
a31a270
13d258a
a31a270
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b18bbba
 
 
 
a31a270
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9499cff
 
 
a31a270
7b52735
a31a270
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
import dotenv
from openai import OpenAI
import os
from pypdf import PdfReader
import gradio as gr
import json
import requests

dotenv.load_dotenv(override=True)


openai_api_key = os.getenv("OPENAI_API_KEY")
pushover_user = os.getenv("PUSHOVER_USER")
pushover_token = os.getenv("PUSHOVER_TOKEN")

#if pushover_user and pushover_token:
 #   print("Pushover user and token found")
  #  print("Pushover user and token not found")

def send_pushover_notification(message):
        url = "https://api.pushover.net/1/messages.json"
        data = {
            "token": os.getenv("PUSHOVER_TOKEN"),
            "user": os.getenv("PUSHOVER_USER"),
            "message": message
        }
        response = requests.post(url, data=data)
        if response.status_code == 200:
            print("Pushover notification sent successfully")
        else:
            print("Failed to send Pushover notification")

def get_pdf_text(pdf_path):
    reader = PdfReader(pdf_path)
    text = ""
    for page in reader.pages:
        text += page.extract_text()
    return text

def record_user_details(email, name="Not provided", notes="Not provided" ):
    print(f"User details recorded: Name: {name}, Email: {email}, Notes: {notes}")
    send_pushover_notification(f"Recording interest from : Name: {name}, Email: {email}, Notes: {notes}")
    return {"recorded": "ok"}

def record_unknown_question(question):
    print(f"Unknown question recorded: {question}")
    send_pushover_notification(f"Unknown question recorded: {question}")
    return {"recorded": "ok"}

record_user_details_json = {
    "name" : "record_user_details",
    "description" : "Record user details",
    "parameters" : {
        "type" : "object",
        "properties" : {
            "email" : {"type" : "string", "description" : "The email of the user"},
            "name" : {"type" : "string", "description" : "The name of the user, if they provided it"},
            "notes" : {"type" : "string", "description" : "Any additional information about conversation that worth's recording to given context"}            
        },
        "required" : ["email"],
        "additionalProperties" : False
    }
}

record_unknown_question_json = {
    "name" : "record_unknown_question",
    "description" : "Record unknown question",
    "parameters" : {
        "type" : "object",
        "properties" : {"question" : {"type" : "string", "description" : "The question that the user asked"}}
    }
}

tools = [ {"type":"function", "function": record_user_details_json},
           {"type" : "function", "function" : record_unknown_question_json}]

class Me :
    def __init__(self):
        self.openai = OpenAI()
        self.name = "Ram Shah"
        self.linked_profile = get_pdf_text("RamShah_Profile.pdf")
        with open("Ram_summary.txt", "r", encoding="utf-8") as file:
            self.summary = file.read()
        self.name = "Ram Shah"         
        self.client = OpenAI(api_key=openai_api_key, base_url="https://generativelanguage.googleapis.com/v1beta/openai/")

    def handle_tool_calls(self, tool_calls):
        results = []
        for tool_call in tool_calls : 
            tool_name = tool_call.function.name
            print(f"Tool called: {tool_name}", flush=True)
            
            tool = globals().get(tool_name)
            arguments = json.loads(tool_call.function.arguments)

            result = tool(**arguments) if tool else {}
            results.append({"role" : "tool", "content" : json.dumps(result), "tool_call_id" : tool_call.id})
        return results

    def system_prompt(self):
        system_prompt = f"You are acting as Ram Shah. You are answering questions on Ram Shah's website, \
            particularly questions related to Ram Shah's career, background, skills and experience. \
            Your responsibility is to represent Ram Shah for interactions on the website as faithfully as possible. \
            You are given a summary of Ram Shah's background and LinkedIn profile which you can use to answer questions. \
            Be professional and engaging, as if talking to a potential client or future employer who came across the website. \
            If you don't know the answer to any question, use your record_unknown_question tool to record the question that you couldn't answer, even if it's about something trivial or unrelated to career. \
            If the user is engaging in discussion, try to steer them towards getting in touch via email; ask for their email and record it using your record_user_details tool. "

        system_prompt += f"\n\n## Summary:\n{self.summary}\n\n## LinkedIn Profile:\n{self.linked_profile}\n\n"
        system_prompt += f"With this context, please chat with the user, always staying in character as {self.name}."
        return system_prompt



    def chat_with_me(self, message, history):
        messages = [{"role" : "system", "content" : self.system_prompt()}] + history + [{"role" : "user","content" : message}]
        done = False
        while not done:

            # this is the call to LLM - passing tool json
            response = self.client.chat.completions.create(model="gemini-1.5-flash", messages=messages, tools=tools)

            finish_reson = response.choices[0].finish_reason

            if finish_reson == "tool_calls":
                message = response.choices[0].message
                tool_calls = message.tool_calls
                results = self.handle_tool_calls(tool_calls)
                messages.append(message)
                messages.extend(results)
            else:
                done = True
        return response.choices[0].message.content


if __name__ == "__main__":
    me = Me()
    gr.ChatInterface(me.chat_with_me, type="messages").launch(share=True)