File size: 2,285 Bytes
ec16ff2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import pinecone 
from sentence_transformers import SentenceTransformer 
import openai
import json
import gradio as gr

pinecone.init(api_key="60422c7f-218d-4343-8158-5b4df9559628", environment="us-east1-gcp")
index = pinecone.Index("chatgpt-memory")

openai.api_key = "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"

embmodel = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')


def get_response(input, conversation):
  user_message = input
  if conversation!=[]:
    assistant_message = conversation[-1]["content"]
  else:
    assistant_message = ""
  messages = [{"role": "system", "content": "You are the assistant of 22 year old Henry Leonardi. You have access to memories from previous conversations."}]
  embed = embmodel.encode([user_message]).tolist()[0]
  matches = index.query(embed, top_k=5, include_metadata=True)["matches"]
  try:
    best = [m["id"] for m in matches][:3]
    for b in best:
      messages.extend(eval(b))
  except:
    messages = []
  pair = []
  d = {"role":"user","content":user_message}
  conversation.append(d)
  messages.extend(conversation[-6:])
  d["content"] = d["content"][:200]
  pair.append(d)
  assistant_message = openai.ChatCompletion.create(model="gpt-3.5-turbo",messages=messages)["choices"][0]["message"].to_dict()["content"]  
  d = {"role":"assistant","content":assistant_message}
  conversation.append(d)
  response = d["content"]
  d["content"] = d["content"][:200]
  pair.append(d)
  if pair[0]["content"]!="bye":
    pair_str = str(json.dumps(pair).encode('utf-8'))[2:-1]
    index.upsert([(pair_str, embmodel.encode([pair]).tolist()[0])])
  return response

def conv(input, history=[]):
    conversation = []
    for item in history:
      conversation.append({"role":"user","content":item[0]})
      conversation.append({"role":"assistant","content":item[1]})
    output = get_response(input, conversation)
    history.append((input, output))
    return history, history

block = gr.Blocks()

with block:
    gr.Markdown("""<h1><center>Your Personal Assistant</center></h1>""")
    chatbot = gr.Chatbot()
    message = gr.Textbox(placeholder="prompt")
    state = gr.State([])
    submit = gr.Button("SEND")
    submit.click(conv, inputs=[message, state], outputs=[chatbot, state])

block.launch(inline = True)