wholewhale commited on
Commit
d8804c0
1 Parent(s): e8c47c5
Files changed (1) hide show
  1. app.py +45 -24
app.py CHANGED
@@ -9,28 +9,53 @@ from langchain.embeddings import OpenAIEmbeddings
9
  from langchain.vectorstores import Chroma
10
  from langchain.chains import ConversationalRetrievalChain
11
 
 
 
12
  os.environ['OPENAI_API_KEY'] = os.getenv("Your_API_Key")
13
 
14
  # Global variable for tracking last interaction time
15
  last_interaction_time = 0
16
 
 
 
 
 
 
 
 
17
  def loading_pdf():
18
  return "Working on the upload. Also, pondering the usefulness of sporks..."
19
 
20
  def pdf_changes(pdf_doc):
21
- loader = OnlinePDFLoader(pdf_doc.name)
22
- documents = loader.load()
23
- text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
24
- texts = text_splitter.split_documents(documents)
25
- embeddings = OpenAIEmbeddings()
26
- db = Chroma.from_documents(texts, embeddings)
27
- retriever = db.as_retriever()
28
- global qa
29
- qa = ConversationalRetrievalChain.from_llm(
30
- llm=OpenAI(temperature=0.2, model_name="gpt-3.5-turbo"),
31
- retriever=retriever,
32
- return_source_documents=False)
33
- return "Ready"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
 
35
  def clear_data():
36
  global qa
@@ -44,21 +69,17 @@ def add_text(history, text):
44
  return history, ""
45
 
46
  def bot(history):
47
- response = infer(history[-1][0], history)
48
- formatted_response = "**Bot:** \n" + ' \n'.join(response.split('. '))
 
49
  history[-1][1] = formatted_response
50
  return history
51
 
52
- def infer(question, history):
53
- res = []
54
- for human, ai in history[:-1]:
55
- pair = (human, ai)
56
- res.append(pair)
57
-
58
- chat_history = res
59
  query = question
60
- result = qa({"question": query, "chat_history": chat_history, "system:":"This is a world-class summarizing AI, be helpful."})
61
- return result["answer"]
62
 
63
  def auto_clear_data():
64
  global qa, last_interaction_time
 
9
  from langchain.vectorstores import Chroma
10
  from langchain.chains import ConversationalRetrievalChain
11
 
12
+
13
+ os.environ['ANTHROPIC_API_KEY'] = os.getenv("Your_Anthropic_API_Key")
14
  os.environ['OPENAI_API_KEY'] = os.getenv("Your_API_Key")
15
 
16
  # Global variable for tracking last interaction time
17
  last_interaction_time = 0
18
 
19
+ # Initialize the Anthropic model instead of OpenAI
20
+ from anthropic import LanguageModel
21
+
22
+ anthropic_model = LanguageModel(api_key=os.environ['ANTHROPIC_API_KEY'], model="some_model")
23
+
24
+
25
+
26
  def loading_pdf():
27
  return "Working on the upload. Also, pondering the usefulness of sporks..."
28
 
29
  def pdf_changes(pdf_doc):
30
+ try:
31
+ if pdf_doc is None:
32
+ return "No PDF uploaded."
33
+
34
+ loader = OnlinePDFLoader(pdf_doc.name)
35
+ documents = loader.load()
36
+
37
+ text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
38
+ texts = text_splitter.split_documents(documents)
39
+
40
+ # Replace this with your appropriate embeddings class
41
+ embeddings = OpenAIEmbeddings()
42
+
43
+ global db
44
+ db = Chroma.from_documents(texts, embeddings)
45
+ retriever = db.as_retriever()
46
+
47
+ global qa
48
+ qa = ConversationalRetrievalChain.from_llm(
49
+ llm=OpenAI(temperature=0.2, model_name="gpt-3.5-turbo"),
50
+ retriever=retriever,
51
+ return_source_documents=False
52
+ )
53
+
54
+ return "Ready"
55
+
56
+ except Exception as e:
57
+ return f"Error processing PDF: {e}"
58
+
59
 
60
  def clear_data():
61
  global qa
 
69
  return history, ""
70
 
71
  def bot(history):
72
+ response = infer_anthropic(history[-1][0], history) # Call the new infer function
73
+ sentences = ' \n'.join(response.split('. '))
74
+ formatted_response = f"**Bot:**\n\n{sentences}"
75
  history[-1][1] = formatted_response
76
  return history
77
 
78
+ def infer_anthropic(question, history):
79
+ chat_history = [(human, ai) for human, ai in history[:-1]]
 
 
 
 
 
80
  query = question
81
+ result = anthropic_model.query(query, context=chat_history)
82
+ return result['answer']
83
 
84
  def auto_clear_data():
85
  global qa, last_interaction_time