DaoAdvocate commited on
Commit
65932a8
·
1 Parent(s): 3c4778e
Files changed (1) hide show
  1. app.py +44 -3
app.py CHANGED
@@ -1,7 +1,48 @@
1
  import gradio as gr
 
 
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
 
 
5
 
6
- iface = gr.Interface(fn=greet, inputs="text", outputs="text")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  iface.launch()
 
1
  import gradio as gr
2
+ import os
3
+ import getpass
4
 
5
+ from langchain.embeddings.openai import OpenAIEmbeddings
6
+ from langchain.chat_models import ChatOpenAI
7
+ from langchain.chains import ConversationalRetrievalChain
8
+ from langchain.vectorstores import DeepLake
9
 
10
+ from dotenv import load_dotenv
11
+
12
+ load_dotenv()
13
+
14
+ os.environ.get("ACTIVELOOP_TOKEN")
15
+ username = "rihp" # replace with your username from app.activeloop.ai
16
+ projectname = "polywrap5" # replace with your project name from app.activeloop.ai
17
+
18
+ embeddings = OpenAIEmbeddings(disallowed_special=())
19
+
20
+ db = DeepLake(dataset_path=f"hub://{username}/{projectname}", read_only=True, embedding_function=embeddings)
21
+
22
+ retriever = db.as_retriever()
23
+ retriever.search_kwargs['distance_metric'] = 'cos'
24
+ retriever.search_kwargs['fetch_k'] = 100
25
+ retriever.search_kwargs['maximal_marginal_relevance'] = True
26
+ retriever.search_kwargs['k'] = 10
27
+
28
+
29
+ model = ChatOpenAI(model_name='gpt-3.5-turbo') # switch to 'gpt-4'
30
+ qa = ConversationalRetrievalChain.from_llm(model, retriever=retriever)
31
+
32
+
33
+ def model(prompt):
34
+ questions = [
35
+ prompt
36
+ ]
37
+ chat_history = []
38
+
39
+ for question in questions:
40
+ result = qa({"question": question, "chat_history": chat_history})
41
+ chat_history.append((question, result['answer']))
42
+ print(f"-> **Question**: {question} \n")
43
+ print(f"**Answer**: {result['answer']} \n")
44
+ return result['answer']
45
+
46
+
47
+ iface = gr.Interface(fn=model, inputs="text", outputs="text")
48
  iface.launch()