devmit commited on
Commit
bd9c957
1 Parent(s): ab2bd9d
Files changed (2) hide show
  1. app.py +41 -2
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,7 +1,46 @@
1
  import gradio as gr
 
 
 
 
 
 
 
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
  iface = gr.Interface(fn=greet, inputs="text", outputs="text")
7
  iface.launch()
 
1
  import gradio as gr
2
+ import os
3
+ import pinecone
4
+ from dotenv import load_dotenv
5
+ from langchain.chains import RetrievalQA
6
+ from langchain.vectorstores import Pinecone
7
+ from langchain.chat_models import ChatOpenAI
8
+ from langchain.embeddings import HuggingFaceInferenceAPIEmbeddings
9
 
10
+
11
+
12
+ load_dotenv()
13
+
14
+
15
+ # APIS
16
+ OPENAI_API_KEY=os.getenv("OPENAI_API_KEY")
17
+ PINECONE_API_KEY=os.getenv("PINECONE_API_KEY")
18
+ PINECONE_ENV=os.getenv("PINECONE_ENV")
19
+ PINECONE_INDEX=os.getenv("PINECONE_INDEX")
20
+ TEXT_EMBEDDING_MODEL=os.getenv("TEXT_EMBEDDING_MODEL")
21
+ HF_MODEL=os.getenv("HF_MODEL")
22
+ HF_API=os.getenv("HF_API")
23
+
24
+
25
+ def model(query):
26
+ pinecone.init(
27
+ api_key=PINECONE_API_KEY, # find at app.pinecone.io
28
+ environment=PINECONE_ENV, # next to api key in console
29
+ )
30
+
31
+ embeddings = HuggingFaceInferenceAPIEmbeddings(api_key=HF_API ,model_name=HF_MODEL)
32
+ vectorstore = Pinecone.from_existing_index(PINECONE_INDEX, embeddings)
33
+
34
+ docs = vectorstore.similarity_search(query,k=5)
35
+
36
+ llm = ChatOpenAI(model_name="gpt-3.5-turbo",temperature=0.76, max_tokens=100, model_kwargs={"seed":235, "top_p":0.01})
37
+
38
+ chain = RetrievalQA.from_chain_type(llm, chain_type="stuff", retriever=vectorstore.as_retriever())
39
+ answer=chain.run({"query": query + "you are a therapist who help people with personal development and self improvement"+ "You can only make conversations related to the provided context. If a response cannot be formed strictly using the context, politely say you don’t have knowledge about that topic."+"[strictly within 75 words]"})
40
+ return answer
41
+
42
+ def greet(query):
43
+ return model(query)
44
 
45
  iface = gr.Interface(fn=greet, inputs="text", outputs="text")
46
  iface.launch()
requirements.txt CHANGED
@@ -4,4 +4,5 @@ pdfminer.six
4
  tiktoken
5
  pinecone-client
6
  python-dotenv
7
- gradio
 
 
4
  tiktoken
5
  pinecone-client
6
  python-dotenv
7
+ gradio
8
+ setuptools