Warblade commited on
Commit
4ddfb92
1 Parent(s): 3927093

Revert "resolve dependency issues"

Browse files

This reverts commit 392709392804c11cb7c318ed25400c861a71caa1.

Files changed (7) hide show
  1. .env +0 -3
  2. .gitattributes +1 -1
  3. .gitignore +0 -2
  4. __pycache__/utils.cpython-310.pyc +0 -0
  5. app.py +0 -81
  6. requirements.txt +0 -0
  7. utils.py +0 -10
.env DELETED
@@ -1,3 +0,0 @@
1
- OPENAI_API_KEY = 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
2
- QDRANT_URL = https://63f1dc5d-951d-4539-be73-94d4f725452f.us-east4-0.gcp.cloud.qdrant.io:6333
3
- QDRANT_API_KEY = x_KgU_SErG-GhKnhGaoi94S4Lrna83s-rwFdcqOyUJbHGVAtgmPflQ
 
 
 
 
.gitattributes CHANGED
@@ -32,4 +32,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.xz filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
32
  *.xz filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
.gitignore DELETED
@@ -1,2 +0,0 @@
1
- .ipynb
2
- virt/
 
 
 
__pycache__/utils.cpython-310.pyc DELETED
Binary file (1.23 kB)
 
app.py DELETED
@@ -1,81 +0,0 @@
1
- import gradio as gr
2
- from langchain.vectorstores import Qdrant
3
- import qdrant_client
4
- from langchain.schema.runnable import RunnablePassthrough
5
- from langchain.schema.output_parser import StrOutputParser
6
- from langchain.prompts import ChatPromptTemplate
7
- from langchain.chat_models import ChatOpenAI
8
- from langchain.embeddings import OpenAIEmbeddings
9
- import dotenv
10
- import os
11
- from utils import template
12
- import time
13
-
14
- # Load environment variables and validate
15
- dotenv.load_dotenv()
16
- QDRANT_URL = os.getenv("QDRANT_URL")
17
- QDRANT_API_KEY = os.getenv("QDRANT_API_KEY")
18
- if not QDRANT_URL or not QDRANT_API_KEY:
19
- raise ValueError("QDRANT_URL and QDRANT_API_KEY must be set in the environment")
20
-
21
- # Initialize the vector store
22
- def initiliaze_vector_store():
23
- """
24
- Initialize and return the vector store.
25
- Only run this on launch.
26
- """
27
- embeddings = OpenAIEmbeddings()
28
- client = qdrant_client.QdrantClient(url=QDRANT_URL, api_key=QDRANT_API_KEY)
29
- vectorstore = Qdrant(client=client, collection_name="doc_datategy", embeddings=embeddings)
30
- return vectorstore
31
-
32
- # Initialize the retriever
33
- def initiliaze_retriever(vectorstore):
34
- """
35
- Initialize and return the retriever using the given vectorstore.
36
- """
37
- return vectorstore.as_retriever()
38
-
39
- # Initialize the chatbot
40
- def initiliaze_chatbot(template, model_name="gpt-3.5-turbo-1106", temperature=0):
41
- """
42
- Initialize and return the chatbot components: prompt and language model.
43
- """
44
- prompt = ChatPromptTemplate.from_template(template)
45
- llm = ChatOpenAI(model_name=model_name, temperature=temperature)
46
- return prompt, llm
47
-
48
- # Initialize the RAG chain
49
- def initiliaze_RAG(retriever, prompt, llm):
50
- """
51
- Initialize and return the RAG chain.
52
- """
53
- context_function = {"context": retriever, "question": RunnablePassthrough()}
54
- rag_chain = context_function | prompt | llm | StrOutputParser()
55
- return rag_chain
56
-
57
- # Launch Gradio app
58
- vectorstore = initiliaze_vector_store()
59
- retriever = initiliaze_retriever(vectorstore)
60
-
61
- with gr.Blocks() as demo:
62
- chatbot = gr.Chatbot(label="PapAI custom chatbot")
63
- msg = gr.Textbox(label="Prompt", value='PapAI?', interactive=True)
64
- clear = gr.Button("Clear")
65
- template_user = gr.Textbox(label="Template", value=template, interactive=True)
66
-
67
- def change_template(template_user_str):
68
- prompt, llm = initiliaze_chatbot(template_user_str)
69
- return initiliaze_RAG(retriever, prompt, llm)
70
-
71
- def RAG_answer(query, chat_history, template_user_str):
72
- rag_chain = change_template(template_user_str)
73
- answer = rag_chain.invoke(query)
74
- chat_history.append((query, answer))
75
- time.sleep(1.3) # Consider optimizing or dynamic handling
76
- return "", chat_history
77
-
78
- msg.submit(RAG_answer, [msg, chatbot, template_user], [msg, chatbot])
79
-
80
- demo.queue()
81
- demo.launch(share=False, debug=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
requirements.txt DELETED
Binary file (4.43 kB)
 
utils.py DELETED
@@ -1,10 +0,0 @@
1
- template = """You are an assistant specialized in answering (specify type, e.g., data analysis/statistics/machine learning) questions for Data Science projects. Follow these steps to answer:
2
- 1. Verify if the provided context is relevant to the question. If not, request more specific information or additional context.
3
- 2. Use the following pieces of retrieved context to answer the question. Incorporate data-driven insights or references when possible.
4
- 3. If the answer requires external data science tools or datasets, mention this in your response.
5
- 4. If you don't know the answer or if the information is insufficient to formulate an answer, clearly state that.
6
- 5. Keep your answer concise, using a maximum of three sentences.
7
- Question: {question}
8
- Context: {context}
9
- Answer:
10
- """