Stanlito commited on
Commit
8776ad9
1 Parent(s): 6f486ad

Delete streamlit_app.py

Browse files
Files changed (1) hide show
  1. streamlit_app.py +0 -99
streamlit_app.py DELETED
@@ -1,99 +0,0 @@
1
- import os
2
- import pathlib
3
- import re
4
- import streamlit as st
5
- from streamlit_chat import message
6
- from langchain.docstore.document import Document
7
- from langchain.document_loaders import TextLoader
8
- from langchain.text_splitter import CharacterTextSplitter
9
- from langchain.embeddings.openai import OpenAIEmbeddings
10
- from langchain.vectorstores import FAISS
11
-
12
- st.set_page_config(
13
- page_title="LLM Chatbot"
14
- )
15
- st.header(" LLM Chatbot on Custom data")
16
- st.sidebar.header("Instructions")
17
- st.sidebar.info(
18
- '''This is a web application that allows you to interact with
19
- your custom data
20
- '''
21
- )
22
- st.sidebar.info('''Enter a query in the text box and press enter
23
- to receive a response''')
24
-
25
- st.sidebar.info('''
26
- This project works perfectly even on your own data
27
- ''')
28
-
29
- os.environ["OPENAI_API_KEY"] = "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"
30
-
31
- from langchain.prompts.chat import (
32
- ChatPromptTemplate,
33
- SystemMessagePromptTemplate,
34
- HumanMessagePromptTemplate,
35
- )
36
- from langchain.chat_models import ChatOpenAI
37
- from langchain.chains import RetrievalQAWithSourcesChain
38
-
39
- # Initialize Streamlit
40
- st.title("Stanlito AI Chatbot")
41
-
42
- # Set the data store directory
43
- DATA_STORE_DIR = "data_store"
44
-
45
- # Upload the files `$DATA_STORE_DIR/index.faiss` and `$DATA_STORE_DIR/index.pkl` to local
46
- if os.path.exists(DATA_STORE_DIR):
47
- vector_store = FAISS.load_local(
48
- DATA_STORE_DIR,
49
- OpenAIEmbeddings()
50
- )
51
- else:
52
- st.write(f"Missing files. Upload index.faiss and index.pkl files to {DATA_STORE_DIR} directory first")
53
-
54
- # Define system template
55
- system_template = """Use the following pieces of context to answer the user's question.
56
- Take note of the sources and include them in the answer in the format: "SOURCES: source1", use "SOURCES" in capital letters regardless of the number of sources.
57
- If you don't know the answer, just say "I don't know", don't try to make up an answer.
58
- ----------------
59
- {summaries}"""
60
-
61
- # Create the prompt
62
- messages = [
63
- SystemMessagePromptTemplate.from_template(system_template),
64
- HumanMessagePromptTemplate.from_template("{question}")
65
- ]
66
- prompt = ChatPromptTemplate.from_messages(messages)
67
-
68
- # Load the language model
69
- llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0,
70
- max_tokens=256) # Modify model_name if you have access to GPT-4
71
-
72
- # Create the chain
73
- chain_type_kwargs = {"prompt": prompt}
74
- chain = RetrievalQAWithSourcesChain.from_chain_type(
75
- llm=llm,
76
- chain_type="stuff",
77
- retriever=vector_store.as_retriever(),
78
- return_source_documents=True,
79
- chain_type_kwargs=chain_type_kwargs
80
- )
81
-
82
-
83
- # Define function to print the result
84
- def print_result(result):
85
- output_text = f"""### Question:
86
- {query}
87
- Answer:
88
- {result['answer']}
89
- """
90
- st.markdown(output_text)
91
-
92
-
93
- # Get user input
94
- query = st.text_input("Ask a question")
95
-
96
- # Process user input
97
- if query:
98
- result = chain(query)
99
- print_result(result)