Spaces:
Build error
Build error
from langchain.embeddings import HuggingFaceEmbeddings,HuggingFaceInstructEmbeddings | |
from langchain.text_splitter import RecursiveCharacterTextSplitter | |
from langchain.vectorstores import FAISS | |
from langchain.chat_models.openai import ChatOpenAI | |
from langchain import VectorDBQA | |
import pandas as pd | |
from variables import * | |
from langchain.chat_models import ChatOpenAI | |
from langchain.prompts.chat import ( | |
ChatPromptTemplate, | |
SystemMessagePromptTemplate, | |
AIMessagePromptTemplate, | |
HumanMessagePromptTemplate, | |
) | |
from langchain.schema import ( | |
AIMessage, | |
HumanMessage, | |
SystemMessage | |
) | |
from datetime import datetime as dt | |
system_template="""Use the following pieces of context to answer the users question. | |
If you don't know the answer, just say that you don't know, don't try to make up an answer. | |
ALWAYS return a "SOURCES" part in your answer. | |
The "SOURCES" part should be a reference to the source of the document from which you got your answer. | |
Example of your response should be: | |
``` | |
The answer is foo | |
SOURCES: xyz | |
``` | |
Begin! | |
---------------- | |
{context} | |
""" | |
messages = [ | |
SystemMessagePromptTemplate.from_template(system_template), | |
HumanMessagePromptTemplate.from_template("{question}") | |
] | |
prompt = ChatPromptTemplate.from_messages(messages) | |
current_time = dt.strftime(dt.today(),'%d_%m_%Y_%H_%M') | |
st.markdown("## Financial Tweets GPT Search") | |
twitter_link = """ | |
[![](https://img.shields.io/twitter/follow/nickmuchi?label=@nickmuchi&style=social)](https://twitter.com/nickmuchi) | |
""" | |
st.markdown(twitter_link) | |
bi_enc_dict = {'mpnet-base-v2':"sentence-transformers/all-mpnet-base-v2", | |
'instructor-base': 'hkunlp/instructor-base'} | |
search_input = st.text_input( | |
label='Enter Your Search Query',value= "What is the latest update on central banks?", key='search') | |
sbert_model_name = st.sidebar.selectbox("Embedding Model", options=list(bi_enc_dict.keys()), key='sbox') | |
tweets = st.session_state['tlist'] | |
topic = st.session_state['topic'] | |
user = st.session_state['user'] | |
cr_time = st.session_state['time'] | |
try: | |
if search_input: | |
model = bi_enc_dict[sbert_model_name] | |
with st.spinner( | |
text=f"Loading {model} embedding model and Generating Response..." | |
): | |
vectorstore = create_vectorstore(tweets,model,user,topic,cr_time) | |
tweets = embed_tweets(search_input,prompt,vectorstore) | |
references = [doc.page_content for doc in tweets['source_documents']] | |
answer = tweets['result'] | |
##### Sematic Search ##### | |
with st.expander(label='Query Result', expanded=True): | |
st.write(answer) | |
with st.expander(label='References from Corpus used to Generate Result'): | |
for ref in references: | |
st.write(ref) | |
else: | |
st.write('Please ensure you have entered the YouTube URL or uploaded the Earnings Call file') | |
except RuntimeError: | |
st.write('Please ensure you have entered the YouTube URL or uploaded the Earnings Call file') | |