|
import streamlit as st |
|
from haystack import Pipeline |
|
from haystack_integrations.document_stores.pinecone import PineconeDocumentStore |
|
from haystack.components.builders.answer_builder import AnswerBuilder |
|
from haystack.components.builders.prompt_builder import PromptBuilder |
|
from haystack_integrations.components.embedders.cohere import CohereTextEmbedder |
|
from haystack_integrations.components.retrievers.pinecone import PineconeEmbeddingRetriever |
|
from haystack_integrations.components.generators.cohere import CohereGenerator |
|
from haystack import Document |
|
|
|
def start_haystack(openai_key): |
|
document_store = PineconeDocumentStore(dimension=1024, index="zen", environment = "gcp-starter") |
|
|
|
template = """ |
|
You are a support agent replying to customers' messages. Use the context to answer the customer, starting by greeting them and ending with goodbyes. |
|
|
|
DO NOT TRY TO GUESS INFORMATION. If the context doesn't provide you with the answer, ONLY say this: []. |
|
|
|
Context: |
|
{% for document in documents %} |
|
{{ document.content }} |
|
{% endfor %} |
|
|
|
Customer's message: {{ query }}? |
|
""" |
|
|
|
st.session_state["haystack_started"] = True |
|
|
|
pipe = Pipeline() |
|
|
|
pipe.add_component("text_embedder", CohereTextEmbedder(model="embed-english-v3.0")) |
|
pipe.add_component("retriever", PineconeEmbeddingRetriever(document_store=document_store, top_k=3)) |
|
pipe.add_component("prompt_builder", PromptBuilder(template=template)) |
|
pipe.add_component("llm", CohereGenerator(model="command-nightly")) |
|
pipe.add_component("answer_builder", AnswerBuilder()) |
|
|
|
pipe.connect("text_embedder.embedding", "retriever.query_embedding") |
|
pipe.connect("retriever", "prompt_builder.documents") |
|
pipe.connect("prompt_builder", "llm") |
|
pipe.connect("llm.replies", "answer_builder.replies") |
|
pipe.connect("llm.meta", "answer_builder.meta") |
|
pipe.connect("retriever", "answer_builder.documents") |
|
|
|
return pipe |
|
|
|
|
|
@st.cache_data(show_spinner=True) |
|
def query(prompt, _pipe): |
|
with st.spinner('Processing'): |
|
try: |
|
replies = _pipe.run({ |
|
"text_embedder": { |
|
"text": prompt |
|
}, |
|
"prompt_builder": { |
|
"query": prompt |
|
}, |
|
"answer_builder": { |
|
"query": prompt |
|
} |
|
}) |
|
|
|
raw = replies['answer_builder']['answers'][0] |
|
print("Raw:") |
|
print(raw) |
|
result = raw.data + "\n\n -- Source: " + raw.documents[0].content + " --" |
|
print("Result:") |
|
print(raw.data) |
|
st.success('Completed!') |
|
except Exception as e: |
|
print("Hay:") |
|
print(e) |
|
result = ["Something went wrong!"] |
|
st.error('Failed!') |
|
return result |