File size: 1,803 Bytes
d61b6cb
 
 
5afd6f9
d61b6cb
 
e0b936d
5afd6f9
d61b6cb
 
 
 
 
 
 
 
 
 
 
 
 
5afd6f9
e0b936d
5afd6f9
 
d61b6cb
 
 
 
 
 
 
 
 
 
 
 
 
 
5afd6f9
 
 
 
 
 
 
 
d61b6cb
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import streamlit as st
from haystack import Pipeline
from haystack.components.builders.prompt_builder import PromptBuilder
from haystack.components.generators import HuggingFaceTGIGenerator, OpenAIGenerator
from .hackernews_fetcher import HackernewsFetcher

st.cache_resource
def start_haystack(key, model):
    prompt_template = """
You will be provided one or more top HakcerNews posts, followed by their URL.
For the posts you have, provide a short summary followed by the URL that the post can be found at.

Posts:
{% for article in articles %}
  Post content: {{article.content}}
  Post URL: {{article.meta['url']}}
{% endfor %}
Summaries:
"""

    prompt_builder = PromptBuilder(template=prompt_template)
    if model == "Mistral":
        llm = HuggingFaceTGIGenerator("mistralai/Mistral-7B-Instruct-v0.2", token=key)
    elif model == "GPT-4":
        llm = OpenAIGenerator(api_key=key, model="gpt-4")
    fetcher = HackernewsFetcher()

    pipe = Pipeline()
    pipe.add_component("hackernews_fetcher", fetcher)
    pipe.add_component("prompt_builder", prompt_builder)
    pipe.add_component("llm", llm)

    pipe.connect("hackernews_fetcher.articles", "prompt_builder.articles")
    pipe.connect("prompt_builder.prompt", "llm.prompt")
    return pipe


def query(top_k, _pipeline):
    try:
        run_args = {"hackernews_fetcher": {"top_k": top_k}}
        
        if st.session_state.get("model") == "Mistral":
            run_args = {"hackernews_fetcher": {"top_k": top_k}, 
                        "llm": {"generation_kwargs": {"max_new_tokens": 600}}
                        }

        replies = _pipeline.run(data=run_args)
        
        result = replies['llm']['replies']
    except Exception as e:
        result = ["Sorry, there seems to be an issue here 😔"]
    return result