File size: 1,670 Bytes
99e744f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline
from langchain.chains.summarize import load_summarize_chain
from langchain.prompts import PromptTemplate


def get_refine_chain(pipeline_or_llm, model_type):
    if model_type == "openai":
        llm = pipeline_or_llm
        question_template  = """Write a concise summary of the following:
                        {text}
                        CONCISE SUMMARY:"""
        question_prompt = PromptTemplate.from_template(question_template)
        refine_template =  """Your job is to produce a final summary
                We have provided an existing summary up to a certain point: {existing_answer}
                We have the opportunity to refine the existing summary (only if needed) with some more context below.
                ------------
                {text}
                ------------
                Given the new context, refine the original summary in bullets. If the context isn't useful return the original summary."""
        refine_prompt = PromptTemplate.from_template(refine_template)

    else:
        question_prompt = PromptTemplate.from_template(template="{text}")
        refine_prompt = PromptTemplate.from_template(template= "{existing_answer}\n{text}")
        llm = HuggingFacePipeline(pipeline=pipeline_or_llm)


    refine_chain = load_summarize_chain(
        llm=llm,
        chain_type="refine",
        question_prompt=question_prompt,
        refine_prompt=refine_prompt,
        return_intermediate_steps=False,
        input_key="input_documents",
        output_key="output_text",
        verbose=True,
    )
    return refine_chain