from langchain.chains.summarize import load_summarize_chain
from langchain_community.document_loaders import UnstructuredFileLoader
from langchain_openai import ChatOpenAI
from langchain_text_splitters import RecursiveCharacterTextSplitter

#导入文本
loader=UnstructuredFileLoader("../test.txt")

#将文本转成Document对象
document =loader.load()
print(f"Document: {len(document)}")

#初始化文本分割器
# chunk_size: 每个chunk的大小
# chunk_overlap: 每个chunk的重叠部分
text_splitter=RecursiveCharacterTextSplitter(chunk_size=200,chunk_overlap=0)

#切分文本
split_documents=text_splitter.split_documents(document)
print(f'Split documents: {len(split_documents)}')

#加载模型
llm = ChatOpenAI(
    openai_api_key="key",
    openai_api_base="https://api.moonshot.cn/v1",
    model="moonshot-v1-8k",
    temperature=0,
    request_timeout=60,
    max_retries=3,
)
# 创建总结链
# chain_type: 总结链的类型，可选值为stuff map_reduce  refine map_rerank
chain = load_summarize_chain(llm,chain_type="refine",verbose=True)

#执行总结链
chain.run(split_documents)