from langchain.document_loaders import UnstructuredFileLoader
from langchain.chains.summarize import load_summarize_chain
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.llms import OpenAI

import os
os.environ["OPENAI_API_KEY"] = "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"

loader = UnstructuredFileLoader("./test.txt")
document = loader.load();
print(f'document: {len(document)}')
splitter = RecursiveCharacterTextSplitter(
    chunk_size=500,
    chunk_overlap=0,
)

split_doc = splitter.split_documents(document)
print(f'document: {len(split_doc)}')

llm = OpenAI(model_name="text-davinci-003")

chain = load_summarize_chain(
    llm=llm,
    chain_type="map_reduce",
    verbose=True
)

chain.run(split_doc[:3])
