| from langchain.chains.llm import LLMChain |
| from langchain.chat_models import ChatOpenAI |
| from langchain.docstore.document import Document |
| import time |
| from typing import List |
| from langchain.chains.summarize import load_summarize_chain |
|
|
|
|
| async def async_generate_llmchain( |
| llm: ChatOpenAI, docs: List[Document], llm_kwargs: dict, k: str |
| ) -> dict: |
| """Asyncronous LLMChain function. |
| |
| Args: |
| llm (ChatOpenAI): Language model to use. |
| docs (List[Document]): List of documents. |
| llm_kwargs (dict): Keyword arguments for the LLMChain. |
| k (str): Key for a dictionary under which the output is returned. |
| |
| Returns: |
| dict: Dictionary with the summarization. |
| """ |
|
|
| print(f"Starting summarization for {k}") |
| now = time.time() |
|
|
| chain = load_summarize_chain( |
| llm=llm, |
| **llm_kwargs, |
| ) |
|
|
| resp = await chain.arun(docs) |
| print(f"Time taken for {k}: ", time.time() - now) |
| return {k: resp} |
|
|
|
|
| async def async_generate_summary_chain( |
| llm: ChatOpenAI, docs: List[Document], summarization_kwargs: dict, k: str |
| ) -> dict: |
| """Asyncronous LLMChain function. |
| |
| Args: |
| llm (ChatOpenAI): Language model to use. |
| docs (List[Document]): List of documents. |
| summarization_kwargs (dict): Keyword arguments for the load_summarize_chain. |
| k (str): Key for a dictionary under which the output is returned. |
| |
| Returns: |
| dict: Dictionary with the summarization. |
| """ |
| print(f"Starting summarization for {k}") |
| now = time.time() |
| chain = load_summarize_chain( |
| llm=llm, |
| **summarization_kwargs, |
| ) |
| resp = await chain.arun(docs) |
| print(f"Time taken for {k}: ", time.time() - now) |
| return {k: resp} |
|
|