File size: 1,427 Bytes
			
			ee795f8 51d8af2 ee795f8 0553d6a ee795f8 8b3b01c ee795f8 51d8af2 ee795f8 51d8af2 ee795f8 275e33e 8b3b01c 51d8af2 8b3b01c 275e33e 8b3b01c 275e33e 8b3b01c 275e33e 8b3b01c 9f4950c  | 
								1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60  | 
								"""
LightRAG meets Amazon Bedrock ⛰️
"""
import os
import logging
from lightrag import LightRAG, QueryParam
from lightrag.llm.bedrock import bedrock_complete, bedrock_embed
from lightrag.utils import EmbeddingFunc
from lightrag.kg.shared_storage import initialize_pipeline_status
import asyncio
import nest_asyncio
nest_asyncio.apply()
logging.getLogger("aiobotocore").setLevel(logging.WARNING)
WORKING_DIR = "./dickens"
if not os.path.exists(WORKING_DIR):
    os.mkdir(WORKING_DIR)
async def initialize_rag():
    rag = LightRAG(
        working_dir=WORKING_DIR,
        llm_model_func=bedrock_complete,
        llm_model_name="Anthropic Claude 3 Haiku // Amazon Bedrock",
        embedding_func=EmbeddingFunc(
            embedding_dim=1024, max_token_size=8192, func=bedrock_embed
        ),
    )
    await rag.initialize_storages()
    await initialize_pipeline_status()
    return rag
def main():
    rag = asyncio.run(initialize_rag())
    with open("./book.txt", "r", encoding="utf-8") as f:
        rag.insert(f.read())
    for mode in ["naive", "local", "global", "hybrid"]:
        print("\n+-" + "-" * len(mode) + "-+")
        print(f"| {mode.capitalize()} |")
        print("+-" + "-" * len(mode) + "-+\n")
        print(
            rag.query(
                "What are the top themes in this story?", param=QueryParam(mode=mode)
            )
        )
if __name__ == "__main__":
    main()
 |