import os

from langchain.text_splitter import CharacterTextSplitter
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores import FAISS


def parse_knowledge(file_path: str, category: str):
    with open(file_path, encoding='utf-8') as f:
        raw_data = f.read()

        text_splitter = CharacterTextSplitter(
            separator=r'\d+\.',
            chunk_size=100,
            chunk_overlap=0,
            length_function=len,
            is_separator_regex=True,
        )
        docs = text_splitter.create_documents([raw_data])
        db = FAISS.from_documents(docs, OpenAIEmbeddings())
        store_path = f"sales_chatbot/vector_store/{category}"
        db.save_local(store_path)
        print('success store path: ', store_path)
    pass


if __name__ == '__main__':
    os.environ.setdefault(
        'OPENAI_API_KEY', 'XXX')

    import category
    for cate in category.category:
        parse_knowledge('sales_chatbot/chat_knowledge/' +
                        cate.key+'.txt', cate.key)
