import os
import yaml
from pathlib import Path
from app.document_processor import DocumentProcessor
from app.vector_store import VectorStore
from app.model_loader import model_loader

def load_config(config_path="config/config.yaml"):
    with open(config_path, 'r') as f:
        return yaml.safe_load(f)

def process_txt_files(docs_dir, document_processor):
    """处理所有txt文件并返回文本块"""
    txt_files = []
    for root, _, files in os.walk(docs_dir):
        for file in files:
            if file.endswith('.txt'):
                txt_files.append(os.path.join(root, file))
    
    all_chunks = []
    for txt_file in txt_files:
        print(f"处理文件: {txt_file}")
        with open(txt_file, 'r', encoding='utf-8') as f:
            content = f.read()
            chunks = document_processor.process_text(content, source=txt_file)
            all_chunks.extend(chunks)
    
    return all_chunks

def main():
    # 加载配置
    config = load_config()
    
    # 初始化文档处理器
    document_processor = DocumentProcessor(config_path="config/config.yaml")
    
    # 加载嵌入模型
    embedding_model = model_loader.load_embedding_model()
    
    # 初始化向量存储
    vector_store = VectorStore(embedding_model, config_path="config/config.yaml")
    
    # 处理所有txt文件
    docs_dir = "data/documents"
    print("开始处理txt文件...")
    chunks = process_txt_files(docs_dir, document_processor)
    print(f"共处理了 {len(chunks)} 个文本块")
    
    # 添加文档到向量存储
    print("开始构建向量索引...")
    vector_store.add_documents(chunks)
    
    # 保存向量存储
    print("保存向量索引...")
    vector_store.save()
    print("向量索引构建完成！")

if __name__ == "__main__":
    main() 