# from retriever.vectordb import search_documents # from retriever.vectordb_rerank import search_documents from retriever.vectordb_rerank_law import search_documents as search_law from retriever.vectordb_rerank_exam import search_documents as search_exam from generator.prompt_builder import build_prompt from generator.llm_inference import generate_answer def rag_pipeline(query: str, top_k: int = 5) -> str: """ 1. 사용자 질문으로 관련 문서를 검색 2. 검색된 문서와 함께 프롬프트 구성 3. 프롬프트로부터 답변 생성 """ # 1. 법령과 문제를 각각 검색 # context_docs = search_documents(query, top_k=top_k) laws_docs = search_law(query, top_k=top_k) exam_docs = search_exam(query, top_k=top_k) # 2. 프롬프트 구성 # prompt = build_prompt(query, context_docs) prompt = build_prompt(query, laws_docs, exam_docs) # 3. LLM으로 문제 생성 # output = generate_answer(prompt) questions = generate_answer(prompt) # 4. 결과 저장 # save_to_exam_vector_db(questions) return questions