from flask import Flask, request, jsonify
from flask_cors import CORS
import json

# 导入你的模块
from QueryProcessor import QP, llm_chat

app = Flask(__name__)
CORS(app)
@app.route('/process_query', methods=['POST'])
def process_query():
    data = request.json
    query = data['query']
    top_k = data.get('top_k', 3)
    
    # 将单个 query 转换为 list
    query_list = [query]
    query_processor = QP()
    # 调用检索函数
    _, _, q_c_list, q_c_source, e_s_result, es_source = query_processor.muti_linkup(
        query_list,
        top_k=top_k,
        modes=["q-c"]
    )
    print(q_c_list, q_c_source, e_s_result, es_source)
    # 准备上下文
    context = ""
    if q_c_list:
        context += "\nQC检索结果:\n" + "\n".join([str(item) for item in q_c_list])
    if e_s_result:
        context += "\nES检索结果:\n" + "\n".join([str(item) for item in e_s_result])
    
    # 调用LLM
    gpt_answer = llm_chat(query, context)
    
    # 构建返回结果
    qc_results = []
    for content, source in zip(q_c_list, q_c_source):
        qc_results.append({
            'content': content,
            'source': source
        })

    es_results = []
    for content, source in zip(e_s_result, es_source):
        es_results.append({
            'content': content,
            'source': source
        })
    
    return jsonify({
        'qc_results': qc_results,
        'es_results': es_results,
        'gpt_answer': gpt_answer
    })

if __name__ == '__main__':
    app.run(host='0.0.0.0', port=5000)