from flask import Blueprint, request, jsonify
from langchain_ollama import OllamaLLM
from langchain_community.vectorstores import Chroma
from langchain_ollama.embeddings import OllamaEmbeddings
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.chains import RetrievalQA

import os
from env import (
    model_url,
    model_name,
)
from .utils import init_chroma

ask_bp = Blueprint("ask", __name__)

# 初始化嵌入模型
embedding_model = OllamaEmbeddings(base_url=model_url, model=model_name)

ollama = OllamaLLM(base_url=model_url, model=model_name)

# 初始化 Chroma 集合

chroma_collection = init_chroma(embedding_model)


@ask_bp.route("/ask", methods=["POST"])
def ask():
    try:
        # 获取 JSON 数据
        data = request.json
        question = data["question"]

        print(f"question:{question}")
        qachain = RetrievalQA.from_chain_type(
            ollama, chain_type="refine", retriever=chroma_collection.as_retriever() # stuff, map_reduce, refine
        )
        res = qachain.invoke({"query": question})
        # res = qachain.invoke({"prompt": question})
        return jsonify({"answer": res}), 200
    except Exception as e:
        return jsonify({"error": str(e)}), 500
