import json
from fastapi import APIRouter, Request, Query
from fastapi.responses import JSONResponse
from pydantic import BaseModel
from typing import List, Optional
from ollama import Client

# Ollama 运行起来后，使用 langchain 加载
router = APIRouter()

# 连接到远程 Ollama 服务
client = Client(
    host='http://172.16.21.38:11436',  # 替换为你的 Ollama 服务器地址
    headers={'x-some-header': 'some-value'}
)

@router.get("/test")
async def test():
    return JSONResponse(content={"message": "Hello World"})


class chatTextV1Schema(BaseModel):
    inputText: Optional[str] = None

# from ollama import Client  使用ollama调用api接口
@router.post("/chatTextV1")
async def chatTextV1(request: chatTextV1Schema):
    try:
        inputText = request.inputText or "你是谁"
        response = client.chat(
            model="qwen3:0.6b",
            messages=[{"role": "user", "content": inputText}]
        )
        return JSONResponse(content={"success": True, "response": response["message"]["content"]})
    except Exception as e:
        return JSONResponse(content={"success": False, "error": str(e)})

@router.get("/chatTextV1")
async def chat_get(
    message: str = Query(..., description="用户输入消息"),
    model: str = Query("qwen3:0.6b", description="模型名称")
):
    """
    GET 接口调用 Ollama 模型进行对话
    参数:
        - message: 用户输入消息
        - model: 模型名称，默认 qwen3:0.6b
    返回:
        - JSON 格式的模型回复
    """
    try:
        response = client.chat(
            model=model,
            messages=[{"role": "user", "content": message}]
        )
        return JSONResponse(content={"success": True, "response": response["message"]["content"]})
    except Exception as e:
        return JSONResponse(content={"success": False, "error": str(e)})