import json
from fastapi import APIRouter, Query
from fastapi.responses import JSONResponse
from typing import Optional, List
from langchain_community.llms import Ollama
from langchain.prompts import PromptTemplate
from langchain.prompts.few_shot import FewShotPromptTemplate
from langchain.prompts.example_selector import LengthBasedExampleSelector
from langchain.schema import HumanMessage, SystemMessage

router = APIRouter()

# Ollama LLM 实例
ollama = Ollama(base_url='http://172.16.21.38:11436', model="qwen3:0.6b")

# 示例数据
examples = [
    {"input":"happy","output":"sad"},
    {"input":"tall","output":"short"},
    {"input":"sunny","output":"gloomy"},
    {"input":"windy","output":"calm"},
    {"input":"高兴","output":"悲伤"}
]

# 构造示例模板
example_prompt = PromptTemplate(
    input_variables=["input","output"],
    template="原词：{input}\n反义：{output}"
)

# 长度选择器
example_selector = LengthBasedExampleSelector(
    examples=examples,
    example_prompt=example_prompt,
    max_length=25  # 控制示例总长度
)

# FewShot Prompt Template
dynamic_prompt = FewShotPromptTemplate(
    example_selector=example_selector,
    example_prompt=example_prompt,
    prefix="给出每个输入词的反义词",
    suffix="原词：{adjective}\n反义：",
    input_variables=["adjective"]
)

@router.get("/test")
async def test():
    return {"message": "Hello, World!"}

# FastAPI 接口
@router.get("/generate_antonym")
async def generate_antonym(adjective: str = Query(..., description="输入的形容词")):
    # 构建完整提示词
    prompt_text = dynamic_prompt.format(adjective=adjective)
    # 调用 Ollama （注意这里传入字符串，而不是 HumanMessage 列表）
    response = ollama.invoke(prompt_text)
    return JSONResponse(content={
        "input": adjective,
        "prompt": prompt_text,
        "output": response
    })

