from fastapi import FastAPI
from langchain.prompts import ChatPromptTemplate
from langserve import add_routes
from langchain_openai import ChatOpenAI

app = FastAPI(
    title="新闻生成API",
    description="通过LangChain生成中文新闻的接口",
    version="1.0.0"
)
# app.openapi_schema = None  # 强制重建
from fastapi import FastAPI, Body
from pydantic import BaseModel
from typing import Annotated

class JokeBatchRequest(BaseModel):
    topics: Annotated[list[str], Body(..., max_length=3)]
JokeBatchRequest.model_rebuild()  # 必须调用

model = ChatOpenAI(
    openai_api_base="https://api.siliconflow.cn/v1/",
    openai_api_key="sk-pdfifkpjdlxvyvgkerbluaotktpznsmpbcvskjauotenxgvz",  # app_key
    model_name="deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",  # 模型名称
)
prompt = ChatPromptTemplate.from_template("用中文跟我讲一个关于 {topic}的新闻")
chain = prompt | model

add_routes(app, chain, path="/joke")
print(app.openapi())  # 检查是否抛出异常

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="localhost", port=9000)