from typing import Union
import uvicorn
from fastapi import FastAPI
from langchain_community.llms.ollama import Ollama
from starlette.middleware.cors import CORSMiddleware
from starlette.responses import StreamingResponse
from pydantic import BaseModel
import re
import logging

app = FastAPI()


class TextRequest(BaseModel):
    text: str


@app.get("/")
async  def read_root():
    return {"Hello": "World"}


@app.get("/items/{item_id}")
async  def read_item(item_id: int, q: Union[str, None] = None):
    return {"item_id": item_id, "q": q}


model = ['phi3', 'llava-phi3', 'dolphin-llama3']
origins = ['http://localhost:5173']
app.add_middleware(
    CORSMiddleware,
    allow_origins=origins,
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)


@app.post("/ai-translate")
async def ollama(request: TextRequest):
    logging.info("Received a request on the root endpoint")
    ollama = Ollama(
        base_url='http://localhost:11434',
        model=model[2]
    )

    # 使用LangChain生成OLLAMA输出数据流
    async def generate_ollama_output():
        prompt = f"""
请帮我翻译以下英文原文:

{request.text}


        """
        #         prompt="""
        # Translate the following English sentence into Chinese And besides outputting translated sentences, you will not output any prompts or irrelevant content:
        # """
        for chunk in ollama.stream(f"""
            {prompt}
            """):
                yield chunk

    return StreamingResponse(content=generate_ollama_output(), media_type="text/event-stream")

