from fastapi import APIRouter, UploadFile, File, HTTPException, Depends
from pydantic import BaseModel
from typing import Optional, Dict
from utils.file_handler import save_temp_file, remove_temp_file
from utils.logger import setup_logger
from models.dependencies import get_dependencies

logger = setup_logger()

# 假设ModelHandler和ModelConfig定义在你的代码中
from data_handler.data_model_process import ModelHandler
from config import ModelConfig  # 替换为实际模块路径
# Ollama 流式路由
router_ollama = APIRouter(prefix="/ollama", tags=["Ollama"])



class OllamaChatConfig(BaseModel):
    model_name: str = "deepseek-r1:7b"
    model: str = "ollama"
    temperature: float = 0.7
    max_tokens: int = 512
    stream: bool = True


class ChatRequest(BaseModel):
    prompt: str
    config: Optional[OllamaChatConfig] = OllamaChatConfig()


@router_ollama.post("/stream/chat")
async def ollama_stream_chat(
    request: ChatRequest,
    dependencies: Dict = Depends(get_dependencies)
):
    try:
        handler = ModelHandler()
        config = ModelConfig(
            model = request.config.model,
            model_name=request.config.model_name,
            temperature=request.config.temperature,
            max_tokens=request.config.max_tokens,
            stream=True
        )
        result = await handler.call_model(
            model=request.config.model,
            model_type="ollama_chat",
            data=request.prompt,
            streaming=True,
            config=config,
            dependencies=dependencies
        )
        return result # StreamingResponse
    except Exception as e:
        logger.error(f"Ollama 流式处理失败: {str(e)}")
        raise HTTPException(status_code=500, detail=f"流式处理失败: {str(e)}")
