package xyz.ylx.apirotation.controller;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.http.server.reactive.ServerHttpRequest;
import org.springframework.http.server.reactive.ServerHttpResponse;
import org.springframework.web.bind.annotation.*;

import xyz.ylx.apirotation.service.AIModelService;
import xyz.ylx.apirotation.service.AIModelServiceFactory;

import java.util.Map;
import java.util.Optional;

import lombok.extern.slf4j.Slf4j;

@Slf4j
@RestController
@RequestMapping("/v1")
public class UnifiedApiController {

    private final AIModelServiceFactory serviceFactory;

    @Autowired
    public UnifiedApiController(AIModelServiceFactory serviceFactory) {
        this.serviceFactory = serviceFactory;
    }

    /**
     * 统一的聊天接口 - 支持OpenAI和Gemini模型
     */
    @PostMapping("/chat/completions")
    public Object chatCompletions(
            @RequestBody Map<String, Object> requestBody,
            @RequestHeader Map<String, String> headers,
            ServerHttpRequest request,
            ServerHttpResponse response) {
        
        // 获取模型名称
        String model = requestBody.containsKey("model") ? requestBody.get("model").toString() : "";
        log.info("接收到聊天请求，模型: {}", model);
        
        // 判断是否为流式响应
        boolean isStream = requestBody.containsKey("stream") && Boolean.TRUE.equals(requestBody.get("stream"));
        
        // 设置流式响应头
        if (isStream) {
            response.getHeaders().setContentType(MediaType.TEXT_EVENT_STREAM);
            response.getHeaders().setCacheControl("no-cache");
            response.getHeaders().setConnection("keep-alive");
            response.getHeaders().set("X-Accel-Buffering", "no");
        }
        
        // 根据模型选择服务
        Optional<AIModelService> serviceOpt = serviceFactory.getServiceForModel(model);
        
        if (serviceOpt.isPresent()) {
            AIModelService service = serviceOpt.get();
            log.info("为模型 '{}' 选择服务: {}", model, service.getClass().getSimpleName());
            return service.handleChatCompletions(requestBody, headers, isStream);
        } else {
            log.error("未找到支持模型 '{}' 的服务", model);
            throw new IllegalArgumentException("不支持的模型类型: " + model);
        }
    }
} 