package com.example.backspring.controller;

import com.example.backspring.Service.LLMService;
import com.example.backspring.Service.impl.LLMServiceImpl;
import com.example.backspring.config.Message;
import com.example.backspring.entity.LLMRequest;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;

@CrossOrigin(origins = "*")
@RestController
@RequestMapping("/deepseek")
@Tag(name = "deepSeek", description = "调用深度学习模型v3或r1")
@Slf4j
public class DeepSeek {
    @Resource
    private LLMService service;

    @Operation(summary = "调用模型", description = "传入模型名称和问题，返回模型的回答")
    @PostMapping("/ask")
    public ResponseEntity<StreamingResponseBody> askQuestion(@RequestBody LLMRequest request) {
        log.info("DeepSeek: askQuestion: " + request.toString());
        StreamingResponseBody stream = outputStream -> {
            Message message = service.queryLLM(request.getAsker(), request.getMoxing(),
                    "lkeap.tencentcloudapi.com", "ap-guangzhou", 0.6, null);
            outputStream.write(message.getDeepseekData().getBytes());
            outputStream.flush();
        };
        return ResponseEntity.ok()
                .contentType(MediaType.APPLICATION_OCTET_STREAM)
                .body(stream);
    }
}
