package com.hoppinzq.controller;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.hoppinzq.api.DeleteResult;
import com.hoppinzq.api.completion.CompletionRequest;
import com.hoppinzq.api.completion.CompletionResult;
import com.hoppinzq.api.completion.chat.ChatCompletionRequest;
import com.hoppinzq.api.completion.chat.ChatCompletionResult;
import com.hoppinzq.api.completion.chat.ChatMessage;
import com.hoppinzq.api.completion.chat.ChatMessageRole;
import com.hoppinzq.api.edit.EditRequest;
import com.hoppinzq.api.edit.EditResult;
import com.hoppinzq.api.embedding.EmbeddingRequest;
import com.hoppinzq.api.embedding.EmbeddingResult;
import com.hoppinzq.api.file.File;
import com.hoppinzq.api.finetune.FineTuneEvent;
import com.hoppinzq.api.finetune.FineTuneRequest;
import com.hoppinzq.api.finetune.FineTuneResult;
import com.hoppinzq.api.image.CreateImageEditRequest;
import com.hoppinzq.api.image.CreateImageRequest;
import com.hoppinzq.api.image.CreateImageVariationRequest;
import com.hoppinzq.api.image.ImageResult;
import com.hoppinzq.api.model.Model;
import com.hoppinzq.api.moderation.ModerationRequest;
import com.hoppinzq.api.moderation.ModerationResult;
import com.hoppinzq.config.WebSocketProcess;
import com.hoppinzq.service.ChatGptService;
import com.hoppinzq.service.OpenAiService;
import com.hoppinzq.service.util.UUIDUtil;
import io.reactivex.functions.Action;
import io.reactivex.schedulers.Schedulers;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;

import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;

@RestController
public class ChatGptController {

    //private static final String token="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";
    private static final String token="sk-";
    private static final String modal="gpt-3.5.turbo";
    private static final int contextNumber=6;
    private static final Logger logger = LoggerFactory.getLogger(ChatGptController.class);

    @Autowired
    private WebSocketProcess webSocketProcess;
    @Autowired
    private ChatGptService chatGptService;
    /**
     * 获取所有模型
     * @return
     */
    @GetMapping("v1/models")
    List<Model> listModels(){//@RequestHeader(name = "apiKey") String apiKey
        return new OpenAiService(token, Duration.ofSeconds(50)).listModels();
    }

    /**
     * 根据模型id获取所有模型
     * @param modelId
     * @return
     */
    @GetMapping("/v1/models/{model_id}")
    Model getModel(@PathVariable("model_id") String modelId) {
        return new OpenAiService(token).getModel(modelId);
    }

    /**
     *
     * @param request
     * @return
     */
    @PostMapping("/v1/completions")
    CompletionResult createCompletion(@RequestBody CompletionRequest request){
        return new OpenAiService(token).createCompletion(request);
    }

    /**
     *
     * @param engineId
     * @param request
     * @return
     */
    @PostMapping("/v1/engines/{engine_id}/completions")
    CompletionResult createCompletion(@PathVariable("engine_id") String engineId, @RequestBody CompletionRequest request){
        return new OpenAiService(token).createCompletion(request);
    }

    /**
     * 创建聊天（完整一次性）
     * @param request
     * @return
     */
    @PostMapping("/v1/chat/completions")
    ChatCompletionResult createChatCompletion(@RequestBody ChatCompletionRequest request){
        //超时时长设置为60秒
        return new OpenAiService(token,Duration.ofSeconds(60)).createChatCompletion(request);
    }

    @GetMapping("/v1/chat/completions/getstream/{message}")
    public SseEmitter streamSSE(@PathVariable("message") String message) throws IOException {
        SseEmitter emitter = new SseEmitter();
        final List<ChatMessage> messages = new ArrayList<>();
        final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.USER.value(), message);
        messages.add(systemMessage);
        OpenAiService service=new OpenAiService(token,Duration.ofSeconds(60));
        ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
                .builder()
                .model("gpt-3.5-turbo")
                .messages(messages)
                .build();
        service.streamChatCompletion(chatCompletionRequest)
                .doOnError(msg->{
                    emitter.send(SseEmitter.event().name("error").data(msg));
                })
                .blockingForEach(msg -> {
                    emitter.send(SseEmitter.event().name("message").data(msg));
                });
        service.shutdownExecutor();
        return emitter;
    }



    @GetMapping("/stream-sse/{message}")
    public SseEmitter streamSSE2(@PathVariable("message") String message,String chatId,boolean isContent,HttpServletResponse response) {
        SseEmitter emitter = new SseEmitter(300000L);// 设置 keep-alive 的时间为 6 分钟
        List<ChatMessage> messages = new ArrayList<>();
        if(chatId!=null){
            List<com.hoppinzq.bean.ChatMessage> chatMessageList=chatGptService.getChatMessageByChatId(chatId);
            for(int i=0;i<chatMessageList.size();i++){
                if(chatMessageList.size()-i>=6){
                    continue;
                }
                com.hoppinzq.bean.ChatMessage chatMessage=chatMessageList.get(i);
                ChatMessage systemMessage = new ChatMessage(chatMessage.getMessage_role(), chatMessage.getMessage());
                messages.add(systemMessage);
            }
        }
        ChatMessage systemMessage = new ChatMessage(ChatMessageRole.USER.value(), message);
        messages.add(systemMessage);
        OpenAiService service=new OpenAiService(token,Duration.ofSeconds(60));
        ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
                .builder()
                .model("gpt-3.5-turbo")
                .messages(messages)
                .build();
        service.streamChatCompletion(chatCompletionRequest)
                .doOnError(msg->{
                    logger.error("sse服务错误");
                    service.shutdownExecutor();
                })
                .subscribeOn(Schedulers.io())
                .observeOn(Schedulers.computation())
                .doOnCancel(() -> {
                    logger.info("停止对话");
                    service.shutdownExecutor();
                })
                .subscribe(msg -> {
                    emitter.send(SseEmitter.event().name("message").data(msg));
                }, err -> {
                    logger.error("sse服务错误");
                    emitter.completeWithError(err);
                }, () -> {
                    logger.debug("完成");
                    emitter.complete();
                });
        return emitter;
    }

    /**
     * 解决内容被识别成请求的重大bug
     * @param message
     * @param chatId
     * @param context_number
     * @param system
     * @return
     */
    @GetMapping("/stream-sse3")
    public SseEmitter streamSSE3(String message,String chatId,String apikey,String context_number,String system) throws IOException {
        SseEmitter emitter = new SseEmitter(300000L);// 设置 keep-alive 的时间为 6 分钟
        String logid= UUIDUtil.getUUID();
        logger.debug("开始请求："+logid);
        List<ChatMessage> messages = new ArrayList<>();
        if(chatId!=null){
            //context_number与system可以通过chatId查出，这个接口采用传参的方式。
            int context_num;
            try{
                context_num=Integer.parseInt(context_number);
                if(context_num<2){
                    emitter.send("500sse_error_zq_no:"+logid+",上下文的数目不能小于2");
                    return emitter;
                }
                if(context_num>18){
                    emitter.send("500sse_error_zq_no:"+logid+",上下文的数目不能大于18");
                    return emitter;
                }
            }catch (Exception ex){
                context_num=contextNumber;
            }
            List<com.hoppinzq.bean.ChatMessage> chatMessageList=chatGptService.getChatMessageByChatId(chatId);
            if(!"".equals(system)){
                ChatMessage systemMessage = new ChatMessage("system", system);
                messages.add(systemMessage);
            }
            for(int i=0;i<chatMessageList.size();i++){
                if(chatMessageList.size()-i>=context_num){
                    continue;
                }
                com.hoppinzq.bean.ChatMessage chatMessage=chatMessageList.get(i);
                ChatMessage chatMessage1 = new ChatMessage(chatMessage.getMessage_role(), chatMessage.getMessage());
                messages.add(chatMessage1);
            }
        }
        try{
            ChatMessage systemMessage = new ChatMessage(ChatMessageRole.USER.value(), message);
            messages.add(systemMessage);
            String apiKey1;
            if("null".equals(apikey)){
                apiKey1=token;
            }else{
                apiKey1=apikey;
            }
            OpenAiService service=new OpenAiService(apiKey1,Duration.ofSeconds(60));
            ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
                    .builder()
                    .model("gpt-3.5-turbo")
                    .messages(messages)
                    .build();
            service.streamChatCompletion(chatCompletionRequest)
                    .doOnError(msg->{
                        logger.error("sse服务错误");
                        service.shutdownExecutor();
                    })
                    .subscribeOn(Schedulers.io())
                    .observeOn(Schedulers.computation())
                    .doOnCancel(() -> {
                        logger.info("停止对话");
                        service.shutdownExecutor();
                    })
                    .subscribe(msg -> {
                        emitter.send(SseEmitter.event().name("message").data(msg));
                    }, err -> {
                        logger.error("sse服务错误，"+err);
                        emitter.completeWithError(err);
                    }, () -> {
                        logger.debug("完成");
                        emitter.complete();
                    });
        }catch (Exception ex){
            logger.error("logid，"+logid+"，错误信息:"+ex.getMessage());
            emitter.send("500sse_error_zq:"+logid);
        }
        return emitter;
    }


    @PostMapping("/v1/chat/completions/poststream/{userno}")
    public void streamWebSocket(@RequestBody ChatCompletionRequest request, @PathVariable("userno") String userno) throws IOException {
        OpenAiService service=new OpenAiService(token,Duration.ofSeconds(60));
        service.streamChatCompletion(request)
                .doOnError(msg->{
                    webSocketProcess.sendCmdMessage(500,msg.toString(),"chat",userno);
                })
                .blockingForEach(msg -> {
                    webSocketProcess.sendCmdMessage(200,JSONObject.toJSONString(JSON.toJSON(msg)),"chat",userno);
                });
    }

    @PostMapping("/v1/chat/completions/stopstream/{userno}")
    public Boolean  streamStopWebSocket(@PathVariable("userno") String userno) throws IOException {
        return webSocketProcess.closeSocket(userno);
    }


    @GetMapping("/v1/chat/completions/stream/{message}")
    public ResponseEntity<StreamingResponseBody> stream1(@PathVariable("message") String message) {
        OpenAiService service=new OpenAiService(token);
        List<ChatMessage> message1 = new ArrayList<>();
        ChatMessage systemMessage = new ChatMessage(ChatMessageRole.USER.value(), message);
        message1.add(systemMessage);
        StreamingResponseBody responseBody = response -> {
            ChatCompletionRequest request = ChatCompletionRequest.builder()
                    .messages(message1)
                    .model("gpt-3.5-turbo")
                    .temperature(1.0)
                    .frequencyPenalty(0.3)
                    .presencePenalty(0.3)
                    .build();

            service.streamChatCompletion(request)
                    .doOnError(e -> {
                        e.printStackTrace();
                    })
                    .doOnComplete(new Action() {
                        @Override
                        public void run() throws Exception {
                            //do something on completion
                        }
                    })
                    .blockingForEach(bytes -> {
                        System.out.println(bytes);
                        response.write(bytes.toString().getBytes());
                        response.flush(); //immediately write out buffered bytes
                    });
        };

        return ResponseEntity.ok()
                .contentType(org.springframework.http.MediaType.TEXT_EVENT_STREAM)
                .body(responseBody);
    }

    @GetMapping(value = "/stream", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
    public SseEmitter streamEvents(String message) {
        SseEmitter emitter = new SseEmitter();
        final List<ChatMessage> messages = new ArrayList<>();
        final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.USER.value(), message);
        messages.add(systemMessage);
        OpenAiService service=new OpenAiService(token,Duration.ofSeconds(60));
        ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
                .builder()
                .model("gpt-3.5-turbo")
                .messages(messages)
                .build();
        service.streamChatCompletion(chatCompletionRequest)
                .doOnError(msg->{
                    service.shutdownExecutor();
                })
                .doOnCancel(() -> {
                    System.out.println("关闭");
                    service.shutdownExecutor();
                    // 在 SSE 连接关闭时执行一些清理工作
                })
                .subscribe(msg -> {
                    try {
                        System.out.println(msg);
                        emitter.send(SseEmitter.event().name("message").data(msg));
                    } catch (Exception e) {
                        service.shutdownExecutor();
                        emitter.completeWithError(e);
                    }
                });
        return emitter;
    }


    @PostMapping("/v1/edits")
    EditResult createEdit(@RequestBody EditRequest request){
        return new OpenAiService(token).createEdit(request);
    }

    @PostMapping("/v1/edits/code")
    EditResult createEditCode(@RequestBody EditRequest request,@RequestHeader(name = "apikey") String apikey){
        String apikey1;
        if("null".equals(apikey)){
            apikey1=token;
        }else{
            apikey1=apikey;
        }
        request.setModel("code-davinci-edit-001");
        return new OpenAiService(apikey1,Duration.ofSeconds(60)).createEdit(request);
    }

    @PostMapping("/v1/engines/{engine_id}/edits")
    EditResult createEdit(@PathVariable("engine_id") String engineId, @RequestBody EditRequest request){
        return new OpenAiService(token).createEdit(request);
    }

    @PostMapping("/v1/embeddings")
    EmbeddingResult createEmbeddings(@RequestBody EmbeddingRequest request){
        return new OpenAiService(token).createEmbeddings(request);
    }

    @PostMapping("/v1/engines/{engine_id}/embeddings")
    EmbeddingResult createEmbeddings(@PathVariable("engine_id") String engineId, @RequestBody EmbeddingRequest request) {
        return new OpenAiService(token).createEmbeddings(request);
    }

    @GetMapping("/v1/files")
    List<File> listFiles() {
        return new OpenAiService(token).listFiles();
    }
    //
//    @Multipart
//    @POST("/v1/files")
//    Single<File> uploadFile(@Part("purpose") RequestBody purpose, @Part MultipartBody.Part file);
//
    @DeleteMapping("/v1/files/{file_id}")
    DeleteResult deleteFile(@PathVariable("file_id") String fileId){
        return new OpenAiService(token).deleteFile(fileId);
    }

    @GetMapping("/v1/files/{file_id}")
    File retrieveFile(@PathVariable("file_id") String fileId){
        return new OpenAiService(token).retrieveFile(fileId);
    }


    @PostMapping("/v1/fine-tunes")
    FineTuneResult createFineTune(@RequestBody FineTuneRequest request){
        return new OpenAiService(token).createFineTune(request);
    }

//    @PostMapping("/v1/completions")
//    CompletionResult createFineTuneCompletion(@RequestBody CompletionRequest request){
//        return new OpenAiService(token).createFineTuneCompletion(request);
//    }

    @GetMapping("/v1/fine-tunes")
    List<FineTuneResult> listFineTunes() {
        return new OpenAiService(token).listFineTunes();
    }

    @GetMapping("/v1/fine-tunes/{fine_tune_id}")
    FineTuneResult retrieveFineTune(@PathVariable("fine_tune_id") String fineTuneId){
        return new OpenAiService(token).retrieveFineTune(fineTuneId);
    }

    @PostMapping("/v1/fine-tunes/{fine_tune_id}/cancel")
    FineTuneResult cancelFineTune(@PathVariable("fine_tune_id") String fineTuneId){
        return new OpenAiService(token).cancelFineTune(fineTuneId);
    }

    @GetMapping("/v1/fine-tunes/{fine_tune_id}/events")
    List<FineTuneEvent> listFineTuneEvents(@PathVariable("fine_tune_id") String fineTuneId){
        return new OpenAiService(token).listFineTuneEvents(fineTuneId);
    }

    @DeleteMapping("/v1/models/{fine_tune_id}")
    DeleteResult deleteFineTune(@PathVariable("fine_tune_id") String fineTuneId){
        return new OpenAiService(token).deleteFineTune(fineTuneId);
    }

    /**
     * 生成图片
     * @param request
     * @return
     */
    @PostMapping("/v1/images/generations")
    ImageResult createImage(@RequestBody CreateImageRequest request,@RequestHeader(name = "apikey") String apikey) {
        String apikey1;
        if("null".equals(apikey)){
            apikey1=token;
        }else{
            apikey1=apikey;
        }
        return new OpenAiService(apikey1).createImage(request);
    }

    @PostMapping("/v1/images/editsPath")
    ImageResult createImageEdit(@RequestBody CreateImageEditRequest createImageEditRequest, String imagePath, String maskPath){
        return new OpenAiService(token).createImageEdit(createImageEditRequest,imagePath,maskPath);
    }

    @PostMapping("/v1/images/edits")
    ImageResult createImageEdit(@RequestBody CreateImageEditRequest createImageEditRequest, MultipartFile imagePath, MultipartFile maskPath) throws IOException {
        java.io.File image1 = new java.io.File(imagePath.getOriginalFilename());
        imagePath.transferTo(image1);
        java.io.File image2 = new java.io.File(maskPath.getOriginalFilename());
        maskPath.transferTo(image2);
        return new OpenAiService(token).createImageEdit(createImageEditRequest,image1,image2);
    }

    @PostMapping("/v1/images/editsTest")
    ImageResult createImageEdit(MultipartFile imagePath, MultipartFile maskPath) throws IOException {
        java.io.File image1 = new java.io.File(imagePath.getOriginalFilename());
        imagePath.transferTo(image1);
        java.io.File image2 = new java.io.File(maskPath.getOriginalFilename());
        maskPath.transferTo(image2);
        CreateImageEditRequest createImageEditRequest=new CreateImageEditRequest();
        createImageEditRequest.setN(1);
        createImageEditRequest.setPrompt("阳光明媚的室内休息区，游泳池内有一只落水的黑猫");
        createImageEditRequest.setSize("256x256");
        createImageEditRequest.setResponseFormat("url");
        return new OpenAiService(token).createImageEdit(createImageEditRequest,image1,image2);
    }

    @PostMapping("/v1/images/variationsPath")
    ImageResult createImageVariation(@RequestBody CreateImageVariationRequest createImageEditRequest, String imagePath){
        return new OpenAiService(token).createImageVariation(createImageEditRequest,imagePath);
    }

    @PostMapping("/v1/images/variations")
    ImageResult createImageVariation(@RequestBody CreateImageVariationRequest createImageEditRequest, MultipartFile imagePath) throws IOException {
        java.io.File image1 = new java.io.File(imagePath.getOriginalFilename());
        imagePath.transferTo(image1);
        return new OpenAiService(token).createImageVariation(createImageEditRequest,image1);
    }

    @PostMapping("/v1/images/variationsTest")
    ImageResult createImageVariation(MultipartFile imagePath) throws IOException {
        java.io.File image1 = new java.io.File(imagePath.getOriginalFilename());
        imagePath.transferTo(image1);
        CreateImageVariationRequest createImageVariationRequest=new CreateImageVariationRequest();
        createImageVariationRequest.setN(1);
        createImageVariationRequest.setSize("256x256");
        createImageVariationRequest.setResponseFormat("url");
        return new OpenAiService(token).createImageVariation(createImageVariationRequest,image1);
    }

    @PostMapping("/v1/moderations")
    ModerationResult createModeration(@RequestBody ModerationRequest request){
        return new OpenAiService(token).createModeration(request);
    }

//    @GetMapping("v1/engines")
//    Single<OpenAiResponse<Engine>> getEngines(){
//
//    }
//
//    @GetMapping("/v1/engines/{engine_id}")
//    Single<Engine> getEngine(@Path("engine_id") String engineId){
//
//    }

}
