package cn.seecoder.ai.controller;

import cn.seecoder.ai.enums.LearningTypeEnum;
import cn.seecoder.ai.model.vo.ResultVO;
import cn.seecoder.ai.utils.HdfsHelper;
import cn.seecoder.ai.utils.TestSpark;

import io.swagger.annotations.Api;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;


import javax.servlet.http.HttpServletResponse;
import java.util.ArrayList;

/**
 * 本文件仅作为功能测试
 * @author fanyanpeng
 * @date 2023/4/6 0:24
 */
@Slf4j
@Api
@RestController
@RequestMapping("${apiPrefix}/hello")
public class HelloController {

    @GetMapping("")
    public String sayHello(){
        return "Hello";
    }


    @Autowired
    TestSpark testSpark;

    @Autowired
    HdfsHelper hdfsHelper;

    /**
     * 获取学习类型的配置信息
     * @author   fanyanpeng
     * @date 2023/4/11 0:15
     * @return cn.seecoder.ai.model.vo.ResultVO<java.util.ArrayList<cn.seecoder.ai.enums.LearningTypeEnum>>
     */
    @GetMapping("/enum")
    public ResultVO<ArrayList<LearningTypeEnum>> getLearningTypeEnums(){

        ArrayList<LearningTypeEnum> arrayList=new ArrayList<>();
        arrayList.add(LearningTypeEnum.SUPERVISED_LEARNING);
        arrayList.add(LearningTypeEnum.UNSUPERVISED_LEARNING);
        log.info(arrayList.toString());
        return ResultVO.buildSuccess(arrayList);
    }

    /**
     * 获取某个类型的配置信息
     * @author   fanyanpeng
     * @date 2023/4/11 0:16
     * @param enumName 枚举类型名
     * @return cn.seecoder.ai.model.vo.ResultVO<cn.seecoder.ai.enums.LearningTypeEnum>
     */
    @PostMapping("/enum")
    public ResultVO<LearningTypeEnum> getLearningTypeEnum(@RequestParam LearningTypeEnum enumName){
        return ResultVO.buildSuccess(enumName);
    }

    /**
     * 简单测试读取文件
     * @author   fanyanpeng
     * @date 2023/4/11 0:17
     * @return cn.seecoder.ai.model.vo.ResultVO<java.lang.String>
     */
    @GetMapping("/spark")
    public ResultVO<String> readFile(){
        return ResultVO.buildSuccess(testSpark.readHdfs());
    }


    /**
     * 将文件上传至hdfs，本方法使用了流处理
     * @author   fanyanpeng
     * @date 2023/4/11 0:18
     * @param fileName 文件名
     * @param file 文件
     * @return cn.seecoder.ai.model.vo.ResultVO<java.lang.String>
     */
    @PostMapping("/upload")
    public ResultVO<String> uploadFile(@RequestParam String fileName, @RequestParam MultipartFile file){
        String fileUri = hdfsHelper.saveMultipartFile(file,fileName);
        return ResultVO.buildSuccess(fileUri);

    }



    @ApiOperation("通过文件uri下载文件")
    @GetMapping("/download")
    public void downloadFile(@RequestParam @ApiParam("文件uri") String fileUri, @ApiParam("请求自带") HttpServletResponse httpServletResponse){

        log.info("fileUri:"+fileUri);
        hdfsHelper.downloadFileByFileUri(fileUri,httpServletResponse);

    }

    @GetMapping("/download/{format}")
    public void downloadFile(@RequestParam String fileFolderUri,@PathVariable String format, HttpServletResponse httpServletResponse){

        hdfsHelper.downloadSingleFileGeneratedBySparkFromFileFolderUri(fileFolderUri,format,httpServletResponse);

    }


}
