package com.sbtr.controller;

/*import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;
import com.sbtr.common.PageDto;
import com.sbtr.common.Response;
import com.sbtr.nlp.entity.*;
import com.sbtr.nlp.util.FastTextSentimentModel;
import com.sbtr.nlp.word2vec.WordEntry;
import com.sbtr.business.publishtask.api.IPlatformPredictTaskApi;
import com.sbtr.business.publishtask.dto.*;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.web.multipart.MultipartFile;

import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.util.*;*/

import io.swagger.annotations.Api;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import springfox.documentation.annotations.ApiIgnore;

@ApiIgnore
@RestController
@RequestMapping("/platform/predict-task")
//@Api(tags = "NLP自然语言处理（废弃，不启用）")
public class NlpAndOcrController {

    /*@Value("${oss.config.samplePath}")
    private String samplePath;

    @Value("${oss.config.uploadPath}")
    private String uploadPath;

    @Value("${oss.config.httpPath}")
    private String httpPath;

    //    @Value("${oss.config.downloadPath}")
    private String downloadPath = "D:\\workspace\\download";

    public Segment segment;
    public Map<String, String> tag;
    public Map<String, String> nountag;
    public List<String> stopTag;


    @Value("${nlp.sentiment.fasttext}")
    private String modelPath;


    @Autowired
    FastTextSentimentModel fastTextSentimentModel;

    @Autowired
    com.sbtr.nlp.word2vec.WordToVecApi vec;


    @Autowired
    CommentAnalyzeUtils commentAnalyzeUtils;

    @Autowired
    private IPlatformPredictTaskApi platformPredictTaskApi;

    public static final String SPLIT_SENTNETCES = "[。!?？！]+";

    @ApiOperation(value = "上传", notes = "上传", httpMethod = "POST")
    @RequestMapping(value = "/upload", method = RequestMethod.POST, headers = "content-type=multipart/form-data")
    public Response<PlatformPredictTaskDto> uploadPredictFile(
            @RequestParam(value = "taskType") String taskType,
            @RequestParam(value = "taskName") String taskName,
            @RequestParam(value = "creater") String creater,
            @RequestParam(value = "languageType") String languageType ,
            @RequestParam(value = "File") MultipartFile file) {

        if (file.isEmpty() || file.getSize() == 0) {
            return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "文件不能为空");
        }

        String fileName = file.getOriginalFilename();
        String otherName = UUID.randomUUID().toString().replaceAll("-", "")
                + fileName.substring(fileName.lastIndexOf("."));
        String newName = otherName;
        PlatformPredictTaskDto publishPredictTask = new PlatformPredictTaskDto();

        try {
            InputStream inputStream = null;
            OutputStream os = null;

            inputStream = file.getInputStream();
            // 2、保存到临时文件
            // 1K的数据缓冲
            byte[] bs = new byte[1024];
            // 读取到的数据长度
            int len;
            // 输出的文件流保存到本地文件
            String bucketDir = uploadPath + "/" + taskType;
            File tempFile = new File(bucketDir);
            boolean results = tempFile.mkdirs();
            if (!tempFile.exists()) {
                tempFile.mkdirs();
            }
            os = new FileOutputStream(tempFile.getPath() + File.separator + newName);
            // 开始读取
            while ((len = inputStream.read(bs)) != -1) {
                os.write(bs, 0, len);
            }

            publishPredictTask.setCreateTime(new Date());
            publishPredictTask.setPredictFile(tempFile.getPath() + File.separator + newName);
            publishPredictTask.setTaskName(taskName);
            publishPredictTask.setTaskType(taskType);
            publishPredictTask.setLanguageType(languageType);
            if (taskType.equals("1001")){
                publishPredictTask.setModelName("Ocr");
            }
            publishPredictTask.setCreater(creater);
            publishPredictTask.setStatus(PlatformPredictTaskDto.PREDICT_NOT_START);
        } catch (Exception e) {
            e.printStackTrace();
            return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "上传失败");
        }
        publishPredictTask = platformPredictTaskApi.save(publishPredictTask);
        return Response.success(publishPredictTask);
    }

    @ApiOperation(value = "分页查询", notes = "分页查询", httpMethod = "POST")
    @PostMapping(value = "/search")
    public Response<PageDto<PlatformPredictTaskDto>> search(@RequestBody PlatformPredictSearchDto dto) {
        return Response.success(platformPredictTaskApi.search(dto));
    }

    @ApiOperation(value = "ocr预测单条", notes = "ocr预测单条", httpMethod = "POST")
    @RequestMapping(value = "/ocr-single", method = RequestMethod.POST, headers = "content-type=multipart/form-data")
    public Response<PredictOcrDto> ocrSingle(
            @RequestParam(value = "File") MultipartFile file) {

        String filePath;
        if (file.isEmpty() || file.getSize() == 0) {
            return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "文件不能为空");
        }
        if(platformPredictTaskApi.checkFileSize(file.getSize(),2,"M")){
            return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "文件大小不能超过2M");
        }
        String fileName = file.getOriginalFilename();
        String otherName = UUID.randomUUID().toString().replaceAll("-", "")
                + fileName.substring(fileName.lastIndexOf("."));
        try {
            InputStream inputStream = null;
            OutputStream os = null;
            inputStream = file.getInputStream();
            // 2、保存到临时文件
            // 1K的数据缓冲
            byte[] bs = new byte[1024];
            // 读取到的数据长度
            int len;
            // 输出的文件流保存到本地文件
            String bucketDir = uploadPath + "/" + "platform";
            File tempFile = new File(bucketDir);
            boolean results = tempFile.mkdirs();
            if (!tempFile.exists()) {
                tempFile.mkdirs();
            }
            filePath = tempFile.getPath() + File.separator + otherName;
            os = new FileOutputStream(filePath);
            // 开始读取
            while ((len = inputStream.read(bs)) != -1) {
                os.write(bs, 0, len);
            }
            os.flush();
            os.close();
            inputStream.close();
        } catch (Exception e) {
            e.printStackTrace();
            return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "上传失败");
        }
//        String resultPath = "/opt/image_dectection/provider/file/upload/platform/输出.jpg"; //供前端测试用
        PredictOcrDto predictOcrDto = platformPredictTaskApi.startTaskSingle(filePath);
        return Response.success(predictOcrDto);
    }

    @ApiOperation(value = "ocr预测多条", notes = "ocr预测多条", httpMethod = "POST")
    @RequestMapping(value = "/{id}/ocr-multi", method = RequestMethod.POST)
    public Response<Boolean> ocrMulti(@PathVariable("id") Long id) {
        boolean flag = platformPredictTaskApi.ocrMulti(id);
        if(flag){
            return Response.success(flag);
        }else {
            return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "运行任务失败");
        }
    }

    @ApiOperation(value = "单条评论分析", notes = "单挑评论分析")
    @RequestMapping(value = "/onecommet", method = RequestMethod.POST, produces = "application/json;charset=UTF-8")
    public Object getAnalyze(@RequestBody PlatformPredictOneDto dto)
            throws JSONException {

        String languageType=dto.getLanguageType();
       if(languageType.equals("chinese")) {
           try {
               JSONObject analyzeResult = null;
               TAResult t = getAnalyzes(dto.getComment());
               if (t != null) {
                   analyzeResult = (JSONObject) JSONObject.toJSON(t);
               }
               if (analyzeResult == null) {
                   return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "分析失败");
               } else {
                   return Response.success(analyzeResult);
               }
           } catch (Exception e) {
               e.printStackTrace();
               return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "分析失败");
           }
       }else{

           PredictNlpOneDto predictOcrDto = platformPredictTaskApi.startLanguageSingle(languageType,dto.getComment());
           //解析单条的数据格式
           JSONObject predictData = predictOcrDto.getPredictData();
           //分词

//          System.out.println(predictData.getString("Tokenization"));// [{you=代词}, {are=动词}, {very=副词}, {good=形容词}]

           List<TANounEntity> nounEntityList = new ArrayList<TANounEntity>();
          if(predictData.getString("EntityExtraction").length()==0){
//              TANounEntity taNounEntity = new TANounEntity();
//              nounEntityList.add(taNounEntity);
          }else {
              String Entity[] = predictData.getString("EntityExtraction").replaceAll("\\}", "")
                      .replaceAll("\\{", "")
                      .replaceAll("\\[", "").replaceAll("\\]", "")
                      .split(",");
              List<String> entitys = Arrays.asList(Entity);
              for (String e : Entity) {
                  if(e.contains("=")) {
                      TANounEntity taNounEntity = new TANounEntity();
                      e = e.replaceAll("\\}", "").replaceAll("\\{", "");
                      taNounEntity.setWord(e.substring(0, e.indexOf("=")));
                      taNounEntity.setType(e.substring(e.indexOf("=") + 1, e.length()));
                      nounEntityList.add(taNounEntity);
                  }
              }
          }

//           System.out.println(predictData.getString("Tokenization"));
           List<TASegmentEntity>taSegmentEntityList=new ArrayList<TASegmentEntity>();
           if(predictData.getString("Tokenization").length()==0){
//               TASegmentEntity taSegmentEntity = new TASegmentEntity();
//               taSegmentEntityList.add(taSegmentEntity);
           }else {
               String Token[] = predictData.getString("Tokenization").
                       replaceAll("\\}", "").replaceAll("\\{", "")
               .replaceAll("\\[", "").replaceAll("\\]", "")
                       .split(",");
               List<String> tokens = Arrays.asList(Token);
               for (String e : Token) {
                   if (e.contains("=")) {
                       TASegmentEntity taSegmentEntity = new TASegmentEntity();
                       e = e.replaceAll("\\}", "").replaceAll("\\{", "");

                      String s=e.substring(0, e.indexOf("="));
                      if(s.length()==0)
                      {
                          taSegmentEntity.setWord(",");
                      }else{
                          taSegmentEntity.setWord(e.substring(0, e.indexOf("=")));
                      }
                       taSegmentEntity.setType(e.substring(e.indexOf("=") + 1, e.length()));
                       taSegmentEntityList.add(taSegmentEntity);
                   }
               }
           }

           Set<TAFrequencyEntity>taFrequencyEntityList=new HashSet<TAFrequencyEntity>();
           if(predictData.getString("WordFrequency").length()==0){
//               TAFrequencyEntity taFrequencyEntity = new TAFrequencyEntity();
//               taFrequencyEntityList.add(taFrequencyEntity);
           }else{
           String wordFre[]=  predictData.getString("WordFrequency").replaceAll("\\}","").replaceAll("\\{","").split(",");
           List<String>words= Arrays.asList(wordFre);
           for(String e:words){
               if(e.contains("=")) {
                   TAFrequencyEntity taFrequencyEntity = new TAFrequencyEntity();
                   taFrequencyEntity.setWord(e.substring(0, e.indexOf("=")));
                   taFrequencyEntity.setCount(Integer.parseInt(e.substring(e.indexOf("=") + 1, e.length())));
                   taFrequencyEntityList.add(taFrequencyEntity);
               }
           }
           }

//           System.out.println(predictData.getString("KeyWord"));//关键词
           List<String> margerSentences=new ArrayList<>();
           if(predictData.getString("KeyWord").length()!=0) {
               String keyWord=predictData.getString("KeyWord").replaceAll("\\[", "")
                       .replaceAll("\\]", "")
                       .replaceAll("\\''", "");

               String keywords[]=keyWord.split(",");
               for(String e:keywords){
                   margerSentences.add( e);
               }
           }

           List<String>simWord=new ArrayList<String>();//近义词
           if(predictData.getString("WordEmbedding").length()!=0) {
//               System.out.println(predictData.getString("WordEmbedding"));
              String wordAll= predictData.getString("WordEmbedding")
                       .replaceAll("\\[", "")
                       .replaceAll("\\]", "")
                       .replaceAll("\\''", "");
               String words[]=wordAll.split(",");
               for(String e:words){
                   simWord.add(e);
               }
           }
           List<TASentimentEntity> sentimentList=new ArrayList<TASentimentEntity>();
           if(predictData.getString("PolarityAnalyze").length()==0){
//               TASentimentEntity entity=new TASentimentEntity();
//               sentimentList.add(entity);
           }
           else {
               String polarityAnalyze[] = predictData.getString("PolarityAnalyze").replaceAll("\\}", "").replaceAll("\\{", "").split(",");
               List<String> polaritys = Arrays.asList(polarityAnalyze);
               for (String e : polaritys) {
                   if(e.contains("=")) {
                       TASentimentEntity entity = new TASentimentEntity();
                       entity.setSubClause(e.substring(0, e.indexOf("=")));
                       entity.setSentiment(e.substring(e.indexOf("=") + 1, e.length()));
                       sentimentList.add(entity);
                   }
               }
           }

           TAResult taResult=new TAResult ();
           taResult.setKeySentenceList(margerSentences);
           taResult.setFrequencySet(taFrequencyEntityList);
           taResult.setSegmentSet(taSegmentEntityList);
           taResult.setNounSet(nounEntityList);
           taResult.setSentimentList(sentimentList);
           taResult.setSimWord(simWord);
           return Response.success(taResult);
       }
    }

    @ApiOperation(value = "多条评论分析", notes = "多条评论分析")
    @RequestMapping(value = "/muticommet", method = RequestMethod.POST, produces = "application/json;charset=UTF-8")
    public Object muticommet(@RequestBody PlatformPredictTaskDto dto)
            throws JSONException {

        if (dto.getPredictFile() == null) {
            return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "分析文件不能为空");
        }

        String languageType=dto.getLanguageType();
        if(languageType.equals("chinese")) {
            try {
                JSONObject analyzeResult = null;
                PlatformPredictTaskDto dtos = getAnalyzesByType(dto);
                if (dtos != null) {
                    return Response.success(dtos);
                }
                if (analyzeResult == null) {
                    return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "分析失败");
                } else {
                    return Response.success(dtos);
                }
            } catch (Exception e) {
                e.printStackTrace();
                return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "分析失败");
            }
        }//中文
        else{
            boolean flag = platformPredictTaskApi.languageMulti(dto);
            if(flag){
                return Response.success(flag);
            }else {
                return Response.error(HttpStatus.INTERNAL_SERVER_ERROR.value(), "运行任务失败");
            }
        }
    }

    public PlatformPredictTaskDto getAnalyzesByType(PlatformPredictTaskDto dto) {

        String otherName = UUID.randomUUID().toString().replaceAll("-", "")
                + ".txt";
        String taskType = dto.getTaskType();
        File resultFile = null;
        try {
            File tempFile = new File(downloadPath + "/" + "platform/" + taskType);
            boolean results = tempFile.mkdirs();
            if (!tempFile.exists()) {
                tempFile.mkdirs();
            }
            resultFile = new File(downloadPath + "/" + "platform/" + taskType + "/" + otherName);

        } catch (Exception e) {
            e.printStackTrace();
        }
        FileInputStream fis = null;
        InputStreamReader isr = null;
        BufferedReader br = null; //
        Writer out;

        try {
            String str = "";
            String frequency="";
            fis = new FileInputStream(dto.getPredictFile());// FileInputStream
            isr = new InputStreamReader(fis);// InputStreamReader 是字节流通向字符流的桥梁,
            br = new BufferedReader(isr);// 封装了一个new InputStreamReader的对象
            out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(resultFile, true), "utf-8"), 10240);
            dto.setStatus(PlatformPredictTaskDto.PREDICT_RUNNING);

                while ((str = br.readLine()) != null) {
                    if (taskType.equals("frequency")) {
                        frequency+=str+";";
                    }
                else if (taskType.equals("segment")) {
                    List<Term> segmentResult = commentAnalyzeUtils.segments(str);
                    String result = "";
                    for (Term e : segmentResult) {
                        result += e.word + ":" + e.nature + "\t";
                    }
                    out.write(str + '\t' + result + '\n');
                } else if (taskType.equals("keyword")) {
                    List<String> margerSentences = commentAnalyzeUtils.extractKeyword(str);
                    String result = "";
                    for (String e : margerSentences) {
                        result += e + "\t";
                    }
                    out.write(str + '\t' + result + '\n');
                } else if (taskType.equals("sentiment")) {
                    TASentimentEntity entity = fastTextSentimentModel.emotionClassifyReturnTA(str);
                    out.write(str + '\t' + entity.getSentiment().toString() + '\n');
                }
                else if (taskType.equals("ner")) {
                        List<TANounEntity>  results = commentAnalyzeUtils.getEntitys(str);
                        String strs="";
                        for(TANounEntity entity:results){
                            strs+=entity.getType()+":"+entity.getWord()+"\t";
                        }
                        out.write(str + '\t' + strs + '\n');
                }
//                近义词识别
                    else if (taskType.equals("simWord")) {
                        String strs="";
                        Set<WordEntry> result=vec.getSimilarWordsSet(str);
                        for(WordEntry e:result){
                            strs+=(e.name)+ '\t';
                        }
                        out.write(str + ':' + strs + '\n');
                    }
            }
                if(taskType.equals("frequency")){
                    List<FrequencyEntity> Frequency = commentAnalyzeUtils.getFreqWords(frequency);
                    String result = "";
                    for (FrequencyEntity e : Frequency) {
                        result += e.getWord() + "\t" + e.getCount() + "\n";
                    }
                    out.write( result + '\n');
                }
            out.flush();
            out.close();
            dto.setEndTime(new Date());
            dto.setStatus(PlatformPredictTaskDto.PREDICT_COMPLETE);
            dto.setResultFile(resultFile.getAbsolutePath());
        } catch (IOException e) {
            e.printStackTrace();
            dto.setStatus(PlatformPredictTaskDto.PREDICT_ERROR);
            dto.setEndTime(new Date());
        } finally {
            try {
                br.close();
                isr.close();
                fis.close();

            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        platformPredictTaskApi.save(dto);
        return dto;
    }

    public TAResult getAnalyzes( String comment) {
        TAResult t = commentAnalyzeUtils.taAnalyze(comment);
        //情感分析
        List<String> sentences = splitSentence(comment);
        List<TASentimentEntity> sentimentList = new ArrayList<TASentimentEntity>();
        for (int i = 0; i < sentences.size(); i++) {
            try {
                String sentence = sentences.get(i);
                TASentimentEntity entity = fastTextSentimentModel.emotionClassifyReturnTA(sentence);
                entity.setOrderNum(i + 1);
                sentimentList.add(entity);
            } catch (Exception e) {
                // TODO Auto-generated catch bloc
                e.printStackTrace();
                continue;
            }
        }
        t.setSentimentList(sentimentList);
       //近义词识别
        List<Term> segmentResult = commentAnalyzeUtils.segments(comment);
        List<String>simWord=new ArrayList<>();
        if(segmentResult.size()!=1){
            System.out.println("-----");
            t.setSimWord(simWord);
        }else{
            //调用近义词识别python接口
//            System.out.println("+++++");
                    Set<WordEntry>  result=vec.getSimilarWordsSet(comment);
            for(WordEntry e:result){
                simWord.add(e.name);
            }
            t.setSimWord(simWord);
        }
        return t;
    }

    public static List<String> splitSentence(String text) {
        List<String> sentences = Arrays.asList(text.split(SPLIT_SENTNETCES));
        return sentences;
    }

    @ApiOperation(value = "根据id查找获取结果文件", notes = "根据id查找获取结果文件", httpMethod = "GET")
    @RequestMapping(value = "/{id}/downLoadResult", method = RequestMethod.GET)
    @ResponseBody
    public void downLoadResult(@PathVariable("id") Long id,
                               HttpServletResponse resp) {
        OutputStream out;
        InputStream in;
        File file;
        try {
            PlatformPredictTaskDto task = platformPredictTaskApi.findById(id);
            String objectName = task.getResultFile().substring(task.getResultFile().lastIndexOf("/") + 1, task.getResultFile().length());
            file = new File(task.getResultFile());
            in = new FileInputStream(file);
            // 设置响应正文的MIME类型
//            resp.setContentType("Content-Disposition;charset=UTF-8");
            resp.setHeader("Content-Disposition", "attachment;" + " filename=" + objectName);
            // 把本地文件发送给客户端
            out = resp.getOutputStream();
            int byteRead = 0;
            byte[] buffer = new byte[512];
            while ((byteRead = in.read(buffer)) != -1) {
                out.write(buffer, 0, byteRead);
            }
            in.close();
            out.flush();
            out.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    @ApiOperation(value = "根据id下载源文件", notes = "根据id下载源文件", httpMethod = "GET")
    @RequestMapping(value = "/{id}/downLoadInpuFile", method = RequestMethod.GET)
    @ResponseBody
    public void downLoadInpuFile(@PathVariable("id") Long id,
                               HttpServletResponse resp) {
        OutputStream out;
        InputStream in;
        File file;
        try {
            PlatformPredictTaskDto task = platformPredictTaskApi.findById(id);
            String objectName = task.getPredictFile().substring(task.getPredictFile().lastIndexOf("/") + 1, task.getPredictFile().length());
            file = new File(task.getPredictFile());
            in = new FileInputStream(file);
            // 设置响应正文的MIME类型
//            resp.setContentType("Content-Disposition;charset=UTF-8");
            resp.setHeader("Content-Disposition", "attachment;" + " filename=" + objectName);
            // 把本地文件发送给客户端
            out = resp.getOutputStream();
            int byteRead = 0;
            byte[] buffer = new byte[512];
            while ((byteRead = in.read(buffer)) != -1) {
                out.write(buffer, 0, byteRead);
            }
            in.close();
            out.flush();
            out.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    @ApiOperation(value = "获取文件模板", notes = "获取文件模板", httpMethod = "POST")
    @RequestMapping(value = "/getFile", method = RequestMethod.POST)
    @ResponseBody
    public void getFile(@RequestParam("projectType") String projectType,
                       HttpServletResponse resp) {
        OutputStream out;
        // 读取本地文件的输入流
        InputStream in;
        if(projectType.equals("ocr_detection")) {
            try {
                File file = new File(samplePath+File.separator +"ocr.zip");
                in = new FileInputStream(file);
                // 设置响应正文的MIME类型
//                resp.setContentType("Content-Disposition;charset=UTF-8");
                resp.setHeader("Content-Disposition", "attachment;" + " filename=" + "ocr.zip");
                // 把本地文件发送给客户端
                out = resp.getOutputStream();
                int byteRead = 0;
                byte[] buffer = new byte[512];
                while ((byteRead = in.read(buffer)) != -1) {
                    out.write(buffer, 0, byteRead);
                }
                in.close();
                out.flush();
                out.close();
            } catch(Exception e) {
                e.printStackTrace();
            }
        }else if(projectType.equals("nlp")){
            try {
                File file = new File(samplePath+File.separator +"nlp.zip");
                in = new FileInputStream(file);
                // 设置响应正文的MIME类型
//                resp.setContentType("Content-Disposition;charset=UTF-8");
                resp.setHeader("Content-Disposition", "attachment;" + " filename=" + "nlp.zip");
                // 把本地文件发送给客户端
                out = resp.getOutputStream();
                int byteRead = 0;
                byte[] buffer = new byte[512];
                while ((byteRead = in.read(buffer)) != -1) {
                    out.write(buffer, 0, byteRead);
                }
                in.close();
                out.flush();
                out.close();
            }catch(Exception e) {
                e.printStackTrace();
            }
        }
    }*/

}