package com.bridgeintelligent.tag.bulkload.service.looklike;

import com.bridgeintelligent.tag.bulkload.api.BulkLoadConfig;
import com.bridgeintelligent.tag.bulkload.api.IndexNameInfo;
import com.bridgeintelligent.tag.bulkload.api.fileload.FileLoadModel;
import com.bridgeintelligent.tag.bulkload.api.fileload.Filter;
import com.bridgeintelligent.tag.bulkload.api.fileload.FilterGroup;
import com.bridgeintelligent.tag.bulkload.service.core.fileload.ConditionBuilder;
import com.bridgeintelligent.tag.bulkload.service.es.ESUtils;
import com.bridgeintelligent.tag.bulkload.service.es.query.EsQueryBuilder;
import com.bridgeintelligent.tag.constants.bulkload.LookLikeMqMsg;
import com.bridgeintelligent.tag.mapper.DimDataMapper;
import com.bridgeintelligent.tag.mapper.LookLikeMapper;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.common.Strings;
import org.elasticsearch.search.SearchHit;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.stereotype.Service;

import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@EnableAsync
@Slf4j
@Service
public class AsyncWriteNewDataService {

    private static final String LOCAL_PATH = "/data/hdfs/upload";

    @Autowired
    private ConditionBuilder conditionBuilder;

    @Autowired
    private LookLikeMapper lookLikeMapper;

    @Autowired
    private DimDataMapper dimDataMapper;


    @Async
    public void lookalikeDateToHdfs(LookLikeMqMsg lookLikeMqMsg) throws Exception {
        boolean flag = true;
        try {
            //TODO 测试环境不能用,暂时不能测试(已测通)
            List<Filter> filters = new ArrayList<>();
            FilterGroup filterGroup = conditionBuilder.findFilterGroup("-", lookLikeMqMsg.getCustomerId(), filters).get(0);
            FileLoadModel fileLoad = createFileLoad(filterGroup);
            IndexNameInfo indexNameInfo = dimDataMapper.findIndexInfoById(lookLikeMqMsg.getIndexId());

            String fileName = "";
            if (lookLikeMqMsg.getType().equals("00")){
                fileName = lookLikeMqMsg.getEsIndex()+"_"+"seed.txt";
            }else if (lookLikeMqMsg.getType().equals("01")){
                fileName = lookLikeMqMsg.getEsIndex()+"_"+"black.txt";
            }else{
                fileName = lookLikeMqMsg.getEsIndex()+"_"+"white.txt";
            }
            long startTime = System.currentTimeMillis();    //读写操作开始
            writeHandler(fileName,fileLoad,indexNameInfo);
            long endTime = System.currentTimeMillis();    //读写操作结束

            log.info("读写操作运行时间：" + (endTime - startTime) + "ms");

        } catch (Exception e) {
            log.error("相似客群："+lookLikeMqMsg.getId()+"-文件类型："+lookLikeMqMsg.getType()+"-写入hdfs失败",e);
            updateStatus(lookLikeMqMsg.getType(),AnalysisEnum.ANALYSIS_FOUR.getCode(),ChoiceEnum.CHOICE_THIRD.getCode(),lookLikeMqMsg.getId(),lookLikeMqMsg.getEsIndex());
            flag =false;
        }
        if(flag){
            //如果成功只需改对应的数据文件状态为成功
            updateStatus(lookLikeMqMsg.getType(),"",ChoiceEnum.CHOICE_TWO.getCode(),lookLikeMqMsg.getId(),lookLikeMqMsg.getEsIndex());
        }
    }


    private FileLoadModel createFileLoad(FilterGroup filterGroup) {
        FileLoadModel fileLoadModel = new FileLoadModel();
        fileLoadModel.setQueryBuilder(EsQueryBuilder.buildFilter(filterGroup));
        fileLoadModel.setExcludes(Strings.EMPTY_ARRAY);
        fileLoadModel.setIncludes(new String[]{BulkLoadConfig.CUST_NO});
        return fileLoadModel;
    }


    private void updateStatus(String type,String isAnalysis,String status,String id,String esIndex){
        Map param = new HashMap<>();
        param.put("id",id);
        param.put("esIndex",esIndex);
        param.put("isAnalysis",isAnalysis);
        if (type.equals("00")){
            param.put("seedStatus",status);
            lookLikeMapper.updateDataStatus(param);
        }else if (type.equals("01")){
            param.put("blackSeedStatus",status);
            lookLikeMapper.updateDataStatus(param);
        }else{//02
            param.put("whiteSeedStatus",status);
            lookLikeMapper.updateDataStatus(param);
        }
    }

    private void writeHandler(String fileName,FileLoadModel fileLoad,IndexNameInfo indexNameInfo) throws IOException {
        File file = new File(LOCAL_PATH+"/"+fileName);
        if (!file.getParentFile().exists()) {
            file.getParentFile().mkdirs();
        }
        try (FileOutputStream fos = new FileOutputStream(file); BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(fos))) {
            ESUtils.scrollSearch(indexNameInfo.getDataDate(), fileLoad, hits -> {
                StringBuffer sb = new StringBuffer();
                int i = 0;
                for (SearchHit hit : hits) {
                    Object s = hit.getSourceAsMap().get(BulkLoadConfig.CUST_NO);
                    if (s == null) {
                        continue;
                    }
                    i++;
                    sb.append(s);
                    sb.append("\n");
                    if(i == 5000){
                        bufferedWriter.write(sb.toString());
                        sb.setLength(0);
                    }
                }
                bufferedWriter.write(sb.toString());
            });
        } catch (IOException e) {
            log.error("hdfs写文件"+fileName+"失败",e);
            throw e;
        }
    }
}
