package com.bridgeintelligent.tag.bulkload.service.staticcustomers;

import com.bridgeintelligent.tag.bulkload.api.BulkLoadConfig;
import com.bridgeintelligent.tag.bulkload.api.IndexNameInfo;
import com.bridgeintelligent.tag.bulkload.api.StaticCustomerStatus;
import com.bridgeintelligent.tag.bulkload.api.fileload.FileLoadModel;
import com.bridgeintelligent.tag.bulkload.api.fileload.Filter;
import com.bridgeintelligent.tag.bulkload.api.fileload.FilterGroup;
import com.bridgeintelligent.tag.bulkload.service.core.fileload.ConditionBuilder;
import com.bridgeintelligent.tag.bulkload.service.es.ESUtils;
import com.bridgeintelligent.tag.bulkload.service.es.HiveSqlHelper;
import com.bridgeintelligent.tag.bulkload.service.es.query.EsQueryBuilder;
import com.bridgeintelligent.tag.bulkload.service.looklike.HdfsService;
import com.bridgeintelligent.tag.bulkload.service.looklike.HdfsUtil;
import com.bridgeintelligent.tag.constants.bulkload.MQConfig;
import com.bridgeintelligent.tag.constants.bulkload.StaticMqMsg;
import com.bridgeintelligent.tag.mapper.DimDataMapper;
import com.bridgeintelligent.tag.mapper.LookLikeMapper;
import com.bridgeintelligent.tag.mapper.StaticCustomerMapper;
import com.bridgeintelligent.tag.utils.DateHelper;
import com.bridgeintelligent.tag.utils.FileUtil;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.wayneleo.quickstart.QuickException;
import lombok.extern.slf4j.Slf4j;
import org.apache.lucene.search.TotalHits;
import org.apache.rocketmq.spring.annotation.RocketMQMessageListener;
import org.apache.rocketmq.spring.core.RocketMQListener;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;

import static com.bridgeintelligent.tag.bulkload.api.exception.BulkLoadErrors.*;
import static com.bridgeintelligent.tag.constants.PublicConstant.*;
import static com.bridgeintelligent.tag.utils.DateHelper.PATTERN_DATE_8;
import static com.bridgeintelligent.tag.utils.FieldHandler.CUSTOMER_FLAG;

/**
 * @Author：liwei
 * @Date：2023/8/1 14:52
 * @Desc： 静态客群标记处理类
 */
@Slf4j
@Component
//@RocketMQMessageListener(topic = MQConfig.STATIC_CUSTOMERS_TOPIC, consumerGroup = MQConfig.STATIC_CUSTOMERS_GROUP,consumeThreadMax=1)
public class StaticCustomersOnlyHdfsListener implements RocketMQListener<String> {
    private ObjectMapper objectMapper;
    private ConditionBuilder conditionBuilder;
    private LookLikeMapper mapper;
    private StaticCustomerMapper staticCustomerMapper;
    private DimDataMapper dimDataMapper;
    private HdfsService hdfsService;
    private HiveSqlHelper helper;
    @Value("${nas.remoteStorage:/data}")
    private String remoteStorage;

    public StaticCustomersOnlyHdfsListener(ObjectMapper objectMapper, ConditionBuilder conditionBuilder, LookLikeMapper mapper, StaticCustomerMapper staticCustomerMapper, DimDataMapper dimDataMapper, HdfsService hdfsService,HiveSqlHelper helper) {
        this.objectMapper = objectMapper;
        this.conditionBuilder = conditionBuilder;
        this.mapper = mapper;
        this.staticCustomerMapper = staticCustomerMapper;
        this.dimDataMapper = dimDataMapper;
        this.hdfsService = hdfsService;
        this.helper = helper;
    }

    @Override
    public void onMessage(String s) {
        StaticMqMsg msg = null;
        String indexId = "";
        IndexNameInfo indexInfo = null;
        try {
            msg = objectMapper.readValue(s, StaticMqMsg.class);
            log.info("=====>静态客群MarkOnlyHdfs接收消息：STATIC_CUSTOMERS_TOPIC:{}>",msg.toString());

            //获取当前客群下的索引信息
            indexId = mapper.findIndexIdByCustomerId(msg.getCustomerId());
            indexInfo = dimDataMapper.findIndexInfoById(indexId);

            this.addStaticCustomer(msg.getCustomerId(),indexId,"",TWO);//正在处理写文件
            long count = getCount(msg,indexInfo.getDataDate());
            String hdfsPath = this.hdfsWork(msg,indexInfo.getDataDate());//HDFS写文件
            log.info("=====>当前静态客群[{}]写入HDFS文件数据量为：{}",msg.getCustomerId(),count);
            staticCustomerMapper.updStatus(msg.getCustomerId(),ONE,hdfsPath);//ONE：写入成功且更新数量
            } catch (JsonProcessingException e) {
                log.error("=====>接收静态客群消息[{}]转换失败！！！",msg);
            }catch (Exception e){
                log.error("=====>当前静态客群[{}]写文件操作失败！！！",msg.getCustomerId());
                 //THREE：写入失败
                staticCustomerMapper.updStatus(msg.getCustomerId(),THREE,null);//ONE：写入成功且更新数量
                e.printStackTrace();
            }
    }
    //向hdfs服务器写入文件备份保存
    private String hdfsWork(StaticMqMsg msg,String dataDate) throws Exception {
        //构建静态客群filterGroup查询条件
        List<Filter> filters = new ArrayList<>();
        FilterGroup filterGroup = conditionBuilder.findFilterGroupAndOverwriteDataDate("-", msg.getCustomerId(), filters).get(0);
        FileLoadModel fileLoad = createFileLoad(EsQueryBuilder.buildFilter(filterGroup));
        //目标文件名
        String fileName = HDFS_STATIC_CUSTOMER_FILENAME_PREFIX+msg.getCustomerId()+"_"+DateHelper.currentDateTime(PATTERN_DATE_8)+TXT;
        log.info("=====>静态客群向HDFS写入的文件名：{}",fileName);
        log.info("=====>[{}]静态客群的HIVESQL为：{}",msg.getCustomerId(),helper.hander(filterGroup));
        //完整的文件路径
        String localPath = remoteStorage+FILESEPERTOR+STATIC_LOCAL_DIR;
        FileUtil.mkdirss(localPath);
        String hdfsPath = hdfsService.getSaticPath(fileName);
        log.info("=====>静态客群向HDFS写入文件的全路径：{}",hdfsPath);

        //本地写文件
        this.doWriteLocal(localPath+FILESEPERTOR+fileName,dataDate,fileLoad,msg.getCustomerId());
        this.uploadToHdfs(localPath+FILESEPERTOR+fileName,hdfsPath);
        return hdfsPath;
//        log.info("=====>静态客群向HDFS写入文件的全路径：{}",localPath+FILESEPERTOR+fileName);
//        return localPath+FILESEPERTOR+fileName;
    }
    private FileLoadModel createFileLoad(QueryBuilder queryBuilder) {
        FileLoadModel fileLoadModel = new FileLoadModel();
        fileLoadModel.setQueryBuilder(queryBuilder);
        fileLoadModel.setExcludes(Strings.EMPTY_ARRAY);
        fileLoadModel.setIncludes(new String[]{BulkLoadConfig.CUST_NO,CUSTOMER_FLAG});
        return fileLoadModel;
    }
    private void uploadToHdfs(String localPath,String remotePath){
        long startTime = System.currentTimeMillis();
        try {
            HdfsUtil.uploadFile(HdfsUtil.getFileSystem(),localPath,remotePath);
        } catch (Exception e) {
            throw QuickException.normal(ERROR_401004.args(remotePath),e);
        }
        long endTime = System.currentTimeMillis();
        log.info("=====>静态客群上传hdfs文件[{}]成功，耗时：{}s",localPath.substring(localPath.lastIndexOf("/")+1),(endTime - startTime)/1000);
    }
    private void doWriteLocal(String fileName,String dataDate,FileLoadModel fileLoad,String customerId){
        long startTime = System.currentTimeMillis();    //获取ES查询开始时间

        try (BufferedWriter out = new BufferedWriter(
                new FileWriter(fileName, true))){
            ESUtils.scrollSearch(dataDate, fileLoad, hits -> {
                StringBuffer sb = new StringBuffer();
                int i = 0;
                for (SearchHit hit : hits) {
                    Object s = hit.getSourceAsMap().get(BulkLoadConfig.CUST_NO);
                    List<String> list = (List<String>) hit.getSourceAsMap().get(CUSTOMER_FLAG);
                    if (s == null) {
                        continue;
                    }
                    i++;
                    sb.append(s);
                    sb.append(SERPTEROR_1);
                    sb.append(String.join(SERPTEROR_2, list)+SERPTEROR_2+customerId);
                    sb.append("\n");
                    if(i == 5000){
                        out.write(sb.toString());
                        sb.setLength(0);
                    }
                }
                out.write(sb.toString());
                out.flush();
            });
        }catch (IOException e) {
            throw QuickException.normal(ERROR_401005.args(fileName),e);
        }
        long endTime = System.currentTimeMillis();
        log.info("=====>静态客群本地写入文件[{}]成功，耗时：{}s",fileName.substring(fileName.lastIndexOf("/")+1),(endTime - startTime)/1000);
    }

    //客群历史规则表新增数据
    private void addStaticCustomer(String cid,String indexId,String hdfsPath,String status){
        StaticCustomerStatus entity = new StaticCustomerStatus();
        entity.setCustomerId(cid).setHiveSql(hdfsPath).setIndexId(indexId).setStatus(status).setCreateTime(DateHelper.currentDateTime());
        staticCustomerMapper.add(entity);
    }
    //获取标记数量
    private long getCount(StaticMqMsg msg, String index)throws IOException {
        List<Filter> filters = new ArrayList<>();
        FilterGroup filterGroup = conditionBuilder.findFilterGroupAndOverwriteDataDate("-", msg.getCustomerId(), filters).get(0);
        SearchSourceBuilder sourceBuilder = new SearchSourceBuilder();
        SearchRequest searchRequest = new SearchRequest(index);
        sourceBuilder.query(EsQueryBuilder.buildFilter(filterGroup)).trackTotalHits(true).size(0);
        searchRequest.source(sourceBuilder);
        SearchResponse searchResponse = ESUtils.getClient().search(searchRequest, RequestOptions.DEFAULT);
        if (searchResponse != null){
            SearchHits hits = searchResponse.getHits();
            if (hits != null){
                TotalHits totalHits = hits.getTotalHits();
                if (totalHits != null){
                    return totalHits.value;
                }
            }
        }
        return 0l;
    }
}
