package com.bridgeintelligent.tag.bulkload.service.staticcustomers;

import com.bridgeintelligent.tag.bulkload.api.BulkLoadConfig;
import com.bridgeintelligent.tag.bulkload.api.IndexNameInfo;
import com.bridgeintelligent.tag.bulkload.api.fileload.FileLoadModel;
import com.bridgeintelligent.tag.bulkload.service.es.ESUtils;
import com.bridgeintelligent.tag.bulkload.service.looklike.HdfsService;
import com.bridgeintelligent.tag.bulkload.service.looklike.HdfsUtil;
import com.bridgeintelligent.tag.constants.bulkload.MQConfig;
import com.bridgeintelligent.tag.constants.bulkload.StaticMqMsg;
import com.bridgeintelligent.tag.mapper.DimDataMapper;
import com.bridgeintelligent.tag.mapper.LookLikeMapper;
import com.bridgeintelligent.tag.mapper.StaticCustomerMapper;
import com.bridgeintelligent.tag.utils.DateHelper;
import com.bridgeintelligent.tag.utils.FieldHandler;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.wayneleo.quickstart.QuickException;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.rocketmq.spring.annotation.RocketMQMessageListener;
import org.apache.rocketmq.spring.core.RocketMQListener;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;

import java.io.IOException;

import static com.bridgeintelligent.tag.bulkload.api.exception.BulkLoadErrors.ERROR_401004;
import static com.bridgeintelligent.tag.constants.PublicConstant.*;
import static com.bridgeintelligent.tag.utils.DateHelper.PATTERN_DATE_8;

/**
 * @Author：liwei
 * @Date：2023/8/17 14:52
 * @Desc： 静态客群写HDFS文件处理类
 */
@AllArgsConstructor
@Slf4j
@Component
@RocketMQMessageListener(topic = MQConfig.STATIC_CUSTOMERS_HDFS_TOPIC, consumerGroup = MQConfig.STATIC_CUSTOMERS_HDFS_GROUP,consumeThreadMax=1)
public class StaticCustomersHdfsListener implements RocketMQListener<String> {
    private ObjectMapper objectMapper;
    private StaticCustomerMapper staticCustomerMapper;
    private HdfsService hdfsService;
    private LookLikeMapper mapper;
    private DimDataMapper dimDataMapper;


    @Override
    @Transactional(rollbackFor = Exception.class)
    public void onMessage(String s) {
        StaticMqMsg msg = null;
        IndexNameInfo indexInfo = null;
        String filePath = "";
        try {
            msg = objectMapper.readValue(s, StaticMqMsg.class);
            log.info("=====静态客群Hdfs接收消息：STATIC_CUSTOMERS_HDFS_TOPIC:{}>",msg.toString());
            //获取当前客群下的索引信息
            indexInfo = dimDataMapper.findIndexInfoById(mapper.findIndexIdByCustomerId(msg.getCustomerId()));
            staticCustomerMapper.updStatus(msg.getCustomerId(),TWO,filePath);//任务正在执行
            filePath = this.writeToHdfs(msg,indexInfo.getDataDate());
            staticCustomerMapper.updStatus(msg.getCustomerId(),ONE,filePath);//任务成功
        } catch (JsonProcessingException e) {
            log.error("=====>接收静态客群写入Hdfs消息[{}]转换失败！！！",msg);
            e.printStackTrace();
        }catch (Exception e){
            log.error("=====>当前静态客群[{}]写入Hdfs文件操作失败！！！",msg.getCustomerId());
            //客群历史规则表更新数据--任务失败
            staticCustomerMapper.updStatus(msg.getCustomerId(),THREE,filePath);//任务失败
            e.printStackTrace();
        }
    }
    //向hdfs服务器写入文件备份保存
    private String writeToHdfs(StaticMqMsg msg,String dataDate) throws Exception {
        FileLoadModel fileLoad = createFileLoad(QueryBuilders.termsQuery(FieldHandler.CUSTOMER_FLAG,msg.getCustomerId()));
        //文件名
        String fileName = HDFS_STATIC_CUSTOMER_FILENAME_PREFIX+msg.getCustomerId()+"_"+DateHelper.currentDateTime(PATTERN_DATE_8)+TXT;
        log.info("=====>静态客群标记后向HDFS写入的文件名：{}",fileName);
        //完整的文件路径
        String filePath = hdfsService.getSaticPath(fileName);
        log.info("=====>静态客群标记后向HDFS写入文件的全路径：{}",filePath);
        //写文件
        this.doWrite(filePath,dataDate,fileLoad,msg.getCustomerId());
        return filePath;

    }
    private FileLoadModel createFileLoad(QueryBuilder queryBuilder) {
        FileLoadModel fileLoadModel = new FileLoadModel();
        fileLoadModel.setQueryBuilder(queryBuilder);
        fileLoadModel.setExcludes(Strings.EMPTY_ARRAY);
        fileLoadModel.setIncludes(new String[]{BulkLoadConfig.CUST_NO});
        return fileLoadModel;
    }
    private void doWrite(String fileName,String dataDate,FileLoadModel fileLoad,String customerId) throws Exception {
            FileSystem fileSystem = HdfsUtil.getFileSystem();
            long startTime = System.currentTimeMillis();    //获取ES查询开始时间
            try (FSDataOutputStream out = fileSystem.create( new Path( fileName ) )){
                ESUtils.scrollSearch(dataDate, fileLoad, hits -> {
                    StringBuffer sb = new StringBuffer();
                    int i = 0;
                    for (SearchHit hit : hits) {
                        Object s = hit.getSourceAsMap().get(BulkLoadConfig.CUST_NO);
                        if (s == null) {
                            continue;
                        }
                        i++;
                        sb.append(s);
                        sb.append(SERPTEROR_1);
                        sb.append(customerId);
                        sb.append("\n");
                        if(i == 5000){
                            out.writeBytes(sb.toString());
                            sb.setLength(0);
                        }
                    }
                    out.writeBytes(sb.toString());
                    out.hflush();
                });
            }catch (IOException e) {
                throw QuickException.normal(ERROR_401004.args(fileName),e);
            }
            long endTime = System.currentTimeMillis();
            log.info("=====>静态客群标记后输出hdfs文件[{}]成功，耗时：{}s",fileName.substring(fileName.lastIndexOf("/")+1),(endTime - startTime)/1000);
    }
}
