package com.lili.collection.mq.plugins;

import cn.hutool.core.map.MapUtil;
import com.lili.collection.core.context.BatchRecordContext;
import com.lili.collection.core.context.StageContext;
import com.lili.collection.core.context.impl.GenericBatchRecordContext;
import com.lili.collection.core.enums.DataFormat;
import com.lili.collection.core.event.StageEvent;
import com.lili.collection.core.plugins.InputPlugin;
import com.lili.collection.core.runner.StageRunner;
import com.lili.collection.core.runner.result.ErrOutputData;
import com.lili.collection.core.runner.result.RecordInfo;
import com.lili.collection.core.runner.result.RunnerResult;
import com.lili.collection.core.utils.HandlerUtil;
import com.lili.collection.mq.config.KafkaInputConfig;
import com.lili.collection.mq.runner.AsyncInputRunner;
import com.lili.collection.mq.utils.MQUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicLong;

/**
 * 描述: kafka消息队列获取数据插件
 *
 * @date:2023/3/17 17:26
 * @author:lili
 */
@Slf4j
public class KafkaInputPlugin implements InputPlugin<KafkaInputConfig> {

    //topic名称
    private String topicName;
    //消费者
    private KafkaConsumer<String, String> consumer;
    //别名配置
    private Map<String, String> aliasMap;
    private RecordInfo allRecordInfo;
    private int readNum = 1;
    private KafkaInputConfig config;
    private Throwable throwable;
    private DataFormat dataFormat;

    @Override
    public StageContext execute(StageRunner runner) {
        AsyncInputRunner inputRunner = (AsyncInputRunner) runner;
        try {
            AtomicLong waitTime = new AtomicLong(System.currentTimeMillis());
            while (true) {
                if(inputRunner.getRunnerTransfer().isRunError()){
                    throw new IllegalStateException("异步运行错误!");
                }
                //每过100毫秒拉取一次消息
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
                for (ConsumerRecord<String, String> record : records) {
                    this.sendMQToQueue(record, inputRunner,this.config);
                    waitTime.set(System.currentTimeMillis());
                    if(inputRunner.getRunnerTransfer().isRunError()){
                        throw new IllegalStateException("异步运行错误!");
                    }
                    consumer.commitSync();
                }
                if(System.currentTimeMillis() - waitTime.get() > 10000){
                    break;
                }
            }
        } catch (Throwable throwable) {
            runner.getRunnerTransfer().setRunError(true);
            runner.getRunnerTransfer().interruptRunner();
            log.error("获取MQ消息失败!", throwable);
            this.throwable=throwable;
            ErrOutputData errOutputData = allRecordInfo.getErrOutputData();
            errOutputData.setErrorMessage(throwable.toString());
            errOutputData.setType("KAFKA");
        } finally {
            MQUtil.completeStageContext(inputRunner,this.throwable);
        }
        log.info("所有数据读取完成-->读取数据量：{}，异常数：{}，成功数：{}",
                allRecordInfo.getTotalRecordCount(),
                allRecordInfo.getErrorRecordCount(),
                allRecordInfo.getSuccessRecordCount());

        RunnerResult<String> runnerResult = runner.getRunnerResult();
        if(this.throwable==null){
            runnerResult.setTotalRecordCount(allRecordInfo.getTotalRecordCount());
            runnerResult.setSuccessRecordCount(allRecordInfo.getSuccessRecordCount());
            runnerResult.setErrorRecordCount(allRecordInfo.getErrorRecordCount());
        }else {
            runnerResult.setErrOutputData(allRecordInfo.getErrOutputData());
            runnerResult.setSuccessed(false);
        }
        if(inputRunner.getRunnerTransfer()!=null){
            runnerResult.decrementCount(inputRunner.getRunnerTransfer().getDeleteOffsetNum());
        }
        return null;
    }

    private void sendMQToQueue(ConsumerRecord<String, String> record,AsyncInputRunner inputRunner,KafkaInputConfig config) {
        //处理
        try {
            String recordData = record.value();
            if (StringUtils.isBlank(recordData)) {
                return;
            }
            BatchRecordContext recordContext = new GenericBatchRecordContext();
            RecordInfo tempRecordInfo = HandlerUtil.getRecordInfo(config, recordData, recordContext);
            if (tempRecordInfo == null) {
                throw new IllegalStateException("kafka无法解析出当前配置下数据，请核实!");
            }
            allRecordInfo.addTotalCount(tempRecordInfo.getTotalRecordCount());
            allRecordInfo.addErrorCount(tempRecordInfo.getErrorRecordCount());
            allRecordInfo.addSuccessCount(tempRecordInfo.getSuccessRecordCount());
            if(recordContext.getOutput().size()>0){
                if(inputRunner.getRunnerTransfer().isRunError()){
                    throw new IllegalStateException("异步运行错误!");
                }
                inputRunner.getPipeContext().getExchangeStageContext().put(StageRunner.StageType.Input, recordContext);
                inputRunner.triggerStageEvent(StageEvent.STAGE_CONTEXT_CHANGE, inputRunner.getStageType(), recordContext);
                recordContext.setStageContextState(StageContext.StageContextState.CONTINUE);
            }
            log.info("第{}次数据读取完成-->读取数据量：{}，异常数：{}，成功数：{}",
                    readNum++,
                    tempRecordInfo.getTotalRecordCount(),
                    tempRecordInfo.getErrorRecordCount(),
                    tempRecordInfo.getSuccessRecordCount());
        } catch (Throwable throwable) {
            log.info("MQ数据转换失败!", throwable);
            throw throwable;
        }
    }

    @Override
    public void init(KafkaInputConfig config) {
        this.config = config;
        Properties props = createKafkaProperties(config);
        this.topicName = config.getTopicName();
        if (StringUtils.isBlank(this.topicName)) {
            throw new IllegalArgumentException("kafka主题不能为空,初始化失败!");
        }
        this.dataFormat=config.getDataFormat();
        if(this.dataFormat==null){
            throw new IllegalArgumentException("转换数据格式不能为空！");
        }
        this.consumer = new KafkaConsumer<>(props);
        this.consumer.subscribe(Collections.singletonList(topicName));
        this.aliasMap = config.getAliasMap();
        if (MapUtils.isEmpty(this.aliasMap)) {
            throw new IllegalArgumentException("kafka列配置映射字段为空!");
        }
        this.allRecordInfo = new RecordInfo();
    }

    private Properties createKafkaProperties(KafkaInputConfig config) {
        Properties props = config.getProperties();
        if (MapUtil.isEmpty(props)) {
            throw new IllegalArgumentException("kafka队列配置为空!");
        }
        if (!props.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)) {
            throw new IllegalArgumentException("kafka地址不存在,初始化失败!");
        }
        if (!props.containsKey(ConsumerConfig.GROUP_ID_CONFIG)) {
            props.put(ConsumerConfig.GROUP_ID_CONFIG, config.getGroupName());
        }
        if (!props.containsKey(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG)) {
            props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        }
        if (!props.containsKey(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG)) {
            props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        }
        if(props.containsKey(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)){
            props.remove(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG);
        }
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
        props.putIfAbsent(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 1);
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        return props;
    }


    @Override
    public void dispose() {
        if (consumer != null) {
            consumer.close();
        }
    }
}