package com.ideal.lx_mss.utils;

import com.google.gson.Gson;
import com.ideal.lx_mss.common.KafkaParam;
import com.ideal.lx_mss.common.RedisCache;
import com.ideal.lx_mss.common.TableInfo;
import com.ideal.lx_mss.common.entity.MssKafkaEntity;
import com.ideal.lx_mss.entity.CheckMssEntity;
import com.ideal.lx_mss.entity.MssDictEntity;
import com.ideal.lx_mss.entity.PullFileEntity;
import com.ideal.lx_mss.mapper.BasicMapper;
import com.ideal.lx_mss.mapper.MssLogsMapper;
import com.ideal.lx_mss.mapper.PullFileMapper;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.logging.log4j.util.PropertiesUtil;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.text.SimpleDateFormat;
import java.util.*;


@Component
public class KafkaPullConsumerService {
    @Autowired
    private MssLogsMapper mssLogsMapper;
    @Autowired
    private BasicMapper basicMapper;
    @Autowired
    private TextUtils textUtils;



    @Autowired
    private RedisCache redisCache;
    @Autowired
    private PullFileMapper pullFileMapper;

    private static Consumer<String, String> consumer;

    private final static int BATCH_SIZE = 100000;

    private List<String> currentBatch = new ArrayList<>();

    private int count = 0;

    PropertiesUtil properties = new PropertiesUtil("application.properties");
    SimpleDateFormat sdf = new SimpleDateFormat("yyMMdd");


    public KafkaParam getKafkaParam(){
        KafkaParam kafkaParam = new KafkaParam();
        PropertiesUtil proper = new PropertiesUtil("application.properties");
        kafkaParam.setBootstrapServers(proper.getStringProperty("spring.kafka.bootstrap-servers"));
        kafkaParam.setGroupId(proper.getStringProperty("spring.kafka.consumer.group-id"));
        kafkaParam.setJaasConfig(proper.getStringProperty("spring.kafka.properties.sasl.jaas.config"));
        kafkaParam.setKeySerializer(proper.getStringProperty("spring.kafka.consumer.key-deserializer"));
        kafkaParam.setValueSerializer(proper.getStringProperty("spring.kafka.consumer.value-deserializer"));
        kafkaParam.setSaslMechanism(proper.getStringProperty("spring.kafka.properties.sasl.mechanism"));
        kafkaParam.setSecurityProtocol(proper.getStringProperty("spring.kafka.properties.security.protocol"));
        return kafkaParam;
    }


    public void setupConsumer(String topic) {
        KafkaParam kafkaParam = getKafkaParam();
        Properties props = new Properties();
        props.put("bootstrap.servers", kafkaParam.getBootstrapServers());
        props.put("security.protocol", kafkaParam.getSecurityProtocol());
        props.put("sasl.mechanism", kafkaParam.getSaslMechanism());
        props.put("sasl.jaas.config",kafkaParam.getJaasConfig());
        props.put("group.id", kafkaParam.getGroupId());
        props.put("key.deserializer", kafkaParam.getKeySerializer());
        props.put("value.deserializer", kafkaParam.getValueSerializer());
        props.put("max.poll.records",5000);


        consumer = new KafkaConsumer<>(props);
        consumer.subscribe(Collections.singletonList(topic));
        consumer.poll(0);
        for (TopicPartition partition : consumer.assignment()) {
            consumer.seekToBeginning(Collections.singleton(partition));
        }

    }

    public boolean pullMessages(String topic,String formId) {
        String basicPath = properties.getStringProperty("kafka.dataFileUrl");
//        setupConsumer(topic);
        Gson gson = new Gson();


        String fileData = sdf.format(new Date());
        String fileUrl = basicPath+"/"+topic+"/"+fileData+"/"+formId;
        int num = 0;
        try {
            while (true){
//                ConsumerRecords<String, String> records = consumer.poll(120000);
                List<Map<String,Object>> list = redisCache.getCacheListByThread(topic);

//                int recordCount = records.count();
                int recordCount = list.size();
//                System.out.printf("poll records size = %d%n", records.count());
                if (recordCount == 0) {
                    break;
                }

//                for (ConsumerRecord<String, String> record : records) {
                for (Map<String,Object> record: list) {
                    //对账单
//                    JSONObject jsonObject = new JSONObject(record.value());
                    JSONObject jsonObject = new JSONObject(record.get("value").toString());
                    String datatype = jsonObject.getString("datatype"); //数据类型
                    if ("RC".equals(datatype)||"FC".equals(datatype)){  //对账单
                        CheckMssEntity entity = gson.fromJson(jsonObject.toString(), CheckMssEntity.class);
                        entity.setDataloadStr(entity.getDataload().toString());
                        entity.setCreate_time(CommonUtil.getNowDate());
                        basicMapper.insertMssCheck(entity);
                    }else if ("DICT".equals(datatype)){  //字典
                        MssDictEntity entity = gson.fromJson(jsonObject.toString(), MssDictEntity.class);
                        entity.setDataloadStr(entity.getDataload().toString());
                        entity.setCreate_time(CommonUtil.getNowDate());

                            basicMapper.insertMssDict(entity);


                    }else {  //数据
                        // 处理else分支的数据
//                        currentBatch.add(record.value()); // 添加到当前批次
                        currentBatch.add(record.get("value").toString());
                        count++;
                        if (count >= BATCH_SIZE) {
                            textUtils.saveText(currentBatch,topic,formId,fileUrl,"info");  // 将数据写入到文件中
                            currentBatch.clear();  // 清空当前批次
                            count = 0;  // 重置记录数
                        }
                    }

                    num++;


                }
//                consumer.commitAsync();

                break;
            }
            if (!currentBatch.isEmpty()) {
                // 确保最后的数据也写入文件
                textUtils.saveText(currentBatch,topic,formId,fileUrl,"info");  // 将数据写入到文件中
            }


        }catch (Exception e){
            e.printStackTrace();
            return false;
        }

        //拉取记录
        PullFileEntity pullFileEntity = new PullFileEntity(UUID.randomUUID().toString(), formId, fileUrl, CommonUtil.getNowDate(), topic, "0","info");
        pullFileMapper.insert(pullFileEntity);


        return true;



    }






}
