package com.xdja.kafka.hdfs.sink.handle;

import com.alibaba.fastjson.JSON;
import com.xdja.kafka.hdfs.sink.bean.UserActiveBean;
import com.xdja.kafka.hdfs.sink.writer.manage.AbstractWriterManage;
import org.apache.kafka.connect.sink.SinkRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.CollectionUtils;

import java.io.IOException;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.*;

public abstract class AbstractSinkHandle<T extends AbstractWriterManage> {
    private static final Logger log = LoggerFactory.getLogger(AbstractSinkHandle.class);
    /**
     * 从kafka主题中读取到的记录
     */
    protected Collection<SinkRecord> sinkRecords;

    public void doHandle() throws Throwable {
        /*1.对读取到的记录按日期分组*/
        Map<String, List<UserActiveBean>> groupMap = this.groupByDay(sinkRecords);
        if(CollectionUtils.isEmpty(groupMap)) {
            log.info("没有要处理的记录");
            return ;
        }
        /*2.将不同日期的记录保存入hadoop*/
        for(Map.Entry<String, List<UserActiveBean>> entry : groupMap.entrySet()) {
            //保存入hadoop
            this.writeToHdfs(entry.getKey(), entry.getValue());
        }
    }

    /**
     * 将记录按日期分组。（同一批记录中可能读取到多天的数据）
     * @return
     */
    protected Map<String, List<UserActiveBean>> groupByDay(Collection<SinkRecord> sinkRecords) throws Throwable {
        Map<String, List<UserActiveBean>> jsonMap = new HashMap<>();
        if(CollectionUtils.isEmpty(sinkRecords)) {
            return jsonMap;
        }
        for(SinkRecord sinkRecord : sinkRecords) {
            //该条记录属于哪一天
            String date = null;
            UserActiveBean userActiveBean = null;
            try {
                userActiveBean = JSON.parseObject(JSON.toJSONString(sinkRecord.value()), UserActiveBean.class);
                DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd");
                date = dateTimeFormatter.format(LocalDateTime.ofInstant(Instant.ofEpochMilli(userActiveBean.getTime().longValue()), ZoneId.systemDefault()));
            } catch (Exception e) {
                log.error("处理记录失败，sinkRecord ： {}。失败原因:{}", JSON.toJSONString(sinkRecord), e.getMessage(), e);
                throw  e.getCause();
            }
            //如果不存在该日期，则加入
            if(!jsonMap.containsKey(date)) {
                jsonMap.put(date, new ArrayList<>());
            }
            jsonMap.get(date).add(userActiveBean);
        }
        return jsonMap;
    }

    /**
     * 由子类实现的将记录写入到hdfs
     * @param date
     * @param userActiveBeanList
     */
    protected abstract void writeToHdfs(String date, List<UserActiveBean> userActiveBeanList) throws IOException;
}
