package com.xdja.kafka.hdfs.sink.handle;

import com.alibaba.fastjson.JSON;
import com.xdja.kafka.hdfs.sink.bean.UserActiveBean;
import com.xdja.kafka.hdfs.sink.writer.manage.TextWriterManage;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.kafka.connect.sink.SinkRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;

import java.io.IOException;
import java.nio.charset.Charset;
import java.util.*;

/**
 * 对kafka读取到的SinkRecords原样保存到hadoop
 */
public class TextSinkHandle extends AbstractSinkHandle<TextWriterManage>{
    /**
     * \r\n换行符字符
     */
    final static String _R_N = "\r\n";

    private static final Logger log = LoggerFactory.getLogger(TextSinkHandle.class);

    /**
     * 构造函数
     * @param sinkRecords
     */
    public TextSinkHandle(Collection<SinkRecord> sinkRecords) {
        super.sinkRecords = sinkRecords;
    }

    @Override
    protected void writeToHdfs(String date, List<UserActiveBean> userActiveBeanList) throws IOException {
        if(CollectionUtils.isEmpty(userActiveBeanList) || StringUtils.isEmpty(date)) {
            log.info("date或者userActiveBeanList为空，直接返回");
            return;
        }
        FSDataOutputStream fsDataOutputStream = TextWriterManage.getInstance().getWriter(date);
        for(UserActiveBean userActiveBean : userActiveBeanList) {
            if(userActiveBean == null) {
                log.error("userActiveBean记录为null，忽略");
                continue;
            }
            fsDataOutputStream.write((JSON.toJSONString(userActiveBean) + _R_N).getBytes(Charset.defaultCharset()));
        }
    }
}
