package com.xdja.kafka.hdfs.sink.writer.manage;

import com.xdja.kafka.hdfs.sink.writer.definition.WriterDefinition;
import com.xdja.kafka.util.ConnectorUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.CollectionUtils;

import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;

/**
 * writer管理抽象类
 * @param <T> 待管理的writer类型
 */
public abstract class AbstractWriterManage<T> {
    private static final Logger log = LoggerFactory.getLogger(AbstractWriterManage.class);

    /**
     * writer集合，默认最多放maxCacheSize个
     */
    private Map<String, T> writerMap = new HashMap<>(WriterDefinition.maxCacheSize * 3 / 4 + 1);

    public Collection<String> getAllWriterKeys() {
        return writerMap.keySet();
    }

    /**
     * 获取所有writer
     * @return
     */
    public Collection<T> getAllWriters() {
        return writerMap.values();
    }

    /**
     * 获取某天的writer
     * @param date
     * @return
     */
    public T getWriter(String date) {
        T writer = writerMap.get(date);
        if(writer != null) {
            log.info("获取到已存在的writer");
            return writer;
        }
        //最多只能缓存maxCacheSize个,关闭掉时间点最早那个
        if(writerMap.size() > WriterDefinition.maxCacheSize - 1) {
            String earliestDate = ConnectorUtil.getEarliestDate(writerMap.keySet());
            T closeWriter = writerMap.get(earliestDate);
            //由子类实现的关闭writer
            this.clolseWriter(closeWriter);
            writerMap.remove(earliestDate);
            log.info("writer连接池句柄个数超过上限，关闭最早的句柄：{}", earliestDate);
        }
        T newWriter = null;
        try {
            newWriter = buildWriter(date);
        } catch (IOException e) {
            log.error("创建{}天的writer失败, 原因：{}", date, e.getMessage(), e);
        }
        if(newWriter == null) {
            //多任务创建失败时，再从parquetWriterMap获取一下尝试
            log.info("buildWriter()后，若获取到的writer为空，进行重试");
            return writerMap.get(date);
        }
        writerMap.put(date, newWriter);
        log.info("将{}天的writer句柄放入连接池，当前池子大小：{}", writerMap.size());
        return newWriter;

    }

    /**
     * 清空writerMap
     */
    public void clearWriterMap() {
        if(CollectionUtils.isEmpty(writerMap)) {
            return;
        }
        for(Map.Entry<String, T> entry : writerMap.entrySet()) {
            if(entry == null) {
                continue;
            }
            if(entry.getValue() != null) {
                log.info("关闭{}的writer");
                clolseWriter(entry.getValue());
            }
        }
        writerMap.clear();

        try {
            if (WriterDefinition.fileSystem != null) {
                WriterDefinition.fileSystem.close();
                log.debug("成功关闭fileSystem");
            }
        } catch (IOException e) {
            log.error("stop(), 关闭fileSystem失败。错误原因：{}", e.getMessage(),  e);
        }
    }

    protected abstract void clolseWriter(T t);

    /**
     * 创建某天的writer，指向的文件的路径：{hdfsUrl}/{hdfsPath}/YYYY-MM/YYYY-MM-DD/YYYY-MM-DD*
     * @param date
     * @return
     */
    protected abstract T buildWriter(String date) throws IOException;

    /**
     * 获取该日期在hdfs对应的月份文件夹路径，路径: {hdfsUrl}/{hdfsPath}/YYYY-MM
     * @param date
     * @return
     */
    public static String getHdfsMonthDir(String date) {
        return WriterDefinition.hdfsUrl + WriterDefinition.hdfsPath + File.separator + date.substring(0, date.length() - 3);
    }

    /**
     * 获取该日期在hdfs对应的日期文件夹路径，路径: {hdfsUrl}/{hdfsPath}/YYYY-MM/YYYY-MM-DD
     * @param date
     * @return
     */
    public static String getHdfsDateDir(String date) {
        return getHdfsMonthDir(date) + File.separator + date;
    }
}
