package com.asp.bigdata.flink.kafka2hdfs.utils;

import com.asp.bigdata.flink.kafka2hdfs.pojo.Event;
import org.apache.flink.core.io.SimpleVersionedSerializer;
import org.apache.flink.streaming.api.functions.sink.filesystem.BucketAssigner;
import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.SimpleVersionedStringSerializer;
import org.apache.flink.util.Preconditions;
import java.text.SimpleDateFormat;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Properties;

/**
 * 解析csv数据的分区目录
 */
public class CsvBucketAssigner implements BucketAssigner<Event, String> {

    private static final long serialVersionUID = 1L;

    private static final String DEFAULT_FORMAT_STRING = "yyyy/MM/dd/HH";

    private final String formatString;

    private final ZoneId zoneId;

    private String splitRegex;

    private int timeIdx;

    private SimpleDateFormat eventFormat;

    private transient DateTimeFormatter dateTimeFormatter;

    /**
     * Creates a new {@Code CustomBucketAssigner} with format string {@code "yyyy-MM-dd--HH"}.
     */
    public CsvBucketAssigner() {
        this(DEFAULT_FORMAT_STRING);
    }

    /**
     * Creates a new {@code CustomBucketAssigner} with the given date/time format string.
     *
     * @param formatString The format string that will be given to {@code SimpleDateFormat} to determine
     *                     the bucket id.
     */
    public CsvBucketAssigner(String formatString) {
        this(formatString, ZoneId.systemDefault());
    }

    /**
     * Creates a new {@code CustomBucketAssigner} with format string {@code "yyyyMMddHH"} using the given timezone.
     *
     * @param zoneId The timezone used to format {@code DateTimeFormatter} for bucket id.
     */
    public CsvBucketAssigner(ZoneId zoneId) {
        this(DEFAULT_FORMAT_STRING, zoneId);
    }

    /**
     * Creates a new {@code CustomBucketAssigner} with the given date/time format string using the given timezone.
     *
     * @param formatString The format string that will be given to {@code DateTimeFormatter} to determine
     *                     the bucket path.
     * @param zoneId The timezone used to format {@code DateTimeFormatter} for bucket id.
     */
    public CsvBucketAssigner(String formatString, ZoneId zoneId) {
        this.formatString = Preconditions.checkNotNull(formatString);
        this.zoneId = Preconditions.checkNotNull(zoneId);
    }

    public CsvBucketAssigner(Properties jobConfig) {
        this.splitRegex = jobConfig.getProperty("split.regex");
        this.timeIdx = Integer.valueOf(jobConfig.getProperty("time.index"));
        this.eventFormat = new SimpleDateFormat(jobConfig.getProperty("time.format"));
        this.formatString = jobConfig.getProperty("partition.format",DEFAULT_FORMAT_STRING);
        this.zoneId = Preconditions.checkNotNull(ZoneId.systemDefault());
    }

    /**
     * 解析csv数据的分区目录
     */
    @Override
    public String getBucketId(Event element, Context context){
        if (dateTimeFormatter == null) {
            dateTimeFormatter = DateTimeFormatter.ofPattern(formatString).withZone(zoneId);
        }
        String partitionName;
        try {
            // 修改成按事件时间字段创建分区目录
            partitionName = dateTimeFormatter.format(Instant.ofEpochMilli(element.getTime()));
        } catch (Exception e) {
            // 异常数据格式统一归入epoch partition
            e.printStackTrace();
            partitionName = "1970/01/01/00";
        }
        return partitionName;
    }

    @Override
    public SimpleVersionedSerializer<String> getSerializer() {
        return SimpleVersionedStringSerializer.INSTANCE;
    }

    @Override
    public String toString() {
        return "CustomBucketAssigner{" +
                "formatString='" + formatString + '\'' +
                ", zoneId=" + zoneId +
                ", splitRegex=" + splitRegex +
                ", timeIdx=" + timeIdx +
                ", eventFormat=" + eventFormat +
                '}';
    }

}
