package cn.gwm.flink.streaming.factory;

import cn.gwm.flink.streaming.constant.DataHubConstant;
import cn.gwm.flink.streaming.ods.model.ModelCanEnum;
import cn.gwm.flink.streaming.ods.model.ModelGpsEnum;
import cn.gwm.flink.streaming.source.DatahubSourcesFunction;
import cn.gwm.utils.ConfigLoader;
import cn.gwm.utils.DateTimeUtil;
import com.alibaba.ververica.connectors.datahub.source.DatahubSourceFunction;
import com.aliyun.datahub.client.DatahubClient;
import com.aliyun.datahub.client.DatahubClientBuilder;
import com.aliyun.datahub.client.auth.AliyunAccount;
import com.aliyun.datahub.client.common.DatahubConfig;
import com.aliyun.datahub.client.http.HttpConfig;
import com.aliyun.datahub.client.model.GetTopicResult;
import com.aliyun.datahub.client.model.SubscriptionOffset;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

/**
 * datahub 连接器工厂类
 *
 * @ClassName DataHubSourceFactory
 * @Author xzh
 * @Date 2022/11/2 10:30
 **/
public class DataHubSourceFactory {

    private static Logger logger = LoggerFactory.getLogger(DataHubSourceFactory.class);

    /**
     *@Author xzh
     *@Description 获取业务类对应的数据源连接器
     *@Date 2022/11/2 11:11
     *@Param [cla]
     *@return com.alibaba.ververica.connectors.datahub.source.DatahubSourceFunction
     **/
    public static DatahubSourcesFunction getDatahubSourceFunction(Class cla) {
        //es13 经纬度获取
        if (ModelCanEnum.class.getName().equals(cla.getName())) {
            String subId = ConfigLoader.get("dataHub.can.subId.toKafka");
            String canTopic = ConfigLoader.get("dataHub.topic.can");
            return getDatahubSourceFunction(canTopic, subId,
                    DataHubSourceFactory.getOffsetsTimestamp(canTopic, subId),
                    DateTimeUtil.defaultFormatPare(ConfigLoader.get("all.receive.endTime")));
        }else if (ModelGpsEnum.class.getName().equals(cla.getName())) {
            String subId = ConfigLoader.get("dataHub.gps.subId.toKafka");
            String gpsTopic = ConfigLoader.get("dataHub.topic.gps");
            return getDatahubSourceFunction(gpsTopic, subId,
                    DataHubSourceFactory.getOffsetsTimestamp(gpsTopic, subId),
                    DateTimeUtil.defaultFormatPare(ConfigLoader.get("all.receive.endTime")));
        }

        return null;
    }

    /**
     *@Author xzh
     *@Description 获取datahub数据源连接器
     *@Date 2022/11/3 9:59
     *@Param [topic, subId, startTimeInMs, stopTimeInMs] 主题名称，订阅任务subid, 取数据得起始时间，取数截止时间
     *@return com.alibaba.ververica.connectors.datahub.source.DatahubSourceFunction
     **/
    public static DatahubSourcesFunction getDatahubSourceFunction(String topic, String subId,
                                                                 long startTimeInMs, long stopTimeInMs) {
        DatahubSourcesFunction datahubSource =
                new DatahubSourcesFunction(
                        ConfigLoader.get("dataHub.source.endpoint"),
                        ConfigLoader.get("dataHub.source.projectName"),
                        topic,
                        subId,
                        ConfigLoader.get("dataHub.source.accessId"),
                        ConfigLoader.get("dataHub.source.accessKey"),
                        startTimeInMs,
                        stopTimeInMs);
        datahubSource.setRequestTimeout(30 * 1000);
        datahubSource.setMaxFetchSize(2000);
        datahubSource.setMaxBufferSize(2000);
        datahubSource.enableExitAfterReadFinished();
        return datahubSource;
    }
    public static DatahubSourcesFunction getDatahubSourceFunction(String topic, String subId, long stopTimeInMs) {
        DatahubSourcesFunction datahubSource =
                new DatahubSourcesFunction(
                        ConfigLoader.get("dataHub.source.endpoint"),
                        ConfigLoader.get("dataHub.source.projectName"),
                        topic,
                        subId,
                        ConfigLoader.get("dataHub.source.accessId"),
                        ConfigLoader.get("dataHub.source.accessKey"),
                        stopTimeInMs);
        datahubSource.setRequestTimeout(10 * 1000);
        datahubSource.setMaxFetchSize(2000);
        datahubSource.setMaxBufferSize(2000);
        datahubSource.enableExitAfterReadFinished();
        return datahubSource;
    }

    public static DatahubSourcesFunction getDatahubSourceFunction2(String topic, String subId, long stopTimeInMs) {
        DatahubSourcesFunction datahubSource =
                new DatahubSourcesFunction(
                        ConfigLoader.get("dataHub.source.endpoint"),
                        ConfigLoader.get("dataHub.source.projectName"),
                        topic,
                        subId,
                        ConfigLoader.get("dataHub.source.accessId"),
                        ConfigLoader.get("dataHub.source.accessKey"),
                        stopTimeInMs);
        datahubSource.setRequestTimeout(10 * 1000);
        datahubSource.setMaxFetchSize(50000);
        datahubSource.setMaxBufferSize(50000);
        datahubSource.enableExitAfterReadFinished();
        return datahubSource;
    }

    public static DatahubSourcesFunction getDatahubSourceFunction3(String topic, String subId, long stopTimeInMs) {
        DatahubSourcesFunction datahubSource =
                new DatahubSourcesFunction(
                        ConfigLoader.get("dataHub.source.endpoint"),
                        ConfigLoader.get("dataHub.source.projectName"),
                        topic,
                        subId,
                        ConfigLoader.get("dataHub.source.accessId"),
                        ConfigLoader.get("dataHub.source.accessKey"),
                        stopTimeInMs);
        datahubSource.setRequestTimeout(10 * 1000);
        datahubSource.setMaxFetchSize(500);
        datahubSource.setMaxBufferSize(500);
        datahubSource.enableExitAfterReadFinished();
        return datahubSource;
    }

    /**
     *@Author xzh
     *@Description 获取datahub客户端
     *@Date 2022/10/26 17:33
     *@return com.aliyun.datahub.client.DatahubClient
     **/
    private static DatahubClient getDataHubClinet(){
        // Endpoint以Region: 华东1为例，其他Region请按实际情况填写
        // 创建DataHubClient实例
        DatahubClient datahubClient = DatahubClientBuilder.newBuilder()
                .setDatahubConfig(
                        new DatahubConfig(ConfigLoader.get("dataHub.source.endpoint"),
                                // 是否开启二进制传输，服务端2.12版本开始支持
                                new AliyunAccount(ConfigLoader.get("dataHub.source.accessId"), ConfigLoader.get("dataHub.source.accessKey")), true))
                //专有云使用出错尝试将参数设置为           false
                // HttpConfig可不设置，不设置时采用默认值
                .setHttpConfig(new HttpConfig()
                        .setCompressType(HttpConfig.CompressType.LZ4) // 读写数据推荐打开网络传输 LZ4压缩
                        .setConnTimeout(10000))
                .build();
        return datahubClient;
    }

    /**
     *@Author xzh
     *@Description 获取订阅号对应的消费位点
     *@Date 2023/1/9 11:03
     *@Param [subId]
     *@return long
     **/
    public static long getOffsetsTimestamp(String topic, String subId){
        DatahubClient dataHubClinet = DataHubSourceFactory.getDataHubClinet();
        // 表示点位被重置，重新获取SubscriptionOffset信息，这里以Sequence重置为例
        // 如果以Timestamp重置，需要通过CursorType.SYSTEM_TIME获取cursor
        GetTopicResult topicResult = dataHubClinet.getTopic(ConfigLoader.get("dataHub.source.projectName"), topic);
        int shardCount = topicResult.getShardCount();
        // 初始化shardIds
        List<String> shardIds = new ArrayList<>();
        for (int i = 0; i < shardCount; i++) {
            shardIds.add(Integer.toString(i));
        }
        Map<String, SubscriptionOffset> offsets = dataHubClinet.getSubscriptionOffset(ConfigLoader.get("dataHub.source.projectName"), topic, subId, shardIds).getOffsets();
        Iterator<Map.Entry<String, SubscriptionOffset>> iterator = offsets.entrySet().iterator();
        long timestamp = 0L;
        while (iterator.hasNext()){
            Map.Entry<String, SubscriptionOffset> next = iterator.next();
            long timestamp1 = next.getValue().getTimestamp();
            if( timestamp==0L || timestamp1 < timestamp ){
                timestamp = timestamp1;
            }
        }
        logger.error( "启动获取消费位点，topic:{},subId:{},timestamp:{}", topic, subId, timestamp);
        if(timestamp<=0){
            timestamp = System.currentTimeMillis()-2*24*60*60*1000L;
        }else{
            //向前推迟5分钟，保证数据至少执行一次
            timestamp = timestamp - 300000L;
        }
        return timestamp;
    }

}
