package com.atguigu.wuliu.utils;

import com.atguigu.wuliu.common.WuliuConfig;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;

import java.io.IOException;

import static com.atguigu.wuliu.common.WuliuConfig.BROKERS;

/**
 * kafka工具类
 */
public class KafkaUtil {

    //获取kafkaSource
    public static KafkaSource<String> getKafkaSource(String topic, String groupId){
        KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                .setBootstrapServers(BROKERS)
                .setTopics(topic)
                .setGroupId(groupId)
                // 从消费组提交的位点开始消费，如果提交位点不存在，使用最新点
                .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.EARLIEST))
                //.setStartingOffsets(OffsetsInitializer.latest())
                //用默认的序列化器不能处理空消息，需要自定义
                //.setValueOnlyDeserializer(new SimpleStringSchema())
                .setValueOnlyDeserializer(new DeserializationSchema<String>() {
                    @Override
                    public String deserialize(byte[] bytes) throws IOException {
                        if (bytes !=null) {
                            return new String(bytes);
                        }
                        return null;
                    }

                    @Override
                    public boolean isEndOfStream(String s) {
                        return false;
                    }

                    @Override
                    public TypeInformation<String> getProducedType() {
                        return TypeInformation.of(String.class);
                    }
                })
                .build();
        return kafkaSource;
    }

    //获取KafkaSink的方法
    public static KafkaSink<String> getKafkaSink(String topic){
        // 将命令行参数对象封装为 ParameterTool 类对象
       /* ParameterTool parameterTool = ParameterTool.fromArgs(args);

        // 提取命令行传入的 key 为 topic 的配置信息，并将默认值指定为方法参数 topic
        // 当命令行没有指定 topic 时，会采用默认值
        topic = parameterTool.get("topic", topic);
        // 如果命令行没有指定主题名称且默认值为 null 则抛出异常
        if (topic == null) {
            throw new IllegalArgumentException("主题名不可为空：命令行传参为空且没有默认值!");
        }

        // 获取命令行传入的 key 为 bootstrap-servers 的配置信息，并指定默认值
        String bootstrapServers = parameterTool.get("bootstrap-severs", BROKERS);
        // 获取命令行传入的 key 为 transaction-timeout 的配置信息，并指定默认值
        String transactionTimeout = parameterTool.get("transaction-timeout", 15 * 60 * 1000 + "");*/


        KafkaSink<String> kafkaSink = KafkaSink.<String>builder()
                .setBootstrapServers(BROKERS)
                .setRecordSerializer(KafkaRecordSerializationSchema.builder()
                        .setTopic(topic)
                        .setValueSerializationSchema(new SimpleStringSchema())
                        .build()
                )
                // .setDeliveryGuarantee(DeliveryGuarantee.EXACTLY_ONCE)
                // .setTransactionalIdPrefix(transIdPrefix)
                // .setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG,transactionTimeout)
                .build();
        return kafkaSink;
    }

    //使用flinkSQL从topicdb中读取数据
    public static String getKafkaSourceTopicDb(String groupId,String tableName) {
        return  " CREATE TABLE "+tableName+" (\n" +
                "  `database` string,\n" +
                "  `table` string,\n" +
                "  `type` string,\n" +
                "  `ts` string,\n" +
                "  `data` MAP<string, string>,\n" +
                "  `old` MAP<string, string>,\n" +
                "  `pt` as proctime()\n" +
                ") "+getKafkaConfig("topic_db",groupId);


    }
    //通过flinkSQL获取kafka配置信息
    public static String getKafkaConfig(String topic,String group){
        return "WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = '" + topic + "',\n" +
                "  'properties.bootstrap.servers' = '" + BROKERS + "',\n" +
                "  'properties.group.id' = '" + group + "',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +
                "  'format' = 'json'\n" +
                ")";
    }
}
