package com.atguigu.util;

import com.atguigu.common.GmallConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;

import java.nio.charset.StandardCharsets;
import java.util.Properties;

public class KafkaUtil {
    public static FlinkKafkaConsumer<String> getFlinkKafkaConsumer(String topicName, String groupId){
        Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, GmallConfig.BOOTSTRAP_SERVER);
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG,groupId);
        return  new FlinkKafkaConsumer<String>(topicName, new KafkaDeserializationSchema<String>() {
            @Override
            //是否为流的末尾
            public boolean isEndOfStream(String s) {
                return false;
            }

            @Override
            //反序列化
            public String deserialize(ConsumerRecord<byte[], byte[]> consumerRecord) throws Exception {
                if(consumerRecord==null||consumerRecord.value() ==null){
                    return "";
                }
                return new String(consumerRecord.value(), StandardCharsets.UTF_8);
            }

            @Override
            //类型信息
            public TypeInformation<String> getProducedType() {
                return BasicTypeInfo.STRING_TYPE_INFO;
            }
        },properties);
    }

    public static FlinkKafkaProducer<String> getFlinkKafkaProducer(String topicName){
        Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, GmallConfig.BOOTSTRAP_SERVER);
        return new FlinkKafkaProducer<String>(topicName, new SimpleStringSchema(),properties);
    }
    public static  String getKafkaDDL(String topicName,String groupId){
        return "WITH(" +
                "'connector'='kafka'," +
                "'topic'='"+topicName+"'," +
                "'properties.bootstrap.servers'='"+GmallConfig.BOOTSTRAP_SERVER+"'," +
                "'properties.group.id'='"+groupId+"'," +
                "'scan.startup.mode'='group-offsets'," +
                "'format'='json')";
    }
    public static String getKafkaSinkDDL(String topicName){
        return "WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = '" + topicName + "',\n" +
                "  'properties.bootstrap.servers' = '" + GmallConfig.BOOTSTRAP_SERVER + "',\n" +
                "  'format' = 'json'\n" +
                ")";
    }
    public static String getUpsertKafkaSinkDDL(String topicName) {

        return " WITH (\n" +
                "  'connector' = 'upsert-kafka',\n" +
                "  'topic' = '"  + topicName+ "',\n" +
                "  'properties.bootstrap.servers' = '" + GmallConfig.BOOTSTRAP_SERVER + "',\n" +
                "  'key.format' = 'json',\n" +
                "  'value.format' = 'json' " +
                ")";
    }
    public static String getCreateTopicDB(){
        return "create table topic_db (\n" +
                "  `database` STRING,\n" +
                "  `table` STRING,\n" +
                "  `type` STRING,\n" +
                "  `ts` bigint,\n" +
                "  `data` MAP<STRING,STRING>,\n" +
                "  `old` MAP<STRING,STRING>,\n" +
                "  `pt` as proctime()\n" +
                ")";
    }
}
