package com.atguigu.utils;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.common.GmallConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.Properties;

/*
自定义kafka source
用来获取跟kafka的连接,让多层都能消费到kafka数据
 */
public class KafkaUtil {
    public static FlinkKafkaConsumer<String> getFlinkKafkaConsumer(String topic,String groupId){
        Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG,groupId);
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, GmallConfig.KAFKA_SERVER);

        return new FlinkKafkaConsumer<String>(
                topic,
//                new SimpleStringSchema(),//不能用这个，如果出现null值，会报空指针异常,我们可以自定义，参考new SimpleStringSchema()里写的
                new KafkaDeserializationSchema<String>(){

                    //抄new SimpleStringSchema()里该方法所重写的内容即可
                    @Override
                    public TypeInformation<String> getProducedType() {
                        return BasicTypeInfo.STRING_TYPE_INFO;
                    }

                    //是否是流的最后一个数据，要设为false，因为后续还要获取数据
                    @Override
                    public boolean isEndOfStream(String nextElement) {
                        return false;
                    }

                    @Override
                    public String deserialize(ConsumerRecord<byte[], byte[]> record) throws Exception {
                        //record就算kafka的一行数据，是kv类型的，当record整体为null，或value为null，返回null值，就不会出现空指针异常了
                        if(record == null || record.value() == null){
                            return null;//直接返回null，就不会报空指针异常了
                        }
//                        return Arrays.toString(record.value());
                        return new String(record.value());//new String()要求里面的参数不能为null
                    }
                }
                ,
                properties);

    }

    //flink数据生产到kafka
    public static FlinkKafkaProducer<String> getFlinkKafkaProducer(String topic){
        return new FlinkKafkaProducer<String>(GmallConfig.KAFKA_SERVER,topic,new SimpleStringSchema());
        //new SimpleStringSchema()：null值已经被过滤了，不会出现空指针
    }



    //BaseDBApp,kafka topic_ab主题里不需要加工的表动态分流到kafka dwd层,主流过滤后的value是JSONObject类型
        /*
        Maxwell数据格式，也就是value:
{"database":"gmall","table":"cart_info","type":"update","ts":1592270938,"xid":13090,"xoffset":1573,"data":{"id":100924,"user_id":"93","sku_id":16,"cart_price":4488,"sku_num":1,"img_url":"http://47.93.148.192:8080/group1/M0rBHu8l-sklaALrngAAHGDqdpFtU741.jpg","sku_name":"华为 HUAWEI P40 麒麟990 5G SoC芯片 5000万30倍数字变焦 8GB+128GB亮黑色全网通5G手机","is_checked":null,"create_time":"2020-06-14 09:28:57","operate_time":null,"is_ordered":1,"order_time":"2021-10-17 09:28:58","source_type":"2401","source_id":null},"old":{"is_ordered":0,"order_time":null}}

{
    "database":"gmall","table":"cart_info","type":"update","ts":1592270938,"xid":13090,"xoffset":1573,
    "data":{//最新字段
    "id":100924,"user_id":"93","sku_id":16,"cart_price":4488,"sku_num":1
    },
    "old":{
        "is_ordered":0,"order_time":null
    }
    "sink_table":"xxx"处理连接流时增加的字段
}
     */
    public static FlinkKafkaProducer<JSONObject>  getFlinkKafkaProducer(){
        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,GmallConfig.KAFKA_SERVER);

        return new FlinkKafkaProducer<JSONObject>("", new KafkaSerializationSchema<JSONObject>() {
            @Override
            public ProducerRecord<byte[], byte[]> serialize(JSONObject value, @Nullable Long timestamp) {

                return new ProducerRecord<byte[], byte[]>(value.getString("sink_table"),value.getString("data").getBytes());//将data写到sink_table值的主题中
            }
        },properties, FlinkKafkaProducer.Semantic.EXACTLY_ONCE);//幂等性：需要flink开启checkpoint，否则EXACTLY_ONCE失效，还是NONE

    }

    //对上面方法改进，对任何类型都可以输入到kafka里，并且任意类型都可以获取想要写的主题
    public static <T> FlinkKafkaProducer<T> getFlinkKafkaProducer(KafkaSerializationSchema<T> kafkaSerializationSchema){
        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,GmallConfig.KAFKA_SERVER);

        return new FlinkKafkaProducer<T>("",kafkaSerializationSchema,properties,FlinkKafkaProducer.Semantic.EXACTLY_ONCE);

    }





    /*
    Kafka-Source DDL语句（连接kafka，从kafka消费数据）

     */

    public static String getKafkaDDL(String topic,String groupId){
        return "WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = '"+topic+"',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'properties.group.id' = '"+groupId+"',\n" +
                "  'scan.startup.mode' = 'group-offsets',\n" +
                "  'format' = 'json'\n" +
                ")";

    }

    /**
     * Kafka-Sink DDL 语句
     *
     * @param topic 输出到 Kafka 的目标主题
     * @return 拼接好的 Kafka-Sink DDL 语句
     */
    public static String getKafkaSinkDDL(String topic) {
        return "WITH ( " +
                "  'connector' = 'kafka', " +
                "  'topic' = '" + topic + "', " +
                "  'properties.bootstrap.servers' = '" + GmallConfig.KAFKA_SERVER + "', " +
                "  'format' = 'json' " +
                ")";
    }

    public static String getKafkaUpsertSinkDDL(String topic){
        return "WITH (\n" +
                "  'connector' = 'upsert-kafka',\n" +
                "  'topic' = '"+topic+"',\n" +
                "  'properties.bootstrap.servers' = '"+ GmallConfig.KAFKA_SERVER +"',\n" +
                "  'key.format' = 'json',\n" +
                "  'value.format' = 'json'\n" +
                ")";
    }

//    public static String getKafkaUpsertSinkDDL(String topic) {
//        return " WITH ( " +
//                "  'connector' = 'upsert-kafka', " +
//                "  'topic' = '" + topic + "', " +
//                "  'properties.bootstrap.servers' ='" + GmallConfig.KAFKA_SERVER + "', " +
//                "  'key.format' = 'json', " +
//                "  'value.format' = 'json' " +
//                ")";
//    }

    //创建动态表连接kafka topic_db主题
    public static String getTopicDbDDL(String groupId){
        return "create table topic_db( " +
                "`database` string, " +
                "`table` string, " +
                "`type` string, " +
                "`data` map<string,string>, " +
                "`old` map<string,string>, " +
                "`pt` as proctime()," +//事实表lookup join mysql的base_dic维度表一定要加处理时间
                "`ts` string," +
                "`rt` as to_timestamp_ltz(cast(ts as bigint),3)," +//事件时间要求时timestamp3类型（interval join要求要设置事件时间）
                " watermark for rt as rt" +//设置watermark
                ")"+ getKafkaDDL("topic_db",groupId);
    }

    //创建动态表连接kafka dwd_trade_order_detail主题
    public static String getDwdTradeOrderDetailDDL(String groupId){
        return "create table dwd_order_detail(" +
                "id string,\n" +
                "order_id string,\n" +
                "sku_id string,\n" +
                "sku_name string,\n" +
                "order_price string,\n" +
                "sku_num string,\n" +
                "create_time string,\n" +
                "source_type string,\n" +
                "source_id string,\n" +
                "split_total_amount string,\n" +
                "split_activity_amount string,\n" +
                "split_coupon_amount string,\n" +
                "consignee string,\n" +
                "consignee_tel string,\n" +
                "total_amount string,\n" +
                "order_status string,\n" +
                "user_id string,\n" +
                "delivery_address string,\n" +
                "order_comment string,\n" +
                "out_trade_no string,\n" +
                "trade_body string,\n" +
                "process_status string,\n" +
                "tracking_no string,\n" +
                "parent_order_id string,\n" +
                "province_id string,\n" +
                "activity_reduce_amount string,\n" +
                "coupon_reduce_amount string,\n" +
                "original_total_amount string,\n" +
                "feight_fee string,\n" +
                "feight_fee_reduce string,\n" +
                "refundable_time string,\n" +
                "activity_id string,\n" +
                "activity_rule_id string,\n" +
                "coupon_id string,\n" +
                "coupon_use_id string,\n" +
                "dic_name string," +
//                "primary key (id) not enforced," +//----------------------最后upsertKafka建表时再指定主键------------------------------
                "`rt` as to_timestamp_ltz(ts,3)," +//事件时间要求时timestamp3类型（interval join要求两表都要设置事件时间）
                "watermark for rt as rt)"+getKafkaDDL("dwd_trade_order_detail",groupId);
    }
}
