package com.zyh.flink.day02.source;

import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;

import java.util.Properties;

public class FromKafkaTest3 {
    public static void main(String[] args) throws Exception{
        //创建运行环境
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        /*
        * flink要作为kafka的消费者,设置4个参数创建出kafka消费对象
        * */
        //kafka分区名
        String topicName = "topic-flink-source";
        //反序列化方式,将kafka中存储的消息的value部分反序列化回String
        //自定义反序列化方式
        MyDeserializationSchema deserializationSchema = new MyDeserializationSchema();
        Properties props = new Properties();
        //kafka集群入口
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"kafka24:9092");
        //消费者组名
        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"flink");
        //创建flinkKafka消费对象
        FlinkKafkaConsumer<Tuple4<String,Integer,Integer,Long>> consumer = new FlinkKafkaConsumer<>(topicName,deserializationSchema,props);
        //将消费者中的数据转换为数据流
        DataStreamSource<Tuple4<String,Integer,Integer,Long>> result = environment.addSource(consumer);

        result.print();
        //提交任务
        environment.execute("a");
    }
}

//自定义反序列化方式
class MyDeserializationSchema implements KafkaDeserializationSchema<Tuple4<String,Integer,Integer,Long>>{
    //JSON工具
    private ObjectMapper objectMapper;

    @Override
    //返回true表示该消息是最后一条消息,false表示不是最后一条
    public boolean isEndOfStream(Tuple4<String, Integer, Integer, Long> stringIntegerIntegerLongTuple4) {
        return false;
    }

    /*
    * consumerRecord:一条kafka消息
    * 返回值 返回序列化后要生成的Tuple4
    * */
    @Override
    public Tuple4<String, Integer, Integer, Long> deserialize(ConsumerRecord<byte[], byte[]> consumerRecord) throws Exception {
        //byte[] keyBytes = consumerRecord.key();//键的字节数组
        //获取消息的值部分
        byte[] valueBytes = consumerRecord.value();
        //将字节数字转换为JsonNode
        JsonNode jsonNode = objectMapper.readTree(valueBytes);
        //获取对应属性的值转换为字符串
        String name = jsonNode.get("name").asText();
        //转化为整数
        int age = jsonNode.get("age").asInt();
        //分区
        int partition = consumerRecord.partition();
        //编号
        long offset = consumerRecord.offset();

        return Tuple4.of(name,age,partition,offset);
    }

    @Override
    public TypeInformation<Tuple4<String, Integer, Integer, Long>> getProducedType() {
        return Types.TUPLE(Types.STRING,Types.INT,Types.INT,Types.LONG);
    }
}