package com.gsm.projects.jobDw;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * TODO
 *
 * @author gsm
 * @version 1.0
 */
public class JobKafkaSource {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1); // 为了演示方便

        // TODO 从Kafka读： 新Source架构
//        会报错 没有指定读出来数据是什么类型 跟进去源代码 发现这是一个泛型方法 一般写在builder()前面即可
//        new SimpleStringSchema() 时候会发现有问题 因为并不知道是String
//        KafkaSource<String> kafkaSource = KafkaSource.builder()
        KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                .setBootstrapServers("hadoop102:9092,hadoop103:9092,hadoop104:9092") // 指定kafka节点的地址和端口
                .setGroupId("gsm")  // 指定消费者组的id
                .setTopics("job_ods")   // 指定消费的 Topic
                .setValueOnlyDeserializer(new SimpleStringSchema()) // 指定 反序列化器，这个是反序列化value , kafka是<key value> 但是key一般不用
                .setStartingOffsets(OffsetsInitializer.earliest())  // flink消费kafka的策略
                .build();


        DataStreamSource<String> stringDataStreamSource = env.fromSource(kafkaSource, WatermarkStrategy.forMonotonousTimestamps(), "kafkasource");

        stringDataStreamSource.print();

        SingleOutputStreamOperator<Job> jobDS = stringDataStreamSource.map(new MapFunction<String, Job>() {
            @Override
            public Job map(String value) throws Exception {

                JSONObject jsonObject = JSON.parseObject(value);
                JSONObject after = jsonObject.getJSONObject("after");

                Job job = new Job();
                job.setId((Integer)after.get("id") );

                return job;
            }
        });

        jobDS.print("jobDS~~~~~~~~~~~");



        env.execute();
    }
}