package com.atguigu.flink.chapter05.source;

import com.atguigu.flink.bean.Person;
import org.apache.commons.math3.geometry.partitioning.BSPTree;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.KafkaSourceBuilder;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.common.protocol.types.Field;

/*
    从kafak读取数
 */
import java.io.IOException;

public class KafkaSourceDemo2 {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port",20000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(2);  //一般和kafka的分区数保持一致

        KafkaSource<Person> source = KafkaSource.<Person>builder()
                .setBootstrapServers("hadoop102:9092,hadoop103:9092,hadoop104:9092")
                .setGroupId("atguigu")
                .setTopics("s1")
                .setStartingOffsets(OffsetsInitializer.latest())
                .setValueOnlyDeserializer(new DeserializationSchema<Person>() {
                    //反序列化
                    @Override
                    public Person deserialize(byte[] message) throws IOException {
                        String[] data = new String(message).split(",");
                        return new Person(Integer.parseInt(data[0]), data[1]);
                    }

                    //是否结束流
                    @Override
                    public boolean isEndOfStream(Person person) {
                        return false;
                    }

                    @Override
                    public TypeInformation<Person> getProducedType() {
                        //return Types.STRING;   //常见的一些类型
                        //return TypeInformation.of(Person.class);   //适合自定义的类型（没有泛型）
                        return TypeInformation.of(new TypeHint<Person>() {
                        }); //适合所有类型
                    }
                }).build();
        DataStreamSource<Person> stream = env.fromSource(source,
                WatermarkStrategy.noWatermarks(),
                "kafka-source");
        stream.print();


        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

//public class KafkaSourceDemo2 {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port",20000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(2);   //一般和kafka 的分区数保持一致
//
//        KafkaSource<Person> source = KafkaSource.<Person>builder()
//                .setBootstrapServers("hadoop102:9092,hadoop103:9092,hadoop104:9092")
//                .setGroupId("atgugui")
//                .setTopics("s1")
//                .setStartingOffsets(OffsetsInitializer.latest())
//                .setValueOnlyDeserializer(new DeserializationSchema<Person>() {
//                    //反序列化
//                    @Override
//                    public Person deserialize(byte[] message) throws IOException {
//                        String[] data = new String(message).split(",");
//                        return new Person(Integer.parseInt(data[0]), data[1]);
//                    }
//
//                    //是否结束流
//                    @Override
//                    public boolean isEndOfStream(Person person) {
//                        return false;
//                    }
//
//                    @Override
//                    public TypeInformation<Person> getProducedType() {
//                        return TypeInformation.of(new TypeHint<Person>() {
//                        });
//                    }
//                })
//                .build();
//        DataStreamSource<Person> stream = env.fromSource(
//                source,
//                WatermarkStrategy.noWatermarks(),
//                "kafka-source"
//        );
//
//        stream.print();
//
//        try {
//            env.execute();
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
//    }
//}

//public class KafkaSourceDemo2 {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port",20000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(2);
//
//        KafkaSource<Person> source = KafkaSource.<Person>builder()
//                .setBootstrapServers("haoop102:9092,hadoop103:9092")
//                .setTopics("s1")
//                .setStartingOffsets(OffsetsInitializer.latest())
//                .setValueOnlyDeserializer(new DeserializationSchema<Person>() {
//                    //反序列化
//                    @Override
//                    public Person deserialize(byte[] message) throws IOException {
//                        String[] data = new String(message).split(",");
//                        return new Person(Integer.parseInt(data[0]), data[1]);
//                    }
//
//                    //是否结束流
//                    @Override
//                    public boolean isEndOfStream(Person person) {
//                        return false;
//                    }
//
//                    @Override
//                    public TypeInformation<Person> getProducedType() {
//                        //return Types.STRING;
//                        //return TypeInformation.of(Person.class);
//                        return TypeInformation.of(new TypeHint<Person>() {});
//                    }
//                })
//                .build();
//        DataStreamSource<Person> stream = env.fromSource(
//                source,
//                WatermarkStrategy.forMonotonousTimestamps(),
//                "kafka-source"
//        );
//        stream.print();
//
//        try {
//            env.execute();
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
//    }
//}