package day02;

import day01.RichHdfsSourceFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.KeyedCoProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;

import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;

public class ConnectOprationKeyByPoJoTest {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        //hdfs流
        DataStreamSource<String> ds = env.addSource(new RichHdfsSourceFunction("/user/yeniu/data/country_data1"));
        //env.setParallelism(1);
        SingleOutputStreamOperator<Tuple2<Student, String>> ds1 = ds.map(new MapFunction<String, Tuple2<Student, String>>() {
            @Override
            public Tuple2<Student, String> map(String s) throws Exception {
                String[] arr = s.split("\t");
                return Tuple2.of(new Student(arr[0]), arr[1]);
            }
        });

        //kafka
        Properties properties = new Properties();
        //连不上第一台kafka 连第二台
       // properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"s1.hadoop:9092，s3.hadoop:9092");
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"s1.hadoop:9092");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,"group_33");//消费者组
        properties.setProperty("flink.partition-discovery.interval-millis", "5000");
        FlinkKafkaConsumer010<Student> kafkaSource = new FlinkKafkaConsumer010<Student>("topic_33", new KafkaDeserializationSchema<Student>() {
            @Override
            public boolean isEndOfStream(Student student) {
                return false;
            }

            @Override                                 //key  value
            public Student deserialize(ConsumerRecord<byte[], byte[]> consumerRecord) throws Exception {
                return new Student(new String(consumerRecord.value()));
            }

            @Override
            public TypeInformation<Student> getProducedType() {
                return TypeInformation.of(Student.class);
            }
        }, properties);
        kafkaSource.setStartFromLatest();
        DataStreamSource<Student> ds2 = env.addSource(kafkaSource);

        //connect
        // kafka -->String
        // hdfs  -->Tuple2<String, String>

        //专门处理connect的process() 里面CoProcessFunction（第一个流 第二个流 返回值）
        //join操作

        //因为多个slot的问题，数据在分发的时候不知道跑哪里去了
        //一个slot槽封装的cpu和内存是独立的 然后slot中各自的map也是独立的
        //keyby进行分组  让key相同的数据在一个slot相遇

        KeyedStream<Student, Student> keyBykafka = ds2.keyBy(new KeySelector<Student, Student>() {
            @Override
            public Student getKey(Student student) throws Exception {
                return student;
            }
        });

        KeyedStream<Tuple2<Student, String>, Student> keyByhdfs = ds1.keyBy(new KeySelector<Tuple2<Student, String>, Student>() {
            @Override
            public Student getKey(Tuple2<Student, String> studentStringTuple2) throws Exception {
                //按照hdsf 第一列key分组
                return studentStringTuple2.f0;
            }
        });

        //keyBy只是规定了管道 并没有进行转换操作


        //KeyedCoProcessFunction 和 CoProcessFunction 就是多了ctx.getCurrentKey() == tuple.f0 ==value
        //                                         key的输入(hdfs+kafka) kafka输入  第一个输入 第二输入值  最终返回
        keyByhdfs.connect(keyBykafka).process(new KeyedCoProcessFunction<Student, Tuple2<Student, String>, Student, String>() {
            Map<String, String> map =new HashMap<>();
            @Override
            public void processElement1(Tuple2<Student, String> value, Context ctx, Collector<String> out) throws Exception {
                System.out.println("hdfs--->"+value.f0.getLine());
                map.put(value.f0.getLine(),value.f1);
            }

            @Override
            public void processElement2(Student value, Context ctx, Collector<String> out) throws Exception {
                System.out.println("kafka--->"+value.getLine());
                String s = map.get(value.getLine());
                String s1 = s == null ? "unknow" : s;
                out.collect(s1);
            }

            //            @Override
//            public void processElement1(Tuple2<String, String> value, Context ctx, Collector<String> out) throws Exception {
//                System.out.println("hdfs--->"+value);
//                System.out.println(ctx.getCurrentKey());
//                map.put(value.f0,value.f1);
//            }
//
//            @Override//kafka
//            public void processElement2(Student value, Context ctx, Collector<String> out) throws Exception {
//                System.out.println(ctx.getCurrentKey());
//                System.out.println("kafka--->"+value.getLine());
//                String s = map.get(value.line);
//                String s1 = s == null ? "unknow" : s;
//                out.collect(s1);
//            }
        }).print();


        env.execute();
    }

    public static class Student{
        private String line;
        public Student() {

        }

        public Student(String line) {
            this.line = line;
        }

        public String getLine() {
            return line;
        }

        public void setLine(String line) {
            this.line = line;
        }

        @Override
        public boolean equals(Object o) {
            if (this == o) return true;
            if (!(o instanceof Student)) return false;
            Student student = (Student) o;
            return getLine().equals(student.getLine());
        }

        @Override
        public int hashCode() {
            return Objects.hash(getLine());
        }

        @Override
        public String toString() {
            return "Student{" +
                    "line='" + line + '\'' +
                    '}';
        }
    }
}
