package day02;

import day01.RichHdfsSourceFunction;
import day02.ConnectOprationKeyByPoJoTest.Student;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.Partitioner;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Random;

/**
 * 侧输出流计算
 */
public class OutputTagTest {

    //OutputTag实现了序列化 创建一次
    private static OutputTag<String> tag_china = new OutputTag<String>("china", BasicTypeInfo.STRING_TYPE_INFO);
    private static OutputTag<String> tag_others = new OutputTag<String>("others", BasicTypeInfo.STRING_TYPE_INFO);

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());

        //hdfs数据源 流
        DataStreamSource<String> ds = env.addSource(new RichHdfsSourceFunction("/user/yeniu/data/country_data1"));

        //kafka数据源 流
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"s1.hadoop:9092");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,"group_33");//消费者组
        properties.setProperty("flink.partition-discovery.interval-millis", "5000");
        FlinkKafkaConsumer010<Student> kafkaSource = new FlinkKafkaConsumer010<Student>("topic_33", new KafkaDeserializationSchema<Student>() {

            @Override
            public TypeInformation<Student> getProducedType() {
                return TypeInformation.of(Student.class);
            }

            @Override
            public boolean isEndOfStream(Student student) {
                return false;
            }

            @Override
            public Student deserialize(ConsumerRecord<byte[], byte[]> consumerRecord) throws Exception {
                return new Student(new String(consumerRecord.value()));
            }
        }, properties);
        kafkaSource.setStartFromLatest();
        DataStreamSource<Student> ds2 = env.addSource(kafkaSource);
        //1.扩容和随机数  2.自定义一种分区跪着，也要让他们相遇

        //hdfs扩容
        DataStream<Tuple2<Student, String>> partDs1 = ds.flatMap(new FlatMapFunction<String, Tuple2<Student, String>>() {
            @Override
            public void flatMap(String value, Collector<Tuple2<Student, String>> out) throws Exception {
                String[] arr = value.split("\t");
                for (int i = 0; i < 6; i++) {
                    String key = i + "_" + arr[0];
                    out.collect(Tuple2.of(new Student(key), arr[1]));
                }
            }
        }).partitionCustom(new Partitioner<Student>() {

            @Override
            public int partition(Student student, int numPartitions) {
                return student.hashCode()%6;
            }
        }, new KeySelector<Tuple2<Student, String>, Student>() {//定义key
            @Override
            public Student getKey(Tuple2<Student, String> value) throws Exception {
                return value.f0;
            }
        });

        //kafka加随机数

        DataStream<Student> partDs2 = ds2.map(new MapFunction<Student, Student>() {
            Random r = new Random();

            @Override
            public Student map(Student student) throws Exception {
                return new Student(r.nextInt(6) + "_" + student.getLine());
            }
        }).partitionCustom(new Partitioner<Student>() {
            @Override
            public int partition(Student student, int i) {
                return student.hashCode()%6;
            }
        }, new KeySelector<Student, Student>() {
            @Override
            public Student getKey(Student value) throws Exception {
                return value;
            }
        });


        SingleOutputStreamOperator<String> fds = partDs1.connect(partDs2).process(new CoProcessFunction<Tuple2<Student, String>, Student, String>() {
            Map<Student, String> map = new HashMap<>();

            @Override
            public void processElement1(Tuple2<Student, String> value, Context ctx, Collector<String> out) throws Exception {
                System.out.println(getRuntimeContext().getIndexOfThisSubtask() + "--->hdfs" + value.f0.getLine() + "," + value.f1);
                map.put(value.f0, value.f1);
            }

            @Override
            public void processElement2(Student value, Context ctx, Collector<String> out) throws Exception {
                String s = map.get(value);
                String s1 = s == null ? "unknow" : value.getLine().split("_")[1] + "--->" + s;
                out.collect(getRuntimeContext().getIndexOfThisSubtask()+"--->"+s1);

                if (s1.contains("中国")) {
                    ctx.output(tag_china, getRuntimeContext().getIndexOfThisSubtask()+"--->"+s1);
                } else {
                    ctx.output(tag_others, getRuntimeContext().getIndexOfThisSubtask()+"--->"+s1);
                }

            }
        });

        fds.print("normal");
        fds.getSideOutput(tag_china).print("china");
        fds.getSideOutput(tag_others).print("others");


        env.execute();
    }

}

/**
 * new CoProcessFunction<Tuple2<String, String>, String, String>() {
 *             Map<String, String> map = new HashMap<>();
 *             @Override
 *             public void processElement1(Tuple2<String, String> value, Context ctx, Collector<String> out) throws Exception {
 *                 System.out.println(getRuntimeContext().getIndexOfThisSubtask()+"--->hdfs"+value.f0+","+value.f1);
 *                 map.put(value.f0,value.f1);
 *             }
 *
 *             @Override
 *             public void processElement2(String value, Context ctx, Collector<String> out) throws Exception {
 *                 String s = map.get(value);
 *                 String s1 = s == null ? "unknow" :value.split("_")[1] +"--->"+ s;
 *                 out.collect(s1);
 *
 *             }
 *         }
 */