package day01.transformation;

import day01.RichHdfsSourceFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.collector.selector.OutputSelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SplitStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.util.Arrays;
import java.util.List;
import java.util.Properties;


public class TransformationOperatorTest  {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        DataStreamSource<Integer> ds = env.fromElements(1, 2, 3, 4, 5, 6, 7, 8, 9);
        /**
         *  拆分流以后就可以使用一个叫做侧输出流进行发送
         */
//        SplitStream<Integer> split = ds.split(new OutputSelector<Integer>() {
//            @Override
//            public Iterable<String> select(Integer value) {
//                if (value % 2 == 0) {
//                    return Arrays.asList("even");
//                } else {
//                    return Arrays.asList("odd");
//                }
//            }
//        });
//
//        split.select("even").print();
//        split.select("odd").print();

        //union  connect

        //hdfs +kafka
        DataStreamSource<String> hdfs = env.addSource(new RichHdfsSourceFunction("/user/data/yeniu/country_data1"));

        //kafka
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"s1.hadoop:9092");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,"g_33");
        properties.setProperty("flink.partition-discovery.interval-millis", "5000");
        FlinkKafkaConsumer010<String> kafkaConsum = new FlinkKafkaConsumer010<>("topic_33", new SimpleStringSchema(), properties);
        //永远从最新位置消费
        kafkaConsum.setStartFromLatest();
        DataStreamSource<String> kafka = env.addSource(kafkaConsum);

        DataStream<String> union = hdfs.union(kafka);
        
        union.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value, Context ctx, Collector<String> out) throws Exception {
                if(value.contains("\t")){
                    out.collect("hdfs-->"+value);
                }else {
                    out.collect("kafka-->"+value);
                }
            }
        }).print();

        env.execute();
    }
}
