package com.yc.bigdata.flink.demo;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Properties;

/**
 * <p></p>
 *
 * @author: YuanChilde
 * @date: 2020-02-11 10:44
 * @version: 1.0
 * Modification History:
 * Date    Author      Version     Description
 * -----------------------------------------------------------------
 * 2020-02-11 10:44    YuanChilde     1.0        新增
 */
public class KafkaConsumer {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置检查点
        env.enableCheckpointing(5000);
/*        ProcessingTime是以operator处理的时间为准，它使用的是机器的系统时间来作为data stream的时间
        IngestionTime是以数据进入flink streaming data flow的时间为准
        EventTime是以数据自带的时间戳字段为准，应用程序需要指定如何从record中抽取时间戳字段*/
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "dev.dongbaosoft.com:9092");
        properties.setProperty("group.id", "flink-group");

/*        SingleOutputStreamOperator<DemoMsg> student = env.addSource(new FlinkKafkaConsumer<>(
                "student",
                new SimpleStringSchema(),
                properties)).setParallelism(1)
                .map(string -> JSON.parseObject(string, DemoMsg.class));*/
    // 汇总1分钟的数据进行处理
      /*  student.timeWindowAll(Time.minutes(1)).apply(new AllWindowFunction<Student, List<Student>, TimeWindow>() {
            @Override
            public void apply(TimeWindow window, Iterable<Student> values, Collector<List<Student>> out) throws Exception {
                ArrayList<Student> students = Lists.newArrayList(values);
                if (students.size() > 0) {
                    System.out.println("1 分钟内收集到 student 的数据条数是：" + students.size());
                    out.collect(students);
                }
            }
        }).addSink(new SinkToMySQL());*/
      // 指定返回的类型，List类型需使用ListTypeInfo代替
      /*.returns(new ListTypeInfo(UrlInfo.class));*/


        FlinkKafkaConsumer consumer = new FlinkKafkaConsumer<>("test-demo", new SimpleStringSchema(), properties);
        consumer.assignTimestampsAndWatermarks(new MessageWaterEmitter());

       // consumer.setStartFromEarliest();
/*        一种是翻滚时间窗口(tumbling time window)
        一种是滑动时间窗口(sliding time window):会存在重复数据*/
      /*  DataStream<String> stream = env
                .addSource(consumer)
                .flatMap(new MessageSplitter())
                .keyBy()
                .timeWindow(Time.seconds(1));
        stream.print();*/
        //stream.map();

        //计算10秒内的每组的平均值
/*        DataStream<Tuple2<String, Long>> keyedStream = env
                .addSource(consumer)
                .flatMap(new MessageSplitter())
                .keyBy(0)
                .timeWindow(Time.seconds(10))

                .apply(new WindowFunction<Tuple2<String, Long>, Tuple2<String, Long>, Tuple, TimeWindow>() {
                    @Override
                    public void apply(Tuple tuple, TimeWindow window, Iterable<Tuple2<String, Long>> input, Collector<Tuple2<String, Long>> out) throws Exception {
                        long sum = 0L;
                        int count = 0;
                        for (Tuple2<String, Long> record: input) {
                            sum += record.f1;
                            count++;
                        }
                        Tuple2<String, Long> result = input.iterator().next();
                        result.f1 = sum / count;
                        out.collect(result);
                    }
                });*/

        env.execute("Kafka Test");
    }
}
