package cn.doitedu.statistic.entry;

import cn.doitedu.eagle.beans.LogBean;
import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.time.Duration;
import java.util.Properties;

/***
 * @author hunter.d
 * @qq 657270652
 * @wx haitao-duan
 * @date 2021/3/11
 **/
public class Main {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        Properties props = new Properties();

        props.setProperty("bootstrap.servers", "hdp01:9092,hdp02:9092,hdp03:9092");
        props.setProperty("auto.offset.reset", "earliest");
        FlinkKafkaConsumer<String> kafkaSource
                = new FlinkKafkaConsumer<String>("app_log", new SimpleStringSchema(), props);

        // 添加kafka source
        DataStreamSource<String> logStream = env.addSource(kafkaSource);
        //logStream.print();

        // json数据解析成bean数据
        SingleOutputStreamOperator<LogBean> beanStream
                = logStream.map(s -> JSON.parseObject(s, LogBean.class)).returns(LogBean.class);


        // 构建watermark生成策略
        WatermarkStrategy<LogBean> logBeanWatermarkStrategy
                = WatermarkStrategy.<LogBean>forBoundedOutOfOrderness(Duration.ofMillis(5000))
                .withTimestampAssigner(new SerializableTimestampAssigner<LogBean>() {
                    @Override
                    public long extractTimestamp(LogBean logBean, long timestamp) {
                        return logBean.getTimeStamp();
                    }
                });
        // 为数据流指定watermark策略
        SingleOutputStreamOperator<LogBean> wtBeanStream
                = beanStream.assignTimestampsAndWatermarks(logBeanWatermarkStrategy);


        // 统计每个用户，每种事件，每分钟  ==> 发生次数
        KeyedStream<Tuple3<String, String, Integer>, Tuple2<String, String>> keyed
                = wtBeanStream
                .map(new MapFunction<LogBean, Tuple3<String, String, Integer>>() {
                    @Override
                    public Tuple3<String, String, Integer> map(LogBean value) throws Exception {
                        return Tuple3.of(value.getDeviceId(), value.getEventId(), 1);
                    }
                })

                .keyBy(new KeySelector<Tuple3<String, String, Integer>, Tuple2<String, String>>() {
                    @Override
                    public Tuple2<String, String> getKey(Tuple3<String, String, Integer> value) throws Exception {
                        return Tuple2.of(value.f0, value.f1);
                    }
                });

        SingleOutputStreamOperator<Tuple3<String, String, Integer>> res
                = keyed.window(TumblingEventTimeWindows.of(Time.minutes(1)))
                .reduce(new ReduceFunction<Tuple3<String, String, Integer>>() {
                    @Override
                    public Tuple3<String, String, Integer> reduce
                            (Tuple3<String, String, Integer> value1,
                             Tuple3<String, String, Integer> value2
                            ) throws Exception {
                        return Tuple3.of(value1.f0, value1.f1, value1.f2 + value2.f2);
                    }
                });

        // 将结果写入hbase
        res.print();

        env.execute();
    }
}
