package log;

import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer08;
import org.apache.flink.streaming.connectors.wikiedits.WikipediaEditEvent;
import org.apache.flink.streaming.connectors.wikiedits.WikipediaEditsSource;

/**
 * 功能: 监控维基百科的编辑日志
 * 1.获取环境信息
 * 2.为环境信息添加WikipediaEditsSource源
 * 3.根据事件中的用户名为key来区分数据流
 * 4.设置窗口时间为5s
 * 5.聚合当前窗口中相同用户名的事件，最终返回一个tuple2<user，累加的ByteDiff>
 * 6.把tuple2映射为string
 * 7.sink数据到kafka，topic为wiki-result
 * 8.执行操作
 * keyBy(...)函数是用来分片数据源的，可以把相同key的放在一个task任务中执行
 *
 * @date: 2020-03-20 15:20
 * @author: Allen
 * @version: 0.0.4-snapshot
 * @Email: allenZyhang@163.com
 * @since: JDK 1.8
 **/
public class WikipediaAnalysis {
    
    public static void main(String[] args) throws Exception {
        
        //1.创建上下文 获取环境信息
        StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
        
        //2.source部分--为上下文添加 WikipediaEditsSource数据源
        DataStream<WikipediaEditEvent> edits = see.addSource(new WikipediaEditsSource());
        
        //3.根据事件中的用户名为key来区分数据流
        KeyedStream<WikipediaEditEvent, String> keyedEdits = edits.keyBy(new KeySelector<WikipediaEditEvent, String>() {
            @Override
            public String getKey(final WikipediaEditEvent event) throws Exception {
                return event.getUser();
            }
        });
        
        DataStream<Tuple2<String, Long>> result = keyedEdits
            //4.指定窗口的宽度为5秒
            .timeWindow(Time.seconds(5))
            //5.聚合当前窗口中相同用户名的事件,最终返回一个Tuple2<User,累加的ByteDiff>
            .aggregate(new AggregateFunction<WikipediaEditEvent, Tuple2<String, Long>, Tuple2<String, Long>>() {
                @Override
                public Tuple2<String, Long> createAccumulator() {
                    return new Tuple2<>("", 0L); //初始化
                }
                
                @Override
                public Tuple2<String, Long> add(final WikipediaEditEvent event, final Tuple2<String, Long> accumulator) {
                    return new Tuple2<>(event.getUser(), event.getByteDiff() + accumulator.f1);
                }
                
                @Override
                public Tuple2<String, Long> getResult(final Tuple2<String, Long> accumulator) {
                    return accumulator;
                }
                
                @Override
                public Tuple2<String, Long> merge(final Tuple2<String, Long> a, final Tuple2<String, Long> b) {
                    return new Tuple2<>(a.f0 + b.f0, a.f1 + b.f1);
                }
            });
        //6.把tuple2映射为string
        result.map(new MapFunction<Tuple2<String, Long>, String>() {
            @Override
            public String map(final Tuple2<String, Long> stringLongTuple2) {
                return stringLongTuple2.toString();
            }
            //7.sink数据到kafka，topic为wiki-result
        }).addSink(new FlinkKafkaProducer08<String>("localhost:9092", "wiki-result", new SimpleStringSchema()));
        result.print();
        
        //8.执行操作
        see.execute();
    }
}
