package com.demo.flink;

import com.plat.paas.kafka.Message;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Properties;

/**
 * @author : pengjie
 * @PackageName : com.demo.flink
 * @Description : TODO
 * @email : 627799251@qq.com
 * @Date : 2019/1/30 16:41
 */
public class KafkaConsumer {

    private Logger logger = LoggerFactory.getLogger(KafkaConsumer.class);

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);


        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "192.168.1.231:9092");
        properties.setProperty("group.id", "flink");
        DataStream<Message> dataSource = env.addSource(new FlinkKafkaConsumer<>("flink-test2", new KafkaMessageSchema(), properties));

        DataStream<UserBehavior> ubSource = dataSource.map(new MapFunction<Message, UserBehavior>() {
            @Override
            public UserBehavior map(Message message) throws Exception {

                String msg = message.getMsg().toString();

                UserBehavior behavior = new UserBehavior();
                //String[] dta = line.split(",");
                behavior.userId = (long) (Math.random()*1000000);
                behavior.itemId = (long) (Math.random()*100);
                behavior.categoryId = (int) (Math.random()*100);
                behavior.behavior = "pv";
                behavior.timestamp = System.currentTimeMillis();

                return behavior;
            }
        });
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        DataStream<UserBehavior> timedData = ubSource.assignTimestampsAndWatermarks(new AscendingTimestampExtractor<UserBehavior>() {
            @Override
            public long extractAscendingTimestamp(UserBehavior userBehavior) {
                // 原始数据单位秒，将其转成毫秒
                return userBehavior.timestamp ;
            }
        });

        DataStream<UserBehavior> pvData = timedData.filter(new FilterFunction<UserBehavior>() {
            @Override
            public boolean filter(UserBehavior userBehavior) throws Exception {
                // 过滤出只有点击的数据
                return userBehavior.behavior.equals("pv");
            }
        });
        //使用.keyBy("itemId")对商品进行分组
        //使用.timeWindow(Time size, Time slide)对每个商品做滑动窗口（1小时窗口，5分钟滑动一次）
        //使用.aggregate(AggregateFunction af, WindowFunction wf) 做增量的聚合操作，它能使用AggregateFunction提前聚合掉数据，减少 state 的存储压力。
        //较之.apply(WindowFunction wf)会将窗口中的数据都存储下来，最后一起计算要高效地多
        DataStream<HotItems.ItemViewCount> windowedData = pvData
                .keyBy("itemId")
                .timeWindow(Time.minutes(60), Time.seconds(5))
                .aggregate(new HotItems.CountAgg(), new HotItems.WindowResultFunction());

        DataStream<String> topItems = windowedData
                .keyBy("windowEnd")
                .process(new HotItems.TopNHotItems(3));  // 求点击量前3名的商品

        //topItems.writeAsText("/result.text", FileSystem.WriteMode.OVERWRITE);
        //topItems.print();
        topItems.addSink(new SinkFunction<String>() {

            private Logger logger = LoggerFactory.getLogger(this.getClass());

            @Override
            public void invoke(String value, Context context) throws Exception {
                logger.info("topItems result = " + value);
            }
        });

        env.execute("KafkaConsumerDemo");

    }
}
