package com.atguigu.chapter08;

import com.atguigu.Bean.ApacheLog;
import com.atguigu.Bean.PageCount;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.TreeSet;

/**
 * @ClassName: Flink01_PV_Project
 * @Description:
 * @Author: kele
 * @Date: 2021/4/9 18:52
 *
 *
 * 需求：
 * 每隔5秒，输出最近10分钟内访问量最多的前N个URL。
 *
 * 1、获取数据，封装为ApacheLog类型
 * 2、根据商品进行keyby
 * 3、开滑动窗（窗口1h，步长10min）   --开窗只跟key相关
 * 4、统计每个商品的点击量
 *                --注意：每个商品一个窗口，无法对同一时间的商品进行聚合，
 *                --所以之后要将在同一时间段的商品再进行keyby
 *                --统计的时候需要agg函数，返回值中带窗口的结束时间
 * 5、再按照每个商品所在窗口的end时间进行keyby
 * 6、统计商品数量在前N的商品
 *
 *
 **/
public class Flink04_HotPage_Project {

    public static void main(String[] args) {

        Configuration conf = new Configuration();
        conf.setInteger("rest.port",20000);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);

        env.setParallelism(2);

        env
                .readTextFile("input/apache.log")
                .map(line -> {
                    String[] data = line.split(" ");
                    SimpleDateFormat df = new SimpleDateFormat("dd/MM/yyyy:HH:mm:ss");
                    return new ApacheLog(data[0],
                            df.parse(data[3]).getTime(),
                            data[5],
                            data[6]);
                })
                .assignTimestampsAndWatermarks(WatermarkStrategy
                        .<ApacheLog>forBoundedOutOfOrderness(Duration.ofSeconds(5))
                        .withTimestampAssigner(new SerializableTimestampAssigner<ApacheLog>() {
                            @Override
                            public long extractTimestamp(ApacheLog element, long recordTimestamp) {
                                return element.getEventTime();
                            }
                        })
                )
                .keyBy(ApacheLog::getUrl)
                .window(SlidingEventTimeWindows.of(Time.minutes(10),Time.seconds(5)))
                .aggregate(new AggregateFunction<ApacheLog, Long, Long>() {
                               @Override
                               public Long createAccumulator() {
                                   return 0l;
                               }

                               @Override
                               public Long add(ApacheLog value, Long accumulator) {
                                   return accumulator + 1;
                               }

                               @Override
                               public Long getResult(Long accumulator) {
                                   return accumulator;
                               }

                               @Override
                               public Long merge(Long a, Long b) {
                                   return a + b;
                               }

                           },
                        new ProcessWindowFunction<Long, PageCount, String, TimeWindow>() {
                            @Override
                            public void process(String key,
                                                Context context,
                                                Iterable<Long> elements,
                                                Collector<PageCount> out) throws Exception {

                                out.collect(new PageCount(key, elements.iterator().next(), context.window().getEnd()));
                            }
                        }

                )
                .keyBy(PageCount::getWindowEnd)
                .process(new KeyedProcessFunction<Long, PageCount, String>() {

                    private ValueState<TreeSet> treeState;

                    @Override
                    public void open(Configuration parameters) throws Exception {

                        treeState = getRuntimeContext().getState(
                                new ValueStateDescriptor<TreeSet>("TreeState", TreeSet.class)
                        );
                    }

                    @Override
                    public void processElement(PageCount value, Context ctx, Collector<String> out) throws Exception {



                    }
                })
                .print();


                



        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

}
