package que.app.dwd.log;

import com.alibaba.fastjson.JSONObject;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import que.app.AppV1;
import que.common.Word;
import que.util.AtguiguUtil;
import que.util.FlinkSinkUtil;

import java.time.Duration;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;

/**
 * @author Naruto
 * @description: todo 独立访客事务事实表 每个用户每天只保留一条数据
 * @Class_Name com.atguigu.realtime.app.dwd.log.DwdTrafficUniqueVisitorDetail
 * @Date 2022/08/17 18:40
 */
public class DwdTrafficUniqueVisitorDetail extends AppV1 {
    public static void main(String[] args) {
        //从Kafka获取数据流
        new DwdTrafficUniqueVisitorDetail().init(
                2003,
                2,
                "DwdTrafficUniqueVisitorDetail",
                Word.TOPIC_DWD_TRAFFIC_PAGE
        );
    }

    /**
    * @author Naruto
    * @description
    * @param env, stream
    * @return void
    */
    @Override
    protected void handle(StreamExecutionEnvironment env, DataStreamSource<String> stream) {
        stream
                .map(JSONObject::parseObject)
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy
                                .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                                .withTimestampAssigner((json,ts) ->json.getLong("ts"))
                )
                .keyBy(json -> json.getJSONObject("common").getString("uid"))
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))
                .process(new ProcessWindowFunction<JSONObject, String, String, TimeWindow>() {

                    private ValueState<String> viewDateState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        viewDateState = getRuntimeContext().getState(new ValueStateDescriptor<String>("viewDateState", String.class));
                    }

                    @Override
                    public void process(String key,
                                        Context ctx,
                                        Iterable<JSONObject> it,
                                        Collector<String> out) throws Exception {
                        //拿到当天最早窗口
                        String today = AtguiguUtil.toDate(ctx.window().getStart());
                        String date = viewDateState.value();

                        Iterator<JSONObject> iterator = it.iterator();
                        if(!today.equals(date)){//如果这两个日期不相等则说明是最早的一批数据

                            List<JSONObject> list = AtguiguUtil.toList(it);
                            //找出最早的一条数据   要降序才是取最大值，o2.compareTo(o1) 这样会取最大值
                            JSONObject min = Collections.min(list, (o1, o2) -> o1.getLong("ts").compareTo(o2.getLong("ts")));

                            out.collect(min.toJSONString());

                            viewDateState.update(today);//跟新状态
                        }

                    }
                })
                .addSink(FlinkSinkUtil.getKafkaSink(Word.TOPIC_DWD_TRAFFIC_UV_DETAIL));

    }
}
/*
DAU

dwd去重

写出每个用户的当天的第一条明细数据

数据源:
    启动日志
        可以, 但是, 数据量可能偏小

        只有app有

    页面
        只要找到第一个页面记录


如何找到第一个访问记录?
    使用状态

    如果考虑乱序, 应该找到第一个窗口, 窗口内的时间戳最小的那个
 */