package com.atguigu.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.Func.MyWindowFunction;

import com.atguigu.bean.UserLoginBean;
import com.atguigu.util.ClickHouseUtil_hjy;
import com.atguigu.util.DateFormatUtil_hjy;
import com.atguigu.util.MyKafkaUtil_hjy;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.TimestampAssignerSupplier;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;


import java.time.Duration;

/**
 * @author hjy
 * @create 2023/3/24 13:55
 */
//数据流：app-->file-->flume-->kafka-->flinkAPP-->kafka-->flinkApp-->clickhouse
//程 序：mock-->flie-->f1.sh-->kafka(zk)-->BaseLogApp-->kafka(zk)-->Dws04_UserUserLoginWindow-->clickhouse(zk)
public class Dws_UserUserLoginWindow {
    public static void main(String[] args) throws Exception {
        //todo 1 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://47.102.112.46:8020/gmall-flink/check/baseLogApp");
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);
//        env.setStateBackend(new HashMapStateBackend());
//        System.setProperty("HADOOP_USER_NAME","atguigu");
        //todo 2 从页面主题获取数据（dwd_traffic_page_log）
        String topic ="dwd_traffic_page_log";
        String groupId="dws_user_user_login_window";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil_hjy.getKafkaConsumer(topic, groupId));
        //todo 3 将数据转为JSONObj对象 并提取时间戳
        SingleOutputStreamOperator<JSONObject> jsonObjWithWM = kafkaDS.map(JSONObject::parseObject)
                .assignTimestampsAndWatermarks(WatermarkStrategy
                        .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(5))
                        .withTimestampAssigner(TimestampAssignerSupplier.of(new SerializableTimestampAssigner<JSONObject>() {
                            @Override
                            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                                return element.getLong("ts");
                            }
                        })));

        //todo 4 按照uid分组
        KeyedStream<JSONObject, String> keyedStream = jsonObjWithWM.keyBy(value -> value.getJSONObject("common").getString("uid"));

        //todo 5 转为javaBean对象
        SingleOutputStreamOperator<UserLoginBean> userLoginBeanDS = keyedStream.flatMap(new RichFlatMapFunction<JSONObject, UserLoginBean>() {
            private ValueState<String> lastDtState;

            @Override
            public void open(Configuration parameters) throws Exception {
                lastDtState = getRuntimeContext().getState(new ValueStateDescriptor<String>("last_dt", String.class));
            }

            @Override
            public void flatMap(JSONObject value, Collector<UserLoginBean> out) throws Exception {
                String lastDt = lastDtState.value();
                Long ts = value.getLong("ts");
                Long uv = 0L;
                Long back = 0L;
                String curtDt = DateFormatUtil_hjy.toDate(ts);
                if (lastDt == null || !lastDt.equals(curtDt)) {
                    uv = 1L;
                }else if ((DateFormatUtil_hjy.toTs(curtDt, false) - DateFormatUtil_hjy.toTs(lastDt, false)) / (3600 * 24) > 7) {
                    back = 1L;
                }
                if (uv == 1L || back == 1L) {
                    out.collect(new UserLoginBean("",
                            "",
                            back,
                            uv,
                            null));
                }
            }
        });

        //todo 6 开窗聚合
        SingleOutputStreamOperator<UserLoginBean> reduceDS = userLoginBeanDS.windowAll(TumblingEventTimeWindows.of(Time.seconds(10)))
                .reduce(new ReduceFunction<UserLoginBean>() {
                    @Override
                    public UserLoginBean reduce(UserLoginBean value1, UserLoginBean value2) throws Exception {
                        value1.setBackCt(value1.getBackCt() + value2.getBackCt());
                        value1.setUuCt(value1.getUuCt() + value2.getUuCt());
                        return value1;
                    }
                }, new AllWindowFunction<UserLoginBean, UserLoginBean, TimeWindow>() {
                    @Override
                    public void apply(TimeWindow window, Iterable<UserLoginBean> values, Collector<UserLoginBean> out) throws Exception {
                        UserLoginBean next = values.iterator().next();
                        out.collect(MyWindowFunction.getWindow(next, window));
                    }
                });

        //todo 7 写入clickHouse
        reduceDS.print("reduceDS>>>>>>>>>>>>>>");
        reduceDS.addSink(ClickHouseUtil_hjy.getClickHouseSink("insert into dws_user_user_login_window values(?,?,?,?,?)"));

        //todo 8 启动程序

        env.execute();

    }
}
