package cn.doitedu.olap_agg;

import cn.doitedu.beans.PageStaylong;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

import java.time.Duration;

/**
 * @Author: 深似海
 * @Site: <a href="www.51doit.com">多易教育</a>
 * @QQ: 657270652
 * @Date: 2024/5/8
 * @Desc: 学大数据，上多易教育
 * 访问时长分析，olap聚合表计算
 **/
public class Job02_PageStaylongOlapAggregate {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        // 构建source，读取kafka中的dwd层的topic（行为日志维度打宽的结果）

        // HashMap<TopicPartition, Long> map = new HashMap<>();
        // map.put(new TopicPartition("dwd-user-action-log", 0),100000L);
        // map.put(new TopicPartition("dwd-user-action-log", 1),150000L);
        // map.put(new TopicPartition("dwd-user-action-log", 2),180000L);

        KafkaSource<String> source = KafkaSource
                .<String>builder()
                .setBootstrapServers("doitedu:9092")
                .setStartingOffsets(OffsetsInitializer.latest())
                .setTopics("dwd-user-action-log")
                .setGroupId("ggg")
                .setClientIdPrefix("ccc")
                .setValueOnlyDeserializer(new SimpleStringSchema())
                //.setBounded(OffsetsInitializer.offsets(map));
                .build();


        DataStreamSource<String> sourceStream
                = env.fromSource(source,
                WatermarkStrategy
                        .<String>forBoundedOutOfOrderness(Duration.ZERO)
                        .withTimestampAssigner(new SerializableTimestampAssigner<String>() {
                            @Override
                            public long extractTimestamp(String element, long recordTimestamp) {
                                JSONObject jsonObject = JSON.parseObject(element);
                                Long actionTime = jsonObject.getLong("action_time");
                                return actionTime;
                            }
                        }), "s1");


        // 对读入的数据进行json解析，转成javabean
        SingleOutputStreamOperator<PageStaylong> beanStream
                = sourceStream.map(json -> JSON.parseObject(json, PageStaylong.class));


        // keyBy(user_id,session_id)
        SingleOutputStreamOperator<PageStaylong> resultStream = beanStream
                .keyBy(
                        bean -> Tuple2.of(bean.getUser_id(), bean.getSession_id()),
                        TypeInformation.of(new TypeHint<Tuple2<Long, String>>() {}))
                .process(new KeyedProcessFunction<Tuple2<Long, String>, PageStaylong, PageStaylong>() {

                    ValueState<PageStaylong> state;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        ValueStateDescriptor<PageStaylong> desc = new ValueStateDescriptor<>("st1", PageStaylong.class);
                        StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.hours(2))
                                .updateTtlOnReadAndWrite()
                                .neverReturnExpired()
                                .build();

                        desc.enableTimeToLive(ttlConfig);


                        state = getRuntimeContext().getState(desc);

                    }

                    @Override
                    public void processElement(PageStaylong bean, KeyedProcessFunction<Tuple2<Long, String>, PageStaylong, PageStaylong>.Context ctx, Collector<PageStaylong> out) throws Exception {

                        /**
                         * 非正常情况的处理
                         */
                        PageStaylong stateBean = state.value();

                        if (stateBean == null) {
                            // 说明数据不理想：前面没有page_load，就发生了别的事件
                            bean.setUrl(bean.getProperties().get("url"));
                            bean.setPage_start_time(bean.getAction_time());

                            state.update(bean);
                        }


                        /**
                         * 常规情况处理
                         */
                        String eventId = bean.getEvent_id();

                        if ("page_load".equals(eventId)) {

                            // 更新状态中的action_time,作为虚拟事件输出
                            PageStaylong stateValue = state.value();
                            stateValue.setAction_time(bean.getAction_time());
                            out.collect(stateValue);


                            // 填充 新的页面起始时间
                            bean.setPage_start_time(bean.getAction_time());
                            // 填充 新的页面url
                            bean.setUrl(bean.getProperties().get("url"));

                            // 将这个bean放入状态，并输出
                            state.update(bean);

                            out.collect(bean);

                        } else if ("wake_up".equals(eventId)) {

                            PageStaylong stateValue = state.value();

                            // 将状态中的 page_start_time 更新成 wakeup的发生时间
                            stateValue.setPage_start_time(bean.getAction_time());

                            // 将状态中 action_time 更新成 wakeup的发生时间
                            stateValue.setAction_time(bean.getAction_time());

                            out.collect(stateValue);

                        } else {

                            PageStaylong stateValue = state.value();

                            // 将状态中 action_time 更新成 wakeup的发生时间
                            stateValue.setAction_time(bean.getAction_time());

                            out.collect(stateValue);

                        }


                        // 如果明确是一次会话的结束，则需要清除本次会话所记录的状态
                        if ("app_close".equals(eventId)) {
                            state.clear();
                        }


                    }
                })
                .filter(bean -> bean.getUrl() != null);


        // 流转表
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
        tenv.createTemporaryView("res", resultStream,
                Schema.newBuilder()
                        .column("user_id", DataTypes.BIGINT())
                        .column("session_id",DataTypes.STRING())
                        .column("event_id",DataTypes.STRING())
                        .column("action_time",DataTypes.BIGINT())
                        .column("province",DataTypes.STRING())
                        .column("city",DataTypes.STRING())
                        .column("region",DataTypes.STRING())
                        .column("url",DataTypes.STRING())
                        .column("page_start_time",DataTypes.BIGINT())
                        .columnByExpression("rt","to_timestamp_ltz(action_time,3)")
                        .watermark("rt","rt - interval '0' second ")
                        .build()
        );

        // 建表，映射doris中的目标聚合表
        tenv.executeSql(
                        " create table doris_sink(                                  "+
                        "        dt date,                                           "+
                        "        user_id bigint,                                    "+
                        "        session_id string,                                 "+
                        "        province string,                                   "+
                        "        city string,                                       "+
                        "        region string,                                     "+
                        "        url string,                                        "+
                        "        page_start_time bigint,                            "+
                        "        timelong  bigint                                   "+
                        " ) WITH (                                                  "+
                        "    'connector' = 'doris',                                 "+
                        "    'fenodes' = 'doitedu:8030',                            "+
                        "    'table.identifier' = 'dws.page_staylong_olap_agg',     "+
                        "    'username' = 'root',                                   "+
                        "    'password' = 'root',                                   "+
                        "    'sink.label-prefix' = 'doris_label-47-02'              "+
                        " )                                                         "
        );



        // 写sql，统计每个页面的每次访问的时长
        tenv.executeSql(
                        " insert into doris_sink select                                  "+
                        "    to_date(from_unixtime(page_start_time/1000,'yyyy-MM-dd')) as dt, "+
                        //"to_date(date_format(to_timestamp_ltz(page_start_time,3),'yyyy-MM-dd')) as dt,  "+
                        "     user_id,                                             "+
                        "     session_id,                                          "+
                        "     province,                                            "+
                        "     city,                                                "+
                        "     region,                                              "+
                        "     url,                                                 "+
                        "     page_start_time,                                     "+
                        "     max(action_time) - page_start_time as timelong       "+
                        " from table(                                              "+
                        "    tumble(table res,descriptor(rt),interval '5' minute)  "+
                        " )                                                        "+
                        " group by                                                 "+
                        "     window_start,                                        "+
                        "     window_end,                                          "+
                        "     user_id,                                             "+
                        "     session_id,                                          "+
                        "     province,                                            "+
                        "     city,                                                "+
                        "     region,                                              "+
                        "     url,                                                 "+
                        "     page_start_time                                      "
        ).print();


        env.execute("Job02_PageStaylongOlapAggregate");


    }
}
