package cn.doitedu.etl;

import cn.doitedu.pojo.UserEvent;
import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

import javax.jdo.annotations.Embedded;

public class Job03_PageAccessTime_Olap_Agg {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        //env.setStateBackend(new HashMapStateBackend());
        env.setStateBackend(new EmbeddedRocksDBStateBackend(true));


        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 构造kafka source，来读取 kafka 的dwd层的topic: dwd-events
        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("doitedu:9092")
                .setTopics("dwd-events")
                .setGroupId("doit44-g03")
                .setClientIdPrefix("doit44-c01")
                .setStartingOffsets(OffsetsInitializer.latest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();


        DataStreamSource<String> stream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "dwd-events");

        // 解析json称为javaBean
        SingleOutputStreamOperator<UserEvent> beanStream = stream.map(new MapFunction<String, UserEvent>() {
            @Override
            public UserEvent map(String eventJson) throws Exception {
                return JSON.parseObject(eventJson, UserEvent.class);
            }
        });


        // keyBy
        SingleOutputStreamOperator<UserEvent> resultStream =
                beanStream.keyBy(new KeySelector<UserEvent, String>() {
                            @Override
                            public String getKey(UserEvent userEvent) throws Exception {
                                return userEvent.getSession_id();
                            }
                        })
                        .process(new KeyedProcessFunction<String, UserEvent, UserEvent>() {

                            ValueState<UserEvent> eventState;

                            @Override
                            public void open(Configuration parameters) throws Exception {

                                ValueStateDescriptor<UserEvent> desc = new ValueStateDescriptor<>("s", UserEvent.class);

                                StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.hours(2))
                                        .updateTtlOnReadAndWrite()
                                        .build();

                                desc.enableTimeToLive(ttlConfig);

                                eventState = getRuntimeContext().getState(desc);

                            }

                            @Override
                            public void processElement(UserEvent userEvent, KeyedProcessFunction<String, UserEvent, UserEvent>.Context ctx, Collector<UserEvent> out) throws Exception {

                                String currentEventId = userEvent.getEvent_id();
                                long currentActionTime = userEvent.getAction_time();

                                if (eventState.value() == null) {
                                    userEvent.setPage_start_time(currentActionTime);
                                    eventState.update(userEvent);
                                    out.collect(userEvent);
                                    return;
                                }

                                // 判断，event是否是page_load
                                // 如果是，
                                // 1. 则更新状态中的 action_time,输出状态数据：虚拟事件
                                // 2. 然后将本条page_load事件,补全page_start_time字段，覆盖状态,并将自己输出
                                if (currentEventId.equals("page_load")) {

                                    UserEvent stateUserEvent = eventState.value();
                                    stateUserEvent.setAction_time(currentActionTime);
                                    out.collect(stateUserEvent); // 虚拟事件,除了action_time,其他字段都是上一个页面的

                                    userEvent.setPage_start_time(currentActionTime);
                                    eventState.update(userEvent);
                                    out.collect(userEvent);

                                }

                                // 判断，event是否是wakeup
                                // 如果是,
                                //  更新状态中的 action_time 和 page_start_time ，然后输出
                                else if (currentEventId.equals("wakeup")) {
                                    userEvent.setPage_start_time(currentActionTime);
                                    eventState.update(userEvent);
                                    out.collect(userEvent);
                                }

                                // 判断，如果是上述2种之外的其他事件
                                // 更新状态中的action_time，然后输出
                                else {

                                    UserEvent stateEvent = eventState.value();
                                    stateEvent.setAction_time(currentActionTime);
                                    out.collect(stateEvent);

                                }

                                // 如果收到了app退出事件，说明该用户的该会话已经结束
                                if(currentEventId.equals("app_close")){
                                    // 清除状态垃圾
                                    eventState.clear();

                                }

                            }
                        }).filter(bean -> bean.getUrl() != null);


        // 流转表
        tenv.createTemporaryView("tmp", resultStream,
                Schema.newBuilder()
                        .column("user_id", DataTypes.BIGINT())
                        .column("session_id", DataTypes.STRING())
                        .column("url", DataTypes.STRING())
                        .column("event_id", DataTypes.STRING())
                        .column("action_time", DataTypes.BIGINT())
                        .column("member_level_id", DataTypes.INT())
                        .column("job", DataTypes.STRING())
                        .column("province", DataTypes.STRING())
                        .column("city", DataTypes.STRING())
                        .column("region", DataTypes.STRING())
                        .column("device_type", DataTypes.STRING())
                        .column("release_channel", DataTypes.STRING())
                        .column("page_type", DataTypes.STRING())
                        .column("page_service", DataTypes.STRING())
                        .column("page_start_time", DataTypes.BIGINT())
                        .columnByExpression("rt","to_timestamp_ltz(action_time,3)")
                        .watermark("rt","rt")
                        .build());

        // 创建doris物理表的连接器表
        tenv.executeSql("create table doris_sink (\n" +
                "    user_id              bigint\n" +
                "    ,session_id          string\n" +
                "    ,url                 string\n" +
                "    ,member_level_id     int\n" +
                "    ,job                 string\n" +
                "    ,province            string\n" +
                "    ,city                string\n" +
                "    ,region              string\n" +
                "    ,device_type         string\n" +
                "    ,release_channel     string\n" +
                "    ,page_type           string\n" +
                "    ,page_service        string\n" +
                "    ,page_start_time     bigint\n" +
                "    ,page_end_time       bigint\n" +
                ") WITH (                                               \n" +
                "   'connector' = 'doris',                              \n" +
                "   'fenodes' = 'doitedu:8030',                         \n" +
                "   'table.identifier' = 'dws.olap_page_access_time_agg',      \n" +
                "   'username' = 'root',                                \n" +
                "   'password' = 'root',                                \n" +
                "   'sink.label-prefix' = 'doris_label-006'             \n" +
                ")                                                      \n");



        // 聚合，一次页面访问聚合成一条：  各维度，页面，访问时长
        tenv.executeSql(
                "insert into doris_sink  \n" +
                "select  \n" +
                "  user_id,\n" +
                "  session_id,\n" +
                "  url,\n" +
                "  member_level_id,\n" +
                "  job,\n" +
                "  province,\n" +
                "  city,\n" +
                "  region,\n" +
                "  device_type,\n" +
                "  release_channel,\n" +
                "  page_type,\n" +
                "  page_service,\n" +
                "  page_start_time,\n" +
                "  max(action_time) as page_end_time\n" +
                "from table(\n" +
                "    tumble(table tmp,descriptor(rt),interval '1' minute)\n" +
                ")\n" +
                "group by \n" +
                "    window_start,\n" +
                "    window_end,\n" +
                "    user_id,\n" +
                "    session_id,\n" +
                "    url,\n" +
                "    page_start_time,\n" +
                "    member_level_id,\n" +
                "    job,\n" +
                "    province,\n" +
                "    city,\n" +
                "    region,\n" +
                "    device_type,\n" +
                "    release_channel,\n" +
                "    page_type,\n" +
                "    page_service");

        env.execute();


    }

}
