package com.sinozo.data.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.sinozo.data.bean.UserStartBean;
import com.sinozo.data.common.ConfigConstant;
import com.sinozo.data.common.DataConfig;
import com.sinozo.data.utils.*;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.concurrent.TimeUnit;

public class DwsUserUserStartWindow {

    public static void main(String[] args) {
        Logger logger = LoggerFactory.getLogger(DwsUserUserStartWindow.class);
        try {
            StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
            env.setParallelism(DataConfig.KAFKA_PARTITION_NUM);

            //开启checkpoint
            env.enableCheckpointing(DataConfig.FLINK_CHECKPOINT_INTERVAL, CheckpointingMode.EXACTLY_ONCE);
            env.getCheckpointConfig().setCheckpointTimeout(DataConfig.FLINK_CHECKPOINT_TIMEOUT);
            env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);

            //1.2 设置状态后端
            env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
            env.getCheckpointConfig().setExternalizedCheckpointCleanup(
                    CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
            );
            env.setRestartStrategy(RestartStrategies.failureRateRestart(10, org.apache.flink.api.common.time.Time.of(3L, TimeUnit.DAYS), org.apache.flink.api.common.time.Time.of(1L, TimeUnit.MINUTES)));

            String hdfsPath = DataConfig.HDFS_SERVER;
            env.getCheckpointConfig().setCheckpointStorage(new FileSystemCheckpointStorage(PathUtil.getHdfsPath(hdfsPath, "dwsUserUserStartWindow")));
            env.setStateBackend(new EmbeddedRocksDBStateBackend());

            String topic = ConfigConstant.KAFKA_UNIQUE_START_TOPIC;
            String groupId = ConfigConstant.KAFKA_USER_START_GROUP_ID;
            DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));

            //3.将每行数据转换为JavaBean对象
            SingleOutputStreamOperator<UserStartBean> userRegisterDS = kafkaDS.map(line -> {
                JSONObject jsonObject = JSON.parseObject(line);
                JSONObject common = jsonObject.getJSONObject("common");
                Long ts = jsonObject.getLong("ts");

                UserStartBean bean = UserStartBean.builder().appVer(common.getString("app_ver")).os(common.getString("os"))
                        .qid(common.getString("qid")).groupQid(ValueEmptyUtils.emptyString(common.getString("group_qid")))
                        .ascQid(common.getString("asc_qid")).code(common.getString("code"))
                        .labCode(common.getString("lab_code")).labGroupCode(common.getString("lab_group_code"))
                        .deviceId(common.getString("device_id")).startTime(DateFormatUtil.toYmdHms(ts)).ts(System.currentTimeMillis())
                        .createTime(DateFormatUtil.toYmdHms(System.currentTimeMillis())).build();

                return bean;
            });

            //6.将数据写出到ClickHouse
            userRegisterDS.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_user_start_window values(?,?,?,?,?,?,?,?,?,?,?,?)"));

            //7.启动任务
            env.execute("DwsUserUserStartWindow");
        } catch (Exception e) {
            logger.error("Exception", e);
        }

    }

}
