package cn._51doit.live.jobs;

import cn._51doit.live.pojo.DataBean;
import cn._51doit.live.udfs.IsNewUserFunction;
import cn._51doit.live.udfs.IsNewUserFunctionV3;
import cn._51doit.live.udfs.JsonToBeanFunction;
import cn._51doit.live.udfs.JsonToBeanFunctionV2;
import cn._51doit.live.utils.EventType;
import cn._51doit.live.utils.FlinkUtils;
import cn._51doit.live.utils.MyKafkaDeserializationSchema;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;

import java.text.SimpleDateFormat;
import java.util.Date;

/**
 * 实时统计统计新老用户（从今天凌晨开始，到当前）
 * 数据中没有isNew这个字段了，需要根据设备ID进行计算
 */
public class NewUserCountV4 {

    public static void main(String[] args) throws Exception {

        //从指定的文件中读取参数
        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        DataStream<Tuple2<String, String>> lines = FlinkUtils.createKafkaStreamWithId(parameterTool, new MyKafkaDeserializationSchema());

        //将数据进行转换（字符串转成Bean）
        SingleOutputStreamOperator<DataBean> beanStream = lines.process(new JsonToBeanFunctionV2());


        //过滤数据类型
        SingleOutputStreamOperator<DataBean> filtered = beanStream.filter(bean -> EventType.APP_LAUNCH.equals(bean.getEventId()));

        //按照设备ID进行keyBy，这样可以保证设备ID相同的数据一定进入到同一个分区
        //存在的问题：按照设备ID进行keyBy，会导致存储的key非常多，存储的状态会非常多
        //解决方案（使用RocksDB StateBacked、或HashMapStateBackend）
        //咱们使用RocksDB StateBacked，因为可以进行增量checkpoint，提高checkpoint的速度
        KeyedStream<DataBean, String> keyedStream = filtered.keyBy(DataBean::getDeviceId);

        SingleOutputStreamOperator<DataBean> beanStreamWithIsNew = keyedStream.process(new IsNewUserFunctionV3());

        //必须将同一条数据，不论什么时候写入统一分区中
        //将数据中所携带的timestamp转成yyyyMMdd 和HH类型
        beanStreamWithIsNew.map(new MapFunction<DataBean, DataBean>() {
            private SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd-HH");
            @Override
            public DataBean map(DataBean bean) throws Exception {
                Long timestamp = bean.getTimestamp();
                String format = dateFormat.format(new Date(timestamp));
                String[] fields = format.split("-");
                bean.setDate(fields[0]);
                bean.setHour(fields[1]);
                return bean;
            }
        }).addSink(JdbcSink.sink(
                "insert into tb_user_event(id, deviceId, eventId, isNew, os, province, channel, deviceType, eventTime, date, hour) values (?,?,?,?,?,?,?,?,?,?,?)",
                (ps, bean) -> {
                    ps.setString(1, bean.getId());
                    ps.setString(2, bean.getDeviceId());
                    ps.setString(3, bean.getEventId());
                    ps.setInt(4, bean.getIsN());
                    ps.setString(5, bean.getOsName());
                    ps.setString(6, bean.getProvince());
                    ps.setString(7, bean.getReleaseChannel());
                    ps.setString(8, bean.getDeviceType());
                    ps.setLong(9, bean.getTimestamp());
                    ps.setString(10, bean.getDate());
                    ps.setString(11, bean.getHour());
                },
                JdbcExecutionOptions.builder()
                        .withBatchSize(100)
                        .withBatchIntervalMs(2000)
                        .build(),
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withUrl("jdbc:clickhouse://node-3.51doit.cn:8123/doit27?characterEncoding=utf-8")
                        .withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
                        .build()));

        FlinkUtils.env.execute();

    }
}
