package cn._51doit.live.jobs;

import cn._51doit.live.deserializer.MyKafkaDeserializationSchema;
import cn._51doit.live.pojo.DataBean;
import cn._51doit.live.udf.IsNewUserFunctionV3;
import cn._51doit.live.udf.JsonToBeanFunction;
import cn._51doit.live.udf.JsonToBeanFunctionV2;
import cn._51doit.live.utils.Constants;
import cn._51doit.live.utils.FlinkUtils;
import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.text.SimpleDateFormat;
import java.util.Date;


/**
 * 使用DataStream API多维度统计新用户
 * 数据中没有isNew这个字段，而是必须实时的计算出该字段，将同一个设备ID的数据，进行keyBy，不论是任何时间的访问数据，一定进入到同一个分区中
 *
 * 按照设备ID进行keyBy，可以保证，同一个设备ID的数据，一定进入到同一个分区，并且可以避免数据倾斜，并且准确
 *
 * 存在的问题？
 *   KeyedState中的key会非常多，如果都存在内存中，会非常消耗内存
 *
 * 解决方案（使用RocksDBStateBackend）
 *   可以存储大量状态
 *   并且支持增量checkpoint
 *
 */
public class NewUserCountV4 {


    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        DataStream<Tuple2<String, String>> lineStreamWithId = FlinkUtils.createKafkaStreamWithId(parameterTool, MyKafkaDeserializationSchema.class);

        SingleOutputStreamOperator<DataBean> beanStream = lineStreamWithId.process(new JsonToBeanFunctionV2());

        //使用侧流输出对AppLaunch类型的数据进行单独处理，就是计算一个isNew字段
        OutputTag<DataBean> appLaunchTag = new OutputTag<DataBean>("app-launch") {};

        SingleOutputStreamOperator<DataBean> mainStream = beanStream.process(new ProcessFunction<DataBean, DataBean>() {
            @Override
            public void processElement(DataBean bean, Context ctx, Collector<DataBean> out) throws Exception {

                String eventId = bean.getEventId();
                if (Constants.APP_LAUNCH.equals(eventId)) {
                    ctx.output(appLaunchTag, bean); //将app启动类型的数据打上标签
                } else {
                    out.collect(bean);
                }
            }
        });

        //处理打标签的数据
        DataStream<DataBean> appLaunchStream = mainStream.getSideOutput(appLaunchTag);
        SingleOutputStreamOperator<DataBean> appLaunchStreamWithIsNew = appLaunchStream.keyBy(DataBean::getDeviceId).process(new IsNewUserFunctionV3());

        //将主流的数据与appLaunchStreamWithIsNew union到一起
        DataStream<DataBean> allStream = mainStream.union(appLaunchStreamWithIsNew);
        //将数据写入到ClickHouse中
        //必须将同一条数据，不论什么时候写入统一分区中
        //将数据中所携带的timestamp转成yyyyMMdd 和HH类型
        allStream.map(new MapFunction<DataBean, DataBean>() {
            private SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd-HH");
            @Override
            public DataBean map(DataBean bean) throws Exception {
                Long timestamp = bean.getTimestamp();
                String format = dateFormat.format(new Date(timestamp));
                String[] fields = format.split("-");
                bean.setDate(fields[0]);
                bean.setHour(fields[1]);
                return bean;
            }
        }).addSink(JdbcSink.sink(
                "insert into tb_user_event(id, deviceId, eventId, isNew, os, province, channel, deviceType, eventTime, date, hour, `properties`) values (?,?,?,?,?,?,?,?,?,?,?, ?)",
                (ps, bean) -> {
                    ps.setString(1, bean.getId());
                    ps.setString(2, bean.getDeviceId());
                    ps.setString(3, bean.getEventId());
                    ps.setInt(4, bean.getIsN());
                    ps.setString(5, bean.getOsName());
                    ps.setString(6, bean.getProvince());
                    ps.setString(7, bean.getReleaseChannel());
                    ps.setString(8, bean.getDeviceType());
                    ps.setLong(9, bean.getTimestamp());
                    ps.setString(10, bean.getDate());
                    ps.setString(11, bean.getHour());
                },
                JdbcExecutionOptions.builder()
                        .withBatchSize(100)
                        .withBatchIntervalMs(2000)
                        .build(),
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withUrl("jdbc:clickhouse://node-2.51doit.cn:8123/doit28?characterEncoding=utf-8")
                        .withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
                        .build()));

        FlinkUtils.env.execute();


    }
}
