package cn._51doit.live.jobs;

import cn._51doit.live.deserializer.MyKafkaDeserializationSchema;
import cn._51doit.live.pojo.DataBean;
import cn._51doit.live.udf.IsNewUserFunctionV3;
import cn._51doit.live.udf.JsonToBeanFunctionV2;
import cn._51doit.live.utils.Constants;
import cn._51doit.live.utils.FlinkUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.text.SimpleDateFormat;
import java.util.Date;


/**
 * 将预处理的数据使用FlinkSQL做多维度的计算
 *
 */
public class NewUserCountV5 {


    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        DataStream<Tuple2<String, String>> lineStreamWithId = FlinkUtils.createKafkaStreamWithId(parameterTool, MyKafkaDeserializationSchema.class);

        StreamTableEnvironment tEnv = FlinkUtils.getStreamTableEnv();


        SingleOutputStreamOperator<DataBean> beanStream = lineStreamWithId.process(new JsonToBeanFunctionV2());

        //使用侧流输出对AppLaunch类型的数据进行单独处理，就是计算一个isNew字段
        OutputTag<DataBean> appLaunchTag = new OutputTag<DataBean>("app-launch") {};

        SingleOutputStreamOperator<DataBean> mainStream = beanStream.process(new ProcessFunction<DataBean, DataBean>() {
            @Override
            public void processElement(DataBean bean, Context ctx, Collector<DataBean> out) throws Exception {

                String eventId = bean.getEventId();
                if (Constants.APP_LAUNCH.equals(eventId)) {
                    ctx.output(appLaunchTag, bean); //将app启动类型的数据打上标签
                } else {
                    out.collect(bean);
                }
            }
        });

        //处理打标签的数据
        DataStream<DataBean> appLaunchStream = mainStream.getSideOutput(appLaunchTag);
        SingleOutputStreamOperator<DataBean> appLaunchStreamWithIsNew = appLaunchStream.keyBy(DataBean::getDeviceId).process(new IsNewUserFunctionV3());

        //将主流的数据与appLaunchStreamWithIsNew union到一起
        DataStream<DataBean> allStream = mainStream.union(appLaunchStreamWithIsNew);

        tEnv.createTemporaryView("tb_user_events", allStream);

        //使用sql方式定义sink
        tEnv.executeSql(
                "CREATE TABLE tb_mysql_sink (\n" +
                        "  dt STRING,  \n" +
                        "  channel STRING,  \n" +
                        "  os STRING,  \n" +
                        "  counts BIGINT, \n" +
                        "  PRIMARY KEY (dt, channel, os) NOT ENFORCED\n" +
                        ") WITH (\n" +
                        "   'connector' = 'jdbc',\n" +
                        "   'url' = 'jdbc:mysql://node-1.51doit.cn:3306/doit28?characterEncoding=utf-8',\n" +
                        "   'table-name' = 'tb_new_user',\n" +
                        "   'username' = 'root',\n" +
                        "   'password' = '123456'\n" +
                        ")"
        );


        //TableResult tableResult = tEnv.executeSql("select FROM_UNIXTIME(`timestamp` / 1000, 'yyyy-MM-dd') dt from tb_user_events where eventId = 'appLaunch' and isN = 1");
        //TableResult tableResult = tEnv.executeSql("insert into tb_mysql_sink select ifnull(releaseChannel, 'null') channel, ifnull(osName,'null') os, count(*) counts from tb_user_events where eventId = 'appLaunch' and isN = 1 group by cube (releaseChannel, osName)");

        tEnv.executeSql("insert into tb_mysql_sink select FROM_UNIXTIME(`timestamp` / 1000, 'yyyy-MM-dd') dt, ifnull(releaseChannel, 'null') channel, ifnull(osName,'null') os, count(*) counts from tb_user_events where eventId = 'appLaunch' and isN = 1 group by FROM_UNIXTIME(`timestamp` / 1000, 'yyyy-MM-dd'), cube (releaseChannel, osName)");


        FlinkUtils.env.execute();


    }
}
