package cn._51doit.live.jobs;

import cn._51doit.live.pojo.DataBean;
import cn._51doit.live.udfs.JsonToBeanFunction;
import cn._51doit.live.utils.EventType;
import cn._51doit.live.utils.FlinkUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;

/**
 * 实时统计统计新老用户（从今天凌晨开始，到当前）
 */
public class NewUserCount {

    public static void main(String[] args) throws Exception {

        //从指定的文件中读取参数
        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        DataStream<String> lines = FlinkUtils.createKafkaStream(parameterTool, SimpleStringSchema.class);

        //将数据进行转换（字符串转成Bean）
        SingleOutputStreamOperator<DataBean> beanStream = lines.process(new JsonToBeanFunction());

        //过滤数据类型
        SingleOutputStreamOperator<DataBean> filtered = beanStream.filter(bean -> EventType.APP_LAUNCH.equals(bean.getEventId()));

        //Tuple5<IsNew, 下载渠道，运营商、操作系统, 1>
        SingleOutputStreamOperator<Tuple5<Integer, String, String, String, Long>> tpStream = filtered.map(new MapFunction<DataBean, Tuple5<Integer, String, String, String, Long>>() {
            @Override
            public Tuple5<Integer, String, String, String, Long> map(DataBean bean) throws Exception {
                int isNew = bean.getIsNew();
                String channel = bean.getReleaseChannel();
                String carrier = bean.getCarrier();
                String osName = bean.getOsName();
                return Tuple5.of(isNew, channel, carrier, osName, 1L);
            }
        });
        //按照新老用户进行keyBy
        SingleOutputStreamOperator<Tuple5<Integer, String, String, String, Long>> res1 = tpStream.keyBy(t -> t.f0).sum(4);

        //按照下载渠道统计新老用户
        SingleOutputStreamOperator<Tuple5<Integer, String, String, String, Long>> res2 = tpStream.keyBy(t -> t.f0 + t.f1).sum(4);

        //按照运营商统计新老用户
        SingleOutputStreamOperator<Tuple5<Integer, String, String, String, Long>> res3 = tpStream.keyBy(t -> t.f0 + t.f2).sum(4);

        //按照下载渠道，运营商、操作系统统计新老用户
        //发现问题：如果使用Flink做多维度统计，就需要进行很多次KeyBy，然后再聚合
        //KeyBy次数过多，实时程序的效率就会降低
        //Flink做数据的预处理，然后将结果写入到实时的查询系统中（ClickHouse, DorisDB）


        //beanStream.print();

        FlinkUtils.env.execute();

    }
}
