package cn._51doit.live.jobs;

import cn._51doit.live.pojo.DataBean;
import cn._51doit.live.udf.JsonToBeanFunction;
import cn._51doit.live.utils.Constants;
import cn._51doit.live.utils.FlinkUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;


/**
 * 使用DataStream API多维度统计新用户
 */
public class NewUserCount {


    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        DataStream<String> lines = FlinkUtils.createKafkaStream(parameterTool, SimpleStringSchema.class);

        SingleOutputStreamOperator<DataBean> beanStream = lines.process(new JsonToBeanFunction());

        //beanStream.print();
        //过滤数据，只要appLaunch类型
        SingleOutputStreamOperator<DataBean> filtered = beanStream.filter(bean -> Constants.APP_LAUNCH.equals(bean.getEventId()) && bean.getIsNew() == 1);

        //对数据进行整理
        SingleOutputStreamOperator<Tuple4<String, String, String, Integer>> tpStream = filtered.map(new MapFunction<DataBean, Tuple4<String, String, String, Integer>>() {
            @Override
            public Tuple4<String, String, String, Integer> map(DataBean bean) throws Exception {
                String releaseChannel = bean.getReleaseChannel();
                String carrier = bean.getCarrier();
                String deviceType = bean.getDeviceType();
                return Tuple4.of(releaseChannel, carrier, deviceType, 1);
            }
        });
        //按照某些字段进行keyBy
        //按照一个或多个维度进行聚合
        SingleOutputStreamOperator<Tuple4<String, String, String, Integer>> res1 = tpStream.keyBy(t -> t.f0).sum(3);
        SingleOutputStreamOperator<Tuple4<String, String, String, Integer>> res2 = tpStream.keyBy(t -> t.f1).sum(3);
        SingleOutputStreamOperator<Tuple4<String, String, String, Integer>> res3 = tpStream.keyBy(t -> t.f2).sum(3);

        KeyedStream<Tuple4<String, String, String, Integer>, Tuple2<String, String>> res4 = tpStream.keyBy(new KeySelector<Tuple4<String, String, String, Integer>, Tuple2<String, String>>() {
            @Override
            public Tuple2<String, String> getKey(Tuple4<String, String, String, Integer> value) throws Exception {
                return Tuple2.of(value.f0, value.f1);
            }
        });

        KeyedStream<Tuple4<String, String, String, Integer>, Tuple2<String, String>> res5 = tpStream.keyBy(new KeySelector<Tuple4<String, String, String, Integer>, Tuple2<String, String>>() {
            @Override
            public Tuple2<String, String> getKey(Tuple4<String, String, String, Integer> value) throws Exception {
                return Tuple2.of(value.f0, value.f2);
            }
        });

        //将计算好的结果写入到MySQL中


        FlinkUtils.env.execute();


    }
}
