package cn.azzhu.day04;

import cn.azzhu.utils.FlinkUtilsV1;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;

import java.util.concurrent.TimeUnit;

/**
 * 省份+事件名称+次数[不同维度]的统计结果，写入到MySQL
 * @author azzhu
 * @create 2020-09-19 17:06:26
 */
public class ActivityCount {
    public static void main(String[] args) throws Exception {
        //创建env和添加source
        final DataStream<String> lines = FlinkUtilsV1.createKafkaStream(args, new SimpleStringSchema());

       //关联维度信息，异步IO方式
        //Transformation
        final SingleOutputStreamOperator<ActivityBean> beans = AsyncDataStream.unorderedWait(lines,
                new AsnycGeoToActivityBeanFunction(), 0, TimeUnit.MICROSECONDS, 10);

        //按照不同维度统计
        //Transformation
        final SingleOutputStreamOperator<ActivityBean> summed1 = beans.keyBy("aid","eventType").sum("count");
        final SingleOutputStreamOperator<ActivityBean> summed2 = beans.keyBy("aid","eventType","province").sum("count");

        //调用sink，写入mysql
        summed1.addSink(new MySqlSink());

        FlinkUtilsV1.getEnv().execute("ActivityCount");
    }
}
