package com.bawei.tk4;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.time.Duration;
import java.util.Properties;

public class FlinkTM9 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //9）、（ADS层）编写Flink流式程序，从Kafka消息队列实时获取启动日志数据，按照操作系统维度分组，
        // 统计各个广告展示总时长，结果数据保存MySQL数据库表：ads_click_start_report ；（5分）  open_ad_ms?  open_ad_skip_ms?
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop-single:9092");
        properties.setProperty("group.id", "group1");
        DataStream<String> stream = env.addSource(new FlinkKafkaConsumer<>("dwd_start_log", new SimpleStringSchema(), properties));

        SingleOutputStreamOperator<Tuple2<String,Long>> mapDS = stream.map(new MapFunction<String, Tuple2<String,Long>>() {
            @Override
            public Tuple2<String,Long> map(String s) throws Exception {
                JSONObject jo = JSON.parseObject(s);
                return new Tuple2<>(jo.getJSONObject("common").getString("os"),jo.getJSONObject("start").getLong("open_ad_ms"));
            }
        });

        SingleOutputStreamOperator<Tuple2<String, Long>> sumDS = mapDS.keyBy(x -> x.f0).sum(1);
        sumDS.print();
        //TODO sumDS.addSink(mysql)

        env.execute();
    }
}
