package demo;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer09;
import pojo.Test;
import scala.Int;
import sink.MySQLSink;

import java.util.Properties;

/**
 * kafka source 启动类
 */
public class KafkaStreamTest {

    public static void main(String[] args) throws Exception {
        /**
         * 获取flink 上下文
         */
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000);
        Properties properties = new Properties();
        //kafka的节点的IP或者hostName，多个使用逗号分隔
        properties.setProperty("bootstrap.servers", "**");
        //zookeeper的节点的IP或者hostName，多个使用逗号进行分隔
        properties.setProperty("zookeeper.connect", "**");
        //flink consumer flink的消费者的group.id
        properties.setProperty("group.id", "flink-consumer");

        FlinkKafkaConsumer09<String> myConsumer = new FlinkKafkaConsumer09<String>("test", new SimpleStringSchema(), properties);

        DataStream<String> stream = env.addSource(myConsumer);

        /**
         * 根据key 分组计算
         */
        /*DataStream<Tuple2<String,Integer>> CleanData = stream.map(new MapFunction<String,Tuple2<String,Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String s) throws Exception {
                String[] split = s.split(",");
                return Tuple2.of(split[0],Integer.valueOf(split[1]));
            }
        }).keyBy(0).sum(1);*/

        /**
         * 窗口时间内聚合计算
         */
        DataStream<Tuple2<String,Integer>> CleanData =stream.map(new MapFunction<String,Tuple2<String,Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String s) throws Exception {
                String[] split = s.split(",");
                return Tuple2.of(split[0],Integer.valueOf(split[1]));
            }
        }).keyBy(0).window(TumblingProcessingTimeWindows.of(Time.seconds(5)))
//                .sum(1);
        .reduce(new ReduceFunction<Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> reduce(Tuple2<String, Integer> v1, Tuple2<String, Integer> t1) throws Exception {
                return new Tuple2<>(v1.f0,v1.f1+t1.f1);
            }
        });
        CleanData.print();

        /**
         * 转化计算结果为Test
         */
        DataStream<Test> testData = CleanData.map(test -> new Test(test.f0,test.f1));
        testData.print();

        testData.addSink(new MySQLSink());

        env.execute("Flink kafka");
    }
}
