package com.sjc.lesson01;

import com.sjc.lesson01.domain.WordAndCount;
import com.sjc.lesson01.function.SplitWord;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;

import java.util.Properties;

public class KafkaSourceWordCount {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        String topic = "testSlot";
        Properties comsumerProperties = new Properties();
        comsumerProperties.setProperty("bootstrap.servers","192.168.15.102:9092");
        comsumerProperties.setProperty("group.id","testSlot_consumer");

        FlinkKafkaConsumer011<String> myConsumer =
                new FlinkKafkaConsumer011<>(topic, new SimpleStringSchema(), comsumerProperties);

        DataStreamSource<String> data = env.addSource(myConsumer).setParallelism(3);

        SingleOutputStreamOperator<WordAndCount> wordOneStream =
                data.flatMap(new SplitWord()).setParallelism(2) ;

        SingleOutputStreamOperator<WordAndCount> result =
                wordOneStream.keyBy(0).sum(1).setParallelism(2);

        result.map(tuple -> tuple.toString()).setParallelism(2)
                .print().setParallelism(1);

        env.execute("wordCount2");
    }
}
