package com.atguigu.flink.wordcount;

import com.atguigu.flink.pojo.WordCount;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * Created by Smexy on 2023/1/12
 *
 *
 *
 */
public class Demo9_WEBUI
{
    public static void main(String[] args) throws Exception {

        //通过哪个主机(默认就在idea的机器)的哪个端口（明确指定），可以监控flink程序
        //代表flink程序的配置对象
        Configuration conf = new Configuration();
        conf.setInteger("rest.port",3333);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);

        env.setParallelism(1);

        DataStreamSource<String> ds = env.socketTextStream("hadoop103",8888);

        SingleOutputStreamOperator<WordCount> ds1 = ds
            .rebalance() //默认
            .flatMap( (String line, Collector<WordCount> out) -> {
                String[] words = line.split(" ");
                for (String word : words) {
                        out.collect(new WordCount(word,1));
                    }
                })
            .returns(WordCount.class).setParallelism(10);


        ds1.keyBy(WordCount::getWord)
          .sum("count")
          .print();


        env.execute();


    }
}
