package com.atguigu.flink.datastreamapi.dirstribute;

import com.atguigu.flink.pojo.WordCount;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * Created by Smexy on 2023/4/3

 *
 */
public class Demo7_RemoteEnv
{
    public static void main(String[] args) {


        /*
            不用手动提交jar包到app，运行程序，可以帮你把jar包提交到某个集群。
            只适用于 session集群

            JobManager Web Interface: http://hadoop103:35136
            JobManager Web Interface: http://hadoop104:36266
         */
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createRemoteEnvironment("hadoop104", 36266, "files/myjob.jar");

        DataStreamSource<String> ds = env.socketTextStream("hadoop102", 8888);

        SingleOutputStreamOperator<WordCount> ds2 = ds
            .flatMap(new FlatMapFunction<String, WordCount>()
            {
                @Override
                public void flatMap(String line, Collector<WordCount> out) throws Exception {

                    String[] words = line.split(" ");
                    for (String word : words) {
                        out.collect(new WordCount(word, 1));
                    }
                }
            });

        ds2
            .keyBy(new KeySelector<WordCount, String>()
            {
                @Override
                public String getKey(WordCount value) throws Exception {
                    return value.getWord();
                }
            })
            //流中的数据类型是POJO，此时聚合时，写对应的属性名
            .sum("count")
            .print();



        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }


    }


}
