package com.chief.transform;


import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class TransformRollingReduceTest {

    public static void main(String[] args) throws Exception {

        //environment,此处使用连接远程服务器的方式，不使用打包上传
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.createRemoteEnvironment(
                "hadoop52", 8081, "E:\\bigdata-start\\flink\\flinkCode\\target\\flink-1.0-SNAPSHOT.jar");


        //source
        DataStreamSource<String> sourceMap = environment.readTextFile("viewfs://mycluster/data1/rolling.txt");

        //transform
        SingleOutputStreamOperator<People> map = sourceMap.map(line -> {
            String[] split = line.split(" ");
            return new People(split[0], Integer.valueOf(split[1]), split[2]);
        });
        KeyedStream<People, String> keyedStream = map.keyBy(people -> people.getJob());

        //找到每个job年龄最大的人
        SingleOutputStreamOperator<People> reduce = keyedStream.reduce(new ReduceFunction<People>() {
            @Override
            public People reduce(People people, People t1) throws Exception {
                if (people.getAge() >= t1.getAge()) {
                    return people;
                }
                return t1;
            }
        });

        //sink
        reduce.print();


        environment.execute("transform rolling test");

    }
}
