package com.bingxu.flink.chapter02.transformation;

import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

/**
 * @author :BingXu
 * @description :TODO
 * @date :2021/8/9 11:34
 * @modifier :
 */

public class Trans_Process {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);
        DataStreamSource<Integer> intDS = env.fromElements(1, 2, 3, 5, 6, 2, 21, 8, 91, 45);
        DataStreamSource<Integer> int1DS = env.fromElements(22, 32, 34, 56, 21, 8, 91, 45);
        DataStreamSource<Integer> int2DS = env.fromElements(99, 100, 145);
        DataStreamSource<String> strDS = env.fromElements("hello", "world", "hello", "flink", "spark");

        SingleOutputStreamOperator<Integer> processDS = intDS.process(new ProcessFunction<Integer, Integer>() {
            @Override
            public void processElement(Integer integer, Context context, Collector<Integer> collector) throws Exception {
                System.out.println(context.timestamp());
                collector.collect(integer%2==0?integer:integer/2);
            }
        });
        processDS.print();


        env.execute();

    }
}
