package org.zjt.flink.stream;

import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.util.Random;
import java.util.concurrent.TimeUnit;

/**
 * Description:
 *
 * @author juntao.zhang
 * Date: 2018-10-09 下午8:40
 * @see
 */
@Slf4j
public class WindowsStreamJob {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        Random random = new Random();
        env.addSource(new SourceFunction<Integer>() {
            @Override
            public void run(SourceContext<Integer> ctx) throws Exception {
                for (int i = 0; i < 1000; i++) {
                    TimeUnit.SECONDS.sleep(1);
                    ctx.collect(random.nextInt(10000));
                }
            }

            @Override
            public void cancel() {

            }
        }).timeWindowAll(Time.seconds(3)).reduce((ReduceFunction<Integer>) (value1, value2) -> {
            log.info(String.valueOf(value1 + value2));
            return value1 + value2;
        }).print();
        env.execute();
    }
}
