package streamAPI.window;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.AllWindowedStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;
import org.testng.annotations.Test;

public class DemoWithCountWindow {

	public static void main(String[] args) throws Exception {
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		DataStreamSource<String> lines = env.socketTextStream("caoyuesh1", 4444);
		SingleOutputStreamOperator<Integer> nums = lines.map(new MapFunction<String, Integer>() {
			@Override
			public Integer map(String s) throws Exception {
				return Integer.parseInt(s);
			}
		});
		// 不分组 将整体作为一个组
		AllWindowedStream<Integer, GlobalWindow> window = nums.countWindowAll(5);
		SingleOutputStreamOperator<Integer> summed = window.sum(0);
		summed.print();
		env.execute();
	}

	@Test(description = "分组后再调用countWindow 每一个组达到一定条数才会触发执行")
	public void t1() throws Exception {
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		//flink,1 flink,1 hadoop,2
		DataStreamSource<String> socketTextStream = env.socketTextStream("caoyuesh1", 4444);
		SingleOutputStreamOperator<Tuple2<String, Long>> wordCountStream = socketTextStream.map(new MapFunction<String, Tuple2<String, Long>>() {
			@Override
			public Tuple2<String, Long> map(String line) throws Exception {
				String[] split = line.split(",");
				return Tuple2.of(split[0], Long.parseLong(split[1]));
			}
		});
		wordCountStream.keyBy(0).countWindow(3).sum(1).print();
		env.execute();
	}


}
