package streamAPI.operators;

import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
import org.testng.annotations.Test;

/**
 * 可以通过此任务 观察到  当发生 类似  keyBy or 并行度发生改变的时候才会 拆分task
 * 以及断开operationChain  开启 等
 * 一般独立算子等目的就是为了让他单独使用资源slot获取更多的资源 而不会和其他的挤在一起
 *
 *
 * @author yue.cao
 * @since 10-16-2020
 */
public class DemoWithTaskSubTaskOperatorChain {

	@Test(description = "混合例子 " +
			"此例子在测试多个slotSharingGroup 在本地模式会有问题 需要修改env.为集群模式 并且 观察最终消耗的并行度")
	public void t1() throws Exception {
		StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
//		env.disableOperatorChaining();
		DataStreamSource<String> source = env.socketTextStream("caoyuesh1", 4444);
		DataStream<String> words = source.flatMap(new FlatMapFunction<String, String>() {
			@Override
			public void flatMap(String line, Collector<String> out) throws Exception {
				for (String word : line.split(" ")) {
					out.collect(word);
				}
			}
		}).setParallelism(2);
		DataStream<String> goodWord = words.filter(new FilterFunction<String>() {
			@Override
			public boolean filter(String value) throws Exception {
				return value.startsWith("f");
			}
		})
		//disable 左右各切一刀
//		}).disableChaining();
//		}).startNewChain();
//    从这个地方开始的所有算子 都是doit 资源组
		.slotSharingGroup("cyGroup").setParallelism(2);
		DataStream<Tuple2<String, Long>> map = goodWord.map(new MapFunction<String, Tuple2<String, Long>>() {
			@Override
			public Tuple2<String, Long> map(String value) throws Exception {
				return Tuple2.of(value, 1L);
			}
		}).setParallelism(2);
		map.keyBy(0).sum(1).setParallelism(2).print().setParallelism(2);
		env.execute();
	}


}
