/** 
* @version:1.0.1
* @Description: （对类进行功能描述）
* @author: yangdechao
* @date: datedate 2021年11月15日 下午3:37:02
*/
package cn.com.guage.flink.partitioner;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @version:1.0.1
 * @Description: 自定义分区器用法
 * @author: yangdechao
 * @date: datedate 2021年11月15日 下午3:37:02
 */
public class PartitionTest {
	/**
	 * 使用自定义分区 根据数字的奇偶性来进行分区
	 *
	 * @param args
	 */
	public static void main(String[] args) throws Exception {
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	
		DataStreamSource<String> textStream = env.readTextFile("data/map-data.txt");

		SingleOutputStreamOperator<Tuple2<String, String>> mapStream = textStream
				.map(new MapFunction<String, Tuple2<String, String>>() {

					/**
					 * 
					 */
					private static final long serialVersionUID = -3306946323113715801L;

					@Override
					public Tuple2<String, String> map(String value) throws Exception {
						String[] splits = value.split(",");
						return new Tuple2<String, String>(splits[0], value);
					}
				});
		// 分区之后的数据
		DataStream<Tuple2<String, String>> dataStream = mapStream.partitionCustom(new MyPartition(), 0);
		dataStream.map(new MapFunction<Tuple2<String, String>, String>() {

			/**
			 * 
			 */
			private static final long serialVersionUID = -2993557454516978680L;

			@Override
			public String map(Tuple2<String, String> value) throws Exception {
				System.out.println("获取当前线程id" + Thread.currentThread().getId() + ",value" + value);
				return value.f0;
			}
		}).print();
		env.execute("own definite partiotn");
	}
}
