package streamAPI.checkpoint;

import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.testng.annotations.Test;
import util.FlinkConstant._Kafka;
import util.FlinkUtils;

/**
 * kafka Consumer学习
 * 官网地址 https://ci.apache.org/projects/flink/flink-docs-release-1.11/dev/connectors/kafka.html
 *
 * @author yue.cao
 * @since 10-28-2020
 */
@Slf4j
public class DemoWithLearnKafkaConsumer {

	public static void main(String[] args) throws Exception {
		DataStream<String> kafkaStream = FlinkUtils.createKafkaStream(null, _Kafka._Topic.CY4, _Kafka._Group_id.COMMON, SimpleStringSchema.class);
		kafkaStream.print();
		FlinkUtils.execute();
	}


	@Test(description = "根据自己封装的flinkUtil改写")
	public void t1() throws Exception {
		DataStream<String> kafkaStream = FlinkUtils.createKafkaStream(null, _Kafka._Topic.CY4, _Kafka._Group_id.COMMON, SimpleStringSchema.class);
		kafkaStream.print();
		FlinkUtils.execute();
	}


	@Test(description = "尝试反序列化为null or 异常的场景")
	public void t2() throws Exception {
		DataStream<String> kafkaStream = FlinkUtils.createKafkaStream(null, _Kafka._Topic.CY4, _Kafka._Group_id.COMMON, ErrorStringSchema.class);
		// kafkaStream.map(new MapFunction<String, String>() {
		// 	@Override
		// 	public String map(String value) throws Exception {
		// 		log.error("receive val:[{}]", value);
		// 		return null;
		// 	}
		// });
		kafkaStream.keyBy(new KeySelector<String, String>() {
			@Override
			public String getKey(String value) throws Exception {
				log.error("keyBy {}",value);
				return value;
			}
		}).print();
		FlinkUtils.execute();
	}


}
