package streamAPI.sink;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.testng.annotations.Test;
import util.FlinkConstant;
import util.FlinkUtils;

/**
 * @author yue.cao
 * @since 10-29-2020
 */
public class DemoWithRedisSink {

	@Test(description = "自定义redisSink")
	public void t1() throws Exception {
		DataStream<String> kafkaStream = FlinkUtils.createKafkaStream(
				ParameterTool.fromPropertiesFile(FlinkUtils.class.getResourceAsStream("/config.properties")),
				FlinkConstant._Kafka._Topic.CY4,
				FlinkConstant._Kafka._Group_id.COMMON,
				SimpleStringSchema.class
		);
		kafkaStream.map(new MapFunction<String, Tuple2<String, Long>>() {
			@Override
			public Tuple2<String, Long> map(String value) throws Exception {
				return Tuple2.of(value, 1L);
			}
		}).keyBy(0).sum(1).map(new MapFunction<Tuple2<String, Long>, Tuple3<String, String, String>>() {
			@Override
			public Tuple3<String, String, String> map(Tuple2<String, Long> value) throws Exception {
				return Tuple3.of("cyWordCount", value.f0, value.f1.toString());
			}
		}).addSink(new MyRedisHashSink());
		FlinkUtils.execute();
	}
}
