package cn.jly.flink.redis;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisConfigBase;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
import org.apache.flink.util.Collector;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Properties;

public class SinkToRedisApp {
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        Properties props = new Properties();
        props.put("bootstrap.servers", "hadoop101:9092");
        props.put("group.id", "metric-group");
        //key 反序列化
        props.put("key.deserializer", StringDeserializer.class.getName());
        //value 反序列化
        props.put("value.deserializer", StringDeserializer.class.getName());
        props.put("auto.offset.reset", "latest");

        SingleOutputStreamOperator<Tuple2<String, String>> product = env.addSource(new FlinkKafkaConsumer011<>(
                "product",
                new SimpleStringSchema(),
                props
        ))
                .map(string -> JSON.parseObject(string, ProductEvent.class))
                .flatMap(new FlatMapFunction<ProductEvent, Tuple2<String, String>>() {
                    @Override
                    public void flatMap(ProductEvent value, Collector<Tuple2<String, String>> out) throws Exception {
                        //收集商品 id 和 price 两个属性
                        out.collect(new Tuple2<>(value.getId().toString(), value.getPrice().toString()));
                    }
                });

        // redis 配置
        FlinkJedisPoolConfig flinkJedisPoolConfig =
                new FlinkJedisPoolConfig.Builder().setHost("192.168.88.101").setPort(6379).build();

        // redis sink
        RedisSink<Tuple2<String, String>> redisSink = new RedisSink<>(flinkJedisPoolConfig, new MyRedisMapper());

        product.addSink(redisSink);

        env.execute("flink redis connector");
    }
}

/**
 * 自定义操作redis的mapper
 */
class MyRedisMapper implements RedisMapper<Tuple2<String, String>> {

    @Override
    public RedisCommandDescription getCommandDescription() {
        // //指定 RedisCommand 的类型是 HSET，对应 Redis 中的数据结构是 HASH，另外设置 key = lanyangji
        return new RedisCommandDescription(RedisCommand.HSET, "lanyangji");
    }

    @Override
    public String getKeyFromData(Tuple2<String, String> stringStringTuple2) {
        return stringStringTuple2.f0;
    }

    @Override
    public String getValueFromData(Tuple2<String, String> stringStringTuple2) {
        return stringStringTuple2.f1;
    }


}
