package cn.jly.flink.source2sink.redis;

import cn.jly.flink.entity.Person;
import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisClusterConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;

import java.net.InetSocketAddress;
import java.util.HashSet;

/**
 * @PackageName cn.jly.flink.source2sink.redis
 * @ClassName FlinkRedisSinkDemo
 * @Description redis sink
 * @Author 姬岚洋
 * @Date 2021/1/15 下午 4:57
 */
public class FlinkRedisSinkDemo {
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        String host = "192.168.172.101";
        final HashSet<InetSocketAddress> set = new HashSet<>();
        set.add(new InetSocketAddress(host, 7001));
        set.add(new InetSocketAddress(host, 7002));
        set.add(new InetSocketAddress(host, 7003));
        set.add(new InetSocketAddress(host, 7004));
        set.add(new InetSocketAddress(host, 7005));
        set.add(new InetSocketAddress(host, 7006));

        final FlinkJedisClusterConfig config =
                new FlinkJedisClusterConfig.Builder()
                        .setNodes(set)
                        .build();

        // 从redis的一个hash中读，再写入到另一个hash中
        env.addSource(new FlinkRedisSourceDemo.RedisSource(Person.class.getSimpleName()))
                .map(new MapFunction<Person, Person>() {
                    @Override
                    public Person map(Person person) throws Exception {
                        System.out.println(person);
                        return person.setLastName(person.getLastName() + "_copy");
                    }
                })
                .addSink(new RedisSink<>(config, new RedisSinkMapper("person_copy")));

        env.execute("FlinkRedisSinkDemo");
    }

    /**
     * Redis Sink 核心类是 RedisMapper，它是一个接口，里面有三个方法，使用时我们需要重写这三个方法
     */
    public static class RedisSinkMapper implements RedisMapper<Person> {

        private final String redisHashKey;

        public RedisSinkMapper(String redisHashKey) {
            this.redisHashKey = redisHashKey;
        }

        /**
         * 指定 RedisCommand 的类型是 HSET，对应 Redis 中的数据结构是 HASH，另外设置 key是redisHashKey
         *
         * @return
         */
        @Override
        public RedisCommandDescription getCommandDescription() {
            return new RedisCommandDescription(RedisCommand.HSET, redisHashKey);
        }

        @Override
        public String getKeyFromData(Person person) {
            return String.valueOf(person.getId());
        }

        @Override
        public String getValueFromData(Person person) {
            return JSON.toJSONString(person);
        }
    }
}
