package com.myflink.day03;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;

/**
 * @author Shelly An
 * @create 2020/9/18 11:24
 */
public class Sink_Redis {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //环境来产生数据流
        DataStreamSource<String> fileDS = env.readTextFile("input/sensor-data.log");

        //数据流来调用sink
        fileDS.addSink(
                //不用自己关，里面已经封装好了
                //FlinkJedisConfigBase flinkJedisConfigBase, RedisMapper<IN> redisSinkMapper
                new RedisSink<>(
                        new FlinkJedisPoolConfig.Builder()
                                .setHost("hadoop102")
                                .setPort(6379)
                                .setTimeout(60)
                                .build(),
                        new RedisMapper<String>() {
                            /**
                             * redis的命令
                             * @return
                             */
                            @Override
                            public RedisCommandDescription getCommandDescription() {
                                return new RedisCommandDescription(
                                        RedisCommand.HSET,
                                        //key是最外层的key (k,(k,v))
                                        "sensor"
                                );
                            }

                            //指定的是hash的key  (k,(k,v))
                            //思考，如果不是HSET呢？  RedisSink中针对getKeyFromData

                            /**
                             *  case HSET:
                             *   this.redisCommandsContainer.hset(this.additionalKey, key, value);
                             *
                             *  可以看出，此处的getKeyFromData是针对hash的key ，而非最外层的key
                             *
                             *     case SADD:
                             *                 this.redisCommandsContainer.sadd(key, value);
                             *
                             *  此处的key是针对最外层的key
                             */
                            @Override
                            public String getKeyFromData(String data) {
                                return data.split(",")[1];
                            }

                            //指定的是hash的vlaue  (k,(k,v))
                            @Override
                            public String getValueFromData(String data) {
                                return data.split(",")[2];
                            }
                        }
                ));

        env.execute();
    }
}
