package com.lsx143.wordcount.day3.sink;

import com.lsx143.wordcount.bean.WaterSensor;
import com.lsx143.wordcount.day2.RandomWaterSensor;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;

public class MyRedisSink {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);

        FlinkJedisPoolConfig redisConf = new FlinkJedisPoolConfig
                .Builder()
                .setHost("hadoop103")
                .setPort(6379)
                .setDatabase(0)
                .build();
        env
                .addSource(new RandomWaterSensor(RandomWaterSensor.RUN_MODE.RANDOM))
                .addSink(new RedisSink<>(redisConf, new RedisMapper<WaterSensor>() {
                    @Override
                    public RedisCommandDescription getCommandDescription() {
                        return new RedisCommandDescription(RedisCommand.HSET, "ws");
                    }

                    @Override
                    public String getKeyFromData(WaterSensor waterSensor) {
                        return waterSensor.getId();
                    }

                    @Override
                    public String getValueFromData(WaterSensor waterSensor) {
                        return waterSensor.toString();
                    }
                }));
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}