package cn.dfun.sample.flink.apitest

import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.redis.RedisSink
import org.apache.flink.streaming.connectors.redis.common.config.{FlinkJedisClusterConfig, FlinkJedisPoolConfig}
import org.apache.flink.streaming.connectors.redis.common.mapper.{RedisCommand, RedisCommandDescription, RedisMapper}

/**
  * Redis Sink
  * 相关redis命令:
  *   keys *
  *   hget sensor_temp sensor_1
  *   hgetall sensor_temp
  */
object RedisSinkTest {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    val inputPath = "C:\\wor\\flink-sample\\src\\main\\resources\\sensor"
    val inputStream= env.readTextFile(inputPath)
    // 包装成样例类
    val dataStream = inputStream
      .map(data => {
        var arr = data.split(",")
        SensorReading(arr(0), arr(1).toLong, arr(2).toDouble)
      })
    // 定义FlinkJedisConfigBase
    val conf = new FlinkJedisPoolConfig.Builder()
        .setHost("node-01")
        .setPort(6379)
        .build()

    dataStream.addSink(new RedisSink[SensorReading](conf, new MyRedisMapper))
    env.execute("redis sink test")
  }
}

class MyRedisMapper extends RedisMapper[SensorReading] {
  // 当前保存数据写入redis的命令 hset 表名 k v
  override def getCommandDescription: RedisCommandDescription = {
    new RedisCommandDescription(RedisCommand.HSET, "sensor_temp")
  }

  // 将温度值指定为value
  override def getValueFromData(t: SensorReading): String = t.temperature.toString

  // 将id指定为key
  // 重复数据会覆盖
  override def getKeyFromData(t: SensorReading): String = t.id
}
