package sink


import bean.SensorReading
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.redis.RedisSink
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig
import org.apache.flink.streaming.connectors.redis.common.mapper.{RedisCommand, RedisCommandDescription, RedisMapper}

/**
  * @Description: TODO QQ1667847363
  * @author: xiao kun tai
  * @date:2021/11/13 23:32
  *
  *
  *
  */
object RedisSink {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    //file source
    val inputPath: String = "src/main/resources/sensor.txt"
    val fileStream: DataStream[String] = env.readTextFile(inputPath)

    //先转换为特定的类型
    val dataStream: DataStream[SensorReading] = fileStream.map(data => {
      val arr = data.split(",")
      SensorReading(arr(0), arr(1).toLong, arr(2).toDouble)
    })
    dataStream.print()

    //定义一个FlinkJedisConfigBase
    val conf = new FlinkJedisPoolConfig.Builder()
      .setHost("192.168.88.106")
      .setPort(6379)
      .build()


    dataStream.addSink(new RedisSink[SensorReading](conf, new MyRedisMapper))

    env.execute("redis sink");


  }

  //定义一个RedisMapper
  class MyRedisMapper extends RedisMapper[SensorReading] {
    //定义保存数据写入redis的命令，HSET，表名，key,value
    override def getCommandDescription: RedisCommandDescription =
      new RedisCommandDescription(RedisCommand.HSET,"sensor_scala")

    //将id值指定为key
    override def getKeyFromData(t: SensorReading): String = t.id
    //将温度值指定为value
    override def getValueFromData(t: SensorReading): String = t.temperature.toString
  }

}
