package com.fudian.spark_platform.DBConnector

import com.redislabs.provider.redis._
import org.apache.spark.rdd.RDD
import com.redislabs.provider.redis.rdd.RedisKeysRDD

class RedisConnector extends DBBase{

    var RedisHost: String = "127.0.0.1"
    var RedisPort: Integer = 6379

    /**
      * 根据传入的值,构造Redis数据连接器
      * @param redisHost redis的host名
      * @param redisPort redis的port名
      * @return
      */
    def RedisConnector(redisHost: String = "127.0.0.1", redisPort: Integer = 6379) = {
        this.RedisHost = redisHost
        this.RedisPort = redisPort
        this.setConf("redis.host",this.RedisHost)
        this.setConf("redis.port",this.RedisPort.toString)
    }


    /**
      * 获取key-value形式的value值,可以使用统配符*,获取多个
      * @param keys 传入的key值
      * @param numPartition partition的数量
      * @return
      */
    def getKey(keys: String, numPartition: scala.Int = 3): RDD[(String,String)] = {
        this.sc.fromRedisKV(keys,numPartition)
    }

    /**
      * 获取多个Keys值得数据
      * @param keys 传入的key值
      * @param numPartition partition的数量
      * @return
      */
    def getkeys(keys: Array[String], numPartition:scala.Int = 3): RedisKeysRDD = {
        this.sc.fromRedisKeys(keys,numPartition)
    }

    /**
      * 获取数组
      * @param keys 传入的key值
      * @param numPartition partition的数量
      * @return
      */
    def getList(keys:String, numPartition: scala.Int = 3) : RDD[String] = {
        this.sc.fromRedisList(keys,numPartition)
    }


}
