package com.fudian.spark_platform.DBConnector

import com.fudian.spark_platform.Configure.DataSourceInputConfig
import com.redislabs.provider.redis._
import com.redislabs.provider.redis.rdd.RedisKeysRDD
import org.apache.spark.rdd.RDD


class RedisConnector(s:DataSourceInputConfig) extends DBBase{

    var RedisHost: String = "127.0.0.1"
    var RedisPort: String = "6379"

    /**
      * 根据传入的值,构造Redis数据连接器
      * @param s
      * @return
      */
    def RedisConnector(s:DataSourceInputConfig) = {
        this.RedisHost = s.inputConfig("DataSourceHost").toString
        this.RedisPort = s.inputConfig("DataSourcePort").toString
        this.setConf("redis.host",this.RedisHost)
        this.setConf("redis.port",this.RedisPort.toString)
    }


    /**
      * 获取key-value形式的value值,可以使用统配符*,获取多个
 *
      * @param keys         传入的key值
      * @param numPartition partition的数量
      * @return
      */
    def getKey(keys: String, numPartition: scala.Int = 3): RDD[(String,String)] = {
        this.sc.sparkContext.fromRedisKV(keys,numPartition)
    }

    /**
      * 获取多个Keys值得数据
 *
      * @param keys         传入的key值
      * @param numPartition partition的数量
      * @return
      */
    def getkeys(keys: Array[String], numPartition:scala.Int = 3): RedisKeysRDD = {
        this.sc.sparkContext.fromRedisKeys(keys,numPartition)
    }

    /**
      * 获取数组
 *
      * @param keys         传入的key值
      * @param numPartition partition的数量
      * @return
      */
    def getList(keys:String, numPartition: scala.Int = 3) : RDD[String] = {
        this.sc.sparkContext.fromRedisList(keys,numPartition)
    }


}
