package com.fanli.bigdata.mytest

import com.fanli.bigdata.db.{StateRedisConnections}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkContext, SparkConf}
import redis.clients.jedis._

object redisDemo {
  Logger.getLogger("org").setLevel(Level.ERROR)
  val conf = new SparkConf().setAppName("MySpakDemo1").setMaster("local[*]")
  val sc = new SparkContext(conf)

  def main (args: Array[String]) {
//    var jd=new Jedis("115.159.45.213",6379,100000)
    val redisClient = new StateRedisConnections("115.159.45.213",6379)
    val redis = redisClient.redis
    val REDIS_EXPIR: Int = 24 * 60 * 60 * 2 //2day

    redis.set("hz", "bar")
    var str = redis.get("hz")
    println(str)

    val prefix_key = "d11_"
//    redis.pfadd(prefix_key, 'a')
//    redis.pfadd(prefix_key, 'a')
//    redis.pfadd(prefix_key, 'b')
    redis.pfadd(prefix_key, 'c')
    redis.expire(prefix_key, REDIS_EXPIR)

    val prefix_detail_key_pv = "d11_pv"
    redis.incrby(prefix_detail_key_pv,1)
    redis.incrby(prefix_detail_key_pv,1)
    redis.incrby(prefix_detail_key_pv,1)
    redis.expire(prefix_detail_key_pv, REDIS_EXPIR)

    val  duvCount = redis.pfcount(prefix_key)
    val  sumPvCount = redis.get(prefix_detail_key_pv).getOrElse(0)
    println(duvCount + "\t" + sumPvCount)

    sc.stop()
  }
}
