package org.example

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author yangzhen14
 * @create 2021/4/22 9:30
 */
object test {
  def main(args: Array[String]): Unit = {
//    System.setProperty("hadoop.home.dir", "C:\\\\Users\\\\yangzhen14\\\\hadoop-2.7.1")
    val conf = new SparkConf().setAppName("HelloWorld").setMaster("local[*]")
    val jedis = RedisClient.pool.getResource
    jedis.select(1)
    val sc = new SparkContext(conf)
    val words = Array(("one",10), ("two",-1), ("two",8), ("three",0), ("three",-6), ("three",-4))
    words.foreach(x=>{
      println(x)
    })
    val wordPairsRDD = sc.parallelize(words)
//      .map(word => (word, -1))
    wordPairsRDD.foreach(x=>{
      println(x)
    })
    val wordCountsWithReduce = wordPairsRDD.reduceByKey(_+_)
    println(wordCountsWithReduce.count())
    wordCountsWithReduce.foreach(x=>{
      println(x)
    })
    val wordCountsWithGroup = wordPairsRDD.countByKey()
//      .map(t => (t._1, t._2.sum))
    wordCountsWithGroup.foreach(x=>{
      println(x)
      jedis.set(x._1,String.valueOf(x._2))
      jedis.close()
    })
    val jedis2 = RedisClient.pool.getResource
    val s = jedis2.get("three");
    jedis2.close()
    println(RedisClient.pool.getNumActive)
    println("--------------------------------------------------")
    val word = Array(Array("one","10"),Array ("two","1"), Array("two","8"), Array("three","0"), Array("three","-6"), Array("three","-4"))
    val wordPairsR = sc.parallelize(word)
    wordPairsR.flatMap(x=>x).collect().foreach(x=>{
      println(x)
    })

//    wordPairsRDD

  }
}
