package com.central.test

import org.apache.spark.sql.SparkSession
import org.slf4j.LoggerFactory

/**
 * @Description TODO
 * @Author huonan
 * @Date 2020/6/10 16:04
 * @Version V1.0
 **/
object TestSS {

  // TODO-lxp
  val log = LoggerFactory.getLogger("TestSS")
  val logger = LoggerFactory.getLogger("TestSS")
  val log2 = LoggerFactory.getLogger("TestSS")

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .appName("Test")
      //          .master("spark://10.68.2.240:7077")
      .master("local[2]")
      .getOrCreate()
    val list = List("A", "B", "B", "C", "C", "C", "D", "D", "D", "D")
    val list2 = List("C", "C", "C", "D")

    val listRdd = spark.sparkContext.parallelize(list)
    val list2Rdd = spark.sparkContext.parallelize(list2)

    val value = listRdd
      .map(x => (x, 1))
      .map(x=>(x._1,x._2+1))
      .map(x=>(x._1,x._2+1))
      .map(x=>(x._1,x._2+1))
      .map(x=>(x._1,x._2+1))
      .reduceByKey(_ + _)
    val list2Count = list2Rdd.map(x => (x, 1))
      .reduceByKey(_ + _)
    log.info("========")
    log.("ClassName: TestSS$.Method:main, args={}", args);
    //    listCount.foreach(println)
    //    list2Count.foreach(println)
    //    logger.info("========")
    //    val leftJoinRDD = listCount.leftOuterJoin(list2Count)
    //    logger.info("====leftJoinRDD====")
    //    leftJoinRDD.filter(_._2._2.isDefined).map(x => (x._1, (x._2._1, x._2._2.getOrElse("")))) foreach (println)
    //    val rightJoinRDD = listCount.rightOuterJoin(list2Count)
    //    logger.info("====rightJoinRDD====")
    //    rightJoinRDD.map(x => (x._1, x._2._2)).foreach(println)
    //    logger.info("====JoinRDD====")
    //    val joinRdd = listCount.join(list2Count)
    //    joinRdd.foreach(println)
    //    logger.info("list: " + list.toString());
    //    val dataRDD = spark.sparkContext.parallelize(list)
    //    val result = dataRDD.map(x => (x, 1))
    //      .reduceByKey(_ + _)
    //      .sortByKey()
    //      .collect()
    //    val reverseRDD = result.reverse
    //    val sc = spark.sparkContext
    //    val z = sc.parallelize(List("a", "b", "c", "d", "e", "f"), 2)
    //    z.map(x => x + 1).filter(x => x != 3)
    //    val y = sc.parallelize(List("1", "2", "3"))
    //    val zy = z.cartesian(y);
    //    val x = sc.parallelize(List(1, 2, 3))
    //    val zx = z.cartesian(x)
    //    zy.foreach(println)
    //    zx.foreach(print)
    //result.reverse

    //spark.close()
  }
}
