import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext, sql}
import org.junit.Test

class RDD_SQL {

  @Test
  def rddTest(): Unit = {
    val conf = new SparkConf().setMaster("local[6]").setAppName("RDD")
    val sc = new SparkContext(conf)
    sc.textFile("E:/IDEA/IntelliJ IDEA 2019.1/sp/datas/WordCount")
      .flatMap(_.split(" "))
      .map((_, 1))
      .reduceByKey(_ + _)
      .collect()
      .foreach(println)
  }

  @Test
  def sqlTest(): Unit = {
    val spark = new SparkSession.Builder()
      .appName("SQL")
      .master("local[6]")
      .getOrCreate()
    import spark.implicits._
    val source = spark.sparkContext.parallelize(Seq(Person("张三", 10), Person("李四", 19)))
    val person = source.toDS()

    /**
      * val result = person.where('age > 10)
      * .where('age < 20)
      * .select('name)
      * .as[String]
      *result.show()
      */
    person.createOrReplaceTempView("person")
    spark.sql("select name from person where age > 10 and age < 20").show()
  }
}
