package com.imooc.spark

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SparkSession}

/**
  * Created by zghgchao 2017/12/23 10:47
  * DataFrame和RDD的互操作
  */
object DataFrameRDDApp {

  def main(args: Array[String]): Unit = {
    SetLogger()
    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("DataFrameRDDApp").getOrCreate()

    //（一）反射方式
   // inferReflection(spark)
    //(二)通过编程的方式
    program(spark)

    spark.stop()

  }

  /**
    * (二)通过编程的方式
    * 如果第一种情况不能满足你的要求（事先不知道列）
    * @param spark
    */
  def program(spark: SparkSession) = {
    //RDD ==> DataFrame
    val peopleRDD = spark.sparkContext.textFile("src/data/people.txt")

    val rowRDD = peopleRDD.map(_.split(",")).map(line =>
        Row(line(0).toInt, line(1), line(2).toInt)
      )

    val structType = StructType(
      Array(
        StructField("id",IntegerType,true),
        StructField("name",StringType,true),
        StructField("age",IntegerType,true)
      ))

    val peopleDF = spark.createDataFrame(rowRDD,structType)

    peopleDF.printSchema()
    peopleDF.show()
  }


  /**
    * （一）反射方式：使用反射来推断包含了特定数据类型的RDD的元数据（case class）
    *                 使用DataFrame API或者sql方式编程
    *  前提：事先需要知道你的字段、字段类型
    * @param spark
    */
  def inferReflection(spark: SparkSession) = {
    //RDD ==> DataFrame
    val peopleRDD = spark.sparkContext.textFile("src/data/people.txt")

    // 注意：需要导入隐式转换  RDD.toDF ==> DataFrame
    import spark.implicits._
    val peopleDF = peopleRDD.map(_.split(","))
      .map(line =>
        People(line(0).toInt, line(1), line(2).toInt)
      ).toDF()

    peopleDF.printSchema()
    peopleDF.show()

    // 使用DataFrame API编程
    peopleDF.filter(peopleDF.col("age") > 30).show()

    //使用SQL方式编程，要先创建临时表
    peopleDF.createOrReplaceTempView("people")
    spark.sql("select * from people where age > 30").show()
  }
  def SetLogger() = {
    Logger.getLogger("org").setLevel(Level.OFF)
    Logger.getLogger("com").setLevel(Level.OFF)
    System.setProperty("spark.ui.showConsoleProgress", "false")
    Logger.getRootLogger().setLevel(Level.OFF);
  }

  case class People(id: Int, name: String, age: Int)
}
