package com.study.spark.scala.dataframe

import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SparkSession}

/**
  * DataFrame和RDD两种互操作方式
  *
  * @author stephen
  * @create 2019-02-26 09:06
  * @since 1.0.0
  */
object DataFrameRDDDemo {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("DataFrameRDDDemo")
      .master("local[2]")
      .getOrCreate()
    val path = "/Users/stephen/Documents/03code/java-demo/bigdata-study/study-spark/src/main/resource/data/infos.txt"

    // 方式一
    //reflection(spark, path)

    // 方式二
    program(spark,path)

    spark.stop()
  }

  /**
    * 反射的方式
    *
    * @param spark
    * @param path
    */
  def reflection(spark: SparkSession, path: String): Unit = {
    val rdd = spark.sparkContext.textFile(path)
    // 隐式转换
    import spark.implicits._
    // RDD转换成DataFrame
    val infoDF = rdd.map(_.split(",")).map(line => Info(line(0).toInt, line(1), line(2).toInt)).toDF()
    infoDF.printSchema()
    infoDF.show()
    // df方式操作：select * from table where age>30
    infoDF.filter(infoDF.col("age") > 30).show()
    // sql方式操作：select * from table where age>30
    infoDF.createOrReplaceTempView("info")
    spark.sql("select * from info where age>30").show()
  }

  case class Info(id: Int, name: String, age: Int)

  /**
    * 编程的方式
    * @param spark
    * @param path
    */
  def program(spark: SparkSession, path: String) {
    val rdd = spark.sparkContext.textFile(path)

    val infoRDD = rdd.map(_.split(",")).map(line => Row(line(0).toInt, line(1), line(2).toInt))

    val structType = StructType(Array(
      StructField("id", IntegerType, true),
      StructField("name", StringType, true),
      StructField("age", IntegerType, true)))

    val infoDF = spark.createDataFrame(infoRDD,structType)
    infoDF.printSchema()
    infoDF.show()

    // df方式操作：select * from table where age>30
    infoDF.filter(infoDF.col("age") > 30).show()
    // sql方式操作：select * from table where age>30
    infoDF.createOrReplaceTempView("info")
    spark.sql("select * from info where age>30").show()

  }

}
