package com.wu.spark
import org.apache.spark.sql.types.{StringType, IntegerType, StructField, StructType}
import org.apache.spark.sql.{Row, SparkSession}

/**
  * DataFrame 和rdd 的相互操作
  */
object DataFrameRdd {
  def main(args: Array[String]): Unit = {
    val  spark = SparkSession.builder().appName("DataFrameRdd").master("local[2]").getOrCreate();

  //  inferReflection(spark)
     projrom(spark)
     spark.stop()
  }

  def  projrom(spark:SparkSession): Unit =
  {
    val rdd = spark.sparkContext.textFile("C://Users//wudl//Documents//ideaWorkSpaces2018.4.13//SparkSQL//src//ceshi2.txt")

    val infoRDD = rdd.map(_.split(",")).map(line => Row(line(0).toInt, line(1), line(2).toInt))
    val structtype = StructType(Array(StructField("id",IntegerType,true),StructField("name",StringType,true),StructField("age",IntegerType,true)))

    val infodf = spark.createDataFrame(infoRDD,structtype);
        infodf.printSchema()
        infodf.show()

    infodf.filter(infodf.col("age")>30).show()
    infodf.createOrReplaceTempView("student")
    spark.sql("select * from student").show()
  }

  def inferReflection(spark: SparkSession): Unit = {
    val  spark = SparkSession.builder().appName("DataFrameRdd").master("local[2]").getOrCreate();
    // RDD --->转化为Dataframe
    val rdd = spark.sparkContext.textFile("C://Users//wudl//Documents//ideaWorkSpaces2018.4.13//SparkSQL//src//t.json");
    // spark 隐式转化
    import spark.implicits._
    val infoDF = rdd.map(_.split(",")).map(line =>info(line(0).toInt,line(1),line(2).toInt)).toDF()
    infoDF.show()
    ///**********************************//
    //  datafrom 加一个过滤条件
    infoDF.filter(infoDF.col("age")>30).show()

    infoDF.createOrReplaceTempView("user")
    spark.sql("select * from user").show()



  }

  case  class info(id:Int,name:String,age:Int)

}
