package core_sql.day06_sql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, Row, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by root on 2019/3/13.
  * 把普通rdd转换为df的方法：
  * 1：把rdd转换成case class的形式
  * 2:生成row，然后加上schema
  * 3：元祖的形式 然后调用toDF的时候传入字段名称
  *
  */
object SqlDemo2_oldsql {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("sql1")
    val sc = new SparkContext(conf)
    // 构建sqlcontext
    val sql = new SQLContext(sc)

    //准备操作
    //以后从哪里加载数据（先创建RDD，然后管理schema，将RDD转换成DataFrame）
    val lines = sc.parallelize(List("laoduan 99 30", "laozhao 9999 28", "laoyang 99 28", "laoxue 98 26"))
    //RDD -> DataFrame
    val boyRDD: RDD[Row] = lines.map(line => {
      val fields = line.split(" ")
      val n = fields(0)
      val f = fields(1).toDouble
      val a = fields(2).toInt
      Row(n, f, a)
    })

    val schema: StructType = StructType(
      List(
        StructField("name", StringType),
        StructField("fv", DoubleType),
        StructField("age", IntegerType)
      )
    )
    // 将RDD管理schema
    val df: DataFrame = sql.createDataFrame(boyRDD,schema)

    //将dataframe注册成一张表
    df.registerTempTable("t_boy")

    val df1: DataFrame = sql.sql("select * from t_boy where fv > 99")

    //查看结果（action操作）
    df1.show()
    sc.stop()

  }

}