package com.scala.learn.sparksql2

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.apache.spark.sql.types._

/**
  * @Copyright: Shanghai Definesys Company.All rights reserved.
  * @Description:
  * @author: chuhaitao
  * @since: 2019/3/9 20:16
  * @history:
  *          1.2019/3/9 created by chuhaitao
  */
object SparkDemo {


  def main(args: Array[String]): Unit = {
    /*使用SparkSession 工厂创建SparkSession*/
    val sparkSession: SparkSession = SparkSession.builder()
      .appName("app")
      .master("local")
      .getOrCreate()

    /*获取sparkContext，创建RDD*/
    val lines = sparkSession.sparkContext
      .parallelize(List("2,laozhuang,20,99", "3,xiaoming,18,199", "4,xiaohuang,20,200", "5,xiaoniu,18,99"))
    /*使用RDD创建  rowRdd*/
    val row: RDD[Row] = lines.map(line => {
      val files = line.split("[,]")
      val id = files(0).toLong
      val name = files(1)
      val age = files(2).toInt
      val fv = files(3).toInt

      Row(id, name, age, fv)
    })
    /*创建schema*/
    val scheml = StructType(
      List(
        StructField("id", LongType, true),
        StructField("name", StringType, true),
        StructField("age", IntegerType, true),
        StructField("fv", IntegerType, true)

      )
    )
    /*关联row和schema 生成DF*/
    val pdf: DataFrame = sparkSession.createDataFrame(row, scheml)
    // 创建视图
    pdf.createTempView("user")
    //执行sql
    val user: DataFrame = sparkSession.sql("select * from user  ")
    user.show()
  }


}
