package com.bw.sparksql1.job1

import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

/**
  *
  *  load数据
  *
  */
object Job8 {
    def main(args: Array[String]): Unit = {
      Logger.getLogger("org").setLevel(Level.ERROR)
      val spark = SparkSession
        .builder()
        .master("local")
        .appName("Spark SQL basic example")
        .getOrCreate()

      import spark.implicits._
      //格式一：parquet,如果调用load方法默认支持就是parquet文件
      val df: DataFrame = spark.read.load("users.parquet")
      df.createOrReplaceTempView("users")
      spark.sql("select name from users").show()

      //格式二：json
      val df1 = spark.read.format("json").load("people.json")
      df1.createOrReplaceTempView("people1")
      spark.sql("select * from people1").show()

      //格式三：CSV
      val df2 = spark.read.format("csv").option("header","true").option("delimiter",";").load("people.csv")
      df2.createOrReplaceTempView("people2")
      spark.sql("select * from people2").show()
    }
}