package com.doit.sparksql.day01

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.types.{DataTypes, StructField, StructType}

/**
 * @DATE 2022/1/13/10:07
 * @Author MDK
 * @Version 2021.2.2
 * */
object SQL02_CSV02 {
  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {
    val spark = SQLUtil.getSession
    //加载没有头字段信息的表数据
    //创建数据的结构对象

    val df: DataFrame = spark
      .read
      .csv("sql_data/csv/a.csv")
    df.printSchema()
    df.createTempView("user")

    spark.sql(
      """
        |select
        | _c0 as id,
        | _c1 as name
        |from
        |user
        |""".stripMargin).show()
    spark.sql(
      """
        |select *
        |from
        |user
        |""".stripMargin).show()
    println("------------------指定加载数据的结构-----------")

    val schema = StructType(
      Seq(
        StructField("id", DataTypes.IntegerType),
        StructField("name", DataTypes.StringType),
        StructField("age", DataTypes.IntegerType),
        StructField("gender", DataTypes.StringType),
        StructField("city", DataTypes.StringType)
      )
    )
    //指定属性信息  指定schema
    val df2 = spark.read.schema(schema).csv("sql_data/csv/a.csv")
    df2.printSchema()
    df2.createTempView("user2")
    spark.sql(
      """
        |select
        |*
        |from user2
        |""".stripMargin).show()

    spark.sql(
      """
        |select
        |city,
        |avg(age) as avg_age
        |from
        |user2
        |group by
        |city
        |""".stripMargin).show()

    spark.close()

  }
}
