package com.doit.sparksql.day01

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.DataFrame

/**
 * @DATE 2022/1/13/11:02
 * @Author MDK
 * @Version 2021.2.2
 *
 *    json 中的正常数据进行解析
 *    脏数据的过滤(filter和where字段过滤的两种方法)
 *    _corrupt_record列存储脏数据
 *    隐式导入  import spark.implicits._
 * */
object SQL_Json01 {
  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {
    val spark = SQLUtil.getSession

    //加载JSON数据
    val df: DataFrame = spark.read.json("sql_data/json/a.json")
    df.printSchema()
    df.createTempView("tb_json")
    spark.sql("select * from tb_json").show()

    spark.close()
  }
}
