package chapter04

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{SaveMode, SparkSession}

object Test16_house {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME","root")
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .appName("toHive")
      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()
    import spark.implicits._
    //读取csv
    val frame = spark.read.option("inferSchema", "true")
      .option("header", "true")
      .csv("input/house.csv")
    frame.printSchema()
    val frame1 = frame
      .withColumn("sale_data", $"sale_data".cast("string"))
    frame1.printSchema()
    //保存到hive
    frame1.write
      .mode(SaveMode.Append)
      .saveAsTable("db_hive1.house1")
  }
}
