package chapter04

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{quarter, to_date}

object Test17_anaHouse {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME","root")
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .appName("toHive")
      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()
    import spark.implicits._
    //读取hive
    val frame = spark
      .read.table("db_hive1.house1")
    val frame1 = frame.withColumn("date",
      to_date($"sale_data", "yyyyMMdd"))
    //统计每个季度的房屋销售量
    frame1.withColumn("date",quarter($"date"))
      .groupBy("date")
      .count()
      .sort($"count".desc)
      .show
    //统计每个季度的房屋销额
    frame1.withColumn("date",quarter($"date"))
      .groupBy("date")
      .sum("selling_price")
      .withColumn("sum(selling_price)",$"sum(selling_price)".cast("long"))
      .sort($"sum(selling_price)".desc)
      .show
    //将数据做成分区表 按照年份分区 保存到hive中
  }
}
