package com.neuedu

import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._

object VehicleHiveSql {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "root")

    val sparkConf = new SparkConf()
      .setMaster("local[*]")

    val sparkSession = SparkSession.builder()
      .config(sparkConf)
      .enableHiveSupport()
      .appName("VehicleHiveSql")
      .getOrCreate()

    val pathA = "/vehicle/data/a/a_vehicle.1734578180872"
    val pathB = "/vehicle/data/b/b_vehicle.1734578502030"
    val pathC = "/vehicle/data/c/c_vehicle.1734595019409.tmp"
    val pathD = "/vehicle/data/d/d_vehicle.1734595083171.tmp"

    val dfA = sparkSession.read.option("header", "true").csv(pathA)
    val dfB = sparkSession.read.option("header", "true").csv(pathB)
    val dfC = sparkSession.read.option("header", "true").csv(pathC)
    val dfD = sparkSession.read.option("header", "true").csv(pathD)

    val dfASelected = dfA.select(
      col("model"),
      col("units_sold").cast(IntegerType),
      col("make"),
      col("low_price").cast(DecimalType(10, 2)),
      col("high_price").cast(DecimalType(10, 2)),
      col("times").cast(DateType),
      col("is_ev"),
      col("body_type"),
      col("brand"),
      col("brand_country")
    )

    dfASelected.write
      .mode(SaveMode.Overwrite)
      .saveAsTable("vehicledb.a_china_automobile_sales")

    val dfBSelected = dfB.select(
      col("region"),
      col("category"),
      col("parameter"),
      col("mode"),
      col("powertrain"),
      col("year").cast(IntegerType),
      col("unit"),
      col("valueb").cast(DecimalType(15, 8))
    )

    dfBSelected.write
      .mode(SaveMode.Overwrite)
      .saveAsTable("vehicledb.b_global_ev")

    val dfCSelected = dfC.select(
      col("times").cast(DateType),
      col("nev_production").cast(IntegerType),
      col("nev_sales").cast(IntegerType),
      col("bev_production").cast(IntegerType),
      col("bev_sales").cast(IntegerType),
      col("phev_production").cast(IntegerType),
      col("phev_sales").cast(IntegerType)
    )

    dfCSelected.write
      .mode(SaveMode.Overwrite)
      .saveAsTable("vehicledb.c_vehicle_production_sales")

    val dfDSelected = dfD.select(
      col("times").cast(DateType),
      col("chongqing").cast(IntegerType),
      col("beijing").cast(IntegerType),
      col("guangdong").cast(IntegerType),
      col("shanghai").cast(IntegerType),
      col("zhejiang").cast(IntegerType),
      col("anhui").cast(IntegerType),
      col("tianjin").cast(IntegerType),
      col("sichuan").cast(IntegerType),
      col("shandong").cast(IntegerType),
      col("jiangsu").cast(IntegerType),
      col("dc_china").cast(IntegerType),
      col("ac_china").cast(IntegerType)
    )

    dfDSelected.write
      .mode(SaveMode.Overwrite)
      .saveAsTable("vehicledb.d_china_charging_pile")

    sparkSession.stop()
    sparkSession.close()
  }
}

