package com.qiche.dws

import com.qiche.common.SparkBase
import org.apache.spark.sql.SparkSession

object DwsVehicleOrder extends SparkBase {

  def execute(spark: SparkSession): Unit = {
    import spark.implicits._

    spark.sql(
      s"""INSERT INTO table dws_vehicle_order SELECT a.user_id, a.full_name, a.age, a.gender, a.car_model , a.car_purchase_time,
         |a.car_purchasing_dealer, a.license_plate_number, a.vin_no, a.creation_time AS a_creation_time , b.order_id,
         |b.user_name, b.age, b.gender, b.purchased_vehicles , b.evaluation_grade, b.creation_time AS b_creation_time,
         |c.full_name, c.age, c.degree_of_purchase_intention , c.intention_model, c.creation_time AS c_creation_time,
         |d.dealer_name, d.dealer_location, d.creation_time AS d_creation_time , e.carid, e.displacement, e.price,
         |e.color, e.level , e.energy_type, e.transmission_case FROM ods_list_of_car_buyers a LEFT JOIN
         |ods_order_evaluation_form b ON a.user_id = b.user_id LEFT JOIN ods_table_of_intended_users c ON a.user_id = c.user_id
         |LEFT JOIN ods_distributor d ON a.car_purchasing_dealer = d.dealer_id LEFT JOIN ods_model e
         |ON a.car_model = e.cartype""".stripMargin)

  }

  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "hadoop")
    System.setProperty("HIVE_USER_NAME", "hadoop")
    val spark = getContextLocal(this.getClass.getName.split('.').last)
    execute(spark)
    spark.stop()
  }
}




