package com.itcj.dmp.etl

import com.itcj.dmp.utils.KuduHelper
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{Column, DataFrame, SparkSession}

object ETLRuner {

  val tablename = "ODS_" + KuduHelper.today()

  def main(args: Array[String]): Unit = {
    import com.itcj.dmp.utils.SparkConfigHelper._
    val sparkSession: SparkSession = SparkSession.builder()
      .master("local[2]")
      .appName("Ip2Region")
      .loadConfig()
      .getOrCreate()

    //读取数据集pmt.json
    val pmtSourceDf: DataFrame = sparkSession.read.json("dataset/pmt.json")

    import com.itcj.dmp.etl.Ip2Region._
    val newDf: DataFrame = transProcess(pmtSourceDf)

    // 筛选出需要落地kudu的列
    import sparkSession.implicits._
    val selectRows: Seq[Column] = Seq(
      'sessionid, 'advertisersid, 'adorderid, 'adcreativeid, 'adplatformproviderid,
      'sdkversion, 'adplatformkey, 'putinmodeltype, 'requestmode, 'adprice, 'adppprice,
      'requestdate, 'ip, 'appid, 'appname, 'uuid, 'device, 'client, 'osversion, 'density,
      'pw, 'ph, 'longitude, 'latitude, 'region, 'city, 'ispid, 'ispname, 'networkmannerid,
      'networkmannername, 'iseffective, 'isbilling, 'adspacetype, 'adspacetypename,
      'devicetype, 'processnode, 'apptype, 'district, 'paymode, 'isbid, 'bidprice, 'winprice,
      'iswin, 'cur, 'rate, 'cnywinprice, 'imei, 'mac, 'idfa, 'openudid, 'androidid,
      'rtbprovince, 'rtbcity, 'rtbdistrict, 'rtbstreet, 'storeurl, 'realip, 'isqualityapp,
      'bidfloor, 'aw, 'ah, 'imeimd5, 'macmd5, 'idfamd5, 'openudidmd5, 'androididmd5,
      'imeisha1, 'macsha1, 'idfasha1, 'openudidsha1, 'androididsha1, 'uuidunknow, 'userid,
      'reqdate, 'reqhour, 'iptype, 'initbidprice, 'adpayment, 'agentrate, 'lomarkrate,
      'adxrate, 'title, 'keywords, 'tagid, 'callbackdate, 'channelid, 'mediatype, 'email,
      'tel, 'age, 'sex,'geoHash
    )


    // - 存入 Kudu 表
    import com.itcj.dmp.utils.KuduHelper._


    val keys = List("uuid")
    val resultdf: DataFrame = newDf.select(selectRows:_*)
    val schema: StructType = resultdf.schema
    schema.fields.update(15,new StructField("uuid", StringType, false))//修改uuid列为false nuunable
    schema.printTreeString()

    sparkSession.createKuduTable(tablename, schema, keys)
    resultdf.saveKuduTable(tablename)
  }
}
