package com.shujia.dw

import com.shujia.spark.SparkTool
import org.apache.spark.sql.{SQLContext, SaveMode}

object DwMergeLocationApp extends SparkTool {
  /**
    * 编写业务逻辑
    *
    */
  override def run(): Unit = {

    val sQLContext = new SQLContext(sc)
    import sQLContext.implicits._


    /**
      * 真实环境 ：从四类数据中抽取个人位置 合并成融合表
      *
      */

    val dpiPath = Constants.DPI_PATH + Constants.PARTITION_NAME + day_id
    val ddrPath = Constants.DDR_PATH + Constants.PARTITION_NAME + day_id
    val oiddPath = Constants.OIDD_PATH + Constants.PARTITION_NAME + day_id
    val wcdrPath = Constants.WCDR_PATH + Constants.PARTITION_NAME + day_id


    val mergelocationPath = Constants.MERGELOCATION_PATH + Constants.PARTITION_NAME + day_id


    logger.info(s"数据数据路径：$dpiPath")
    logger.info(s"数据数据路径：$ddrPath")
    logger.info(s"数据数据路径：$oiddPath")
    logger.info(s"数据数据路径：$wcdrPath")
    logger.info(s"输出路径：$mergelocationPath")


    val ddr = sc.textFile(ddrPath)
    val wcdr = sc.textFile(wcdrPath)
    val dpi = sc.textFile(dpiPath)
    val oidd = sc.textFile(oiddPath)


    val merge = ddr.union(wcdr).union(dpi).union(oidd)

    // rdd  -> df

    /**
      * mdn string comment '手机号码'
      * ,start_time string comment '业务开始时间'
      * ,county_id string comment '区县编码'
      * ,longi string comment '经度'
      * ,lati string comment '纬度'
      * ,bsid string comment '基站标识'
      * ,grid_id string comment '网格号'
      * ,biz_type string comment '业务类型'
      * ,event_type string comment '事件类型'
      * ,data_source string comment '数据源'
      *
      */
    merge.map(line => {
      val split = line.split("\t")

      val mdn = split(0)
      val start_time = split(1)
      val county_id = split(2)
      val longi = split(3)
      val lati = split(4)
      val bsid = split(5)
      val grid_id = split(6)
      val biz_type = split(7)
      val event_type = split(8)
      val data_source = split(9)

      (mdn, start_time, county_id, longi, lati, bsid, grid_id, biz_type, event_type, data_source)

    })
      .toDF("mdn", "start_time", "county_id", "longi", "lati", "bsid", "grid_id", "biz_type", "event_type", "data_source")
      .coalesce(10)
      .write
      .mode(SaveMode.Overwrite)
      .parquet(mergelocationPath)


  }

  /**
    * 初始化 设置 spark 运行参数
    *
    */
  override def init(): Unit = {
    //conf.setMaster("local[8]")
  }
}
