package com.shujia.tour

import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object CityTouristWideApp extends Logging{
  def main(args: Array[String]): Unit = {


    if (args.length == 0) {
      log.error("输出参数为空")
      return
    }

    val day_id = args(0)

    log.info(s"当前天时间分区为：$day_id")

    val month_id: String = day_id.substring(0, 6)

    log.info(s"当前月时间分区为：$month_id")

    val spark: SparkSession = SparkSession.builder()
      .appName("CityTouristWideApp")
      .config("spark.sql.shuffle.partitions", "20")
      .enableHiveSupport()
      .getOrCreate()


    /**
      *
      * 关联事实表和维度表构建宽表
      *
      * 关联市游客表用户画像表行政区配置表构建市游客宽表
      *
      */

    /**
      * ,mdn string comment '手机号'
      * ,d_city_name string comment '旅游目的地市名'
      * ,o_city_name string comment '旅游来源地地市名'
      * ,o_province_name string comment '旅游来源地省名'
      * ,number_attr string comment '号码归属地'
      * ,d_distance_section string comment '出游距离'
      * ,d_stay_time int comment '停留时间按小时'
      * ,gender string comment '性别'
      * ,trmnl_brand string comment '终端品牌'
      * ,pckg_price int comment '套餐'
      * ,conpot int comment '消费潜力'
      * ,age int comment '年龄'
      *
      *
      */


    val wideDF: DataFrame = spark.sql(
      s"""
         |
         |SELECT
         | e.mdn,
         | e.d_city_name,
         | e.o_city_name,
         | e.o_province_name,
         | f.number_attr,
         |case
         |when  e.d_max_distance > 10 and  e.d_max_distance<=50 then "10-50"
         |when  e.d_max_distance > 50 and  e.d_max_distance<=80 then "50-80"
         |when  e.d_max_distance > 80 and  e.d_max_distance<=120 then "80-120"
         |else "120~" end as d_distance_section,
         |case
         |when e.d_stay_time>3 and e.d_stay_time<5 then "3-5"
         |when e.d_stay_time>5 and e.d_stay_time<8 then "5-8"
         |when e.d_stay_time>8 and e.d_stay_time<12 then "8-12"
         |when e.d_stay_time>12 and e.d_stay_time<20 then "12-20"
         |else "20~" end as d_stay_time,
         | f.gender,
         | f.trmnl_brand,
         | f.packg AS pckg_price,
         | f.conpot,
         | f.age
         |FROM
         | (
         |  SELECT
         |  /*+ BROADCAST(d) */
         |   c.mdn,
         |   c.d_stay_time,
         |   c.d_max_distance,
         |   c.city_name AS o_city_name,
         |   c.prov_name AS o_province_name,
         |   d.city_name AS d_city_name
         |  FROM
         |   (
         |    SELECT
         |    /*+ BROADCAST(b) */
         |     a.mdn,
         |     a.d_city_id,
         |     a.d_stay_time,
         |     a.d_max_distance,
         |     b.city_name,
         |     b.prov_name
         |    FROM
         |     (
         |      SELECT
         |       *
         |      FROM
         |       dal_tour.dal_tour_city_tourist_msk_d
         |      WHERE
         |       day_id = $day_id
         |     ) AS a
         |    JOIN ods.ods_admincode AS b ON a.source_county_id = b.county_id
         |   ) AS c
         |  JOIN (
         |   SELECT DISTINCT
         |    city_id,
         |    city_name
         |   FROM
         |    ods.ods_admincode
         |  ) AS d ON c.d_city_id = d.city_id
         | ) AS e
         |JOIN (
         | SELECT
         |  *
         | FROM
         |  dim.dim_usertag_msk_m
         | WHERE
         |  month_id = $month_id
         |) AS f ON e.mdn = f.mdn
         |
        |
      """.stripMargin)


    //保存结果
    wideDF.write
      .format("csv")
      .option("sep", "\t")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dal_tour/dal_tour_city_wide_msk_d/day_id=$day_id")



    //增加分区
    spark.sql(s"alter table dal_tour.dal_tour_city_wide_msk_d add if not exists partition(day_id='$day_id')")

  }
}
