package Example

import org.apache.spark.sql.SparkSession

object user_portrait {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("Data Explore")
      //设置Hive连接参数
      .config("hive.metastore.uris", "thrift://master:9083")
      .config("spark.sql.warehouse.dir", "hdfs://master:8020/user/hive/warehouse")
      .enableHiveSupport()
      .getOrCreate()

    //构建用户画像
    val billevents = spark.sql("select * from portrait.mmconsume_billevents_process")
    //    val billevents=spark.read.option("header",true).table("portrait.mmconsume_billevents_process")

    //消费内容
    val billevents1 = spark.sql("select distinct phone_no,case when fee_code='0J' or fee_code='0B' or fee_code='0Y' then '直播' " +
      "when fee_code='0X' then '应用' when fee_code='0T' then '付费频道' " +
      "when fee_code='0W' or fee_code='0L' or fee_code='0Z' or fee_code='0K' then '宽带' " +
      "when fee_code='0D' then '点播' when fee_code='0H' then '回看' when fee_code='0U' then '有线电视收视费'  end as label," +
      "'消费内容' as parent_label from portrait.mmconsume_billevents_process")
    // 下面的道理一样的。
    //billevents.withColumn("class",when(col("fee_code")==="0J","应用").
    //                                        when(col("fee_code")==="0B","付费频道").
    //                                        when(col("fee_code")==="0Y","宽带").
    //                                        when(col("fee_code")==="0X","点播")
    //                                        .otherwise("未知"))
    //  .select(col("class").as("label")).distinct()show()


    //保存消费内容标签到customer_label表，写入格式为append
    //    billevents1.write.mode("append").saveAsTable("portrait.consume_content")

    //电视消费水平
    //nvl方法：字段为空的自动补0
    val billevents2 = spark.sql("select t2.phone_no,case when fee_per_month>-26.5 and fee_per_month<26.5 then '电视超低消费' " +
      "when fee_per_month>=26.5 and fee_per_month<46.5 then '电视低消费' " +
      "when fee_per_month>=46.5 and fee_per_month<66.5 then '电视中等消费' " +
      "when 66.5<=fee_per_month then '电视高消费'  end as label," +
      "'电视消费水平' as parent_label " +
      "from (select t1.phone_no,sum(real_pay)/3 as fee_per_month  " +
      "from (select phone_no,nvl(should_pay,0)-nvl(favour_fee,0) as real_pay from portrait.mmconsume_billevents_process " +
      "where sm_name like '%电视%') t1 group by t1.phone_no) t2")
    //    billevents2.write.mode("append").saveAsTable("portrait.consume_level")

    //销售品名称
    val order_index = spark.sql("select phone_no,offername as label,'销售品名称' as parent_label " +
      "from portrait.order_index_process " +
      "where cost>0 and offername not like '%空包%' and sm_name !='珠江宽频' and mode_time = 'Y' and offertype = 0 and prodstatus='YY' " +
      "and effdate<=from_unixtime(unix_timestamp(),'yyyy-MM-dd HH:mm:ss') " +
      "and from_unixtime(unix_timestamp(),'yyyy-MM-dd HH:mm:ss')>=expdate union all select phone_no,offername as label," +
      "'销售品名称' as parent_label from portrait.order_index_process " +
      "where cost>0 and offername not like '%空包%' and sm_name='珠江宽频' " +
      "and effdate<=from_unixtime(unix_timestamp(),'yyyy-MM-dd HH:mm:ss') " +
      "and from_unixtime(unix_timestamp(),'yyyy-MM-dd HH:mm:ss')>=expdate")

    //    order_index.write.mode("append").saveAsTable("portrait.product_name")

    //业务品牌
    val usermsg = spark.sql("select phone_no,case when sm_name='互动电视' then '互动电视' " +
      "when sm_name='数字电视' then '数字电视' when sm_name='甜果电视' then '甜果电视' " +
      "when sm_name='珠江宽频' then '珠江宽频'  end as label,'业务品牌' as parent_label " +
      "from portrait.mediamatch_usermsg_process where sm_name not like '%模拟有线电视%' or sm_name not like '%番通%'")
    //    usermsg.write.mode("append").saveAsTable("portrait.brand")

    //电视入网程度
    val usermsg1 = spark.sql("select t1.phone_no,case when T>6 then '老用户' when T>3 and T<=6 then '中等用户' " +
      "when T<=3 then '新用户' end as label,'电视入网程度' as parent_label " +
      "from (select phone_no,max(datediff(current_date(),open_time)/365) as T " +
      "from portrait.mediamatch_usermsg_process " +
      "where sm_name like '%电视%' and open_time is not NULL group by phone_no) t1")
    //    usermsg1.write.mode("append").saveAsTable("portrait.tv_open_years")

    //日均观看时长
    val media_index = spark.sql("select phone_no,case when t>0 and t<2 then '日均观看时长短' " +
      "when t>=2 and t<5 then '日均观看时长中' when t>=5 and t<10 then '日均观看时长长' " +
      "when t>=10 then '日均观看时长超长' end as label,'日均观看时长' as parent_label " +
      "from (select phone_no,sum(nvl(duration,0))/(90*1000*60*60) as t from portrait.media_index_process group by phone_no) t1")
    //    media_index.write.mode("append").saveAsTable("portrait.customer_label")

    val all_index = billevents1.union(billevents2).union(order_index)
      .union(order_index).union(usermsg).union(usermsg1).union(media_index)
    all_index.repartition(1).write.mode("append").csv("ostatic/portrait.csv")
  }
}
