package com.lvmama.rhino.analyze.processing

import org.apache.spark.SparkContext
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.analyze.client.WirelessStat._
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitInsert
import org.apache.spark.storage.StorageLevel

/**
 * 地域流量统计，按照省份统计
 */
object WirelessRegion {
  
  def process(sc : SparkContext,pageForward : DataFrame) : Unit = {
    import pageForward.sparkSession.implicits._
    val commons = sc.broadcast(Seq("deviceToken", "sessionId", "channelCode", "platformCode", "pageTypeCode","province", "timestamp", "pageParam", "buttonCode"))
    
    val getCategoryId = pageForward.select(commons.value.map(col): _*)
      .withColumn("category_id", when(col("pageParam").getItem("categoryId").isNull, "-1").otherwise(col("pageParam").getItem("categoryId").cast("string")))
      
    val native = getCategoryId.filter(col("platformCode") =!= "2g53").coalesce(5)
      .withColumn("product_id", when(col("pageParam").getItem("productId").isNull, "-1").otherwise(col("pageParam").getItem("productId")))
      .withColumn("pageTotalCode", concat(col("pageTypeCode"), col("category_id"), col("product_id"), col("buttonCode")))
      .withColumn("pageTotalCodeLag", dataLag(col("pageTotalCode")))
      .withColumn("buttonLag", dataLag(col("buttonCode")))
      .filter(col("pageTotalCodeLag").isNull.or(col("pageTotalCode") !== col("pageTotalCodeLag"))).coalesce(5)
      .filter(col("buttonCode") !== "back")
      .filter(col("buttonLag").isNull.or(col("buttonLag") !== "back"))
      .drop(col("pageTotalCode"))
      .drop(col("pageTotalCodeLag"))
      .drop(col("buttonLag"))
      .drop(col("product_id"))
      .persist(StorageLevel.MEMORY_AND_DISK_SER)
      
    val h5 = getCategoryId.filter(col("platformCode") === "2g53").coalesce(1)
    
    val pagePV = native.union(h5)
    
    val yesterday = Utils.getYesterday()
    
    val regionCount = pagePV.select(col("channelCode").as("channel_code"), col("platformCode").as("platform_code"),
        col("pageTypeCode").as("page_code"),col("province").as("region_code"),
        when(col("category_id") === "-1", null).otherwise(col("category_id")).as("category_id"))
      .coalesce(5)
      .groupByKey(g => (g.getAs[String]("channel_code"), g.getAs[String]("platform_code"), g.getAs[String]("page_code"),
        g.getAs[String]("region_code"), g.getAs[String]("category_id")))
      .count()
      .map(m => (m._1._1, m._1._2, m._1._3, m._1._4, m._1._5, m._2, yesterday))
      .toDF("channel_code", "platform_code","page_code","region_code", "category_id", "Amount", "oper_date")
//    .groupBy("channel_code", "platform_code","page_code","region_code","category_id")
//    .agg(count("*").as("Amount"))
//    .withColumn("oper_date", lit(yesterday))
      .filter(length(col("region_code")) <= 20)

    regionCount.insertDF2Mysql("flow_statistics_region")
    
  }
  
}