package com.lvmama.rhino.analyze.client

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.Column
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
import org.apache.spark.sql.hive.HiveContext
import com.lvmama.rhino.analyze.processing._
import com.lvmama.rhino.common.entity.JobConfig
import com.lvmama.rhino.common.utils.logging.LazyLogging
import com.lvmama.rhino.common.utils.methodset.CalculateFunction._
import com.lvmama.rhino.common.utils.spark.SparkApplication
import org.apache.spark.storage.StorageLevel

/**
 * Created by yuanxiaofeng on 2016/10/18.
 */
class WirelessStat(config: JobConfig, path: String) extends SparkApplication with LazyLogging {
  override var appName: String = "WirelessStatJob"
  override var sparkConfig: Map[String, String] = config.spark

  def execute() = {
    sparkConfig += ("spark.app.name" -> appName)
    sparkConfig += ("spark.broadcast.blockSize" -> "1m")
    sparkConfig += ("spark.sql.shuffle.partitions" -> "300")
    sparkConfig += ("spark.buffer.pageSize" -> "4m")
//    sparkConfig += ("spark.sql.tungsten.enabled" -> "false")
//    sparkConfig += ("spark.memory.useLegacyMode" -> "true")
//    sparkConfig += ("spark.master" -> "local[3]")
    withSparkContext { sc =>
      val hiveCtx = new HiveContext(sc)

      val pageForward = hiveCtx.read.parquet(path)

      val columns = Seq("pageTypeCode", "sessionId", "platformCode",
        "timestamp","channelCode", "pageParam", "deviceToken", "buttonCode", "pageCode", "province")

      pageForward.select(columns.map(col(_)): _*)
        .persist(StorageLevel.MEMORY_AND_DISK_SER)

      //着落页流量数据
      logger.info(s"WirelessStat begin process WirelessLandingFlow")
      WirelessLandingFlow.process(sc, pageForward)

      //流量转化
      logger.info(s"WirelessStat begin process WirelessFlowTransform")
      WirelessFlowTransform.process(sc, pageForward)

      //页面流量上下游溯源
      logger.info(s"WirelessStat begin process WirelessTrace")
      WirelessTrace.process(sc, pageForward)

      //页面网络运营商
      logger.info(s"WirelessStat begin process WirelessNetworkOperator")
      WirelessNetworkOperator.process(sc, pageForward)

      //地域流量
      logger.info(s"WirelessStat begin process WirelessRegion")
      WirelessRegion.process(sc, pageForward)

      // 订单详情
      logger.info(s"WirelessStat begin process WirelessPitOrder")
      WirelessPitOrder.process(sc, pageForward)

      logger.info(s"WirelessStat begin process WirelessPitClick")
      WirelessPitClick.process(sc, pageForward)

    }
  }
}

object WirelessStat {
  def apply(path: String) = {
    val config = JobConfig()
    new WirelessStat(config,  config.param.get("wireless.process.path").get + path).execute()
  }

  val groupByColumn = (columnName: Broadcast[String]) => (df: DataFrame) =>
    df.groupBy(columnName.value).agg(count("*") as "counts")

  val groupByColumns = (columnNames: Broadcast[Seq[String]]) => (df: DataFrame) =>
    df.groupBy(columnNames.value.map(col(_)): _*).agg(count("*") as "counts")

  val groupByColumnsWithCountDistinct = (columnNames: Broadcast[Seq[String]]) => (df: DataFrame) =>
    df.groupBy(columnNames.value.map(col(_)): _*).agg(countDistinct("deviceToken") as "counts")

  val groupByColumnsPvUvIp = (columnNames: Broadcast[Seq[String]]) => (df: DataFrame) =>
    df.groupBy(columnNames.value.map(col(_)): _*).agg(count("*") as "counts",
                                                      countDistinct("sessionId") as "uvCounts",
                                                      countDistinct("ip") as "ipCounts",
                                                      concat_ws(",",collect_set("page_url")).as("page_url")
                                                      )

  val wireless_lead = lead(col("timestamp"), 1).over(windowSpec(col("sessionId"), col("timestamp")))
  /**
   * 将参数所在的列往后延迟一行
   */
  val dataLag = (param: Column) => lag(param, 1).over(windowSpec(col("sessionId"), col("timestamp")))

  /**
   * 计算用户的访问时长区间
   */
  val duration = (col1: Column, col2: Column, colName: String) => (df: DataFrame) =>
    df.withColumn(colName, value_range(column_sum(column_delta(col1, col2), lit(30l)), lit(30l)))
}