package com.lvmama.rhino.analyze.client

import com.lvmama.rhino.analyze.processing.WirelessCustomizeFunnel
import org.apache.spark.sql.functions._
import org.apache.spark.sql.hive.HiveContext
import com.lvmama.rhino.common.entity.JobConfig
import com.lvmama.rhino.common.utils.logging.LazyLogging
import com.lvmama.rhino.common.utils.spark.SparkApplication
import org.apache.spark.storage.StorageLevel

/**
  * Created by wxxuyuan on 2017/6/7.
  */

class WirelessOperPathStat(config: JobConfig, path: String,date :String) extends SparkApplication with LazyLogging {
  override var appName: String = "WirelessStatJob"
  override var sparkConfig: Map[String, String] = config.spark

  def execute() = {
    sparkConfig += ("spark.app.name" -> appName)
    sparkConfig += ("spark.broadcast.blockSize" -> "1m")
    sparkConfig += ("spark.sql.shuffle.partitions" -> "300")
    sparkConfig += ("spark.buffer.pageSize" -> "4m")
    //    sparkConfig += ("spark.sql.tungsten.enabled" -> "false")
    //    sparkConfig += ("spark.memory.useLegacyMode" -> "true")
//        sparkConfig += ("spark.master" -> "local[3]")
    withSparkContext { sc =>
      val hiveCtx = new HiveContext(sc)

      val pageForward = hiveCtx.read.parquet(path)

      val columns = Seq("pageTypeCode", "sessionId", "platformCode",
        "timestamp","channelCode", "pageParam", "deviceToken", "buttonCode", "pageCode", "province")

      pageForward.select(columns.map(col(_)): _*)
        .persist(StorageLevel.MEMORY_AND_DISK_SER)

      //自定义漏斗，用户操作路径清洗
      logger.info(s"WirelessStat begin process WirelessCustomizeFunnel")
      WirelessCustomizeFunnel.process(sc,pageForward,date)
    }
  }
}

object WirelessOperPathStat {
  def apply(path: String) = {
    val config = JobConfig()
    new WirelessOperPathStat(config,  config.param.get("wireless.process.path").get + path,path).execute()
  }
}