package com.lvmama.rhino.analyze.client

import com.lvmama.rhino.analyze.processing.{WirelessV2ChannelRelate, WirelessV2SearchJump, WirelessV2VisitorBatch}
import com.lvmama.rhino.common.entity._
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.spark.StructuredStreamingApplication
import org.apache.hadoop.fs.{FileSystem, Path}

/**
  * Created by yuanxiaofeng on 2017/4/28.
  */
class WirelessStatVer2Batch(config: JobConfig, path: String) extends StructuredStreamingApplication with Serializable {
  self =>

//  case class SearchType(logType: String, preLogType: String, pageTypeCode: String, stationId: String, platformCode: String,
//                        timestamp: Long, channelCode: String, deviceToken: String, sessionId: String,
//                        buttonCode: String, pageCode: String, province: String, prePage: String, nextPage: String,
//                        keyType: String, searchKey: String, lab: String, productSum: String, pagination: String,
//                        productId: String, productName: String, categoryId: String)

  override var sparkConfig: Map[String, String] = config.spark
  override var appName: String = "WirelessStatVer2Batch"

  override def streamingBatchDuration: Long = ???

  override def streamingCheckpointDir: String = ???

  def execute() = {
    sparkConfig += ("spark.app.name" -> appName)
    sparkConfig += ("spark.debug.maxToStringFields" -> "1000")

//    val schemas = Utils.extractFieldNames[self.SearchType].dropRight(1)
//    org.apache.spark.sql.catalyst.encoders.OuterScopes.addOuterScope(self)

    withStructuredApplication { spark =>
      import spark.implicits._
      val hdfs = FileSystem.get(spark.sparkContext.hadoopConfiguration)

      if (hdfs.exists(new Path(path))) {
        val batch = spark
          .read
          .parquet(path)
          .as[WirelessClientMonitorLog]

        //搜索跳转和搜索结果分析
        new WirelessV2SearchJump().process(batch)

        //访客订单分析
        new WirelessV2VisitorBatch().process(batch)
      }

      val orderPath = config.param.get("wireless.preprocess.path").get + Utils.getYesterday("yyyy/MM/dd")
      if (hdfs.exists(new Path(orderPath))) {
        val orderData = spark
          .read
          .parquet(orderPath)
          .as[StatLogType]

        new WirelessV2ChannelRelate().process(orderData)
      }
    }
  }
}

object WirelessStatVer2Batch {
  def apply(path: String) = {
    val config = JobConfig()
    new WirelessStatVer2Batch(config, config.param.get("wireless.process.path").get + path).execute()
  }
}
