package com.lvmama.rhino.analyze.client

import java.text.SimpleDateFormat
import java.util.{Date, Locale}

import com.lvmama.rhino.common.entity._
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitLoad
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.kafka.{KafkaDStreamSource, KafkaPayloadStringCodec}
import com.lvmama.rhino.common.utils.methodset.InterfaceInvoke
import com.lvmama.rhino.common.utils.spark.StreamingApplication
import org.apache.spark.SparkContext
import org.apache.spark.sql.functions._
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.streaming.{StreamingContext, Time}

/**
  * Created by yuanxiaofeng on 2017/5/26.
  */
class PreProcessWirelessStreaming(config: JobConfig, duration: Long) extends StreamingApplication
  with Serializable{

  override var sparkConfig: Map[String, String] = config.spark
  override var appName: String = "PreStatStreaming"

  override def streamingBatchDuration: Long = duration

  override def streamingCheckpointDir: String = config.streamingCheckpointDir
    .getOrElse("preStatStreamingCP", "/checkpoint/pre_stat")

  val kafkaConf = config.sourceKafka

  private val kafkaSourceTopic = KafkaTopic("wireless_v2")

  def execute() = {

    sparkConfig += ("spark.sql.autoBroadcastJoinThreshold" -> "10485760")
    //    sparkConfig += ("spark.master" -> "local[2]")
    sparkConfig += ("spark.app.name" -> appName)

    def func = (sc: SparkContext, ssc: StreamingContext) => {

      val streaming = KafkaDStreamSource(config.sourceKafka).createSource(ssc, kafkaSourceTopic.name)
//      val stringCodec = sc.broadcast(KafkaPayloadStringCodec())
      val lines = streaming
        .flatMap(KafkaPayloadStringCodec().decodeValue(_))
        .map(l => Utils.initStat(l))
        .filter(l => l.logType != "" && (l.pageTypeCode == PENDING_PAYMENT.code || l.pageTypeCode == REGISTER.code)
         && l.province.length <= 20)

      val currentDate = new Date(System.currentTimeMillis())

      val predicates = Array("CodeTypeLevel = 'D2'")
      val deviceCode = SparkSession.builder().getOrCreate().sqlContext
        .loadFromMysql("statistic_continuity_code", predicates)
        .select("code", "en_name")
        .collect()
        .map(r => (r(0), r(1)))
        .toMap

      val deviceCode_bc = sc.broadcast(deviceCode)

      val client = lines
        .repartition(4)
        .transform(tf =>
          tf.mapPartitions(iter => {
            val deviceCode_value = deviceCode_bc.value.asInstanceOf[Map[String, String]]
            iter.map { r =>
              if (r.pageTypeCode == PENDING_PAYMENT.code || r.pageTypeCode == REGISTER.code) {
                val createDate = InterfaceInvoke.getUserCreatedDate(r.userId)
                val format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.CHINA)
                val uYears = if (createDate != "") {
                  Utils.getDiffYears(format.parse(createDate), currentDate)
                } else {
                  -1
                }
                val platform = deviceCode_value.getOrElse(r.platformCode, "")
                val sChannel = if (platform.startsWith("ios")) {
                  InterfaceInvoke.getDeviceChannel(r.idfa, "IOS", r.secondChannel)
                } else if (platform.startsWith("android")) {
                  InterfaceInvoke.getDeviceChannel(r.deviceToken, "ANDROID", r.secondChannel)
                } else {
                  r.secondChannel
                }
                r.copy(secondChannel = sChannel, useYears = uYears)
              } else {
                r
              }
            }
          })
        )

      client
        .repartition(2)
        .foreachRDD((rdd, time: Time) => {
          val sqlContext = SparkSession.builder().getOrCreate().sqlContext
          import sqlContext.implicits._
          val wordsDataFrame = rdd.toDF()
          wordsDataFrame.write.mode(SaveMode.Append).parquet(PreProcessWirelessStreaming.fullPath)
        })
    }

    val ssc = StreamingContext.getOrCreate(streamingCheckpointDir, () => withSparkStreamingContext(func))
    ssc.start()
    ssc.awaitTermination()
  }
}

object PreProcessWirelessStreaming {
  def apply(duration: Long): Unit = {
    val config = JobConfig()
    new PreProcessWirelessStreaming(config, duration).execute()
  }

  def fullPath = JobConfig().param.get("wireless.preprocess.path").get + Utils.getToday("yyyy/MM/dd")
}
