package com.lvmama.rhino.analyze.client

import java.text.SimpleDateFormat

import com.lvmama.rhino.common.entity.{JobConfig, _}
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitLoad
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.kafka.{KafkaPayloadStringCodec, _}
import com.lvmama.rhino.common.utils.logging.LazyLogging
import com.lvmama.rhino.common.utils.methodset.InterfaceInvoke
import com.lvmama.rhino.common.utils.spark.SparkApplication
import net.liftweb.json.DefaultFormats
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.spark.TaskContext
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

/**
  * Created by yuanxiaofeng on 2016/10/18.
  */
class VIPTemp(config: JobConfig, path: String) extends SparkApplication with LazyLogging {
  override var appName: String = "VIPTempJob"
  override var sparkConfig: Map[String, String] = config.spark


  val kafkaConf = config.sourceKafka
  val kafkaSink = config.sinkKafka

  private val kafkaSourceTopic = KafkaTopic("wireless_v2")
  private val _kafkaSinkTopic = KafkaTopic("vip_marketing")

  def execute() = {
    sparkConfig += ("spark.app.name" -> appName)
    sparkConfig += ("spark.broadcast.blockSize" -> "1m")
    sparkConfig += ("spark.sql.shuffle.partitions" -> "300")
    sparkConfig += ("spark.buffer.pageSize" -> "4m")
    //    sparkConfig += ("spark.sql.tungsten.enabled" -> "false")
    //    sparkConfig += ("spark.memory.useLegacyMode" -> "true")
//    sparkConfig += ("spark.master" -> "local[3]")
    withSparkContext { sc =>

      val predicates = Array("CodeTypeLevel = 'D2'")
      val deviceCode = SparkSession.builder().getOrCreate().sqlContext
        .loadFromMysql("statistic_continuity_code", predicates)
        .select("code", "en_name")
        .collect()
        .map(r => (r(0), r(1)))
        .toMap

      val predicates_p = Array("CodeTypeLevel = 'P2' or CodeTypeLevel = 'P3'")
      val pageCode = SparkSession.builder().getOrCreate().sqlContext
        .loadFromMysql("statistic_continuity_code", predicates_p)
        .select("code", "en_name")
        .collect()
        .map(r => (r(0), r(1)))
        .toMap

      val deviceCode_bc = sc.broadcast(deviceCode)
      val pageCode_bc = sc.broadcast(pageCode)


      val log = sc.textFile(path)
        .map(l => Utils.initStat(l))
        .map(r => {
          val deviceCode_value = deviceCode_bc.value.asInstanceOf[Map[String, String]]
          val pageCode_value = pageCode_bc.value.asInstanceOf[Map[String, String]]
          val platform = deviceCode_value.getOrElse(r.platformCode, "")
          val sChannel = if (platform.startsWith("ios")) {
            "iphone"
          } else if (platform.startsWith("android")) {
            InterfaceInvoke.getDeviceChannel(r.deviceToken, "ANDROID", r.secondChannel)
          } else {
            r.secondChannel
          }
          r.copy(secondChannel = sChannel)
          VIP(r.userId, r.timestamp, if (r.pageTypeCode == SEARCH_PRE.code) "search" else "browse", r.province,
            r.sessionId, r.deviceToken, r.productId, sChannel, pageCode_value.getOrElse(r.pageTypeCode, ""),
            if (platform.startsWith("ios") || platform.startsWith("android")) "app" else platform,
            r.searchKey)
        })

      val output = log.map { r =>
        import net.liftweb.json.Serialization
        implicit val formats = DefaultFormats
        val jsonString = Serialization.write(r)
        jsonString
      } .map(new KafkaPayloadStringCodec().encodeValue(_))


      output.foreachPartition(
        records => {
          val producer = KafkaProducerFactory.getOrCreateProducer(config.sinkKafka)

          val context = TaskContext.get()

          val callback = new KafkaDStreamSinkExceptionHandler

          val metadata = records.map { record =>
            callback.throwExceptionIfAny()
            producer.send(new ProducerRecord(_kafkaSinkTopic.name, record.key.orNull, record.value), callback)
          }.toList

          metadata.foreach { metadata => metadata.get() }

          callback.throwExceptionIfAny()
        })


    }
  }

}

object VIPTemp {
  def apply(path: String) = {
    val times: Array[String] = path.split("-")
    val start = times.apply(0)
    val end = times.apply(1)
    val simpleDateFormat = new SimpleDateFormat("yyyy/MM/dd")
    val startTime = simpleDateFormat.parse(start)
    val endTime = simpleDateFormat.parse(end)
    var tims = Utils.getBetweenDate(startTime,endTime)
    for(i <- tims){
      val config = JobConfig()
      new VIPTemp(config, config.param.get("wireless.vip.path").get + i).execute()
    }
  }
}

