package com.lvmama.rhino.analyze.client

import net.liftweb.json._

import com.lvmama.rhino.common.entity._
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.kafka.{KafkaDStreamSource, KafkaPayloadStringCodec}
import com.lvmama.rhino.common.utils.spark.StreamingApplication
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.StreamingContext
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitLoad
import com.lvmama.rhino.common.utils.methodset.InterfaceInvoke

case class VIP(userId: String, timestamp: Long, behaviorType: String, province: String, sessionId: String,
               deviceToken: String, productId: String, deviceType: String, pageTypeCode: String, logSource: String, searchKey:String)

class VIPMarketingStreaming(config: JobConfig, duration: Long) extends StreamingApplication
  with Serializable {

  override var appName: String = "VIPMarketingStreaming"

  override def streamingBatchDuration: Long = duration

  override def streamingCheckpointDir: String = config.streamingCheckpointDir
    .getOrElse("vipKafkaStreaming", "/checkpoint/vip_kafka_streaming")

  override var sparkConfig: Map[String, String] = config.spark

  private val kafkaSourceTopic = KafkaTopic("wireless_v2")
  private val kafkaOutputTopic = KafkaTopic("vip_marketing")

  def execute() = {
    sparkConfig += ("spark.app.name" -> appName)
    sparkConfig += ("spark.master" -> "local[2]")
    sparkConfig += ("spark.cleaner.ttl" -> "6000")

    def func = (sc: SparkContext, ssc: StreamingContext) => {
      val streaming = KafkaDStreamSource(config.sourceKafka).createSource(ssc, kafkaSourceTopic.name)

      val lines = streaming
        .flatMap(KafkaPayloadStringCodec().decodeValue(_))
        .map(l => Utils.initStat(l))

      val predicates = Array("CodeTypeLevel = 'D2'")
      val deviceCode = SparkSession.builder().getOrCreate().sqlContext
        .loadFromMysql("statistic_continuity_code", predicates)
        .select("code", "en_name")
        .collect()
        .map(r => (r(0), r(1)))
        .toMap

      val predicates_p = Array("CodeTypeLevel = 'P2' or CodeTypeLevel = 'P3'")
      val pageCode = SparkSession.builder().getOrCreate().sqlContext
        .loadFromMysql("statistic_continuity_code", predicates_p)
        .select("code", "en_name")
        .collect()
        .map(r => (r(0), r(1)))
        .toMap

      val deviceCode_bc = sc.broadcast(deviceCode)
      val pageCode_bc = sc.broadcast(pageCode)

      val client = lines
        .repartition(4)
        .transform(tf =>
          tf.mapPartitions(iter => {
            val deviceCode_value = deviceCode_bc.value.asInstanceOf[Map[String, String]]
            val pageCode_value = pageCode_bc.value.asInstanceOf[Map[String, String]]
            iter.map { r =>
//              if (r.pageTypeCode == PENDING_PAYMENT.code || r.pageTypeCode == REGISTER.code) {
                //                val createDate = InterfaceInvoke.getUserCreatedDate(r.userId)
                //                val format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.CHINA)
                //                val uYears = if (createDate != "") {
                //                  Utils.getDiffYears(format.parse(createDate), currentDate)
                //                } else {
                //                  -1
                //                }
                val platform = deviceCode_value.getOrElse(r.platformCode, "")
                val sChannel = if (platform.startsWith("ios")) {
//                  InterfaceInvoke.getDeviceChannel(r.idfa, "IOS", r.secondChannel)
                  "iphone"
                } else if (platform.startsWith("android")) {
                  InterfaceInvoke.getDeviceChannel(r.deviceToken, "ANDROID", r.secondChannel)
                } else {
                  r.secondChannel
                }
                r.copy(secondChannel = sChannel)
//              } else {
//                r
//              }

              VIP(r.userId, r.timestamp, if (r.pageTypeCode == SEARCH_PRE.code) "search" else "browse", r.province,
                r.sessionId, r.deviceToken, r.productId, sChannel, pageCode_value.getOrElse(r.pageTypeCode, ""),
                if (platform.startsWith("ios") || platform.startsWith("android")) "app" else platform,
                r.searchKey)
            }
          })
        )

      import com.lvmama.rhino.common.utils.kafka.KafkaDStreamSink._
      client
        .transform(tf =>
          tf.mapPartitions(iter =>
            iter.map{r =>
                import net.liftweb.json.Serialization
                implicit val formats = DefaultFormats
                val jsonString = Serialization.write(r)
                jsonString
            }
          )
        )
        .map(new KafkaPayloadStringCodec().encodeValue(_))
        .sendToKafka(config.sinkKafka, kafkaOutputTopic.name)
    }

    val ssc = StreamingContext.getOrCreate(streamingCheckpointDir, () => withSparkStreamingContext(func))
    ssc.start()
    ssc.awaitTermination()
  }
}

object VIPMarketingStreaming {
  def apply(duration: Long): Unit = {
    val config = JobConfig()
    new VIPMarketingStreaming(config, duration).execute()
  }
}
