package com.tech.consumer

import com.tech.common.{KafkaUtil, KuduUtil}
import com.tech.process.nodeProcess._
import com.tech.process.{DelayProcess, DynamicCustomerProcess, EventProcess, MsgProcess, NodeProcess}
import org.apache.spark.sql.ForeachWriter

import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer

class MyForeachWriter(manager: LoadResourceManager) extends ForeachWriter[(String, String, Int, Long)] with Serializable {

  private var activities: ArrayBuffer[String] = _
  private var behaviorMap: mutable.HashMap[String, String] = _

  private var startNodeMap: mutable.HashMap[String, String] = _
  private var nextNodeMap: mutable.HashMap[String, NextNode] = _
  private var groupMap: mutable.HashMap[String, mutable.Set[String]] = _
  private var behaviorNodeMap: mutable.HashMap[String, BehaviorNode] = _
  private var testNodeMap: mutable.HashMap[String, TestNode] = _
  private var waitNodeMap: mutable.HashMap[String, WaitNode] = _
  private var eventKeySet: mutable.HashSet[String] = _
  private var dimDataGroupDetailMap: mutable.HashMap[String, ArrayBuffer[String]] = _
  private var retaileventSchemaMap: mutable.HashMap[String, String] = _
  private var customerSchemaMap: mutable.HashMap[String, String] = _
  private var columnTypeMap: mutable.HashMap[String, String] = _


  override def open(partitionId: Long, version: Long): Boolean = {
    println(s"开始--------------$partitionId-------------------$version")

    activities = manager.activityBroadcast.value
    behaviorMap = manager.behaviorMapBroadcast.value
    startNodeMap = manager.startNodeMapBroadcast.value
    nextNodeMap = manager.nextNodeMapBroadcast.value
    groupMap = manager.groupMapBroadcast.value
    behaviorNodeMap = manager.behaviorNodeMapBroadcast.value
    testNodeMap = manager.testNodeMapBroadcast.value
    waitNodeMap = manager.waitNodeMapBroadcast.value
    eventKeySet = manager.eventKeySetBroadcast.value
    dimDataGroupDetailMap = manager.dimDataGroupDetailMapBroadcast.value
    retaileventSchemaMap = manager.retaileventSchemaMapBroadcast.value
    customerSchemaMap = manager.customerSchemaMapBroadcast.value
    columnTypeMap = manager.columnTypeMapBroadcast.value

    true
  }

  override def process(value: (String, String, Int, Long)): Unit = {

    val retaileventPattern = "(retailevent.*)".r
    val delayPattern = "(delay.*)".r
    val dynamicCustomerPattern = "(dynamic_customer.*)".r
    val customerNodePattern = "(customer_node.*)".r
    val maInfoStatusPattern = "(ma_info_status.*)".r

    try{
      value._2 match {
        case retaileventPattern(_) =>
          println(value._2, value._1)
          EventProcess.eventProcess(value._1,value._2, activities, startNodeMap, nextNodeMap, groupMap, behaviorNodeMap, testNodeMap, waitNodeMap, eventKeySet, dimDataGroupDetailMap, retaileventSchemaMap, columnTypeMap)
        case delayPattern(_) =>
          println(value._2, value._1)
          DelayProcess.delayProcess(value._1, activities, nextNodeMap, behaviorNodeMap, testNodeMap, waitNodeMap, groupMap)
        case dynamicCustomerPattern(_) =>
          println(value._2, value._1)
          DynamicCustomerProcess.dynamicCustomerProcess(value._1, nextNodeMap, behaviorNodeMap, testNodeMap, waitNodeMap)
        case customerNodePattern(_) =>
          println(value._2, value._1)
          NodeProcess.nodeProcess(value._1)
        case maInfoStatusPattern(_) =>
          println(value._2, value._1)
          MsgProcess.msgProcess(value._1)
        case _ => println("其他消息:" + value._1)
      }
    }catch {
      case _: Exception => KafkaUtil.write2Kafka(s"errorMsg_${value._2}", value._1)
    }


    //offset写入kudu
    KuduUtil.upsertOffset(value._2, value._3, value._4)

  }

  override def close(errorOrNull: Throwable): Unit = {
    println("结束---------------------------------")
  }
}