package com.tech.customer

import com.tech.common.{KafkaUtil, KuduUtil}
import com.tech.process.EventProcess
import org.apache.log4j.Logger
import org.apache.spark.sql.ForeachWriter

class MyForeachWriter(manager: LoadResourceManager) extends ForeachWriter[(String, String, Int, Long)] with Serializable {
  @transient lazy val logger: Logger = Logger.getLogger(getClass)

  override def open(partitionId: Long, version: Long): Boolean = {
    true
  }

  override def process(value: (String, String, Int, Long)): Unit = {

    val retaileventPattern = "(retailevent.*)".r
    val customerPattern = "(customer.*)".r
    value._2 match {
      case retaileventPattern(_) =>
        try {
          EventProcess.retaileventProcess(value._1, value._2, manager.retaileventSchemaMapBroadcast.value, manager.ruleMapBroadcast.value, manager.customerSchemaMapBroadcast.value)
        }
        catch {
          case e: Exception =>
            logger.error(s"retailevent data process failure, data: [${value._1}]", e)
            KafkaUtil.write2Kafka("errorMsg", value._1)
        }
      case customerPattern(_) =>
        try {
          EventProcess.customerProcess(value._1, manager.customerSchemaMapBroadcast.value)
        }
        catch {
          case e: Exception =>
            logger.error(s"customer data process failure, data: [${value._1}]", e)
            KafkaUtil.write2Kafka("errorMsg", value._1)
        }
    }
    //offset写入kudu
    KuduUtil.upsertOffset(value._2, value._3, value._4)
  }

  override def close(errorOrNull: Throwable): Unit = {
  }
}