package com.pw.study.realtime.app

import com.pw.study.common.constants.TopicConstant
import com.pw.study.realtime.bean.AlarmBean
import com.pw.study.realtime.handle.{KafkaHandler, RDDHandler}
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Minutes, Seconds, StreamingContext}

import java.time.{Instant, LocalDate, LocalDateTime, ZoneId}
import java.time.format.DateTimeFormatter
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.util.control.Breaks

object ActionApp extends BaseAPP {
  appName = "actionToEs"
  groupName = "actionToEs"
  val topics = Array(TopicConstant.ACTIONS_LOG)


  def main(args: Array[String]): Unit = {
    conf.setAppName(appName)
    // 如果index不存在，允许自动创建
    conf.set("es.index.auto.create", "true")
    // 设置es的集群地址
    conf.set("es.nodes", "hadoop112,hadoop113,hadoop114")
    conf.set("es.port", "9200")

    context = new StreamingContext(conf, Seconds(batchDuration))
    runApp({
      //获取kafka 数据
      val ds = KafkaHandler.getKafkaStream(topics, context, groupName)

      //获取初始偏移量,获取初始数据
      var ranges: Array[OffsetRange] = null
      val ds2 = ds.transform(rdd => {
        ranges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd.map(record => record.value())
      })

      //处理数据
      //封装成对象
      val ds3 = RDDHandler.rddToActionBean(ds2)
      //过滤 5分钟，设备，账号，地址
      val ds4 = ds3.window(Minutes(5)).map(bean =>
        ((bean.common.mid, bean.common.uid), bean)).groupByKey()
      // 过滤5分钟内重复提交的
      val ds5 = ds4.filter({
        case ((mid, uid), beans) => {
          //判断
          var btnUpdate: Boolean = false
          Breaks.breakable({
            beans.foreach(bean => {
              bean.actions.foreach(action => {
                if (action.action_id.equals("trade_add_address")) {
                  btnUpdate = true
                  Breaks.break()
                }
              })
            })
          })
          btnUpdate
        }
      })
      //每个设备登录数量大于2
      val ds6 = ds5.map({ case ((mid, uid), bean) => (mid, bean) })
        .groupByKey().filter(_._2.size >= 2).mapValues(_.flatten)

      //生成预警日志
      val ds7 = ds6.map {
        case (mid, beans) => {}
          val uids = new mutable.HashSet[String]()
          val items = new mutable.HashSet[String]()
          val events = new ListBuffer[String]()
          beans.foreach(bean => {
            uids.add(bean.common.uid)
            bean.actions.foreach(action => {
              events.append(action.action_id)
              if (action.action_id.equals("favor_add"))
                items.add(action.item)
            })
          })
          val ts = System.currentTimeMillis()
          val f = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm")
          val timeSuffix = LocalDateTime.ofInstant(Instant.ofEpochMilli(ts), ZoneId.of("Asia/Shanghai")).format(f)
          val id = mid + "_" + timeSuffix
          AlarmBean(id, uids, items, events, ts)
      }



      //保存到es
      import org.elasticsearch.spark._

      ds7.foreachRDD(rdd => {
        rdd.cache()
        println(s"即将写入数据条数：${rdd.count()}")
        rdd.saveToEs("gmall_action_" + LocalDate.now() + "/_doc", Map("es.mapping.id" -> "id"))
        ds.asInstanceOf[CanCommitOffsets].commitAsync(ranges)
      })


    })
  }

}
