package com.atguigu.realtime.apps

import java.time.LocalDate

import com.alibaba.fastjson.JSON
import com.atguigu.realtime.beans.{ActionLog, CouponAlertInfo}
import com.atguigu.realtime.constants.{DBNameConstant, TopicConstant}
import com.atguigu.realtime.utils.{DStreamUtil, DateTimeUtil, PropertiesUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Minutes, Seconds, StreamingContext}

import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.util.control.Breaks

/**
 * Created by Smexy on 2022/8/26
 *
 *    at least once + es的幂等输出 = 精确一次
 *
 *      一般都是滚动窗口(防止数据遗漏或数据重复处理)： slide = window
 */
object AlertDemo extends BaseApp {

  override var batchDuration: Int = 10
  override var appName: String = "AlertDemo"
  override var groupId: String = "220409realtime"
  override var topic: String = TopicConstant.ACTION_LOG

  def main(args: Array[String]): Unit = {

    //配置es的相关信息。Spark的配置信息都放在SparkConf中
    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName(appName)
      .set("es.nodes", PropertiesUtil.getValue("es.nodes"))
      .set("es.port", PropertiesUtil.getValue("es.port"))
      //如果要写入的index不存在，允许自动创建
      .set("es.index.auto.create", "true")
      //允许把hostname转为ip
      .set("es.nodes.wan.only", "true")

    //重写父类中的context
    context = new StreamingContext(sparkConf,Seconds(batchDuration))

    runSparkStreamingApp{

      val ds: InputDStream[ConsumerRecord[String, String]] = DStreamUtil.getDStream(context, groupId, topic)

      //方便后续提交使用
      var offsetRanges: Array[OffsetRange] = null

      //封装样例类
      val ds1: DStream[ActionLog] = ds.transform(rdd => {

        //获取偏移量
        offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        //数据转换为Bean
        rdd.map(record => {
          val actionLog: ActionLog = JSON.parseObject(record.value(), classOf[ActionLog])
          actionLog
        })

      })

      //开窗5min
      val ds2: DStream[((String, String), Iterable[ActionLog])] = ds1.window(Minutes(5),Seconds(30))
        .map(log => ((log.mid, log.uid), log))
        .groupByKey()

      //过滤出 增加了收货地址的 人
      val ds3: DStream[((String, String), Iterable[ActionLog])] = ds2.filter {
        case ((mid, uid), logs) => {

          //默认假设他不是一个嫌疑人
          var flag: Boolean = false

          Breaks.breakable {

            logs.foreach(log => {

              if ("trade_add_address".equals(log.action_id)) {
                flag = true
                //没有必要继续了，可以跳出循环了
                Breaks.break()
              }

            })

          }

          flag

        }
      }

      // (mid1, [ [trad_add_address,cart_add...] ,[trad_add_address,cart_add...]  ])
      val ds4: DStream[(String, Iterable[Iterable[ActionLog]])] = ds3.map {
        case ((mid, uid), logs) => (mid, logs)
      }.groupByKey()

      // ds5的key就是一个需要预警的设备， value是这个设备过去5min所有嫌疑用户产生的actionLog
      val ds5: DStream[(String, Iterable[ActionLog])] = ds4.filter(_._2.size >= 2)
        //把集合中每个 Entry的value部分进行处理，由 value1 ===> value2，之后再和key组成新的Entry
        .mapValues(_.flatten)

      //生成预警日志信息
      val ds6: DStream[CouponAlertInfo] = ds5.map {
        case (mid, logs) => {

          val uids: mutable.Set[String] = new mutable.HashSet[String]
          val itemIds: mutable.Set[String] = new mutable.HashSet[String]
          val events: ListBuffer[String] = new ListBuffer[String]

          logs.foreach(log => {

            uids.add(log.uid)
            events.append(log.action_id)

            if ("favor_add".equals(log.action_id)) {
              itemIds.add(log.item)
            }

          })

          val ts: Long = System.currentTimeMillis()

          /*
              在id上体现 mid ,保证同一设备，如果一分钟产生多条预警，只保留最后一条预警日志。

              无需操心：  目前窗口是5min，正常情况下如果是一个滚动窗口(两个窗口之间不交叉，也没有距离)。

              PUT  /index/_doc/mid_2022-08-27 11:11   :20  {预警日志1}
              PUT  /index/_doc/mid_2022-08-27 11:11   :50  {预警日志2}
           */
          val id = mid + DateTimeUtil.parseMillTsToDateTimeMinute(ts)

          CouponAlertInfo(id, uids, itemIds, events, ts)

        }
      }

      //写出到ES
      import org.elasticsearch.spark._

      ds6.foreachRDD(rdd => {

        rdd.cache()

        println("要写入ES:"+rdd.count())
        /*
          resource: String：  index名
          cfg: scala.collection.Map[String, String]： 加一些配置
                es.mapping.id:  代表要写入的Bean中哪个属性作为当前这条消息的id
         */
        rdd.saveToEs(DBNameConstant.ALERTINDEX+LocalDate.now(),Map("es.mapping.id" -> "id"))

        //初始的DS提交偏移量
        ds.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)

      })


    }

  }
}
