package com.atguigu.realtime.app

import java.time.{Instant, LocalDate, LocalDateTime, ZoneId}
import java.time.format.DateTimeFormatter
import java.util

import com.alibaba.fastjson.JSON
import com.atguigu.gmall.Constansts
import com.atguigu.realtime.app.GMVApp.streamingContext
import com.atguigu.realtime.beans.{CouponAlertInfo, EventLog}
import com.atguigu.realtime.utils.{MyEsUtil, MyKafkaUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.streaming.Minutes
import org.apache.spark.streaming.dstream.{DStream, InputDStream}

import scala.util.control.Breaks._
/**
 * Created by Smexy on 2021/7/9
 *
 *      Java后端开发
 *
 *          ①springboot 雷丰阳
 *          ②spring 注解开发
 *          ③Mybatis
 *              a) mybatis
 *              b)  动态SQL
 *              c) 逆向工程
 *              d) 通用Mapper
 *
 *          ④javaweb
 *
 *
 *
 *
 */
object AlertApp extends BaseApp {
  override var appName: String = "AlertApp"
  override var duration: Int = 10

  def main(args: Array[String]): Unit = {

    run{


      val ds: InputDStream[ConsumerRecord[String, String]] = MyKafkaUtil.getKafkaStream(Constansts.GMALL_EVENT_LOG, streamingContext)

      //封装样例类
      val ds1: DStream[EventLog] = ds.map(record => {

        val eventLog: EventLog = JSON.parseObject(record.value(), classOf[EventLog])

        //根据ts 为logDate 和 logHour赋值
        val formatter1: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd")

        val localDateTime: LocalDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(eventLog.ts), ZoneId.of("Asia/Shanghai"))


        eventLog.logDate = localDateTime.format(formatter1)
        eventLog.logHour = localDateTime.getHour.toString

        eventLog

      })

      /*
          开窗，指定采集过去5分钟的数据

          先统计，每个设备，用每个用户登录时的用户行为
             K: (mid,uid)
             V:  List[EventLog] logs

            必须符合领取优惠劵，并且过程中没有浏览商品，才会预警
                只要这个用户在5分钟内，浏览了一次商品，就不预警
       */
      val ds2: DStream[((String, String), Iterable[EventLog])] = ds1.window(Minutes(5))
        .map(log => ((log.mid, log.uid), log))
        .groupByKey()


      /*
          判断过去5分钟，每个用户登录后的行为是否需要预警，只留下需要预警的用户,所使用的设备，及5分钟内产生的日志

          ds2： 一个设备上一个用户，过去5分钟产生的logs

          (mid1,uid1) , { log1,log2,log3,log4....          }

       */
      val ds3: DStream[(String, Iterable[EventLog])] = ds2.map {
        case ((mid, uid), logs) => {

          //是否要预警的标记
          var ifneedAlert: Boolean = false

          breakable {
            logs.foreach(log => {

              //只要浏览了商品，就不预警
              if ("clickItem".equals(log.evid)) {
                ifneedAlert = false
                //跳出循环
                break()
              } else if ("coupon".equals(log.evid)) {

                ifneedAlert = true
              }

            })
          }

          //只留下需要预警的用户
          if (ifneedAlert) {
            (mid, logs)
          } else {
            (null, null)
          }

        }
      }

      //过滤 (null, null)
      val ds4: DStream[(String, Iterable[EventLog])] = ds3.filter(_._1 != null)

      /*
          ds4: DStream[(String, Iterable[EventLog])]
              K：  mid
              V :   Logs

              mid_1, [1] , logs
              mid_1, [2] , logs
              mid_1, [3] , logs

              mid_2, 4 ,logs

          继续按照设备id进行聚合

          ds5:  需要预警的设备，及这个设备过去5分钟，所有用户产生的所有的logs
       */
      val ds5: DStream[(String, Iterable[EventLog])] = ds4.groupByKey()
        //过滤掉，登录用户少于 3个的设备
        .filter(_._2.size >= 3)
        //扁平化，将{ {log1,log2},{log5,log6},{log3,log4} } ---> { log1,....,log6       }
        .mapValues(_.flatten)


      //产生预警日志
      val ds6: DStream[CouponAlertInfo] = ds5.map {
        case (mid, logs) => {

          var uids: util.HashSet[String] = new java.util.HashSet[String]()
          var itemIds: util.HashSet[String] = new java.util.HashSet[String]()
          var events: util.List[String] = new util.ArrayList[String]()

          logs.foreach(log => {

            uids.add(log.uid)
            events.add(log.evid)

            if ("coupon".equals(log.evid)) {
              itemIds.add(log.itemid)
            }
          })

          CouponAlertInfo(mid, uids, itemIds, events, System.currentTimeMillis())

        }
      }

      /*
          将DS中的数据，转换为docList: List[(String, Any)]，再写入

          确定写入的每条 Doc的id
              通过ID，可以实现幂等操作,ID还是一个唯一标识。还可以实现覆盖操作！
                     2021-07-08 14:11:01  mid_1  预警日志1
                        id: 2021-07-08 14:11_mid_1
                        doc: 预警日志1
                     2021-07-08 14:11:11  mid_1  预警日志2
                        id: 2021-07-08 14:11_mid_1
                        doc: 预警日志2

                     id: 2021-07-08 14:11_mid_1


                      2021-07-08 14:12:11  mid_1  预警日志3
                        id: 2021-07-08 14:12_mid_1
                        doc: 预警日志3

              put  /index1/type1/1
              {
                "name":jack
              }
       */
      val ds7: DStream[(String, CouponAlertInfo)] = ds6.map(info => {

        val formatter1: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm")

        val localDateTime: LocalDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(info.ts), ZoneId.of("Asia/Shanghai"))

        (localDateTime.format(formatter1) + "_" + info.mid, info)

      })

     //写入ES
      ds7.foreachRDD(rdd => {

        //以分区为单位写入，节省创建连接的开销
        rdd.foreachPartition(partition => {

            //将这个分区的数据，封装为 docList: List[(String, Any)]
            val list: List[(String, CouponAlertInfo)] = partition.toList

            MyEsUtil.insertBulk("gmall_coupon_alert"+LocalDate.now() , list)

        })

      })

    }

  }
}
