package com.bigdata.spark.mallapp_realtime.test

import java.text.SimpleDateFormat
import java.util.Date

import com.alibaba.fastjson.{JSON, JSONObject}
import com.bigdata.spark.util.MyRedisUtil
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.Jedis

import scala.collection.mutable.ListBuffer

object mall_log_app_DA {
  def main(args: Array[String]): Unit = {

    //1 初始化配置
    val sparkStreamingConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("sparkStreaming_logger_DA")
    //初始化 StreamingContext
    val ssc = new StreamingContext(sparkStreamingConf, Seconds(3))

    //选择数据源创建RDD

    val kafkaPara: Map[String, Object] = Map[String, Object](
      ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG ->"false",
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "hadoop102:9092,hadoop103:9092,hadoop104:9092",
      ConsumerConfig.GROUP_ID_CONFIG -> "atguigu",
      "key.deserializer" ->
        "org.apache.kafka.common.serialization.StringDeserializer",
      "value.deserializer" ->
        "org.apache.kafka.common.serialization.StringDeserializer"
    )

    val kafkaDStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Set("atguiguNew"), kafkaPara))


    //    var topic:String = "gmall_start_0523"
    //    var groupId = "gmall_dau_0523"
    //    val sss: InputDStream[ConsumerRecord[String, String]] = MyKafkaUtil.getKafkaStream(topic, ssc, groupId)


    //    val kfD2: DStream[String] = kafkaDStream.map( data=> {
    //
    //      val strdata: String = data.value()
    //
    //    }


    val JsonObjectStream: DStream[JSONObject] = kafkaDStream.map(
      kafkadata => {

        val str: String = kafkadata.value()
        //println("str",str)
        val JS: JSONObject = JSON.parseObject(str) //整个转换为js
        val JSstart: JSONObject = JS.getJSONObject("start")

        val log_ts: Long = JSstart.getLong("ts")


        val datestr: String = new SimpleDateFormat("yyyy-MM-dd hh:mm").format(new Date(log_ts))

        //println(log_ts,log_ts.getClass)
        //println("JSstart",JSstart.toString,JSstart.getClass,"dstr",datestr)
        //println(JSstart.get("user_id"))
        //println("kv            ",kafkadata.value())

        val dateStrArr: Array[String] = datestr.split(" ")
        var dt = dateStrArr(0)
        var hr = dateStrArr(1)
        JS.put("date", dt)
        JS.put("hr", hr)
        //println(JS.toJSONString)
        JS


      }
    )

    val filteredDStream: DStream[JSONObject] = JsonObjectStream.mapPartitions( //去重：连接Redis-》把sid写入set-》返回1成功插入-》数量+1
      jsonObjItr => {

        //每个分区获取redis连接
        val jedis: Jedis = MyRedisUtil.getJedisClient()
        val userList = new ListBuffer[JSONObject]()  //按照顺序保存登录信息

        for (jsObj <- jsonObjItr) { //
          val datestr: String = jsObj.getString("date")
          val idstr: String = jsObj.getJSONObject("start").getString("user_id")  //获取id和时间

          var daukey = "dau:" + datestr    //拼接成kv
          val isFirst = jedis.sadd(daukey, idstr)  // 插入Redis，插入成功算做是首次登录。

          if (jedis.ttl(daukey) < 0) { //查看还有多长时间失效，没设置就是-1.
            jedis.expire(daukey, 3600 * 24) //没设置失效时间的话，就设置失效时间。
          }
          println(isFirst)  //输出这次是不是首次登录

          if (isFirst != 0) {
            userList.append(jsObj)  //如果是首次登录，就保存到List里。
            println(jsObj.toJSONString)
          }

          //
        }

        jedis.close()

        println(userList)
        userList.toIterator   //返回List类型要转换过。要可迭代。给后面保存。这样就能按照时间顺序获得每个用户的首次登录时间。


      }
    )


    filteredDStream.map(
      Js =>{
        println(Js)
        if(Js!=null){             //保存部分：只有有首次登录的用户出现，才会进入保存，一次保存一批次的用户。完成了实时日活+每个用户登录时间和顺序需求。
          println("save",Js)
        }

      }
    ).print()

    ssc.start()
    ssc.awaitTermination()

  }
}
