package com.atguigu.gmall.realtime.app

import java.{lang, util}
import java.text.SimpleDateFormat
import java.util.Date

import com.alibaba.fastjson.{JSON, JSONObject}
import com.atguigu.gmall.realtime.bean.DauInfo
import com.atguigu.gmall.realtime.util.{MyEsUtil, MyKafkaUtil, OffsetManager, RedisUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.elasticsearch.ElasticsearchStatusException
import org.elasticsearch.action.search.{SearchRequest, SearchResponse}
import org.elasticsearch.client.{RequestOptions, RestHighLevelClient}
import org.elasticsearch.search.{SearchException, SearchHit}
import org.elasticsearch.search.builder.SearchSourceBuilder
import redis.clients.jedis.Jedis

import scala.collection.mutable
import scala.collection.mutable.ListBuffer


// 0   环境初始
//1  加载kafka流
//2  提取偏移量
//3  转换结构
//4   去重
//5   维度关联
//6  写入es
object DwdDauApp {

  def main(args: Array[String]): Unit = {
    // 0   环境初始
    val sparkConf: SparkConf = new SparkConf().setAppName("dwd_dau_app").setMaster("local[4]")

    val ssc = new StreamingContext(sparkConf,Seconds(5))

    val topic="DWD_PAGE_LOG"
    val groupId="dwd_dau_app"

    // 0.5 还原状态
    resetState()

    //1  加载kafka流
    val offsetMap: Map[TopicPartition, Long] = OffsetManager.getOffset(topic,groupId)

    val inputDstream: InputDStream[ConsumerRecord[String, String]] = MyKafkaUtil.getKafkaStream(topic,ssc,offsetMap,groupId)


    //2  提取偏移量
    var offsetRanges: Array[OffsetRange]=null

    val inputWithOffsetDstream: DStream[ConsumerRecord[String, String]] = inputDstream.transform { rdd =>
      //4个分区 的偏移量各种值
      offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges //dr
      rdd
    }


    //3  转换结构
    val jsonObjDStream: DStream[JSONObject] = inputWithOffsetDstream.map { record =>
      val jsonString: String = record.value()
      val jSONObject: JSONObject = JSON.parseObject(jsonString)
     //时间转换
      val ts: lang.Long = jSONObject.getLong("ts")
      val simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH")
      val dateHour: String = simpleDateFormat.format(new Date(ts))
      val dateHourArr: Array[String] = dateHour.split(" ")
      val dt: String = dateHourArr(0)
      val hr: String = dateHourArr(1)
      jSONObject.put("dt",dt)
      jSONObject.put("hr",hr)

      jSONObject
    }
  //  jsonObjDStream.print(1000)

    //4   去重
    val filteredDstream: DStream[JSONObject] = jsonObjDStream.mapPartitions { jsonObjItr =>
      // 改造  1 一个分区用一个连接
      //   2  在通过redis 检查前 可以用last_page_id进行自查  节省外联的开销
      val jsonList: List[JSONObject] = jsonObjItr.toList
      println(s"过滤前:${jsonList.size}条")
      val jedis: Jedis = RedisUtil.getJedisClient
      val filteredJsonList = new ListBuffer[JSONObject]
      for (jsonObj <- jsonList) {
        //维护去重清单在redis中
        // type ?     set    key?  DAU:[2021-01-12]      field/score?  无  value ? mid
        //  写 ？ sadd    读？ sadd    过期？ 24小时
        val lastPageId: String = jsonObj.getString("last_page_id")
        if (lastPageId == null || lastPageId.size == 0) {

          val dt: String = jsonObj.getString("dt")
          val dauKey = s"DAU:$dt"
          val mid: String = jsonObj.getString("mid")

          val isNew: lang.Long = jedis.sadd(dauKey,mid)

          if (isNew == 1L) {
            filteredJsonList.append(jsonObj)
          }
        }
      }
      jedis.close()
      println(s"过滤后:${filteredJsonList.size}条")
      filteredJsonList.toIterator
    }
    //filteredDstream.print(1000)

    //5   维度关联
    val dauInfoDstream: DStream[DauInfo] = filteredDstream.mapPartitions { jsonObjItr =>
      val jedis: Jedis = RedisUtil.getJedisClient
      val dauInfoList = new ListBuffer[DauInfo]
      for (jsonObj <- jsonObjItr) {
        val dauInfo: DauInfo = JSON.parseObject(jsonObj.toJSONString, classOf[DauInfo])
        //补充用户信息
        // 性别
        val userInfoKey = s"DIM:USER_INFO:${dauInfo.user_id}"
        val userInfoJson: String = jedis.get(userInfoKey)
        val userJsonObj: JSONObject = JSON.parseObject(userInfoJson)
        val gender: String = userJsonObj.getString("gender")
        var genderCHN = ""
        if (gender == "M") {
          genderCHN = "男"
        } else if (gender == "F") {
          genderCHN = "女"
        } else {
          genderCHN = "未知"
        }
        dauInfo.user_gender = genderCHN
        // 年龄
        val birthday: String = userJsonObj.getString("birthday")
        val dateFormat = new SimpleDateFormat("yyyy-MM-dd")
        val birthdayDt: Date = dateFormat.parse(birthday)
        val diffTs = new Date().getTime - birthdayDt.getTime
        val age: Long = diffTs / 1000 / 3600 / 24 / 365
        dauInfo.user_age = age.toString
        // 地区
        val provinceKey = s"DIM:BASE_PROVINCE:${dauInfo.province_id}"
        val provinceJson: String = jedis.get(provinceKey)
        val provinceJsonObj: JSONObject = JSON.parseObject(provinceJson)

        dauInfo.province_name = provinceJsonObj.getString("name")
        dauInfo.province_3166_2 = provinceJsonObj.getString("iso_3166_2")
        dauInfo.province_iso_code = provinceJsonObj.getString("iso_code")
        dauInfo.province_area_code = provinceJsonObj.getString("area_code")

        dauInfoList.append(dauInfo)
      }


      jedis.close()
      dauInfoList.toIterator
    }

    dauInfoDstream.cache()
    dauInfoDstream.print(1000)

     //实时OLAP 写入
    // 0 数据表的定义 index
    // 1 批量写入
    // 2  幂等性
    //  3   提交偏移量
    //  4  状态和数据库是否保持了一致
    dauInfoDstream.foreachRDD{ rdd=>
      rdd.foreachPartition{dauInfoItr=>
        val dauInfoList: List[(String, DauInfo)] = dauInfoItr.toList.map(dauInfo=>(dauInfo.mid,dauInfo))
        if(dauInfoList!=null && dauInfoList.size>0){
          val dauInfoTupleFirst: (String, DauInfo) = dauInfoList(0)
          val dt: String = dauInfoTupleFirst._2.dt
          val  indexName=s"gmall0819_dau_info_$dt"
          MyEsUtil.bulkSave(dauInfoList,indexName)
          //a
        }
        //b
      }
     //c  driver中提交偏移量
      OffsetManager.saveOffset(topic,groupId,offsetRanges)
    }
   //d


    //6  写入es
    // 假设 写入成功  提交偏移量  如果宕机 重启 数据会不会有问题？  能不能做到精确一次  有没有数据丢失的问题
     //
    ssc.start()
    ssc.awaitTermination()

  }

  // 状态还原
  // 1  读取es
  // 2  写入redis
  def  resetState(): Unit ={
    // 1  读取es
       val client: RestHighLevelClient = MyEsUtil.getClientNew

    val dateFormat = new SimpleDateFormat("yyyy-MM-dd")
    val today: String = dateFormat.format(new Date())
    val indexName = s"gmall0819_dau_info_$today"
    val searchRequest = new SearchRequest()
    searchRequest.indices(indexName)

    val searchSourceBuilder = new SearchSourceBuilder()
    searchSourceBuilder.size(100000)
    searchSourceBuilder.fetchSource("mid",null)

    searchRequest.source(searchSourceBuilder)
    var searchResponse: SearchResponse=null
   try {
     searchResponse  = client.search(searchRequest,RequestOptions.DEFAULT)
   } catch { case  e:ElasticsearchStatusException=>{
         println(e.getIndex  +"索引未找到！")
         return
     }
   }
    //提取结果
    val hits: Array[SearchHit] = searchResponse.getHits.getHits
    val midSet = new mutable.HashSet[String]()
    for (hit <- hits ) {
      val sourceMap: util.Map[String, AnyRef] = hit.getSourceAsMap
      val mid: AnyRef = sourceMap.get("mid")
      midSet.add(mid.toString)
    }
    // 2   把midSet写入redis
    // 2.1 写清理当日已有的清单
    val jedis: Jedis = RedisUtil.getJedisClient
    val dauKey=s"DAU:$today"
    jedis.del(dauKey)
    // 2.2  写入redis
    val midArr: Array[String] = midSet.toArray
    jedis.sadd(dauKey,midArr:_*)


    jedis.close()

  }


}
