package com.zlm.realtime.dim

import com.alibaba.fastjson.JSON
import com.zlm.realtime.bean.UserInfo
import com.zlm.realtime.utils.{MyKafkaUtils, MyOffsetUtils, MyPropertiesUtils}
import org.apache.hadoop.conf.Configuration
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}

import java.text.SimpleDateFormat

/**
 * Author: Harbour 
 * Date: 2021-04-17 12:52
 * Desc: 从Kafka获取用户数据存入HBase
 */
object UserInfoApp {
    def main(args: Array[String]): Unit = {
        // step 1. 获取context
        val conf: SparkConf = new SparkConf().setAppName("Order-Info").setMaster("local[*]")
        val ssc = new StreamingContext(conf, Seconds(5))

        // step 2. 获取kafka输入流
        val topic = "ods_user_info"
        val groupId = "ods_user_group"

        val offsetMap: Map[TopicPartition, Long] = MyOffsetUtils.getOffsetFromRedis(topic, groupId)
        val kafkaInputDStream: InputDStream[ConsumerRecord[String, String]] =
            MyKafkaUtils.getKafkaStream(topic, ssc, offsetMap, groupId)

        var offsetRanges = Array.empty[OffsetRange]
        kafkaInputDStream.transform( // step 3. 保存偏移量
            (kafkaRDD: RDD[ConsumerRecord[String, String]]) => {
                offsetRanges = kafkaRDD.asInstanceOf[HasOffsetRanges].offsetRanges
                kafkaRDD
            }
        ).foreachRDD( // step 4. 保存到 HBase
            (kafkaRDD: RDD[ConsumerRecord[String, String]]) => {
                import org.apache.phoenix.spark._
                kafkaRDD.map(
                    (record: ConsumerRecord[String, String]) => {
                        val userInfo: UserInfo = JSON.parseObject(record.value(), classOf[UserInfo])

                        //把生日转成年龄
                        val formatter = new SimpleDateFormat("yyyy-MM-dd")
                        val date: java.util.Date = formatter.parse(userInfo.birthday)
                        val curTs: Long = System.currentTimeMillis()
                        val betweenMs: Long = curTs - date.getTime
                        val age: Long = betweenMs / 1000L / 60L / 60L / 24L / 365L

                        // 模式匹配应该怎么写？
                        userInfo.age_group = age match {
                            case e if e >= 0 && e <= 10 => "10 岁及以下"
                            case e if e <= 15 => "10 岁到 15 岁"
                            case e if e <= 20 => "16 岁到 20 岁"
                            case e if e <= 25 => "21 岁到 25 岁"
                            case e if e <= 30 => "26 岁到 30 岁"
                            case e if e <= 35 => "31 岁到 35 岁"
                            case e if e <= 40 => "36 岁到 40 岁"
                            case e if e <= 50 => "41 岁到 50 岁"
                            case e if e <= 60 => "51 岁到 60 岁"
                            case e if e >= 61 => "60岁以上"
                        }

                        userInfo.gender_name = if (userInfo.gender == "M") "男" else "女"
                        userInfo
                    }
                ).saveToPhoenix(
                    tableName = "mall_user_info",
                    Seq("ID", "USER_LEVEL", "BIRTHDAY", "GENDER", "AGE_GROUP", "GENDER_NAME"),
                    new Configuration,
                    zkUrl = Some(MyPropertiesUtils.getProperty("zk.host"))
                )

                MyOffsetUtils.saveOffsetToRedis(topic, groupId, offsetRanges)
            }
        )

        ssc.start()
        ssc.awaitTermination()
    }
}
