package com.atguigu.gmall.realtime.apps

import java.time.LocalDateTime
import java.time.format.DateTimeFormatter

import com.alibaba.fastjson.JSON
import com.atguigu.gmall.contants.GmallConstants
import com.atguigu.gmall.realtime.bean.OrderInfo
import com.atguigu.gmall.realtime.utils.MykafkaUtil
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.phoenix.spark._

/**
 * Date:2021/3/22
 * Author:csw
 * Description:
 */
object GMVApp extends BaseApp {
  override val appName: String = "gmv"
  override val seound: Int = 10

  def main(args: Array[String]): Unit = {
    runApp {
      ssc.sparkContext.setLogLevel("warn")
      val ds: InputDStream[ConsumerRecord[String, String]] = MykafkaUtil.getKafkaStream(GmallConstants.KAFKA_TOPIC_ORDER_INFO, ssc)
      val ds1: DStream[OrderInfo] = ds.map(record => {
        val strvalue = record.value()
        val info: OrderInfo = JSON.parseObject(strvalue, classOf[OrderInfo])
        val formatter1 = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
        val formatter2 = DateTimeFormatter.ofPattern("yyyy-MM-dd")
        val time: LocalDateTime = LocalDateTime.parse(info.create_time, formatter1)
        info.create_date = time.format(formatter2)
        info.create_hour = time.getHour.toString
        info.consignee_tel=info.consignee_tel.replaceAll("(\\d{3})\\d{4}(\\d{4})", "$1****$2")
        info
      })
      ds1.foreachRDD(rdd=>{
        rdd.saveToPhoenix("gmall2020_order_info".toUpperCase,
          Seq("ID","PROVINCE_ID", "CONSIGNEE", "ORDER_COMMENT", "CONSIGNEE_TEL", "ORDER_STATUS", "PAYMENT_WAY", "USER_ID","IMG_URL", "TOTAL_AMOUNT", "EXPIRE_TIME", "DELIVERY_ADDRESS", "CREATE_TIME","OPERATE_TIME","TRACKING_NO","PARENT_ORDER_ID","OUT_TRADE_NO", "TRADE_BODY", "CREATE_DATE", "CREATE_HOUR"),
          HBaseConfiguration.create(),
          Some("hadoop102:2181")
        )
      })
    }


  }
}
