package com.atguigu.dw.gmall.realtime.app

import com.alibaba.fastjson.JSON
import com.atguigu.constan.ConstanVal
import com.atguigu.dw.gmall.realtime.bean.OrderInfo
import com.atguigu.dw.gmall.realtime.util.MyKafkaUtil
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream

/**
 * description ：订单app
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/2/14 15:30
 * modified By ：
 * version:    : 1.0
 */
object OrderApp extends App {

  override def handle(ssc: StreamingContext) = {
    // 读取 kafkaStream 数据
    val dawStream = MyKafkaUtil.getKafkaStream(ssc, Set[String](ConstanVal.KAFKA_TOPIC_ORDER))

    // 将 kafkaStream 数据转换为样例类
    val dawBean: DStream[OrderInfo] = dawStream.map({
      case (_, v) => JSON.parseObject(v, classOf[OrderInfo])
    })

    // 保存到 hbase
    import org.apache.phoenix.spark._
    dawBean.foreachRDD(rdd => {
      rdd.saveToPhoenix(
        ConstanVal.GMALL_HBASE_TABLE_NAME_ORDER,
        Seq("ID", "PROVINCE_ID", "CONSIGNEE", "ORDER_COMMENT", "CONSIGNEE_TEL",
          "ORDER_STATUS", "PAYMENT_WAY", "USER_ID", "IMG_URL", "TOTAL_AMOUNT",
          "EXPIRE_TIME", "DELIVERY_ADDRESS", "CREATE_TIME", "OPERATE_TIME",
          "TRACKING_NO", "PARENT_ORDER_ID", "OUT_TRADE_NO", "TRADE_BODY", "CREATE_DATE", "CREATE_HOUR"),
        zkUrl = Some(ConstanVal.PHOENIX_ZK_ADRESS)
      )
    })
  }

}
