package com.pw.study.realtime.app

import com.pw.study.common.constants.TopicConstant
import com.pw.study.realtime.handle.{KafkaHandler, MysqlHandler, RDDHandler}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object GMVApp extends BaseAPP {
  appName = "GMVApp"
  groupName = "GMVApp"
  val topic: String = TopicConstant.GMALL_ORDER_INFO


  def main(args: Array[String]): Unit = {
    conf.setAppName(appName)
    context = new StreamingContext(conf, Seconds(batchDuration))
    runApp({
      // 获取到mysql当前消费状态
      val offsetStart = MysqlHandler.startOffset(topic, groupName)
      //kafka获取数据
      val ds: InputDStream[ConsumerRecord[String, String]] = KafkaHandler.getKafkaStreamByMysql(Array(topic), context, groupName,offsetStart)
      println(s"offsetStart: ${offsetStart}")
      //处理数据
      ds.foreachRDD(rdd => {
        //保存偏移量（消费kafka数量offset）
        // 获取到mysql的偏移量
        println(s"rdd: $rdd")
        if (!rdd.isEmpty()) {
          //获取mysql中数据
          val ranges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
          //分装样列类
          val rddOrder = RDDHandler.rddToOrderBean(rdd)
          //处理数据，过滤空对象，根据时间
          val result = rddOrder.filter(bean => bean != null).
            filter(bean=> bean.create_date!=null).map(bean => {
            ((bean.create_date, bean.create_hour), bean.total_amount)
          }).reduceByKey(_+_).collect()

          //mysql写入结果,并提交offset
          MysqlHandler.endOffset(result,ranges,topic,groupName);

          // 提交kafka消费偏移量


        }
      })

    })
  }
}
