package com.bdc.Phoenix

import com.alibaba.fastjson.JSON
import com.bdc.Config.MyConfig
import com.bdc.Entity.DataEntity
import kafka.serializer.StringDecoder
import org.apache.hadoop.conf.Configuration
import org.apache.log4j.{Level, Logger}
import org.apache.phoenix.spark.DataFrameFunctions
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.streaming.kafka.{HasOffsetRanges, KafkaUtils, OffsetRange}
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object SaveDataToPhoenix {

  val brokerList = MyConfig.getString("brokerList")
  val zkUrl = MyConfig.getString("zkUrl")
  val phoenixUrl = MyConfig.getString("phoenixUrl")

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("SaveDataToPhoenix")
    //设置日志登记
    Logger.getLogger("org").setLevel(Level.WARN)
    //SQLContext要依赖SparkContext
    val sc = new SparkContext(sparkConf)
    sc.setLocalProperty("spark.scheduler.mode", "FAIR")
    //创建SQLContext
    val sqlContext = new SQLContext(sc)
    val ssc = new StreamingContext(sc, Durations.seconds(10))

    // 这里是不需要zookeeper节点,所以这里放broker.list
    val kafkaParams: Map[String, String] = Map[String, String](
      "metadata.broker.list" -> brokerList,
      "auto.offset.reset" -> "largest",
      "group.id" -> "save_data_to_phoenix_group"
    )

    val topics = Set("SAVE_DATA_TO_PHOENIX")
    val stream = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topics)

    var offsetRanges = Array[OffsetRange]()
    stream.transform { rdd =>
      offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
      rdd
    }.foreachRDD(rdd => {
      val rddR: RDD[DataEntity] = rdd.filter(tuple => tuple._2 != null && tuple._2.length > 10).map(r => {
        val entity = JSON.parseObject[DataEntity](r._2.toString, classOf[DataEntity])
        entity
      }).filter(entity => entity != null && entity.date != null && !entity.date.trim.isEmpty)
      import sqlContext.implicits._
      val dataframe = rddR.toDF
      if (dataframe.count() > 0) {
        val configuration = new Configuration()
        configuration.set("url", phoenixUrl)
        //存储历史数据
        new DataFrameFunctions(dataframe).saveToPhoenix("GPS_DATA_HISTORY", configuration, Some(zkUrl))
        println("=============save history================")
        //保存实时数据
        new DataFrameFunctions(dataframe).saveToPhoenix("GPS_DATA_REALTIME", configuration, Some(zkUrl))
        println("=============save realtime===============")
      }

    })
    ssc.start()
    ssc.awaitTermination()
  }
}
