import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}

import scala.util.parsing.json.JSON


object kafkaConsumer {
  def main(args: Array[String]): Unit = {

    //1.创建SparkConf并设置App名称
    val conf: SparkConf = new SparkConf().setAppName("kafkaConsumer")
    //2.创建 StreamingContext
    val ssc = new StreamingContext(conf, Seconds(10))
    //3.定义Kakfa参数
    val kafkaPara: Map[String, Object] = Map[String, Object](
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG ->
        "dpcdh106:9092,dpcdh107:9092,dpcdh108:9092",
      ConsumerConfig.GROUP_ID_CONFIG -> "dapeng",
      "key.deserializer" ->
        "org.apache.kafka.common.serialization.StringDeserializer",
      "value.deserializer" ->
        "org.apache.kafka.common.serialization.StringDeserializer"
    )
    //4.读取kafka 数据创建Dstream
    val kafkaDStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Set("kf_test"), kafkaPara)
    )
    //5.将获取到的数据写入到hive中

    val sparkSession = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()
    import sparkSession.implicits._
    // 获取到value值,调用mapPartition,分区里面的数据json装换成样例类
    kafkaDStream.map(ele=>ele.value()).mapPartitions(partition => partition.map(ele=>{
      // 获取到json字符串
      val jsons: Option[Any] = JSON.parseFull(ele)
      val data: Map[String, Any] = regJson(jsons)
      data.get("")

    })).foreachRDD(rdd=>{
      rdd.toDF().coalesce(1).write.mode(SaveMode.Append).save("hdfs://dpcdh100:8020/user/hive/warehouse/tmp.db/matomo_log_action")
    })

    //7.开启任务
    ssc.start()
    ssc.awaitTermination()
  }

  def regJson(json:Option[Any]) = json match {
    case Some(map: Map[String, Any]) => map
    //      case None => "erro"
    //      case other => "Unknow data structure : " + other
  }

  //  def jsonConvert(json:JSONObject):Matomo_log_action = {
  //    val dataJson = json.getJSONArray("data").getJSONObject(0)
  //    val idaction = dataJson.getString("idaction")
  //    val name = dataJson.getString("name")
  //    val hash = dataJson.getString("hash")
  //    val typ = dataJson.getString("type")
  //    val url_prefix = dataJson.getString("url_prefix")
  //    Matomo_log_action(idaction,name,hash,typ,url_prefix)
  //  }
}
