package com.arnold.demo.journal.main

import java.text.SimpleDateFormat
import java.util.{Date, UUID}

import com.alibaba.fastjson.{JSON, JSONObject}
import com.arnold.demo.journal.config.Config
import com.arnold.demo.journal.entity.Invoice
import com.arnold.demo.journal.service.ParseService
import kafka.serializer.StringDecoder
import org.apache.spark.sql.types._
import org.apache.kudu.spark.kudu._
import org.apache.spark.SparkConf
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable.ArrayBuffer
import scala.util.Random

/**
  * Created by arnold.zhu on 2017/7/19.
  */
object Spark2Kudu {

  private val kuduOptions = Map("kudu.master" -> Config.KUDU_MASTER, "kudu.table" -> Config.KUDU_TABLE)
  private val kuduContext = new KuduContext(Config.KUDU_MASTER)

  private val brokers = Config.KAFKA_BROKERS
  private val kafkaParams = Map[String, String](
    "metadata.broker.list" -> brokers,
    "group.id" -> Config.KAFKA_GROUP_ID)

  private var errorCount = 0
  private val errorCodes = Array("01", "02", "03", "05", "41", "43")

  private val schema = StructType(
    Seq(
      StructField("id", StringType, false),
      StructField("invoice_id", StringType, false),
      StructField("invoice_date", StringType, true),
      StructField("invoice_amt", DoubleType, true),
      StructField("type", StringType, true),
      StructField("status", StringType, true),
      StructField("error_code", StringType, true),
      StructField("error_type", StringType, true),
      StructField("error_desc", StringType, true),
      StructField("store_number", StringType, true)
    )
  )

  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder.config(new SparkConf()).getOrCreate()

    // 创建StreamingContext 3秒处理一批
    val sc = new StreamingContext(sparkSession.sparkContext, Seconds(10))

    createJournalDirectStream(sc, sparkSession)

    //    createErrorDirectStream(sc, sparkSession)

    sc.start()
    sc.awaitTermination()

    println(errorCount)

  }

  private def createJournalDirectStream(sc: StreamingContext, sparkSession: SparkSession): Unit = {
    val kafkaStream = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](sc, kafkaParams, Set(Config.KAFKA_JOURNAL_TOPIC))

    kafkaStream.map(x => {
      val jsonObj: JSONObject = JSON.parseObject(x._2)

      // beat 里 有门店的信息
      val message: String = jsonObj.getString("message")

      if (message.length > 0) {
        try {
          val beat: JSONObject = JSON.parseObject(jsonObj.getString("beat"))

          val part: Array[String] = message.split("--------------------------------")

          // body 里面包含具体小票消费的信息
          val header: Array[String] = part(0).split("\\r\\n")
          val body = part(1).split("\\r\\n")

          // 获取门店号
          val storeNumber: String = beat.getString("hostname")

          // 获取日期
          val invoice_date: String = ParseService.getInvoiceDate(header)

          // 获取金额 和 小票类型
          val invoiceList = ParseService.getInvoiceValueAndType(body)

          // 返回Invoice
          val invoice = new Invoice(UUID.randomUUID().toString, invoice_date, invoiceList._1, invoiceList._2, storeNumber)
          invoice

        } catch {
          case e: Exception => {
            println("error:" + e.getMessage)
            errorCount = errorCount + 1
          }
        }
      }
    }).foreachRDD(rdd => {

      println("----------------")
      println(rdd.collect().length + "-" + rdd.partitions.length)

      val invoices: ArrayBuffer[Invoice] = ArrayBuffer[Invoice]()

      if (rdd.count > 0) {
        for (invoiceAny <- rdd.collect()) {
          val x: Invoice = invoiceAny.asInstanceOf[Invoice]
          invoices += x

          println(x.toString)
        }

        val rows = rdd.sparkContext.parallelize(invoices).map(x => {
          Row(x.id_(), x.invoiceId_(), x.invoiceDate_(), x.invoiceValue_(), x.invoiceType_(), x.status_(), x.errorCode_(),
            x.error_type_(), x.error_desc_(), x.storeNumber_()
          )
        })

        val df = sparkSession.createDataFrame(rows, schema)

        df.write.options(kuduOptions).mode("append").kudu
      }
      println("----------------")
    })
    //      rdd.foreachPartition(partitionOfRecords => {
    //        partitionOfRecords.foreach(invoiceAny => {
    //          if (invoiceAny != ()) {
    //
    //            val invoice: Invoice = invoiceAny.asInstanceOf[Invoice]
    //            val invoices: Array[Invoice] = Array(invoice)
    //            println("-------------------------------------------")
    //            println(invoice.toString)
    //            println("-------------------------------------------")
    //
    //            // 创建RDD数据集 并且转换成Row
    //            val rows = rdd.sparkContext.parallelize(invoices).map(x => {
    //              Row(x.id_(), x.invoiceId_(), x.invoiceDate_(), x.invoiceValue_(), x.invoiceType_(), x.status_(), x.errorCode_(),
    //                x.error_type_(), x.error_desc_(), x.storeNumber_()
    //              )
    //            })
    //
    //            // 根据Schema 和 RDD 创建DataFrame
    //            val df = sparkSession.createDataFrame(rows, schema)
    //
    //            // 将数据插入Kudu
    //            kuduContext.insertRows(df, "kudu_invoices2")
    //          }
    //        })
    //      })
  }

  private def createErrorDirectStream(sc: StreamingContext, sparkSession: SparkSession): Unit = {
    val kafkaStream = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](sc, kafkaParams, Set(Config.KAFKA_ERROR_TOPIC))
    kafkaStream.map(x => {
      val jsonObj: JSONObject = JSON.parseObject(x._2)
      val dateObj: JSONObject = JSON.parseObject(jsonObj.getString("invoiceDate"))

      var errorCode = jsonObj.getString("errorCode")
      if (Config.MOCK_DATA) {
        errorCode = errorCodes(Random.nextInt(6))
      }

      val uniqueFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
      val time = uniqueFormat.format(new Date())

      val invoice = new Invoice(UUID.randomUUID().toString, time, jsonObj.getDouble("invoiceValue"),
        jsonObj.getString("invoiceType"), jsonObj.getString("storeNumber"), errorCode, jsonObj.getString("status"))
      invoice
    }).foreachRDD(rdd => {
      rdd.foreachPartition(partitionOfRecords => {
        partitionOfRecords.foreach(invoiceAny => {

          val invoice: Invoice = invoiceAny

          val invoices: Array[Invoice] = Array(invoice)

          val rows = sparkSession.sparkContext.parallelize(invoices).map(x => {
            Row(x.id_(), x.invoiceId_(), x.invoiceDate_(), x.invoiceValue_(), x.invoiceType_(), x.status_(), x.errorCode_(),
              x.error_type_(), x.error_desc_(), x.storeNumber_()
            )
          })

          val df = sparkSession.createDataFrame(rows, schema)

          // 将数据插入Kudu
          kuduContext.insertRows(df, "kudu_invoices2")
        })

      })
    })
  }

}
