package com.sxzjrj.mblybc

import com.sxzjrj.config.MBYLBCConfig
import com.sxzjrj.constant.Constant
import com.sxzjrj.utils.{JdbcUtils, OffsetUtils}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by ljj in 2019/3/25
  *
  */
object MBYLBCCountStreaming {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .set("spark.streaming.stopGracefullyOnShutdown", "true")
      .set("spark.streaming.kafka.maxRatePerPartition", "10000")
      .set("spark.sql.parquet.compression.codec", "snappy")
      .setMaster("local[*]")
      .setAppName(this.getClass.getSimpleName)

    val sc = new SparkContext(conf)

    val session = SparkSession.builder()
      .config(conf)
      .getOrCreate()

    val ssc = new StreamingContext(sc, Seconds(2))

    val topic = Array(MBYLBCConfig.topic)

    val DStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](
        topic,
        MBYLBCConfig.kafkaParams,
        OffsetUtils.getCurrentOffset4Mysql()
      )
    )

    DStream.foreachRDD(rdd => {
      if (!rdd.isEmpty()) {

        val offsetRanges: Array[OffsetRange] = rdd.asInstanceOf[HasOffsetRanges].offsetRanges

        import session.implicits._
        val result = rdd.map(_.value())
          .map(_.split("\t", -1))
          .filter(arr => arr(1).length > 0 && arr(2).length > 0 && arr(4).length > 0 && arr.length >= Constant.DATALENGTH.toString.toInt)
          .map(arr => {
            val F_GRBM = arr(1) //个人编码
            val F_JZLX = arr(2) //就诊类型
            //val F_YLJG = arr(4) //医疗机构
            //val F_ID = if (arr(29).length > 0) arr(29) else "身份证信息缺失" //身份证号
            var F_SJ_JE = 0.0
            var F_HS_JE = 0.0
            var F_MB_YYF = 0.0
            var F_JZCS = 0 //就诊次数
            try {
              F_SJ_JE = if (arr(41).length > 0) arr(41).toDouble else 0.0 //实际费用
              F_HS_JE = if (arr(42).length > 0) arr(42).toDouble else F_SJ_JE //核算金额
              F_MB_YYF = if (arr(43).length > 0) arr(43).toDouble else F_HS_JE //慢病总费用
              F_JZCS = if (F_SJ_JE == 0.0 && F_HS_JE == 0.0 && F_MB_YYF == 0.0) 0 else 1
            } catch {
              case e: Exception => e.printStackTrace()
            }

            ((F_GRBM, F_JZLX), (F_JZCS, F_SJ_JE, F_HS_JE, F_MB_YYF))
          })
          .reduceByKey((a, b) => (a._1 + b._1, a._2 + b._2, a._3 + b._3, a._4 + b._4))

          result.foreachPartition(iter => {
            iter.foreach(println(_))
          })

        //JdbcUtils.saveDStreamData2Mysql(result)
        //          .toDF("F_GRBM", "F_JZLX", "F_JZCS", "F_SJ_JE", "F_HS_JE", "F_MB_YYF")
        //
        //        data.createTempView("t_mbylbc")
        //
        //        session.sql(
        //          """
        //            |select F_GRBM, F_JZLX, sum(F_JZCS) as F_JZCS, sum(F_SJ_JE) as F_SJ_JE,
        //            |sum(F_HS_JE) as F_HS_JE, sum(F_MB_YYF) as F_MB_YYF from t_mzlybc
        //            |group by F_GRBM, F_JZLX
        //          """.stripMargin)
        //          .repartition(2)
        //          .write.mode(SaveMode.Append).parquet(MBYLBCConfig.descPath)

        //DStream.asInstanceOf[CanCommitOffsets].commitAsync(offsetRange)

        //OffsetUtils.saveOffset2Mysql(offsetRanges)

      }
    })


    ssc.start()
    ssc.awaitTermination()
    session.stop()

  }

}
