

import java.util.concurrent.TimeUnit

import ConnectMysql.options
import Sink.CustomerJDBCSink
import com.typesafe.scalalogging.Logger
import common.DBConstants
import org.apache.spark.sql.catalyst.plans.JoinType
import org.apache.spark.sql.functions._
import org.apache.spark.sql.streaming._
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Row, SparkSession, types}

import scala.collection._

/**
  *
  * @author Abyss
  * @date 2019/9/02
  * @description 组织机构流
  */
object OrgStreaming {


  private val logger = Logger(this.getClass)

  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "hadoop")
    println("hello world")
    val spark = getSparkSession
    spark.sparkContext.setLogLevel("ERROR")
    var batchId: Long = 0
    //对查询添加一个监听，获取每个批次的处理信息
    spark.streams.addListener(new StreamingQueryListener() {
      override def onQueryStarted(event: StreamingQueryListener.QueryStartedEvent): Unit = {}

      override def onQueryProgress(event: StreamingQueryListener.QueryProgressEvent): Unit = {
        val progress: StreamingQueryProgress = event.progress
        batchId = progress.batchId
        val inputRowsPerSecond: Double = progress.inputRowsPerSecond
        val processRowsPerSecond: Double = progress.processedRowsPerSecond
        val numInputRows: Long = progress.numInputRows
        println("batchId=" + batchId, "  numInputRows=" + numInputRows + "  inputRowsPerSecond=" + inputRowsPerSecond +
          "  processRowsPerSecond=" + processRowsPerSecond)
      }

      override def onQueryTerminated(event: StreamingQueryListener.QueryTerminatedEvent): Unit = {}
    })
    //读取kafka消息
    sparkReadKafka(spark)

    spark.streams.awaitAnyTermination()
  }

  /**
    * 创建SparkSession
    *
    * @return
    */
  private def getSparkSession = {
    val spark = SparkSession.builder().appName(getClass.getName).master("local[*]").getOrCreate()
    spark
  }


  /**
    * 注册spark临时表，执行sql语句，注意这里每一个sql都是一个writeStream，最后使用spark.streams.awaitAnyTermination()等待所有查询
    *
    * @param spark
    */
  def sparkReadKafka(spark: SparkSession): Unit = {
    createOrReplaceTempView(spark, "gate-cj-org-topic", "tb_org")

    //输出到控制台
    val sqls = Array("select * from tb_org", "select * from tb_org_msg", "select * from tb_org_join")
    val querys = mutable.ListBuffer[StreamingQuery]()
    for (sql <- sqls) {
      println(sql)
      querys += sqlWriteStream(spark, sql)
    }
    //入库
    //    querys += sqlWriteStreamToDB(spark, sqls(1))
  }

  /**
    * 解析kafka json数据，并将其映射为spark临时表
    *
    * @param spark
    * @param kafkaTopic
    * @param sourceName
    */
  def createOrReplaceTempView(spark: SparkSession, kafkaTopic: String, sourceName: String): Unit = {
    //    val schema = SocSchemaCollection.getSchemaBySourceName(sourceName) //从数据库加载json schema
    val df = spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "192.168.0.185:9092")
      .option("subscribe", kafkaTopic)
      //      .option("startingOffsets", "latest")
      .option("startingOffsets", "earliest")
      .load()
    val schema = StructType(
      Seq(
        StructField("dataType", IntegerType, true),
        StructField("dataSource", IntegerType, true),
        StructField("dataTime", LongType, true),
        StructField("msg",
          StructType(Seq(
            StructField("changeTimes", StringType, true),
            StructField("createTimes", StringType, true),
            //客户信息
            StructField("customers", ArrayType(StructType(Seq(
              StructField("customerId", StringType, true),
              StructField("orgId", StringType, true),
              StructField("customerName", StringType, true),
              StructField("customerCatagory", StringType, true),
              StructField("idustryCategory", StringType, true),
              StructField("area", StringType, true),
              StructField("cableCategory", StringType, true),
              StructField("customerAddress", StringType, true),
              StructField("enable", StringType, true),
              StructField("changeTime", StringType, true),
              StructField("createTime", StringType, true),
              StructField("gender", StringType, true),
              StructField("department", StringType, true),
              StructField("duty", StringType, true),
              StructField("mobile", StringType, true),
              StructField("telephone", StringType, true),
              StructField("address", StringType, true),
              StructField("email", StringType, true),
              StructField("contact", StringType, true),

              StructField("capacity", StringType, true),
              StructField("channel", StringType, true)
            )))),
            //设备信息
            StructField("devices", ArrayType(StructType(Seq(
              StructField("deviceId",StringType,true),
              StructField("unitId",StringType,true),
              StructField("deviceName",StringType,true),
              StructField("deviceType",StringType,true),
              StructField("model",StringType,true),

              StructField("namePlate",StringType,true),
              StructField("params",StringType,true),
              StructField("amount",StringType,true),
              StructField("alteration",StringType,true),
              StructField("createTime",StringType,true),
              StructField("changeTime",StringType,true),
              StructField("enable",StringType,true),
              StructField("channel",StringType,true)
            )))),
            //站点信息
            StructField("sites",ArrayType(StructType(Seq(
              StructField("customerId",StringType,true),
              StructField("siteId",StringType,true),
              StructField("siteName",StringType,true),
              StructField("siteType",StringType,true),
              StructField("area",StringType,true),
              StructField("siteAddress",StringType,true),
              StructField("areas",StringType,true),
              StructField("longitude",StringType,true),
              StructField("dimension",StringType,true),

              StructField("createTime",StringType,true),
              StructField("changeTime",StringType,true),
              StructField("enable",StringType,true),
              StructField("channel",StringType,true)
            )))),
            //单元信息
            StructField("units", ArrayType(StructType(Seq(
              StructField("siteId",StringType,true),
              StructField("unitId",StringType,true),
              StructField("unitName",StringType,true),
              StructField("unitType",StringType,true),
              StructField("unitSubitem",StringType,true),
              StructField("unitParentId",StringType,true),
              StructField("changeTime",StringType,true),
              StructField("createTime",StringType,true),

              StructField("enable",StringType,true),
              StructField("channel",StringType,true)
            )))),
            //集团信息
            StructField("institutions", ArrayType(StructType(Seq(
              StructField("orgId",StringType,true),
              StructField("orgName",StringType,true),
              StructField("financialType",StringType,true),
              StructField("produceValTotalYear",StringType,true),
              StructField("enterpriseURL",StringType,true),
              StructField("introduction",StringType,true),
              StructField("registeredCapital",StringType,true),
              StructField("legal",StringType,true),
              StructField("businessScope",StringType,true),
              StructField("majorProduct",StringType,true),
              StructField("technology",StringType,true),
              StructField("capacity",StringType,true),
              StructField("majorMaterial",StringType,true),
              StructField("salesValue",StringType,true),
              StructField("saleArea",StringType,true),
              StructField("ratio",StringType,true),
              StructField("idustryType",StringType,true),
              StructField("enable",StringType,true),

              StructField("createTime",StringType,true),
              StructField("changeTime",StringType,true),
              StructField("channel",StringType,true)
            ))))
          ))
        )
      )
    )
    import spark.implicits._

    if (schema != null) {
      val jsonDf = df.select(from_json(col("value").cast("string"), schema).alias("result"))
      val orgDF = jsonDf.select("result.*").where("dataSource >=0")
      //创建组织机构临时表
      orgDF.createOrReplaceTempView(sourceName)

      //获取mysql表
      val resource_df = spark.read.format("jdbc").options(options).load().toDF("code", "name")

      //join mysql表
      val orgJoin = orgDF.join(resource_df, orgDF("dataSource") === resource_df("code"), "left").alias("data")
      orgJoin.createOrReplaceTempView("tb_org_join")

      val customs = orgJoin.select($"data.name",explode($"data.msg.customers")).select("name","col.*")

      customs.createOrReplaceTempView("tb_org_msg")
      jsonDf.printSchema()

      //用户信息输入到数据库
      //            val sink = new CustomerJDBCSink("jdbc:mysql://192.168.0.204:3306/gather?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=UTC","root","CyUr3qEk<*Do")
      //            customs.writeStream.foreach(sink)
      //              .outputMode(OutputMode.Update())
      //              .start()

    } else {
      println("error,schema is null")
    }

  }

  /**
    * * 输出spark sql的查询结果
    * * @param spark
    * * @param sql
    * * @return
    **/
  def sqlWriteStream(spark: SparkSession, sql: String): StreamingQuery = {
    val query = spark.sql(sql).writeStream
      .outputMode(OutputMode.Append())
      .format("console")
      .trigger(Trigger.ProcessingTime(5, TimeUnit.SECONDS))
      .start()
    query
  }

  //  def sqlWriteStreamToDB(spark: SparkSession, sql: String): StreamingQuery = {
  //    spark.sql(sql).writeStream()
  //  }
  val options = Map(
    "url" -> "jdbc:mysql://192.168.0.204:3306/sgies_data_center?characterEncoding=utf8&useSSL=false&serverTimezone=GMT%2B8",
    "user" -> "root",
    "password" -> "CyUr3qEk<*Do",
    "dbtable" -> "dc_resource"
  )

  case class Info(code: Int, name: String)

}
