package Spark

import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.sql.types.{StructField, StructType}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import java.util.Properties

import org.apache.spark.sql.types._
import org.apache.spark.sql.Row
import org.apache.spark.streaming.dstream.DStream
/**
  * @authon Esther
  * @create 2022/12/5
  */
object sparkRdd {
    case class Student(semester:Int, classNo:String, gender:Int,num:Int)
    def main(args: Array[String]): Unit = {

      val group = "niit03"
      val topic = "studentDemo112"
      val conf = new SparkConf().setMaster("local[*]").setAppName("sparkkafka")

      val ssc = new StreamingContext(conf,Seconds(20))
      ssc.checkpoint("./checkpoint")
      ssc.sparkContext.setLogLevel("error")
      val kafkaParams = Map[String,Object](
        "bootstrap.servers" -> "niit01:9092",
        "key.deserializer" -> classOf[StringDeserializer],
        "value.deserializer" -> classOf[StringDeserializer],
        "group.id"-> group,
        "auto.offset.reset"-> "earliest",
        "enable.auto.commit"->(false:java.lang.Boolean)
      )
      val topicName = Array(topic)
      val streamRDD = KafkaUtils.createDirectStream[String,String](
        ssc,  //ssc sparkstreaming context
        PreferConsistent,// 位置策略
        Subscribe[String,String](topicName,kafkaParams)
      )
      System.setProperty("https.protocols", "TLSv1.2")
      streamRDD.foreachRDD(kafkaRdd=>{
        println(!kafkaRdd.isEmpty())
        if (!kafkaRdd.isEmpty()){

          val sqlCon =SparkSessionSingleton.getInstance(kafkaRdd.sparkContext.getConf)


          //设置模式信息
          val lines = kafkaRdd.map(x=>x.value().split(" ")).map(a=>(a(5),a(4),a(2)))
          //   a b c
          val lineAll= lines.map(x => (x,1))
          //   （a b c） a(2)
          //   （a b c ）1
            val lineAll2 = lineAll.reduceByKey((x,y)=>x+y).
              //   （a b c ）2
            map(t => (t._1._1.toString,t._1._2.toString,t._1._3.toString,t._2.toInt))
          // [8,2020340791,0,1]
          val lineAll3 = lineAll2.foreach(println)

          val stuSche = lineAll2.map(x => Student(x._1.toInt,x._2,x._3.toInt,x._4))

          import sqlCon.implicits._
          stuSche.toDF.show()

          val studentDF = stuSche.toDF
          //t._1._1._3.toString,
//          val schema = StructType(List(StructField("semester",StringType,true),
//            StructField("classNo",StringType,true),
//            StructField("gender",StringType,true),
//            StructField("number",IntegerType,true)))
//
//          val orderDF = sqlCon.createDataFrame(lineAll2,schema)
//          //        创建jdbc连接配置
//          val prop = new Properties()
//          prop.put("user","root")
//          prop.put("password","123456")
//          prop.put("driver","com.mysql.jdbc.Driver")
//          orderDF.write.mode("append").jdbc("jdbc:mysql://localhost:3306/student","six",prop)


          val prop = new Properties()
          prop.put("user", "root")
          prop.put("password", "123456")
          //将数据追加到数据库
//          prop.put("driver","com.mysql.cj.jdbc.Driver")
          studentDF.write.mode("append").jdbc("jdbc:mysql://192.168.247.10:3306/STU?serverTimezone=UTC", "student", prop)


          println("添加成功！！！")

        }

      })
      // ssc 关闭
      ssc.start()
      ssc.awaitTermination()
      Thread.sleep(2)


    }
}
