package org.sparkSql

import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils}
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent

import java.sql.DriverManager
import java.util.Properties

object CountGenderSum_2 {

  // countState
  // 2
  case class st(sex: String, num: Int)

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName("CountGenderSum");
    val ssc = new StreamingContext(conf, Seconds(10));
    val sqlCon = new SQLContext(ssc.sparkContext)
    ssc.sparkContext.setLogLevel("error")

    val topic = "stu3"
    val group = "countGenderSum"
    val kafkaparam = Map[String, Object](
      "bootstrap.servers" -> "niit01:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],//序列化
      "group.id" -> group,
      // 消费模式  从头消费， 从尾消费，当前消费
      "auto.offset.reset" -> "earliest",
      // 是否自动提交
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )
    ssc.checkpoint("./checkpoint")
    val lineStream = KafkaUtils.createDirectStream(//从topic拿数据流
      ssc,
      // 策略     PreferConsistent   kafka 集群 master /leader
      PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Array(topic), kafkaparam)
    )
    val line = lineStream.map(_.value())//拿到数据流的value值 line是Rdd数据类型




    // Dstream   Seq[rdd]
    line.foreachRDD(//遍历Rdd集合

      // x  rdd
      x => {

        // 统计男女
        val coun = x.map(line => {
          val split = line.split("\t")//把每条数据通过空格切分
          //        val sex = split(2) == "1" ?
          (split(2).filter(x => x != 2), 1)//map将数据转换成键值对 （0，1）（1，1）
          // (sex ,1  )
        })

        }

        )


        import sqlCon.implicits._//beizhi
        val counDataFrame = coun.toDF()//将string数据类型coun转换成Dstream数据类型
        counDataFrame.show()//将数据流在控制台展示


        val prop = new Properties()
        prop.setProperty("user", "root")
        prop.setProperty("password", "123456")
        //        prop.setProperty("driver", "com.mysql.jdbc.Driver") // 5.7 mysql   //8.x  com.mysql.cj.jdbc.Driver
        prop.setProperty("driver", "com.mysql.jdbc.Driver")
        counDataFrame.write.mode("append").jdbc("jdbc:mysql://10.202.27.7:3306/studentinfo", "student2", prop)

      }

    )
    //  一行数据
    //     line.print()


    // 监听器
    ssc.start()
    ssc.awaitTermination()
  }

}
