package gunglad.com.anli

import java.text.SimpleDateFormat
import java.util.Date

import gunglad.com.utils.{JDBCUtil, RedisUtil}

//import hut.utils.{JDBCUtil, RedisUtil}

//import com.hut.utils.{JDBCUtil, RedisUtil}
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * @author chenjie
 * @date 2020/12/12 14:10
 *      错误: 找不到或无法加载主类 gunglad.com.anli.Solving
 */
object Solving {
  def main(args: Array[String]): Unit = {
//    启动 spark 代码
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming")
//pyspark  启动  setMaster
//    log
//    scala  log
    println("sparkConf")
    println(sparkConf)
//    sparkConf.toString;
//    这里我们定义一个SchemaUtil.java类，该方法提供了一个getAvroSchemaFromHDFSFile方法用来实现从hdfs上读取avro文件，并把该avro文件解析为schema对象。
//    println(sparkConf.getAvroSchema)
    val ssc = new StreamingContext(sparkConf, Seconds(1))
    val kafkaPara: Map[String, Object] = Map[String, Object](
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "hadoop1:9092,hadoop2:9092,hadoop3:9092",
      ConsumerConfig.GROUP_ID_CONFIG -> "solvingProblem",
      "key.deserializer" -> "org.apache.kafka.common.serialization.StringDeserializer",
      "value.deserializer" -> "org.apache.kafka.common.serialization.StringDeserializer"
    )
    println("kafkaDataDS InputDStream")
    val kafkaDataDS: InputDStream[ConsumerRecord[String, String]] =
      KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Set("solvingProblem"), kafkaPara)
    )

    val filterDataDS: DStream[String] = kafkaDataDS.filter(
      record => {
        record.value().split("##").length == 2
      }
    ).map(
      record => {
        val line = record.value().split("##")(1)
        line
      }
    )
    println("filterDataDS is")
    println(filterDataDS)

    println("filterDataDS print")
    filterDataDS.print()
    stuVisitListen(filterDataDS)
    all_visit_countSave(filterDataDS)
    req3(filterDataDS)
    req4(filterDataDS)

    ssc.start()
    ssc.awaitTermination()
  }

  //2020/12/05 11:52:46:199-INFO -0:0:0:0:0:0:0:1-A7EB7F140CAA166BA9E317B58355FF66-/studentLogin-
  // (1234567890,测试,女,大二,经济与贸易学院,Fri Dec 04 22:06:04 CST 2020,null,null)-1-10

  //2020/12/05 11:52:46:199-INFO -0:0:0:0:0:0:0:1-A7EB7F140CAA166BA9E317B58355FF66-/submitProblems-
  // (1234567890,测试,男,大二,外国语学院,5)-1-10
  //统计每小时各个学院的访问人数
//  Count the number of visitors to each college every hour
  def stuVisitListen(fileRdd:DStream[String]): Unit ={
    val filterRdd = fileRdd.filter{
      lines => {
        val line = lines.split("-")
        line.length==8 && !line(7).equals("6")
      }
    }

    val mapRdd = filterRdd.map(
      lines => {
        val line = lines.split("-")
        val d = line(0).split(" ")
        val data = d(0)
        val time = d(1).split(":")(0)
        val college = line(5).split(",")(4)
        ((data,time, college), 1)
      }
    ).reduceByKey(_ + _)

    val groupRdd: DStream[((String, String), Iterable[(String, Int)])] = mapRdd.map {
      case ((data,time, college), sum) => {
        ((data,college),(time,sum))
      }
    }.groupByKey()

    //写入数据库
    groupRdd.foreachRDD(
      rdd => {
        rdd.foreachPartition(
          iter=>{
            val conn = JDBCUtil.getConnection
            iter.foreach{
              case (info:(String, String),list:Iterable[(String, Int)]) => {
                for ((time:String,sum:Int) <- list){
                  val sql = "insert into visit_time(collegeName,date,hour,count) values(?,'"+info._1+"','"+time+"',?) " +
                    "on duplicate key update count = count + ?"
                  val pstmt = JDBCUtil.executeUpdate(conn,sql,Array(info._2,sum,sum))

                  val jedis = RedisUtil.getPool.getResource
                  val updateFlag = jedis.set("updateFlag", "true")
                  val lastUpdateDate = jedis.set("lastUpdateDate", new SimpleDateFormat("yyyy/MM/dd HH:mm:ss").format(new Date))
                  jedis.close()
                }
              }
            }
            conn.close()
          }
        )
      }
    )
  }

  //统计各个学院访问总人数，男女分别为多少
  def all_visit_countSave(fileRdd:DStream[String]): Unit = {
    val filterRdd = fileRdd.filter{
      lines => {
        val line = lines.split("-")
        line.length==8 && !line(7).equals("6")
      }
    }
    val mapRdd = filterRdd.map(
      lines => {
        val line = lines.split("-")
        val date= line(0).split(" ")(0)
        val info = line(5).split(",")
        var college = info(4)
        val sex = info(2)
        ((date, college,sex), 1)
      }
    ).reduceByKey(_ + _)

    val groupRdd = mapRdd.map {
      case ((data,college, sex), sum) => {
        ((data,college),(sex,sum))
      }
    }.groupByKey()

    groupRdd.foreachRDD(
      rdd => {
        rdd.foreachPartition(
          iter=>{
            val conn = JDBCUtil.getConnection
            iter.foreach{
              case (info:(String, String),list:Iterable[(String, Int)]) => {
                for ((sex:String,sum:Int) <- list){
                  if (sex.equals("男")){
                    val sql = "insert into all_visit_count(college_name,date,man_visit) values(?,?,?) " +
                      "on duplicate key update man_visit = man_visit + ?"
                    val pstmt = JDBCUtil.executeUpdate(conn,sql,Array(info._2,info._1,sum,sum))
                  }else{
                    val sql = "insert into all_visit_count(college_name,date,woman_visit) values(?,?,?) " +
                      "on duplicate key update woman_visit = woman_visit + ?"
                    val pstmt = JDBCUtil.executeUpdate(conn,sql,Array(info._2,info._1,sum,sum))
                  }

                  val jedis = RedisUtil.getPool.getResource
                  val updateFlag = jedis.set("updateFlag", "true")
                  val lastUpdateDate = jedis.set("lastUpdateDate", new SimpleDateFormat("yyyy/MM/dd HH:mm:ss").format(new Date))
                  jedis.close()
                }
              }
            }
            conn.close()
          }
        )
      }
    )

  }

  //统计各个学院的正确率
  def req3(fileRdd:DStream[String]): Unit ={

    val filterRdd = fileRdd.filter{
      lines => {
        val line = lines.split("-")
        line.length==8 && !line(7).equals("6") && line(4).equals("/submitProblems")
      }
    }

    val mapRdd = filterRdd.map(
      lines => {
        val line = lines.split("-")
        val info = line(5).split(",")
        val college = info(4)
        val correct: Int = info(5).replace(")","").toInt
        val sex = info(2)
        ((college,sex),(correct,1))
      }
    ).reduceByKey{
      (t1,t2) => (t1._1+t2._1,t1._2+t2._2)
    }

    mapRdd.foreachRDD(
      rdd => {
        rdd.foreachPartition(
          iter=>{
            val conn = JDBCUtil.getConnection
            iter.foreach{
              case (info:(String, String),count:(Int,Int)) => {
                if (info._2.equals("男")){
                  val sql = "insert into correct_count(college_name,man_count,man_correct) values(?,?,?) " +
                    "on duplicate key update man_count = man_count + ?,man_correct = man_correct + ?"
                  val pstmt = JDBCUtil.executeUpdate(conn,sql,Array(info._1,count._2,count._1,count._2,count._1))
                }else{
                  val sql = "insert into correct_count(college_name,woman_count,woman_correct) values(?,?,?) " +
                    "on duplicate key update woman_count = woman_count + ?,woman_correct = woman_correct + ?"
                  val pstmt = JDBCUtil.executeUpdate(conn,sql,Array(info._1,count._2,count._1,count._2,count._1))
                }

                val jedis = RedisUtil.getPool.getResource
                val updateFlag = jedis.set("updateFlag", "true")
                val lastUpdateDate = jedis.set("lastUpdateDate", new SimpleDateFormat("yyyy/MM/dd HH:mm:ss").format(new Date))
                jedis.close()
              }
            }
            conn.close()
          }
        )
      }
    )
  }

  //统计各个年级的访问人数
  def req4(fileRdd:DStream[String]): Unit ={
    val filterRdd = fileRdd.filter{
      lines => {
        val line = lines.split("-")
        line.length==8 && !line(7).equals("6")
      }
    }
    val mapRdd: DStream[((String, String, String), Int)] = filterRdd.map(
      lines => {
        val line = lines.split("-")
        val date = line(0).split(" ")(0)
        val info = line(5).split(",")
        val college = info(4)
        val grade = info(3)
        ((date,college,grade),1)
      }
    ).reduceByKey(_+_)

    mapRdd.foreachRDD(
      rdd => {
        rdd.foreachPartition(
          iter=>{
            val conn = JDBCUtil.getConnection
            iter.foreach{
              case (info:(String,String,String),count:Int) => {
                var sql = "";
                info._3 match {
                  case "大一" => {
                    sql = "INSERT INTO all_grade_visit(college_name,DATE,grade1) VALUES(?,?,?) " +
                      "ON DUPLICATE KEY UPDATE grade1 = grade1 + ?"
                  }
                  case "大二" => {
                    sql = "INSERT INTO all_grade_visit(college_name,DATE,grade2) VALUES(?,?,?) " +
                      "ON DUPLICATE KEY UPDATE grade2 = grade2 + ?"
                  }
                  case "大三" => {
                    sql = "INSERT INTO all_grade_visit(college_name,DATE,grade3) VALUES(?,?,?) " +
                      "ON DUPLICATE KEY UPDATE grade3 = grade3 + ?"
                  }
                  case "大四" => {
                    sql = "INSERT INTO all_grade_visit(college_name,DATE,grade4) VALUES(?,?,?) " +
                      "ON DUPLICATE KEY UPDATE grade4 = grade4 + ?"
                  }
                  case _ => {
                    sql = "INSERT INTO all_grade_visit(college_name,DATE,grade5) VALUES(?,?,?) " +
                      "ON DUPLICATE KEY UPDATE grade5 = grade5 + ?"
                  }
                }
                val pstmt = JDBCUtil.executeUpdate(conn,sql,Array(info._2,info._1,count,count))

                val jedis = RedisUtil.getPool.getResource
                val updateFlag = jedis.set("updateFlag", "true")
                val lastUpdateDate = jedis.set("lastUpdateDate", new SimpleDateFormat("yyyy/MM/dd HH:mm:ss").format(new Date))
                jedis.close()
              }
            }
            conn.close()
          }
        )
      }
    )
  }
}
