package org.example

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

import java.util.Properties

object PopularTeacherInDatabase {

  case class Teacher(name: String, count: Int)

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("PopularTeacherInDataBase").setMaster("local[*]")
    val sc = new SparkContext(conf)

    val inputRDD = sc.textFile("teacher.log")
    val teacherCountRDD = inputRDD
      .filter(_.nonEmpty)
      .map(_.split("/").last)
      .map((_, 1))
      .reduceByKey(_ + _)
    val mostPopularTeacher = teacherCountRDD
      .reduce((t1, t2) => if (t1._2 > t2._2) t1 else t2)
    val teacher = Teacher(mostPopularTeacher._1, mostPopularTeacher._2)

    val popularTeacherRDD = sc.parallelize(Seq(teacher))
    val sqlCon = new SQLContext(sc)

    import sqlCon.implicits._
    val df = popularTeacherRDD.toDF()
    val prop = new Properties()
    prop.setProperty("user", "niit")
    prop.setProperty("password", "123456")
    prop.setProperty("driver", "com.mysql.cj.jdbc.Driver")
    try{
      df.write.mode("append").jdbc("jdbc:mysql://123.56.187.176:1101/huel", "teacher", prop)
      println("成功")
    }
    catch
      {
      case e:Exception => println("数据库连接失败")
    }
  }
}
