package org.example

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

import java.util.Properties

object TupleTest {
  case class Teacher(name: String, count: Int)

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("PopularTeacherInDataBase").setMaster("local[*]")
    val sc = new SparkContext(conf)
    val prop = new Properties()
    prop.setProperty("user", "niit")
    prop.setProperty("password", "123456")
    prop.setProperty("driver", "com.mysql.cj.jdbc.Driver")

    val teacherNameRDD = sc.textFile("teacher.log")
      .map(_.split("n/").last)
      .map((_,1))
      .reduceByKey(_ + _)
      .sortBy(_._2,ascending = false)
      .take(1).last._1

    val countRDD = sc.textFile("teacher.log")
      .map(_.split("n/").last)
      .map((_, 1))
      .reduceByKey(_ + _)
      .sortBy(_._2, ascending = false)
      .take(1).last._2

    val sqlCon = new SQLContext(sc)
    import sqlCon.implicits._
    try{
      sc.parallelize(Seq(Teacher(teacherNameRDD, countRDD)))
       .toDF().show()
      sc.parallelize(Seq(Teacher(teacherNameRDD, countRDD)))
        .toDF()
        .write.mode("append").jdbc("jdbc:mysql://123.56.187.176:1101/huel", "zyx", prop)
      println("成功")
    }catch {
      case e:Exception => println("数据库连接失败")
    }



//    val input2RDD = sc.textFile("teacher.log")
//      .map(_.split("n/").last)
//      .map((_, 1))
//      .reduceByKey(_ + _)
//      .sortBy(_._2, ascending = false)
//      .first()
//
//    print(input2RDD)
  }
}
