package com.spark.simple

import java.sql.{Connection, DriverManager, PreparedStatement}
import java.util.Properties

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.{SparkConf, SparkContext}

object EnglishWordCount {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession.builder().appName("MovieType").master("local").getOrCreate()
        import spark.implicits._
        val fields: Array[StructField] = Array(StructField("word", StringType, nullable = true),
            StructField("count", IntegerType, nullable = true))
        val schema: StructType = StructType(fields)
        val moviesFile: RDD[String] = spark.sparkContext.textFile("input/kaoyan/sum.txt")
        val movieType: RDD[Row] = moviesFile.flatMap(f => f.split(" ").map((_,1))).reduceByKey(_+_)
          .sortBy(f => f._2,ascending = false)
          .map(attributes => Row(attributes._1,attributes._2))
        val movieTypeDF: DataFrame = spark.createDataFrame(movieType,schema)
        val prop = new Properties()
        prop.put("user","root")
        prop.put("password","bigdata")
        prop.put("driver","com.mysql.jdbc.Driver")
        // 下面连接数据库，采用append模式，表示追加记录到数据库spark的student表中
        movieTypeDF.write.mode("append").jdbc("jdbc:mysql://192.168.21.104:3306/kaoyan","kaoyan.sum2020_2021",prop)


    }
}