package com.spark.cust.movie

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @description:
 * @time: 2020/12/3 23:24
 * @author: lhy
 */
object Code03_MovieScore {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession.builder().appName("movieScore").master("local").getOrCreate()
        spark.sparkContext.setLogLevel("ERROR")
        val jdbcDF: DataFrame = spark.read.format("jdbc")
                               .option("url","jdbc:mysql://192.168.21.104:3306/spark")
                               .option("driver","com.mysql.jdbc.Driver")
                               .option("dbtable","movie_ratings")
                               .option("user","root")
                               .option("password","bigdata")
                               .load()
        val bad: Long = jdbcDF.filter(jdbcDF("ave_rating") < 2).count()
        val good: Long = jdbcDF.filter(jdbcDF("ave_rating") >= 2 and jdbcDF("ave_rating") < 4).count()
        val excellent: Long = jdbcDF.filter(jdbcDF("ave_rating") >= 4).count()
        println("bad: "+ bad)
        println("good: "+ good)
        println("excellent: " + excellent)
    }
}
