package DataAnalysis_hzl

import org.apache.spark.sql.{SparkSession, functions}
import org.apache.spark.sql.functions._

import java.util.Properties

object t5 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("数据分析第一题")
      .enableHiveSupport()
      .getOrCreate()


    // 准备连接mysql的配置
    val conn = new Properties()
    conn.setProperty("user", "root")
    conn.setProperty("password", "123456")
    conn.setProperty("driver", "com.mysql.jdbc.Driver")

    //  todo 读取已经处理好的数据
    val data = spark.read
      .jdbc("jdbc:mysql://192.168.40.110:3306/hzl?useSSL=false", "clean", conn)


    //  todo 分析各城市二手车的平均售价与数量
    val r1 = data.groupBy("city")
      .agg(
        round(avg("price"),1).as("avg_price"),
        functions.count("*").as("number")
      )
      .distinct()

    r1.orderBy(desc("number"))
      .show

    r1.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/hzl?useSSL=false", "r5", conn)


    spark.close()
  }

}
