package DataAnalysis_ljy

import org.apache.spark.sql.SparkSession

import java.util.Properties

object t1 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("数据分析第一题")
      .enableHiveSupport()
      .getOrCreate()

    // 准备连接mysql的配置
    val conn = new Properties()
    conn.setProperty("user", "root")
    conn.setProperty("password", "123456")
    conn.setProperty("driver", "com.mysql.jdbc.Driver")

    //  todo 读取已经处理好的数据
    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/ljy?useSSL=false", "dwd", conn)
      .createOrReplaceTempView("data")

    //  todo 分析每个城市开演唱会的次数
    val r1 = spark.sql(
      """
        |select distinct
        |city,
        |count(*) over(partition by city) as number
        |from data
        |""".stripMargin)

    r1.write.format("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/ljy?useSSL=false","r1",conn)



    spark.close()
  }

}
