package DataAnalysis_qcl

import org.apache.spark.sql.SparkSession
import java.util.Properties

object DataAnalysis01 {
  def main(args: Array[String]): Unit = {

    /*
         DataAnalysis01:分析北京每个区的平租房价格(元/月),还有房源数量
     */
        // 准备sparksql的缓解
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("数据分析第一题")
      .enableHiveSupport()
      .getOrCreate()

    // 准备连接mysql的配置
val mysql_connect=new Properties()
  mysql_connect.setProperty("user","root")
  mysql_connect.setProperty("password","123456")
  mysql_connect.setProperty("driver","com.mysql.jdbc.Driver")

  //  拿到mysql里面处理完的数据，并创建临时表
  spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/qcl?useSSL=false","data_processing",mysql_connect)
    .createOrReplaceTempView("data")

spark.sql("use qcl")

   val result= spark.sql(
      """
        |select
        |downtown,
        |ceil(avg(money)) as money_avg,
        |ceil(avg(area)) as area_avg,
        |count(*) as house_number
        |from data
        |group by downtown
        |order by money_avg desc
        |""".stripMargin)

    spark.sql("drop table if exists qcl.data_analysis01")



result.write.mode("overwrite")
  .jdbc("jdbc:mysql://192.168.40.110:3306/qcl?useSSL=false","data_analysis01",mysql_connect)

//  将结果存入hdfs
    result.repartition(1).write.mode("overwrite")
      .csv("/qcl/data_analysis/data_analysis01.csv")





    //  关闭环境
    spark.close()
  }

}
