package cn.doit.report

import cn.doit.common.DolphinAppComm
import cn.doit.config.DolphinConfig
import org.apache.spark.sql.{SQLContext, SaveMode}

/**
  * 省市数据分析
  */

object PRovinceCityAnalysis {
  def main(args: Array[String]): Unit = {
    if (args.size!=2){
      println(
        """
          |Usage: cn.doit.report.PRovinceCityAnalysis
          |参数 <parquetPath><outputPath>
        """.stripMargin)
    }
    val Array(parquetPath,outputPath)=args
    val sc = DolphinAppComm.creatSparkContext("cn.doit.report.PRovinceCityAnalysis")
    val sqlc = new SQLContext(sc)

    //读取parquet文件
    val dataFrame = sqlc.read.parquet(parquetPath)
    //将dataFrame注册成表
    dataFrame.registerTempTable("dolphin_table")
    //针对数据表进行sql查询:每个省市的数据记录条数
    val result = sqlc.sql(
      """
         |select substring(requestdate,0,10) date ,provincename , cityname , count(1) cnt
         |from dolphin_table
         |group by provincename  , cityname , substring(requestdate, 0, 10)
      """.stripMargin)


    //JSON
       // result.coalesce(4).write.json(outputPath)

    //MYSQL
        result.write.mode(SaveMode.Append).jdbc(DolphinConfig._url,"r_mty_dolphin",DolphinConfig.props)





  }
}
