package cn.dmp.charts

import java.util.Properties

import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}


object DataDistributionProvinceAndCity {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("DataDistributionProvinceAndCity").setMaster("local[4]")
    val sc: SparkContext = new SparkContext(conf)
    //包装sparkcontext后增强
    val sqlContext= new SQLContext(sc)

    val logDf: DataFrame = sqlContext.read.load(args(0))
    logDf.registerTempTable("t_log")

    val rechargesAggByProvinceAndCity = sqlContext.sql("select provincename,cityname,count(*) ct from t_log group by provincename,cityname")
    //val rechargesAggByProvinceAndCity = logDf.select("provincename","winprice").groupBy("provincename
    rechargesAggByProvinceAndCity.show()
    //持久化到mysql
//    val props = new Properties()
//    props.put("user","root")
//    props.put("password","123456")
//    rechargesAggByProvinceAndCity.write.mode("append").jdbc("jdbc:mysql://localhost:3306/npm?useUnicode=true&characterEncoding=utf-8", "rechargesAggByProvinceAndCity", props)

    //保存为json格式
    rechargesAggByProvinceAndCity.write.json(args(1))

  }
}
