package report

import java.util.Properties

import Configer.Config
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

//各省市数据分析  sparksql
object ProCitySQL {
  def main(args: Array[String]): Unit = {
    //sparkContext
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}")
    conf.setMaster("local[*]")
    conf.set("spark.serializer",Config.serializer)
    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)

    //读取数据
    val dataFrame = sQLContext.read.parquet(Config.parquetPath)

    dataFrame.registerTempTable("logs")
    val result = sQLContext.sql(
      """
        |select
        |provincename,cityname,count(*) count
        |from logs
        |group by provincename,cityname
      """.stripMargin)

    result.coalesce(1).write.json("C:\\Users\\44323\\Desktop\\资料PDF\\json3")

    val props = new Properties()
    props.setProperty("driver",Config.driver)
    props.setProperty("user",Config.user)
    props.setProperty("password",Config.password)
    result.write.jdbc(Config.url,Config.table,props)

    sc.stop()
  }
}
