package com.oracle.violet.first.report

import com.oracle.violet.first.config.ConfigHandler
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

object ProCityAnaly {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()
      .setAppName("省市数据分布统计")
      .setMaster("local[*]")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(sparkConf)

    val sQLContext = new SQLContext(sc)

    //读取数据
    val dataFrame = sQLContext.read.parquet(ConfigHandler.parquetFilePath)

    //分析数据
    dataFrame.registerTempTable("alogs")

    //查询表
    sQLContext.sql(
      """
        |select count(1) ct,provincename,cityname from alogs group by provincename,cityname
      """.stripMargin)
      .coalesce(4)
        .write
        .json("H:\\甲骨文培训\\大数据\\Linux\\大纲\\项目二\\VIOLET\\report\\json")
        //.jdbc(ConfigHandler.url,"orc_report_province",ConfigHandler.props)







    sc.stop()
  }

}
