package com.dmp.total

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

object ProvinceCityReport {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)
    if(args.length<3){
      println(
        """
          |com.dmp.report.ProvinceCityReport<logInputPath><provinceDataPath><cityDataPath>
          |<logInputPath>
          |<provinceDataPath>
          |<cityDataPath>

        """.stripMargin)
      System.exit(1)
    }

    val Array(logInputPath,procinceDataPath,cityDataPath)=args
    val conf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName("地域分布情况")
    conf.set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    val dataFrame: DataFrame = sqlContext.read.parquet(logInputPath)
    dataFrame.registerTempTable("log")


  }
}
