package report

import java.util.Properties

import Configer.Configer
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}

//地区数据量分析sql
object AreaDatasAnalysisSQL {
  def main(args: Array[String]): Unit = {
    //sparkContext
    val conf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName(s"${this.getClass.getName}")
    conf.set("spark.serializer", Configer.serializer)
    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)
    //读取数据
    val dataFrame = sQLContext.read.parquet("E:\\小牛项目\\DMP广告项目34期\\资料PDF\\parquet")
    //处理数据
    dataFrame.registerTempTable("logs")
    val result: DataFrame = sQLContext.sql(
      """
select provincename,cityname,count(*) count
from logs
group by provincename,cityname
      """.stripMargin)

    //存储数据
    val props = new Properties()
    props.setProperty("driver",Configer.driver)
    props.setProperty("user",Configer.user)
    props.setProperty("password",Configer.password)
    result.write.mode(SaveMode.Overwrite).jdbc(Configer.url,"Area34",props)
    //释放资源
    sc.stop()
  }
}
