package com.itcast.report

import com.itcast.beans.ReportLogDataAnalysis
import com.itcast.utils.{ConfigHandler, FileHandler, MysqlHandler}
import org.apache.spark.sql.{SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}

//统计日志文件中各省市的数据分布情况
//sql方式
object LogDataAnalysisCore {
  def main(args: Array[String]): Unit = {
    //创建sparkConf
    val sparkConf = new SparkConf()
      .setAppName("LogDataAnalysisCore")
      .setMaster("local[*]")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    //创建sparkContext
    val sparkContext = new SparkContext(sparkConf)
    //创建sqlContext
    val sQLContext = new SQLContext(sparkContext)
    //读取数据
    val rawDataFrame = sQLContext.read.parquet(ConfigHandler.parquetPath)
    //(key,value)
    val result =  rawDataFrame.map(row => {
      val pname = row.getAs[String]("provincename")
      val cname = row.getAs[String]("cityname")
      ((pname,cname),1)
    }).reduceByKey(_+_)
    import sQLContext.implicits._
    val resultDF = result.map(tp => ReportLogDataAnalysis(tp._1._1,tp._1._2,tp._2)).toDF()
    resultDF.coalesce(1).write.json(ConfigHandler.rptPath)
    //将结果写出到mysql中
    //resultDF.write.mode(SaveMode.Overwrite).jdbc(ConfigHandler.url,ConfigHandler.table,ConfigHandler.dbProper)
    MysqlHandler.save2db(resultDF,ConfigHandler.table)
   sparkContext.stop()

  }
}
