package report

import java.util.Properties

import Configer.Configer
import beans.AreaClass
import com.google.gson.Gson
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}
import scalikejdbc.{DB, SQL}

//地区数据量分析
object AreaDatasAnalysis {
  def main(args: Array[String]): Unit = {
    //sparkContext
    val conf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName(s"${this.getClass.getName}")
    conf.set("spark.serializer",Configer.serializer)
    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)
    //读取数据
    val dataFrame = sQLContext.read.parquet("E:\\小牛项目\\DMP广告项目34期\\资料PDF\\parquet")
    //数据处理
    val result: RDD[((String, String), Int)] = dataFrame.map(row => {
      val pname = row.getAs[String]("provincename")
      val cname = row.getAs[String]("cityname")
      ((pname, cname), 1)
    }).reduceByKey(_ + _)

    //存储数据方法一
    import sQLContext.implicits._
    val frame: DataFrame = result.map(arr=>(arr._1._1,arr._1._2,arr._2)).toDF("pname","cname","count")
//    frame.coalesce(1).write.mode(SaveMode.Overwrite).json("E:\\小牛项目\\DMP广告项目34期\\资料PDF\\json")
    //方法二
//    result.map(tp=>{
//      val gson = new Gson()
//      gson.toJson(AreaClass(tp._1._1,tp._1._2,tp._2))
//    }).coalesce(1).saveAsTextFile("E:\\小牛项目\\DMP广告项目34期\\资料PDF\\json2")

      //写入mysql方法一
//    val props = new Properties()
//    props.setProperty("driver",Configer.driver)
//    props.setProperty("user",Configer.user)
//    props.setProperty("password",Configer.password)
//    frame.write.mode(SaveMode.Overwrite).jdbc(Configer.url,"Area34",props)
    //写入mysql方法二
    result.foreachPartition(partition=>{
      DB.localTx{implicit session=>
        partition.foreach(line=>{
          SQL("insert into Area34 values (?,?,?)").bind(line._1._1,line._1._2,line._2)
            .update().apply()
        })
      }
    })
    //释放资源
    sc.stop()
  }
}
