package report

import java.util.Properties

import Configer.Configer
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}
import scalikejdbc.{DB, SQL}
import utils.KPITotal

//地域分布core
object AreaAnalysis {
  def main(args: Array[String]): Unit = {
    //sparkcontext
    val conf = new SparkConf().setAppName(s"${this.getClass.getName}")
      .setMaster("local[*]").set("spark.serializer", Configer.serializer)
    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)
    //读取数据
    val dataFrame = sQLContext.read.parquet("E:\\小牛项目\\DMP广告项目34期\\资料PDF\\parquet")
    //数据处理
    val result: RDD[((String, String), List[Double])] = dataFrame.map(row => {
      //维度信息省市
      val pname = row.getAs[String]("provincename")
      val cname = row.getAs[String]("cityname")
      val listALL = KPITotal.KPI(row)

      ((pname, cname), listALL)
    }).reduceByKey((list1, list2) => list1.zip(list2).map(li => li._1 + li._2))
    import sQLContext.implicits._
    val frame = result.map(tp => (tp._1._1, tp._1._2, tp._2(0), tp._2(1), tp._2(2), tp._2(3), tp._2(4), tp._2(7), tp._2(8), tp._2(5), tp._2(6)))
      .toDF("pname", "cname", "allreq", "effreq", "adreq", "adjoin", "adsucess", "adshow", "adclick", "adpay", "adwin")
    //存储数据
    val props = new Properties()
    props.setProperty("user",Configer.user)
    props.setProperty("password",Configer.password)
    props.setProperty("driver",Configer.driver)
    frame.write.mode(SaveMode.Overwrite).jdbc(Configer.url,"Area34",props)
//    result.foreachPartition(partition=>{
//      DB.localTx{implicit session=>
//        partition.foreach(arr=>{
//          SQL("insert into Area34 values (?,?,?,?)").bind("1","2",arr._1._1,arr._1._2)
//            .update().apply()
//        })
//      }
//    })
    //释放资源
    sc.stop()
  }


}
