package report

import Configer.Configer
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
import scalikejdbc.{DB, SQL}
import utils.KPITotal

//运营商
object IspnameAnaysis {
  def main(args: Array[String]): Unit = {
    //sparkcontext
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}")
    conf.setMaster("local[*]")
    conf.set("spark.serializer",Configer.serializer)

    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)
    //读取数据
    val dataFrame = sQLContext.read.parquet("E:\\小牛项目\\DMP广告项目34期\\资料PDF\\parquet")
    //处理数据
    val result: RDD[(String, List[Double])] = dataFrame.map(row => {
      //运营商
      val ispname = row.getAs[String]("ispname")
      val lsit: List[Double] = KPITotal.KPI(row)
      (ispname, lsit)
    }).reduceByKey {
      (list1, list2) => list1 zip list2 map (li => li._1 + li._2)
    }
    //存储数据
    result.foreachPartition(partition=>{
      DB.localTx{implicit session=>
        partition.foreach(rp=>{
          SQL("insert into Area34 values (?,?,?,?,?,?,?,?,?,?)")
            .bind(rp._1,rp._2(0),rp._2(1),rp._2(2),rp._2(3),rp._2(4),rp._2(7),rp._2(8),rp._2(5),rp._2(6))
            .update().apply()
        })
      }
    })


    //释放资源
    sc.stop()
  }
}
