package report

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by 王康 on 2018/6/29.
  */
object Regional_distribution_Core {
  def main(args: Array[String]) {

    val conf: SparkConf = new SparkConf().setAppName("Regional_distribution_Core").setMaster("local[4]")
    val scc: SparkContext = new SparkContext(conf)

    val sqlcontext: SQLContext = new SQLContext(scc)

    val parquet: DataFrame = sqlcontext.read.parquet("E:\\大数据资料\\project2\\parquet_1.6.3")

    val filterdata: RDD[((String, String), (Int, Int, Int, Int, Int, Int, Int, Double, Double))] = parquet.map(row => {
      val provincename: String = row.getString(24)
      val cityname: String = row.getString(25)
      val requestmode: Int = row.getInt(8)
      val processnode: Int = row.getInt(35)
      val iseffective: Int = row.getInt(30)
      val isbilling: Int = row.getInt(31)
      val isbid: Int = row.getInt(39)
      val iswin: Int = row.getInt(42)
      val adorderid: Int = row.getInt(2)
      val winprice: Double = row.getDouble(41)
      val adpayment: Double = row.getDouble(75)
      //多维度 进行判断
      val flag1: Boolean = requestmode == 1 && processnode >= 1
      val flag2 = requestmode == 1 && processnode >= 2
      val flag3 = requestmode == 1 && processnode == 3
      val flag4 = iseffective == 1 && isbilling == 1 && isbid == 1 && adorderid != 0
      val flag5 = iseffective == 1 && isbilling == 1 && iswin == 1
      val flag6 = requestmode == 2 && iseffective == 1
      val flag7 = requestmode == 3 && iseffective == 1

      ((provincename, cityname), (if (flag1) 1 else 0, if (flag2) 1 else 0, if (flag3) 1 else 0, if (flag4) 1 else 0, if (flag5) 1 else 0, if (flag6) 1 else 0, if (flag7) 1 else 0, if (flag5) winprice else 0.0, if (flag5) adpayment else 0.0))
    })
    val result: RDD[((String, String), (Int, Int, Int, Int, Int, Int, Int, Double, Double))] = filterdata.reduceByKey((x, y)=>(x._1+y._1,x._2+y._2,x._3+y._3,x._4+y._4,x._5+y._5,x._6+y._6,x._7+y._7,x._8+y._8,x._9+y._9))
    val re: RDD[String] = result.map(t=>t._1._1+","+t._1._2+":"+t._2._1+","+t._2._2+","+t._2._3+","+t._2._4+","+t._2._5+","+t._2._6+","+t._2._7+","+t._2._8+","+t._2._9)
    re.saveAsTextFile("E:\\大数据资料\\project2\\Regional_distribution_Core")

  }
}
