package cn.doitedu.day02

import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object T21_GameCount {

  def main(args: Array[String]): Unit = {

    //1.创建SparkConf
    val conf = new SparkConf().setAppName("MapPartitionsWithIndexDemo")
      .setMaster("local[4]")
    //2.创建SparkContext
    val sc = new SparkContext(conf)

    val lines = sc.textFile("data/game.txt")

    lines.mapPartitions(it => {
      val random = new Random()
      it.map(line => {
        val fields = line.split(",")
        val gid = fields(1)
        val zid = fields(2)
        val money = fields(3).toDouble
        val ri = random.nextInt(4)
        ((gid, zid, ri), money)
      })
    }).reduceByKey(_+_)
      .map{
        case((gid, zid, _), money) => {
          ((gid, zid), money)
        }
      }.reduceByKey(_+_)
      .saveAsTextFile("out/out11")

    Thread.sleep(1000000)
  }

}
