package com.shujia.core

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.io.Source

object Code32StuMapJoin {
  def main(args: Array[String]): Unit = {
    val sc = new SparkContext(new SparkConf().setMaster("local").setAppName("Mysql2Text"))




//    Source
//      .fromFile("spark_code/data/score.txt")
//      .getLines()
//      .toList
//      .map {
//        case oneLine: String => {
//          val splitRes: Array[String] = oneLine.split(",")
//          (splitRes(0), (splitRes(1), splitRes(2).toInt))
//        }
//      }.groupBy(_._1)
//      .map{
//        case x => {
//          val id: String = x._1
//          (id,x._2.map(_._2._2).sum)
//        }
//      }
//      .foreach(println)

    val scoreMap: collection.Map[String, Int] = sc
      .textFile("spark_code/data/score.txt", 4)
      .map {
        case oneLine => {
          val splitRes: Array[String] = oneLine.split(",")
          (splitRes(0), (splitRes(1), splitRes(2).toInt))
        }
      }.groupBy(_._1)
      .map {
        case x => {
          val id: String = x._1
          (id, x._2.map(_._2._2).sum)
        }
      }.collectAsMap()

    val scoreMapBroadcast: Broadcast[collection.Map[String, Int]] = sc.broadcast(scoreMap)

    sc
      .textFile("scala_code/data/students.txt",3)
      .map {
        case oneLine => {
          val splitRes: Array[String] = oneLine.split(",")
          val scoreMap: collection.Map[String, Int] = scoreMapBroadcast.value

          val score: Int = scoreMap.getOrElse(splitRes(0), 0)
          (splitRes(0), splitRes(1), splitRes(2), splitRes(3), splitRes(4),score)
        }
      }.foreach(println)


    while (true){}
  }
}
