package Spark

import org.apache.spark.{SparkConf, SparkContext}
import org.junit
import org.junit.Test

class createTest {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[6]").setAppName("test")
    val sc   = new SparkContext(conf)


    /**
     * 读取数据
     */
    val bigdata = sc.textFile("hdfs://192.168.64.129:9000/data/result_math.txt").map{
      item => val line=item.split("\t");
        (line(0),line(2).toInt)
    }
    val math = sc.textFile("hdfs://192.168.64.129:9000/data/result_bigdata.txt").map{
      item => val line=item.split("\t");
        (line(0),line(2).toInt)
    }
    val students = sc.textFile("hdfs://192.168.64.129:9000/data/student.txt").map{
      item => val line=item.split("\t");
        (line(0),line(1))
    }

    // 计算总成绩
    val total_score = math.union(bigdata).reduceByKey((curr,agg)=>curr+agg)

    // 计算平均分
    val avg_score = total_score.map(item => (item._1,item._2/2.toDouble))

    /*
    val prepare_avg = math.union(bigdata).combineByKey(
      count => (count,1),
      ( acc:(Int,Int),next:Int ) => (acc._1+next,acc._2+1),
      ( curr:(Int,Int),agg:(Int,Int) ) => (curr._1+agg._1,curr._2+agg._2)
    )
    val avg = prepare_avg.map(item => (item._1,item._2._1/item._2._2))
    */

    // 合并表
    val info = students.join(math).join(bigdata).join(total_score).join(avg_score)
        .map(item =>
          Array(item._1,item._2._1._1._1._1,item._2._1._1._1._2,item._2._1._1._2,item._2._1._2,item._2._2)
          .mkString(",")
        )

    // 将处理好的汇总数据生成文件
    info.repartition(1).saveAsTextFile("result_info")

    sc.stop()

  }
}
