package com.inspur
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
object stu {
  def main(args: Array[String]): Unit = {
 //first::::实例化SparkContext上下文:
      val conf = new SparkConf
    conf.setAppName("Test")
    conf.setMaster("local")
     val sc =  new SparkContext(conf)
 //math成绩
    val math = sc.textFile("file:///E:/data/student/result_math.txt")
    val m_math = math.map{
        x=> val line=x.split(" ");
        (line(0),line(1),line(2).toInt)
        } 
 //bigdata成绩
    val bigdata = sc.textFile("file:///E:/data/student/result_bigdata.txt")
    val m_bigdata = bigdata.map{
      x=>val line=x.split(" ");
      (line(0),line(1),line(2).toInt)
      }
 //filter过滤掉不符合=100条件的学生，将两门成绩筛选出来的结果合并
    val student_100 = m_math.filter(_._3 == 100).union(m_bigdata.filter(_._3 == 100))
 //distinct去重   
    val result = student_100.map(_._1).distinct
    val resultArray = result.collect
    println(resultArray.mkString(" "))
  }
}