import org.apache.spark.{SparkConf, SparkContext}

object home0330 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("home0330")
    val sc = new SparkContext(conf)
    val math=sc.textFile("D:\\sparktest\\student\\result_math.txt")
    val bigdata=sc.textFile("D:\\sparktest\\student\\result_bigdata.txt")
    val m_math=math.map{x=>val line =x.split(" ");(line(0),line(1),line(2).toInt)}
    val m_bigdata=bigdata.map{x=>val line =x.split(" ");(line(0),line(1),line(2).toInt)}
    val student_100=m_math.filter(_._3==100).union(m_bigdata.filter(_._3==100))
    val result=student_100.map(_._1).distinct
    val resultArray=result.collect
    resultArray.foreach(println)
  }
}
