val lines=sc.textFile("file:///home/ubuntu1/spark/work6//chapter5-data1.txt")
val words=lines.filter(_.split(",")(0)=="Tom")
//words.foreach(println)
val rdd=words.map(x=>(x.split(",")(0),x.split(",")(2).toInt))
val rdd1=rdd.mapValues(x=>(x,1)).reduceByKey((x,y)=>(x._1+y._1,x._2+y._2)).mapValues(x=>(x._1 / x._2)).collect()
rdd1.foreach(println)

