import org.apache.spark.{SparkContext,SparkConf}
val conf=new SparkConf()
val sc=new SparkContext(conf)
val rdd=sc.parallelize(1 to 100)
val data=rdd.filter(_>50).map(_*2).filter(_>100)
val result=data.collect()
println(result.mkString(","))

