import org.apache.spark.SparkConf
import org.apache.spark.api.java.JavaSparkContext

/**
 * Created by Administrator on 2016/6/9 0009.
 */
object test {
  def main(args: Array[String]) {
    val conf=new SparkConf().setAppName("hello").setMaster("local") //环境
    val sc=new JavaSparkContext(conf) //上下文
    val text=sc.parallelize(Seq("a","b","c")) //RDD
    val cc=text.count()
    print(cc)
  }
}
