package com.asiainfo.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object WordCount {
  def main(args: Array[String]): Unit = {
    //创建Spark执行上下文
    val conf = new SparkConf().setMaster("local").setAppName("spark_jobName")
    val sparkContext:SparkContext = new SparkContext(conf)
    //读取文件 RDD 弹性分布式数据集
    val fileRdd: RDD[String] = sparkContext.textFile("data/user.text")

    println(fileRdd.flatMap(x => {
      x.split(",")
    }).map((_, 1)).reduceByKey((_ + _)).collect().mkString(","))


    //使用完成需要关闭
    sparkContext.stop()
  }
}
