import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}
//import org.apache.spark.sql.SparkSession
/**
  * Created by DELL on 2017/9/21.
  */
object SparkPi {
  def main(args: Array[String]) {
//    val conf = new SparkConf()
//    val sc = new SparkContext(conf)
//    val text = sc.textFile("README.md")
//    val result = text.flatMap(_.split(' ')).map((_, 1)).reduceByKey(_ + _).collect()
//    result.foreach(println)

//    new SparkConf().setMaster("local").setAppName("Wordcount")
//        val logFile = "README.md" // Should be some file on your system
//        val spark = SparkSession.builder.appName("Simple Application").getOrCreate()
//        val logData = spark.read.textFile(logFile).cache()
//        val numAs = logData.filter(line => line.contains("a")).count()
//        val numBs = logData.filter(line => line.contains("b")).count()
//        println(s"Lines with a: $numAs, Lines with b: $numBs")
//        spark.stop()

        val conf = new SparkConf().setMaster("local").setAppName("Wordcount")
        val sc = new SparkContext(conf)
        val data = sc.textFile("README.md") // 文本存放的位置
        data.flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_).collect().foreach(println)

  }
}
