
import org.apache.spark.SparkContext._
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{Path, FileSystem}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkContext, SparkConf}

/**
 * Created by root on 2015/3/3.
 */
class test {

}

object  test{


  def main(args: Array[String]) {
    val filePath="hdfs:///test/res"
    val sparkConf=new SparkConf().setAppName("workCount")
    val sc= new SparkContext(sparkConf)
    val hdfs=FileSystem.get(new Configuration())
    if(hdfs.exists(new Path(filePath)))hdfs.delete(new Path(filePath),true)
    val f =sc.textFile("hdfs:///test/workCount")
    f.flatMap(line =>line.split(" "))
      .map(line =>(line,1))
      .reduceByKey(_ + _)
      .map(line=>line._1+","+line._2)
      .saveAsTextFile(filePath)
        sc.wholeTextFiles("hdfs:///flume/cmd/")
      sc.wholeTextFiles("hdfs:///hive/core_device/")
     persist(StorageLevel.MEMORY_AND_DISK_SER_2)

    sc.wholeTextFiles("hdfs:///analyzedb/core_device/")
  }
}
