package study.wsn

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._

object Products {
    def main(args: Array[String]){
    //实例化
    val conf = new SparkConf().setAppName("Products").setMaster("local")
    val sc = new SparkContext(conf)
    
    //读文件
//    val inputFile = "hdfs://namenode:8020/input/products.txt"
    val inputFile = "file:///A:/input/products.txt"
    val textFile = sc.textFile(inputFile)
    val rdd = textFile.filter(
      v => {
        val arr = v.split("\t")
        var flag = true
        for(x <- arr if flag){
          if(x.length() == 0)
            flag = false
        }
        flag
      })
    //
    rdd.foreach(println) 
//    rdd.saveAsTextFile("hdfs://namenode:8020/output2/products_clear")
    rdd.saveAsTextFile("file:///A:/output2/products_clear")
   }
}