package com.inspur

import org.apache.spark.{SparkConf, SparkContext}

object DataClear {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
//      .setMaster("spark://192.168.66.88:7077")
      .setMaster("local")
      .setAppName("data clear")
    val sc = new SparkContext(conf)

    //数据清理
    val order_goods = sc.textFile("hdfs://192.168.66.88:8020/0616/data/order_goods")
    val od_result = order_goods.filter(x => { //清理空白元素
      val str = x.split("\t")
      var flag = true
      for(element <- str if flag) {
        if(element.isEmpty || element.length == 0){
          flag = false
        }
      }
      flag
    })
    od_result.saveAsTextFile("hdfs://192.168.66.88:8020/0616/clear_data/order_goods")

    val products = sc.textFile("hdfs://192.168.66.88:8020/0616/data/products")
    val products_result = products.filter(x => {
      val str = x.split("\t")
      var flag = true
      for(element <- str if flag) {
        if(element.isEmpty || element.length == 0){
          flag = false
        }
      }
      flag
    })
    products_result.saveAsTextFile("hdfs://192.168.66.88:8020/0616/clear_data/products")

    sc.stop()
  }

}
