package com.shujia.demo

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.{SparkConf, SparkContext}

object Demo1DataFIlter {
  def main(args: Array[String]): Unit = {

    val day = args(0)
    val outputPath = "/data/staypoint/day=" + day
    println("输出路径：" + outputPath)

    val conf = new SparkConf().setAppName("Demo1DataFIlter")
    val sc = new SparkContext(conf)

    val data = sc.textFile("/data/dianxin")

    //数据清洗
    val fIlterRDD = data.filter(line => {
      val time = line.split(",")(4)
      !"\\N".equals(time)
    })

    //删除数据路径
    val config = new Configuration()
    val fileSystem = FileSystem.newInstance(config)
    if (fileSystem.exists(new Path(outputPath))) {
      fileSystem.delete(new Path(outputPath), true)
    }

    fIlterRDD.saveAsTextFile(outputPath)

  }
}
