package chapter03

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.{SparkConf, SparkContext}

object Test02_FileOprator {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME","root")
    //创建Spark运行配置对象
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("Fileop")
    //创建上下文环境
    val sc = new SparkContext(sparkConf)
    //读取本地文件
    val value = sc.textFile("input/word.txt")
    //使用执行算子运行
    val strings = value.collect()
    println(strings.mkString("Array(", ", ", ")"))
    //读hadoop上的文件
    val system = new Path("hdfs://192.168.100.131:9000")
      .getFileSystem(new Configuration())
    val bool = system.exists(new Path("/word.txt"))
    println(bool)
    //文件的读取
    val value1 = sc.textFile("hdfs://192.168.100.131:9000/word.txt")
    println(value1.collect().toList)
    //将hdfs中文件下载到本地
    system.copyToLocalFile(new Path("hdfs://192.168.100.131:9000/word.txt"),
      new Path("input/ts.txt"))
    //将文件上传
    system.copyFromLocalFile(
      new Path("input/wordlocal.txt"),
      new Path("hdfs://192.168.100.131:9000/"))
  }
}
