package com.aigis.big4.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.File;
import java.net.URI;

public class HdfsOperateMain {
//    public static void main(String[] args) throws Exception {
//        String uri = "hdfs://localhost:9000/";
//        Configuration config = new Configuration();
//        FileSystem fs = FileSystem.get(URI.create(uri), config);
//
//        // 列出hdfs上/user/fkong/目录下的所有文件和目录
//        FileStatus[] statuses = fs.listStatus(new Path("/"));
//        for (FileStatus status : statuses) {
//            System.out.println(status);
//        }
//
//        // 在hdfs的/user/fkong目录下创建一个文件，并写入一行文本
//        FSDataOutputStream os = fs.create(new Path("/out2.txt"));
//        os.write("Hello World!".getBytes());
//        os.flush();
//        os.close();
//    }

        public static void main(String[] args) throws Exception {
        String uri = "hdfs://localhost:9000/";
        Configuration config = new Configuration();
        config.set("fs.default.name","hdfs://127.0.0.1:9000");
        config.set("mapred.jop.tracker", "hdfs://127.0.0.1:9001");
        FileSystem fs = FileSystem.get(URI.create(uri), config);
        String filePath="D:/hadoop/study/countstr/";
        File file=new File(filePath);
        File[] fileName=file.listFiles();
        String hdfsPathStr="hdfs://127.0.0.1:9000/test/multiFileCount/input";
        String outPathStr="hdfs://127.0.0.1:9000/test/multiFileCount/output";
        FileSystem fileSystem=FileSystem.get(config);
        Path hdfsPath=new Path(hdfsPathStr);
        fileSystem.mkdirs(hdfsPath);
        fileSystem.mkdirs(new Path(outPathStr));
        for (File path:fileName){
          fileSystem.copyFromLocalFile(new Path(path.getAbsolutePath()),hdfsPath);
        }

    }
}
