package com.jc.demo.hadoop.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;

import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;

import static org.apache.hadoop.fs.FileSystem.DEFAULT_FS;
import static org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY;

/**
 * 使用FileSystem复制文件（写入），会自动创建目录
 * FileSystem也是可以操作本地文件的，所以没有指定协议，就会操作本地文件目录/opt/java
 */
public class FileCopyWithProgressNoaaWeather {
    public static void main(String[] args) throws Exception {
//        String localSrc = "/media/cherry/Seagate Backup Plus Drive/home/pub/all.txt";
//        String dst = "/user/jevoncode/inputAll/all.txt";
        String localSrc = args[0];
        String dst = args[1];
//        String dst = "/opt/java/"; //FileSystem也是可以操作本地文件的，所以没有指定协议，就会操作本地文件目录/opt/java
        InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
        Configuration conf = new Configuration();
        conf.addDefaultResource("hadoop-cluster.xml");
        System.out.println(conf.get(FS_DEFAULT_NAME_KEY, DEFAULT_FS));
        System.out.println(conf.get("dfs.ha.namenodes.ns"));
        FileSystem fs = FileSystem.get(conf);
        OutputStream out = fs.create(new Path(dst), new Progressable() {
            public void progress() {
//                System.out.print(".");
            }
        });
        IOUtils.copyBytes(in, out, 4096, true);
    }
}
