package hadoop.ch03.v17124080112;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class DownloadHDFSFile {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        // 配置 NameNode 地址，具体根据你的NameNode IP 配置
        URI uri = new URI("hdfs://192.168.30.131:8020");
        // 指定用户名 , 获取 FileSystem 对象
        FileSystem fs = FileSystem.get(uri, conf, "hadoop");
        //定义文件路径
        Path dfs = new Path("/17124080112/test5.txt");
        FSDataOutputStream os = fs.create(dfs, true);
        //往文件写入信息
        os.writeBytes("Hello World");
        os.close();
        //HDFS file
        Path src = new Path("/17124080112/test5.txt");
        //local file
        Path dst = new Path("e:\\test5.txt");
        //Linux下
        //fs.copyToLocalFile(src,dst);
        //Windows下
        fs.copyToLocalFile(false,src,dst,true);
        // 不需要再操作 FileSystem 了，关闭
        fs.close();

        System.out.println("Download Successfully!");
    }
}
