package cc.lucien.hadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;

/**
 * 通过调⽤hdfs的相关命令来实现⽂件的管理操作
 */
public class HDFSFileOpr {

    /**
     * 用Hadoop Shell 命令
     * 将指定的⽂件上传到hdfs
     */
    public void upload(String url) throws IOException {
        // 创建一个配置对象
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS",url);
        // 通过conf来获取fs实例，fs从namenode拿到元数据信息
        FileSystem fs = FileSystem.get(conf);
        // 把本地文件复制到hadoop集群上
        fs.copyFromLocalFile(new Path("D:\\Users\\xiaoma\\ludingji.txt"),
                new Path("/user/hadoop/demo/ludingji.txt"));
        System.out.println("文件上传成功！");
    }

    /**
     * 下载指定的⽂件到本地⽬录下
     */
    public void download(String url) throws IOException {
        // 创建一个配置对象
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS",url);
        // 通过conf来获取fs实例，fs从namenode拿到元数据信息
        FileSystem fs = FileSystem.newInstance(conf);
        fs.copyToLocalFile(new Path("/user/hadoop/demo/ludingji.txt"),
                new Path("D:/Users/xiaoma/download/ludingji.txt"));
        System.out.println("下载成功！");
    }

    /**
     * 删除指定的远程⽂件
     */
    public void removeFile(String url) throws IOException {
        // 创建一个配置对象
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS",url);
        // 通过conf来获取fs实例，fs从namenode拿到元数据信息
        FileSystem fs = FileSystem.newInstance(conf);
        fs.delete(new Path("/user/hadoop/demo"),true);
        System.out.println("删除成功！");
    }

    public static void main(String[] args) throws IOException {
        // 设置用户
        System.setProperty("HADOOP_USER_NAME","hadoop");
        String url = "hdfs://10.24.130.132:9000";
        HDFSFileOpr hdfsFileOpr = new HDFSFileOpr();
        // hdfsFileOpr.upload(url);
         hdfsFileOpr.download(url);
        // hdfsFileOpr.removeFile(url);
    }
}
