package com.bishe.cyh.utils;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

/**
 * @Author: Poppin
 * @Date: 2022/1/12 13:07
 * @Version: 1.0
 */
public class HDFSUtils {


    public static void CopyFromLocalFile(String srcPath){
        Configuration conf = new Configuration();
        conf.set("dfs.client.use.datanode.hostname", "true");
        conf.set("fs.defaultFS","hdfs://192.168.66.157:9000");
        System.setProperty("HADOOP_USER_NAME","fuck");
        try {
            FileSystem fs = FileSystem.get(conf);
            fs.copyFromLocalFile(new Path(srcPath),new Path("/"));
            fs.close();
            File file = new File(srcPath);
            deleteFolder(file);
        }catch (Exception e){
            e.printStackTrace();
        }

    }
    public static void Delete(String srcPath) throws URISyntaxException, IOException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("dfs.client.use.datanode.hostname", "true");
        conf.set("fs.defaultFS","hdfs://192.168.66.157:9000");
        System.setProperty("HADOOP_USER_NAME","fuck");
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.66.157:9000"),conf,"fuck");
        fs.delete(new Path("/"+srcPath));
    }

    /**
     * 删除解压文件
     * @param file
     */
    public static void deleteFolder(File file){
        File[] files = file.listFiles();
        if (files!=null){
            for (File f:files){
                if (f.isDirectory()){
                    deleteFolder(f);
                }else {
                    f.delete();
                }
            }
        }
        file.delete();
    }
}
