package HDFS;

import com.application.fabric.users.config.HDFS.HDFSConfiguration;
import com.application.fabric.users.config.HDFS.HadoopTemplate;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;

import javax.annotation.Resource;
import java.io.IOException;
import java.io.OutputStream;

@Repository
@Slf4j
public class zsgc {


//    private static String srcFile = "/home/hatelys/idea/springboot/fabric/src/main/java/com/application/fabric/fabriccontrol/existevidence/capool/9dMk0uNwYwDsfa2dd8c35d985c96e6f28195591c8841.mp4";
private static String srcFile = "src/main/java/com/application/fabric/fabriccontrol/existevidence/capool/JGn2xPOvimQHbbdc6b882d7f4930923653c122b7db72.jpg";
    private static String hdfsPath="/fabric";

    @Resource
    private HDFSConfiguration hdfsConfiguration;
    @Test
    public void config() throws Exception {
//        hadoopTemplate.uploadFile("","/fabric");
        Path path= new Path("/test1");
//        System.out.println(hdfsConfiguration.createFs());
        HDFSConfiguration hdfsConfiguration1= new HDFSConfiguration();
//        hdfsConfiguration1.test();
        hdfsConfiguration1.createFs().exists(path);
    }

    private static String user = "hatelys";
    private static String password = "123456";
    private static String hdfsSite = "/home/hatelys/HDFS/hadoop-3.2.2/etc/hadoop/hdfs-site.xml";
    private static String coreSite = "/home/hatelys/HDFS/hadoop-3.2.2/etc/hadoop/core-site.xml";
//    @Autowired
//    private FileSystem fileSystem;
    @Test
    public void HdfsExample() throws IOException {
        System.setProperty("HADOOP_USER_NAME", user);
        System.setProperty("HADOOP_USER_PASSWORD", password);
        Configuration conf = new Configuration();
        System.setProperty("HADOOP_HOME","/home/hatelys/HDFS/hadoop-3.2.2");
        System.setProperty("hadoop.home.dir","/home/hatelys/HDFS/hadoop-3.2.2");
        conf.set("fs.defaultFS", "hdfs://localhost:9000"); // 设置HDFS的URL
        conf.addResource(coreSite);
        conf.addResource(hdfsSite);

        FileSystem fs = FileSystem.get(conf);
        Path path= new Path("/test1");
        Path filePath = new Path("src/main/java/com/application/fabric/fabriccontrol/existevidence/capool/9dMk0uNwYwDsfa2dd8c35d985c96e6f28195591c8841.mp4"); // 在HDFS上创建一个新文件
//        fs.mkdirs(path);
        System.out.println(fs.exists(path));
    }


    @Autowired
    private HadoopTemplate hadoopTemplate;
    @Test
    public void Test() throws Exception {
//        hadoopTemplate.delDir("/test");
//        HDFSConfiguration hdfsConfiguration1= new HDFSConfiguration();
//        hdfsConfiguration1.createFs().delete(new Path("/test1"));
//        hdfsConfiguration.createFs().delete(new Path("/test"));
//        FileSystem fileSystem = hdfsConfiguration.createFs();
        System.out.println(hdfsConfiguration);
    }
}
