package org.jst4me.hadoop;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.hdfs.DistributedFileSystem;

import java.io.IOException;

public class HDFS_Configuration {
    public final static String HDFS_Server_And_Port = "hdfs://jst4me.master:9000";

    public final static String Local_Work_Dir = "D:\\dtinone\\shell_practise";

    private static Configuration getDefaultConfiguration(){
        Configuration configuration = new Configuration();
        return configuration;
    }

    public static FileSystem getFileSystem() throws IOException {
        FileSystem hdfs = null;
        String[] temp = HDFS_Server_And_Port.split(":");
        Configuration configuration = getDefaultConfiguration();
        configuration.set("fs.defaultFS",HDFS_Server_And_Port);
        configuration.set("dfs.client.block.write.replace-datanode-on-failure.enable", "true");
        configuration.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
        configuration.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
        configuration.set("fs.file.impl", LocalFileSystem.class.getName());

        hdfs = FileSystem.get(configuration);
        return hdfs;
    }
}
