package org.test.hadoop.hdfs;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;

public class HdfsUtil {
	
	public static FileSystem getFs() throws IOException, URISyntaxException{
		System.setProperty("HADOOP_USER_NAME", "hadoop");
		
		Configuration conf = new Configuration();
		/*conf.set("fs.defaultFS", "hdfs://ns1");
		conf.set("dfs.nameservices", "ns1");
		conf.set("dfs.ha.namenodes.ns1", "nn1,nn2");
		conf.set("dfs.namenode.rpc-address.ns1.nn1", "192.168.128.132:8020");
		conf.set("dfs.namenode.rpc-address.ns1.nn2", "192.168.128.134:8020");
		conf.set("dfs.client.failover.proxy.provider.ns1", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
		*/
		conf.set("fs.defaultFS", "viewfs:///");
		conf.set("fs.viewfs.mounttable.default.link./cluster", "hdfs://hadoopcluster/");
        conf.set("fs.viewfs.mounttable.default.link./cluster2","hdfs://hadoopcluster2/");
        conf.set("dfs.nameservices","hadoopcluster,hadoopcluster2");
        conf.set("dfs.ha.namenodes.hadoopcluster","nn1,nn2");
        conf.set("dfs.namenode.rpc-address.hadoopcluster.nn1","192.168.128.132:8020");
        conf.set("dfs.namenode.rpc-address.hadoopcluster.nn2","192.168.128.134:8020");
        conf.set("dfs.client.failover.proxy.provider.hadoopcluster", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        
        conf.set("dfs.ha.namenodes.hadoopcluster2","nn3,nn4");
        conf.set("dfs.namenode.rpc-address.hadoopcluster2.nn3","192.168.128.136:8020");
        conf.set("dfs.namenode.rpc-address.hadoopcluster2.nn4","192.168.128.138:8020");
        conf.set("dfs.client.failover.proxy.provider.hadoopcluster2", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        
   
        
		FileSystem fs = FileSystem.get(new URI("viewfs:///"),conf);
		return fs;
	}

}
