package org.apache.hadoop.contrib.ftp;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.log4j.Logger;

import java.io.IOException;
import java.net.URI;

/**
 * Class to store DFS connection
 */
public class HdfsOverFtpSystem {

	 
    private static DistributedFileSystem dfs = null;

    public static String HDFS_URI = "";

    private static String superuser = "superuser";
    private static String supergroup = "supergroup";

    private final static Logger log = Logger.getLogger(HdfsOverFtpSystem.class);


    private static void hdfsInit() {
        dfs = new DistributedFileSystem();
        Configuration conf = new Configuration();
        conf.set("hadoop.job.ugi", superuser + "," + supergroup);
        try {
        	log.info("user:"+superuser+"\t group:"+supergroup);
            dfs.initialize(new URI(HDFS_URI), conf);
        } catch (Exception e) {
            log.error("DFS Initialization error", e);
        }
    }

    public static void setHDFS_URI(String HDFS_URI) {
        HdfsOverFtpSystem.HDFS_URI = HDFS_URI;
    }

    /**
     * Get dfs
     *
     * @return dfs
     * @throws IOException
     */
    public static DistributedFileSystem getDfs() {
        if (dfs == null) {
            hdfsInit();
        }
        return dfs;
    }

    /**
     * Set superuser. and we connect to DFS as a superuser
     *
     * @param superuser
     */
    public static void setSuperuser(String superuser) {
        HdfsOverFtpSystem.superuser = superuser;
    }
}
