/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package cn.ac.iie.di.ban.data.exchange.runner.master;

import java.io.InputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;

/**
 *
 * @author Administrator
 */
public class HDFSUtil {

    static Logger logger = Logger.getLogger(HDFSUtil.class);

    public static FileSystem getHDFS(Configuration conf, String pHDFSFilePathStr) {
        if (pHDFSFilePathStr == null || pHDFSFilePathStr.isEmpty()) {
            return null;
        } else {
            URI hdfsURI = URI.create(pHDFSFilePathStr);
            try {
                return FileSystem.get(hdfsURI, conf);
            } catch (Exception ex) {
                logger.warn("get hdfs of " + pHDFSFilePathStr + "unsuccessfully for " + ex.getMessage(), ex);
                return null;
            }
        }
    }

    public static void createHDFSPath(Configuration conf, Path pHDFSPath) throws Exception {
        logger.info("create hdfsPath is " + pHDFSPath);
        FileSystem fs = getHDFS(conf, pHDFSPath.toString());
        try {
            fs.delete(pHDFSPath, true);
            fs.mkdirs(pHDFSPath);
        } catch (Exception ex) {
            logger.error("HDFS:create " + pHDFSPath + " failed ");
            throw ex;
        }
    }

    public static Path[] getHDFSPathList(Configuration conf, String path) throws Exception {
        FileSystem fs = getHDFS(conf, path);
        Path[] p = null;
        try {
            if (fs.isFile(new Path(path))) {
            } else {
                fs.mkdirs(new Path(path));
            }
            FileStatus[] s = fs.listStatus(new Path(path));
            p = FileUtil.stat2Paths(s);
        } catch (Exception e) {
            logger.error("HDFS:getHDFSPathList " + path + " failed ");
        }
        return p;
    }

    public static InputStream readFileAsStream(Configuration conf, String filePath) throws Exception {
        logger.info("inputStream + path + " + System.currentTimeMillis() + "   " + filePath);
        FileSystem fs = FileSystem.get(new URI(filePath), conf);
        Path srcPath = new Path(filePath);
        InputStream in = null;
        try {
            if (fs.isFile(srcPath)) {
                in = fs.open(srcPath);
            }
        } catch (Exception e) {
            logger.error("HDFS:readFileAsStream " + filePath + " failed ");
        }
        return in;
    }

    public static void main(String[] args) throws Exception {
        System.setProperty("hadoop.home.dir", "C:\\HADOOP\\hadoop-common-2.2.0-bin-master");
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://nameservice1");
        conf.set("dfs.nameservices", "nameservice1");
        conf.set("dfs.ha.namenodes.nameservice1", "namenode1,namenode2");
        conf.set("dfs.namenode.rpc-address.nameservice1.namenode1", "192.168.11.96:1111");
        conf.set("dfs.namenode.rpc-address.nameservice1.namenode2", "192.168.11.95:1111");
        conf.set("dfs.client.failover.proxy.provider.nameservice1",
                "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        conf.set("dfs.client.failover.connection.retries", "1");
        conf.set("dfs.client.failover.connection.retries.on.timeouts", "1");

        HDFSUtil.createHDFSPath(conf, new Path("hdfs://nameservice1/ji"));
//        Path[] hdfsFileList = HDFSUtil.getHDFSPathList(conf, "jiyutest");
//        System.out.println(hdfsFileList);
//        try {
//            Path[] hdfsFileList = HDFSUtil.getHDFSPathList(conf, "hdfs://nameservice1/" + "jiyutest");
//            ByteArrayOutputStream out = new ByteArrayOutputStream();
//            for (Path filePath : hdfsFileList) {
//                System.out.println(filePath.toString());
//                InputStream hdfsFileStream = HDFSUtil.readFileAsStream(conf, filePath.toString());
//                if (hdfsFileStream != null) {
//                    byte[] buffer = new byte[1024];
//                    int len = -1;
//                    while ((len = hdfsFileStream.read(buffer)) != -1) {
//                        String flowFlag = "false";
//                        while (!"true".equals(flowFlag)) {
//                            flowFlag = "true";
//                            if ("true".equals(flowFlag)) {
//                                System.out.println();
//                                out.write(buffer, 0, len);
//                                break;
//                            }
////                        Thread.sleep(GlobalParas.flowControlTimeout);
//                        }
//                    }
//                }
//            }
//            byte[] rep = out.toByteArray();
//            System.out.println(new String(rep, "UTF-8"));
//        } catch (Exception ex) {
//            System.out.println(ex);
//        }
    }

}
