/**  
 * @Title:  HdfsFileMan.java
 * @Package com.hugedata.cdnserver.datanalysis.util
 * @Description: TODO(用一句话描述该文件做什么)
 * @author Fengsen
 * @date  2013-8-20 下午4:35:58
 * @version V1.0  
 * ****************************************************
 * Update Logs:
 * Name:
 * Date:
 * Description:
 ******************************************************
 */
package com.hugedata.cdnserver.util;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;

/**
 * @ClassName: HdfsFileMan
 * @Description: TODO(这里用一句话描述这个类的作用)
 * @author Fengsen
 * @date 2013-8-20 下午4:35:58
 * 
 */
public class CdnHdfsFileMan
{
    private final Logger LOGGER = Logger.getLogger(getClass());

    /**
     * 将本地文件上传至HDFS
     * 
     * @param local
     *            本地路径
     * @param hdfs
     *            hdfs路径
     * @throws IOException
     *             IO异常
     */
    // public void uploadToHdfs(String local, String hdfs) throws IOException
    // {
    //
    // // 加载配置文件
    // Configuration config = new Configuration();
    // // config.addResource("conf/core-site.xml");
    // config.set("fs.default.name",
    // SystemUtils.getCfg().getProperty("fs.default.name"));
    // // 获取HDFS文件系统
    // FileSystem fs = FileSystem.get(URI.create(hdfs), config);
    //
    // // 读取本地文件
    // FileInputStream fis = new FileInputStream(new File(local));
    // OutputStream os = fs.create(new Path(hdfs));
    // // 复制
    // IOUtils.copyBytes(fis, os, 4096, true);
    //
    // os.close();
    // fis.close();
    //
    // System.out.println("拷贝完成...");
    // }
    //
    /**
     * 读取HDFS文件
     * 
     * @param filePath
     *            源文件路径
     * @param dest
     *            写入文件路径
     * @throws IOException
     */
    public File readFromHdfs(Configuration conf, String filePath, String dest)
            throws IOException
    {
        File file = new File(dest);
        // 加载配置文件
        // Configuration conf = new Configuration();
        // conf.set("fs.default.name",
        // SystemUtils.getCfg().getProperty("fs.default.name"));
        // 写入本地文件系统
        OutputStream out = null;
        OutputStreamWriter outSW = null;
        BufferedWriter bw = null;
        FSDataInputStream hdfsInStream = null;
        FileSystem fs = null;
        try
        {
            out = new FileOutputStream(file);
            outSW = new OutputStreamWriter(out);
            bw = new BufferedWriter(outSW);
            hdfsInStream = null;
            fs = null;

            FileStatus[] directoryFromHdfs = getDirectoryFromHdfs(conf,
                    filePath);
            if (directoryFromHdfs == null)
            {
                LOGGER.info("读取文件错误,此目录下可能没有文件.");
                return null;
            }

            String lineStr = System.getProperty("line.separator");
            for (FileStatus fst : directoryFromHdfs)
            {
                if (!fst.isDir())
                {
                    // 获取HDFS文件系统
                    String path = fst.getPath().toString();
                    fs = FileSystem.newInstance(URI.create(path), conf);
                    // 打开文件流
                    hdfsInStream = fs.open(new Path(path));
                    // 按行读取
                    String readLen = hdfsInStream.readLine();
                    while (null != readLen)
                    {
                        bw.write(readLen + lineStr);
                        readLen = hdfsInStream.readLine();
                    }
                }
            }
        }
        finally
        {
            org.apache.zookeeper.common.IOUtils.closeStream(fs);
            org.apache.zookeeper.common.IOUtils.closeStream(hdfsInStream);
            org.apache.zookeeper.common.IOUtils.closeStream(bw);
            org.apache.zookeeper.common.IOUtils.closeStream(outSW);
            org.apache.zookeeper.common.IOUtils.closeStream(out);
        }
        return file;
    }

    /**
     * 读取HDFS文件
     * 
     * @param filePath
     *            源文件路径
     * @param dest
     *            写入文件路径
     * @throws IOException
     */
    public File readfileFromHdfs(Configuration conf, FileStatus[] filePath,
            String dest, String ispcode) throws IOException
    {
        File file = new File(dest);
        // 加载配置文件
        // Configuration conf = new Configuration();
        // conf.set("fs.default.name",
        // SystemUtils.getCfg().getProperty("fs.default.name"));
        // 写入本地文件系统
        OutputStream out = null;
        OutputStreamWriter outSW = null;
        BufferedWriter bw = null;
        FSDataInputStream hdfsInStream = null;
        FileSystem fs = null;
        try
        {
            out = new FileOutputStream(file);
            outSW = new OutputStreamWriter(out);
            bw = new BufferedWriter(outSW);
            hdfsInStream = null;
            fs = null;

            String lineStr = System.getProperty("line.separator");
            for (FileStatus fst : filePath)
            {
                if (!fst.isDir())
                {
                    // 获取HDFS文件系统
                    String path = fst.getPath().toString();
                    String ispname = path.substring(path.lastIndexOf('/') + 1);
                    // 同一个运营商合并文件
                    if (ispname.indexOf(ispcode) != -1)
                    {
                        LOGGER.debug("path:" + path + "--->" + "isp:" + ispcode
                                + "," + "dest:" + dest);
                        fs = FileSystem.newInstance(URI.create(path), conf);
                        // 打开文件流
                        hdfsInStream = fs.open(new Path(path));
                        // 按行读取
                        String readLen = hdfsInStream.readLine();
                        while (null != readLen)
                        {
                            bw.write(readLen + lineStr);
                            readLen = hdfsInStream.readLine();
                        }
                    }
                    else if (!ispcode.equals("10") && !ispcode.equals("11")
                            && ispname.indexOf("other") != -1)
                    {
                        LOGGER.debug("path:" + path + "--->" + "isp:" + ispcode
                                + "," + "dest:" + dest);
                        fs = FileSystem.newInstance(URI.create(path), conf);
                        // 打开文件流
                        hdfsInStream = fs.open(new Path(path));
                        // 按行读取
                        String readLen = hdfsInStream.readLine();
                        while (null != readLen)
                        {
                            bw.write(readLen + lineStr);
                            readLen = hdfsInStream.readLine();
                        }
                    }
                }
            }
        }
        finally
        {
            org.apache.zookeeper.common.IOUtils.closeStream(fs);
            org.apache.zookeeper.common.IOUtils.closeStream(hdfsInStream);
            org.apache.zookeeper.common.IOUtils.closeStream(bw);
            org.apache.zookeeper.common.IOUtils.closeStream(outSW);
            org.apache.zookeeper.common.IOUtils.closeStream(out);
        }
        return file;
    }

    /**
     * 列出HDFS目录
     * 
     * @param path
     *            路径
     * @throws IOException
     */
    public FileStatus[] getDirectoryFromHdfs(Configuration conf, String path)
            throws IOException
    {
        // 加载配置文件
        // Configuration conf = new Configuration();
        // conf.set("fs.default.name",
        // SystemUtils.getCfg().getProperty("fs.default.name"));
        // 获取HDFS文件系统
        FileSystem fs = null;
        FileStatus fileList[] = null;
        try
        {

            fs = FileSystem.newInstance(URI.create(path), conf);
            // 获取指定路径下的文件
            fileList = fs.listStatus(new Path(path));
        }
        finally
        {
            org.apache.zookeeper.common.IOUtils.closeStream(fs);
        }

        return fileList;
    }

    // public void copyFromHdfsToHdfs(String input, String output)
    // {
    // try
    // {
    // // 加载配置文件
    // Configuration conf = new Configuration();
    // conf.set("fs.default.name",
    // SystemUtils.getCfg().getProperty("fs.default.name"));
    // // 获取HDFS文件系统
    // FileSystem fs = FileSystem.get(URI.create(input), conf);
    //
    // // 打开文件流
    // FSDataInputStream hdfsInStream = fs.open(new Path(input));
    // OutputStream hdfsOutputStream = fs.create(new Path(output));
    //
    // // IOUtils.copyBytes(hdfsInStream, hdfsInStream, 4096, true);
    // }
    // catch (IOException e)
    // {
    // // TODO: handle exception
    // }
    //
    // }

    /**
     * 删除存在目录
     */
    // public boolean delHadoopPath(String filePath)
    // {
    // if (SystemUtils.isEmpty(filePath))
    // {
    // LOGGER.error("delHadoopPath file path is null ");
    // return false;
    // }
    //
    // Configuration conf = new Configuration();
    // conf.set("fs.default.name",
    // SystemUtils.getCfg().getProperty("fs.default.name"));
    //
    // FileSystem fs = null;
    // boolean delFlag = false;
    // Path path = new Path(filePath);
    // try
    // {
    // fs = path.getFileSystem(conf);
    // delFlag = fs.delete(path, true);
    // }
    // catch (IOException e)
    // {
    // LOGGER.error("Delete hadoop file path fail" + e);
    // delFlag = false;
    // }
    // return delFlag;
    // }
    //
    // /**
    // * 不存在目录
    // */
    // public boolean checkPathIsNotExists(final String path)
    // {
    // return !checkPathExists(path);
    // }
    //
    // /**
    // * 存在目录
    // */
    // public boolean checkPathExists(final String path)
    // {
    // if (SystemUtils.isEmpty(path))
    // {
    // LOGGER.error("checkPathExists file path is null ");
    // return false;
    // }
    //
    // Configuration conf = new Configuration();
    // conf.set("fs.default.name",
    // SystemUtils.getCfg().getProperty("fs.default.name"));
    // Path dst_path = new Path(path);
    // try
    // {
    // FileSystem hdfs = dst_path.getFileSystem(conf);
    // // 检查是否存在文件
    // if (hdfs.exists(dst_path))
    // {
    // return true;
    // }
    // else
    // {
    // return false;
    // }
    // }
    // catch (IOException e)
    // {
    // LOGGER.error("Delete hadoop file path fail" + e);
    // return false;
    // }
    // }
    //
    // static public byte[] readHDFSFile(String dst, Configuration jobconf)
    // {
    // try
    // {
    //
    // FileSystem fs = FileSystem.get(jobconf);
    //
    // // check if the file exists
    // Path path = new Path(dst);
    // if (fs.exists(path))
    // {
    // FSDataInputStream is = fs.open(path);
    // // get the file info to create the buffer
    // FileStatus stat = fs.getFileStatus(path);
    //
    // // create the buffer
    // byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat
    // .getLen()))];
    // is.readFully(0, buffer);
    //
    // is.close();
    // // fs.close();
    //
    // return buffer;
    // }
    // else
    // {
    // return Bytes.toBytes("");
    // }
    // }
    // catch (Exception e)
    // {
    // // TODO: handle exception
    // return Bytes.toBytes("");
    // }
    // }
    //
    // public static void main(String[] args) throws Exception
    // {
    // /*
    // * try { new HdfsFileMan().readFromHdfs("/luxiumin/cdn",
    // * "./domain.txt"); } catch (IOException e) { System.out.println(e); }
    // */
    // // new HdfsFileMan().delHadoopPath("/luxiumin/cdn");
    // String path = "/cdn/20130904/crawllog";
    //
    // boolean status = new HdfsFileMan().checkPathExists(path);
    // // boolean status1 = new HdfsFileMan().delHadoopPath(path);
    // // new HdfsFileMan().readFromHdfs("/cdn/20130904/crawllog", "aa.txt");
    // System.err.println(status);
    // }
}
