package com.hugedata.cdnserver.util;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.SequenceInputStream;
import java.net.URI;
import java.util.Calendar;
import java.util.Date;
import java.util.Enumeration;
import java.util.Vector;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IOUtils;

import com.hugedata.cdn.util.PropUtils;

public class HdfsFileUtils
{
    /** log */
    private static final Log LOG = LogFactory.getLog(HdfsFileUtils.class);
    
    /**
     * 将本地文件上传至HDFS
     * 
     * @param local
     *            本地路径
     * @param hdfs
     *            hdfs路径
     * @throws IOException
     *             IO异常
     */
    public boolean uploadToHdfs(Configuration config, String local, String hdfs)
    {
        try
        {

            // 获取HDFS文件系统
            FileSystem fs = FileSystem.get(URI.create(hdfs), config);

            // 读取本地文件
            FileInputStream fis = new FileInputStream(new File(local));
            OutputStream os = fs.create(new Path(hdfs));
            // 复制
            IOUtils.copyBytes(fis, os, 4096, true);

            os.close();
            fis.close();

            LOG.info("finish copy...");
            return true;
        }
        catch (Exception e)
        {
            LOG.error(e.toString());
            return false;
        }
    }

    /**
     * 将本地目录下的所有文件上传至HDFS
     * 
     * @param local
     *            本地路径
     * @param hdfs
     *            hdfs路径
     * @throws IOException
     *             IO异常
     */
    public boolean uploadToHdfsFromLocDir(Configuration config, String local,
            String hdfs)
    {
        try
        {
            // 获得hdfs文件系统设置
            FileSystem hdfsFileSys = FileSystem.get(config);
            // 获得本地文件系统设置
            FileSystem localFileSys = FileSystem.getLocal(config);

            // 本地输入目录
            Path inputDir = new Path(local);
            // 远程输出文件
            Path hdfsFile = new Path(hdfs);
            // 数组,用来循环保存本地文件目录信息
            FileStatus[] inputFiles = localFileSys.listStatus(inputDir);
            // 创新输出hdfs文件
            FSDataOutputStream out = hdfsFileSys.create(hdfsFile);
            FSDataInputStream in = null;
            // 循环取出本地文件目录信息
            for (int i = 0; i < inputFiles.length; i++)
            {
                // 打开本地文件
                in = localFileSys.open(inputFiles[i].getPath());

                byte[] ioBuffer = new byte[1024];

                // 按行读取
                int readLen = in.read(ioBuffer);
                while (-1 != readLen)
                {
                    out.write(ioBuffer, 0, readLen);
                    readLen = in.read(ioBuffer);
                }
            }
            in.close();
            out.close();
            LOG.info("文件复制完成.....");
            return true;
        }
        catch (IOException e)
        {
            LOG.error("复制本地文件到Hdfs出错了" + e.toString());
            return false;
        }
    }

    /**
     * 读取HDFS文件
     * 
     * @param filePath
     *            源文件路径
     * @param dest
     *            写入文件路径
     * @throws IOException
     */
    public boolean readFromHdfs(Configuration conf, String filePath, String dest)

    {
        try
        {

            File file = new File(dest);
            if (!file.exists())
            {
                File parentFile = file.getParentFile();
                if (!parentFile.exists())
                {
                    LOG.error("output path directory doesn't exist");
                    return false;
                }
            }
            if (file.isDirectory())
            {
                String path = dest;
                if (dest.endsWith(File.separator))
                {
                    path = dest + "result";
                }
                else
                {
                    path = dest + File.separator + "result";
                }
                file = new File(path);
                LOG.info("the output path is a directory,use path:" + path);
            }
            // 加载配置文件

            // 写入本地文件系统
            OutputStream out = new FileOutputStream(file);
            // OutputStreamWriter outSW = new OutputStreamWriter(out);
            // BufferedWriter bw = new BufferedWriter(outSW);
            FSDataInputStream hdfsInStream = null;
            FileSystem fs = null;

            FileStatus[] directoryFromHdfs = getDirectoryFromHdfs(conf,
                    filePath);
            // String lineStr = System.getProperty("line.separator");
            for (FileStatus fst : directoryFromHdfs)
            {
                if (!fst.isDir())
                {
                    // 获取HDFS文件系统
                    String path = fst.getPath().toString();
                    fs = FileSystem.get(URI.create(path), conf);
                    // 打开文件流
                    hdfsInStream = fs.open(new Path(path));

                    byte[] ioBuffer = new byte[1024];

                    // 按行读取
                    int readLen = hdfsInStream.read(ioBuffer);

                    while (-1 != readLen)
                    {

                        out.write(ioBuffer, 0, readLen);

                        readLen = hdfsInStream.read(ioBuffer);

                    }

                    // // 按行读取
                    // String readLen = hdfsInStream.readLine();
                    // while (null != readLen)
                    // {
                    // bw.write(readLen + lineStr);
                    // readLen = hdfsInStream.readLine();
                    // }
                }
            }
            // bw.close();
            // outSW.close();
            out.close();

            hdfsInStream.close();
            fs.close();
            return true;
        }
        catch (Exception e)
        {
            return false;
        }

    }

    static public byte[] readHDFSFile(String dst, Configuration jobconf)
    {
        try
        {

            FileSystem fs = FileSystem.get(jobconf);

            // check if the file exists
            Path path = new Path(dst);
            if (fs.exists(path))
            {
                FSDataInputStream is = fs.open(path);
                // get the file info to create the buffer
                FileStatus stat = fs.getFileStatus(path);

                // create the buffer
                byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat
                        .getLen()))];
                is.readFully(0, buffer);

                is.close();
                // fs.close();

                return buffer;
            }
            else
            {
                return Bytes.toBytes("");
            }
        }
        catch (Exception e)
        {
            // TODO: handle exception
            return Bytes.toBytes("");
        }
    }

    /**
     * 列出HDFS目录
     * 
     * @param path
     *            路径
     * @throws IOException
     */
    public FileStatus[] getDirectoryFromHdfs(Configuration conf, String path)
            throws IOException
    {

        // 获取HDFS文件系统
        FileSystem fs = FileSystem.get(URI.create(path), conf);

        // 获取指定路径下的文件
        FileStatus fileList[] = fs.listStatus(new Path(path));
        return fileList;
    }

    public void copyFromHdfsToHdfs(Configuration conf, String input,
            String output)
    {
        try
        {
            // 获取HDFS文件系统
            FileSystem fs = FileSystem.get(URI.create(input), conf);

            // 打开文件流
            FSDataInputStream hdfsInStream = fs.open(new Path(input));
            OutputStream hdfsOutputStream = fs.create(new Path(output));

            // 复制
            IOUtils.copyBytes(hdfsInStream, hdfsOutputStream, 4096, true);
        }
        catch (IOException e)
        {
            // TODO: handle exception
        }
    }

    /**
     * 
     * @Title: delHadoopPath
     * @Description: 删除hadoop的目录
     * @param conf
     *            hadoop配置项
     * @param filePath
     *            文件路径
     * @return
     * @throws
     */

    public static boolean delHadoopPath(Configuration conf, String filePath)
    {

        FileSystem fs = null;
        boolean delFlag = false;
        Path path = new Path(filePath);
        try
        {

            fs = path.getFileSystem(conf);
            if (fs.exists(path))
            {
                delFlag = fs.delete(path, true);
                LOG.info("delete temp file: " + filePath + " sucessfully");
            }

            else
            {
                LOG.info("output path: " + filePath + " does not exist");
                delFlag = true;
            }

        }
        catch (IOException e)
        {
            LOG.error("Delete hadoop file path fail" + e);
            delFlag = false;
        }
        finally
        {
            IOUtils.closeStream(fs);
        }
        return delFlag;
    }

    /**
     * 
     * @Title: isHtableContainsFamily
     * @Description: 判断hbase表中是否含有指定列簇
     * @param conf
     *            配置项
     * @param Htable
     *            hbase表名
     * @param family
     *            列簇名
     * @return
     * @throws
     */

    public boolean isHtableContainsFamily(Configuration conf, String Htable,
            String family)
    {
        try
        {

            HBaseAdmin admin = new HBaseAdmin(conf);
            HTableDescriptor htd = admin.getTableDescriptor(Bytes
                    .toBytes(Htable));
            // HTableDescriptor htd = new HTableDescriptor(Htable);
            // HColumnDescriptor col = new HColumnDescriptor(family);

            HColumnDescriptor[] colFamilies = htd.getColumnFamilies();

            for (int i = 0; i < colFamilies.length; i++)
            {
                if (family.equals(colFamilies[i].getNameAsString()))
                {
                    return true;
                }
            }
            return false;
        }
        catch (MasterNotRunningException e)
        {
            LOG.error("Hbase master is not running");
            return false;

        }
        catch (ZooKeeperConnectionException e)
        {
            LOG.error("can not connect zookeeper");
            return false;
        }
        catch (IOException e)
        {
            LOG.error("Hbase does not contain the table");
            return false;

        }

    }

    public boolean checkPathIsNotExists(Configuration conf, final String path)
    {
        return !checkPathExists(conf, path);
    }

    /**
     * 瀛樺湪鐩綍
     */
    public static boolean checkPathExists(Configuration conf, final String path)
    {
        if (isEmpty(path))
        {
            LOG.error("checkPathExists file path is null ");
            return false;
        }

        FileSystem hdfs = null;
        try
        {
            Path dst_path = new Path(path);
//            hdfs = dst_path.getFileSystem(conf);
            hdfs = FileSystem.newInstance(dst_path.toUri(), conf);
            if (hdfs.exists(dst_path))
            {
                return true;
            }
            else
            {
                return false;
            }
        }
        catch (IOException e)
        {
            LOG.error("Delete hadoop file path fail", e);
            return false;
        }
        finally
        {
            org.apache.zookeeper.common.IOUtils.closeStream(hdfs);
        }
    }

    public static boolean isEmpty(String str)
    {
        return (null == str) || "".equals(str);
    }

    /**
     * 鍒ゆ柇瀛楃涓蹭笉涓虹┖
     * 
     * @param str
     *            input param
     * @return boolean
     */
    public static boolean isNotEmpty(String str)
    {
        return !isEmpty(str);
    }

    /** file combination manipulation **/
    public static boolean FileCombination(String[] filepath, String outputPath)
    {

        if (filepath == null)
        {
            LOG.error("file path is null,combination failed");
            return false;
        }
        Integer fileNum = filepath.length;
        File[] files = new File[fileNum];
        for (int i = 0; i < fileNum; i++)
        {
            if (filepath[i] == null)
            {
                LOG.error("some of the filepath is null,combination failed");
                return false;
            }
            files[i] = new File(filepath[i]);
        }
        return FileCombination(files, outputPath);

    }

    public static boolean FileCombination(File[] files, String outputPath)
    {

        if (files == null)
        {
            LOG.error("file path is null,combination failed");
            return false;
        }
        Integer fileNum = files.length;

        SequenceInputStream iStream = null;
        BufferedOutputStream bStream = null;
        Vector<InputStream> vector = new Vector<InputStream>();
        try
        {
            for (int i = 0; i < fileNum; i++)
            {
                if (files[i] == null)
                {
                    LOG.error("some of the filepath is null,combination failed");
                    return false;
                }
                vector.addElement(new FileInputStream(files[i]));
            }
            Enumeration<InputStream> enumeration = vector.elements();
            iStream = new SequenceInputStream(enumeration);
            bStream = new BufferedOutputStream(new FileOutputStream(new File(
                    outputPath)));
            byte[] arr = new byte[1024 * 20];
            int len = 0;
            while ((len = iStream.read(arr)) != -1)
            {
                bStream.write(arr, 0, len);
                bStream.flush();// 刷新此缓冲的输出流。
            }
            iStream.close();
            bStream.close();
            LOG.info("file combination succeed,please check the file"
                    + outputPath);
            return true;

        }
        catch (FileNotFoundException e)
        {
            LOG.error(
                    "file not found,please check your filepath!combination failed",
                    e);
            return false;
        }
        catch (IOException e)
        {
            LOG.error("stream reads or writes exception,combination failed", e);
            return false;
        }
    }

    public static boolean FileCombination(String folderPath, String outputPath)
    {
        File folder = new File(folderPath);
        File[] files = folder.listFiles();
        return FileCombination(files, outputPath);

    }

    /** Copy all files in a directory to one output file (merge). */
    public boolean copyMerge(FileSystem srcFS, Path srcDir, FileSystem dstFS,
            Path dstFile, boolean deleteSource, Configuration conf,
            String addString)
    {
        try
        {

            dstFile = checkDest(srcDir.getName(), dstFS, dstFile, false);

            if (!srcFS.getFileStatus(srcDir).isDir())
            {
                LOG.error("the input path is not a dictory");
                return false;
            }

            OutputStream out = dstFS.create(dstFile);

            try
            {
                FileStatus contents[] = srcFS.listStatus(srcDir);
                for (int i = 0; i < contents.length; i++)
                {
                    if (!contents[i].isDir())
                    {
                        InputStream in = srcFS.open(contents[i].getPath());
                        try
                        {
                            IOUtils.copyBytes(in, out, conf, false);
                            if (addString != null)
                                out.write(addString.getBytes("UTF-8"));

                        }
                        finally
                        {
                            in.close();
                        }
                    }
                }
            }
            finally
            {
                out.close();
            }

            if (deleteSource)
            {
                return srcFS.delete(srcDir, true);
            }
            else
            {
                LOG.info("file combination succeed,please check the file "
                        + dstFile.getName());
                return true;
            }
        }
        catch (IOException e)
        {
            LOG.error(e);
            return false;
        }
    }

    private static Path checkDest(String srcName, FileSystem dstFS, Path dst,
            boolean overwrite) throws IOException
    {
        if (dstFS.exists(dst))
        {
            FileStatus sdst = dstFS.getFileStatus(dst);
            if (sdst.isDir())
            {
                if (null == srcName)
                {
                    throw new IOException("Target " + dst + " is a directory");
                }
                return checkDest(null, dstFS, new Path(dst, srcName), overwrite);
            }
            else if (!overwrite)
            {
                throw new IOException("Target " + dst + " already exists");
            }
        }
        return dst;
    }

    public boolean mergeHdfsFiles(Configuration conf,
            String hdfsInputFolderPath, String hdfsOutputFile,
            boolean deleteSource)
    {
        Path srcPath = new Path(hdfsInputFolderPath);
        Path destPath = new Path(hdfsOutputFile);
        FileSystem srcFs;
        try
        {
            srcFs = srcPath.getFileSystem(conf);

            FileSystem destFs = destPath.getFileSystem(conf);

            return copyMerge(srcFs, srcPath, destFs, destPath, deleteSource,
                    conf, null);
        }
        catch (IOException e)
        {
            LOG.error("get file system errors", e);
            return false;
        }
    }
    
    public static boolean mergeFilesInOne(Configuration conf,
            String hdfsInputFolderPath, String hdfsOutputFile,
            boolean deleteSource)
    {
        Path srcDir = new Path(hdfsInputFolderPath);
        Path dstFile = new Path(hdfsOutputFile);
        
        FileSystem srcFS = null;
        FileSystem dstFS = null;
        try
        {
//            srcFS = srcDir.getFileSystem(conf);
//            dstFS = dstFile.getFileSystem(conf);
            
            srcFS = FileSystem.newInstance(srcDir.toUri(), conf);
            dstFS = FileSystem.newInstance(dstFile.toUri(), conf);
            
            return FileUtil.copyMerge(srcFS, srcDir, dstFS, dstFile, deleteSource, conf, null);
        }
        catch (IOException e)
        {
           LOG.error("merge files exception", e);
           return false;
        }
        finally
        {
            org.apache.zookeeper.common.IOUtils.closeStream(dstFS);
            org.apache.zookeeper.common.IOUtils.closeStream(srcFS);
        }
        
    }
    
    /**
     * 
    * @Title: mergeFilesInOneToLocal
    * @Description: 合并hdfs目录并下载到本地文件系统
    * @author wangxiaoming
    * @param conf
    * @param hdfsInputFolderPath
    * @param localFilePath
    * @param fileType
    * @param deleteSource
    * @return
    * @throws
     */
    public static String mergeFilesInOneToLocal(Configuration conf,
            String hdfsInputFolderPath, String localFilePath, String fileType,
            boolean deleteSource)
    {
        Path srcDir = new Path(hdfsInputFolderPath);
        Date currentDay = Calendar.getInstance().getTime();
        String timeStr = DateFormatUtils.format(currentDay, "yyyyMMddHHmmss");
        int start = StringUtils.lastIndexOf(hdfsInputFolderPath, "/");
        String timeTmpStr = StringUtils.substring(hdfsInputFolderPath, start + 1);
        String fileName = StringUtils.join(new String[]{"/tmpmerge", fileType, timeTmpStr, "_", timeStr});
//        String fileName = StringUtils.join(new String[]{"/tmpmerge", fileType, timeStr});
        String mergeFilePath = PropUtils.getProp("hdfsMergeTmp") + fileName;
        Path dstFile = new Path(mergeFilePath);
        
        FileSystem srcFS = null;
        FileSystem dstFS = null;
//        boolean flag = false;
        try
        {
            srcFS = FileSystem.newInstance(srcDir.toUri(), conf);
            dstFS = FileSystem.newInstance(dstFile.toUri(), conf);
            
            boolean flag = FileUtil.copyMerge(srcFS, srcDir, dstFS, dstFile, deleteSource, conf, null);
            if (!flag)
            {
                return "";
            }
        }
        catch (IOException e)
        {
           LOG.error("merge files exception", e);
           return "";
        }
        finally
        {
            org.apache.zookeeper.common.IOUtils.closeStream(dstFS);
            org.apache.zookeeper.common.IOUtils.closeStream(srcFS);
        }
        
        FileSystem fs = null;
        try
        {
            fs = FileSystem.newInstance(conf);
            Path p1 =new Path(mergeFilePath);  
            Path p2 =new Path(localFilePath);  
            fs.copyToLocalFile(false, p1, p2);  
        }
        catch (IOException e)
        {
            LOG.error("merge files exception", e);
            return "";
        }  
        finally
        {
            IOUtils.closeStream(fs);
        }
        return localFilePath + fileName;
    }

    public boolean checkTableExist(Configuration conf, String tableName)
            throws IOException
    {
        HBaseAdmin hBaseAdmin = new HBaseAdmin(conf);
        if (hBaseAdmin.tableExists(tableName))
        {
            return true;
        }
        return false;
    }

}
