package com.zhangwoo.analyser.process;
 
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;

import com.zhangwoo.util.Config;
 
 
public class PutMerge {
 
    private static Logger logger = Logger.getLogger(PutMerge.class);
    private String singleDir;
 
    public PutMerge(String singleDir){
        this.singleDir=singleDir;
    }
 
    public static void main(String[] args) throws IOException {
         if(args.length==0){
             System.exit(-1);
         }
 
        Properties props = new Properties();
        try {
            InputStream istream = PutMerge.class
                    .getResourceAsStream("/log4j.properties");
            props.load(istream);
            istream.close();
            props.setProperty("log4j.appender.logfile.File",
                    "logs/PutMerge.log");
 
            // 重新配置后，日志会打到新的文件去。
            PropertyConfigurator.configure(props);// 装入log4j配置信息
        } catch (Exception e) {
            logger.error("装入属性文件异常 Exception ", e);
        }
 
        // String singleDir="e:\\destzone\\2012070503";
 
        for (String singleDir : args) {
 
            logger.debug("******* merge path is : "+singleDir);
            new PutMerge(singleDir).mergeTheDirectory();
        }
    }
 
    public void mergeTheDirectory(){
        try {
            String host = Config.getHdfshost();
 
            Configuration conf = new Configuration();
            conf.set("fs.default.name", "hdfs://" + host + ":9000");
            conf.set("mapred.job.tracker", host + ":9001");
 
            FileSystem hdfs = FileSystem.get(conf);
            FileSystem local = FileSystem.getLocal(conf);
            int filesProcessed = 0;
 
            // 测试数据基础目录
            File dic = new File(singleDir);
 
            Path inputDir = new Path(dic.getAbsolutePath());
            Path hdfsFile = new Path(Config.getHdfsPath() + dic.getName());
 
            logger.debug("converting dir ... " + dic.getAbsolutePath());
 
            try {
                FileStatus[] inputFiles = local.listStatus(inputDir);
                if (hdfs.exists(hdfsFile))
                    return;
                FSDataOutputStream out = hdfs.create(hdfsFile);
                for (int i = 0; i < inputFiles.length; i++) {
                    if (!inputFiles[i].isDir()) {
                        logger.debug("now processing <"
                                + inputFiles[i].getPath().getName() + ">");
                        FSDataInputStream in = local.open(inputFiles[i]
                                .getPath());
 
                        byte buffer[] = new byte[256];
                        int bytesRead = 0;
                        while ((bytesRead = in.read(buffer)) > 0) {
                            out.write(buffer, 0, bytesRead);
                        }
                        out.writeUTF(System.getProperty("line.separator"));
                        filesProcessed++;
                        in.close();
                    }
                }
                out.close();
                logger.debug("Successfully merged " + filesProcessed
                        + " local files and written to <" + hdfsFile.getName()
                        + "> in HDFS.");
            } catch (Exception ioe) {
                logger.error("write in hdfs error!", ioe);
            }
        } catch (Exception e) {
            logger.error("put merge thread error!", e);
        }
    }
}