package cas.ihep.hadoop.tr;

import com.google.common.base.Strings;
import com.google.common.io.Closer;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.*;
import java.net.URI;
import java.util.Formatter;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicLong;

public class Put implements Closeable{

    private boolean overwrite;
    private Closer closer;
    private FileSystem hdfs;
    private AtomicLong bytes;
    private ExecutorService pool;
    private PrintWriter log;
    private ThreadLocal<byte[]> cache;

    @Override
    public void close() throws IOException {
        closer.close();
    }

    private void transferData(File from,Path todir){
        byte[] buf=cache.get();
        if(buf==null){
            buf=new byte[8192];
            cache.set(buf);
        }
        int each;
        try(FileInputStream fis=new FileInputStream(from);OutputStream hout=hdfs.create(new Path(todir,from.getName()),overwrite)){
            while((each=fis.read(buf))!=-1){
                hout.write(buf,0,each);
                bytes.addAndGet(each);
            }
        }catch (IOException e){
            try {
                log.println("Error in transfering " + from.getCanonicalPath() + ": " + e.getMessage());
            }catch (IOException ignored){}
        }
    }

    private void addTask(File from, Path to, boolean checkName,boolean performTest)throws IOException{
        String dirname=from.getName();
        String pathname=to.getName();
        Path targetDir;
        if(checkName && dirname.equals(pathname)){
            if(!hdfs.exists(to)){
                hdfs.mkdirs(to);
            }else if(!hdfs.isDirectory(to)){
                throw new IOException(to.toString()+" not a directory");
            }
            targetDir=to;
        }else{
            targetDir=new Path(to,dirname);
            if(!hdfs.exists(targetDir)){
                hdfs.mkdirs(targetDir);
            }else if(!hdfs.isDirectory(targetDir)){
                throw new IOException(targetDir.toString()+ " not a directory");
            }
        }
        File subfiles[]=from.listFiles();
        if(subfiles!=null) {
            for (File f : subfiles) {
                if(f.isDirectory()){
                    addTask(f,targetDir,false,performTest);
                }else{
                    if(f.length()>hdfs.getDefaultBlockSize(targetDir)){
                        log.println("Warning: the size of "+f.getCanonicalPath()+" is bigger than default block size.");
                    }
                    if(performTest){
                        try(FileInputStream fis=new FileInputStream(f)){
                            //noinspection ResultOfMethodCallIgnored
                            fis.read();
                        }catch (Exception e){
                            log.println("Error in testing "+f.getCanonicalPath());
                        }
                    }else {
                        pool.submit(new Callable<Integer>() {
                            @Override
                            public Integer call() {
                                transferData(f, targetDir);
                                return 0;
                            }
                        });
                    }
                }
            }
        }
    }

    private static Path resolveDst(String p)throws Exception{
        URI uri=new URI(p);
        if("hdfs".equals(uri.getScheme())){
            return new Path(uri);
        }else{
            File f=new File(p);
            p=f.getCanonicalPath();
            if(p.startsWith("/hdfs/")){
                p=p.substring(5);
            }
            return new Path(p);
        }
    }

    private Put(int thrs, boolean o, boolean performTest,List<String> argv,int sz,String hdpConfDir,String logfile) throws Exception {
        System.out.println("hadoop data transmission tool v0.1");
        closer=Closer.create();
        overwrite=o;
        bytes=new AtomicLong(0);
        Configuration hConf = new Configuration();
        hConf.addResource(closer.register(new FileInputStream(hdpConfDir+"/etc/hadoop/core-site.xml")));
        hConf.addResource(closer.register(new FileInputStream(hdpConfDir+"/etc/hadoop/hdfs-site.xml")));
        hdfs=closer.register(FileSystem.get(hConf));
        Path basepath=resolveDst(argv.get(sz-1));
        if(!performTest) {
            pool = Executors.newFixedThreadPool(thrs);
        }else{
            pool=null;
        }
        cache=new ThreadLocal<>();
        log=closer.register(new PrintWriter(new File(logfile)));
        for(int i=0;i<sz-1;i++){
            File basedir=new File(argv.get(i));
            System.out.println(basedir.getCanonicalPath()+"\t====>\t"+basepath.toString());
            if(basedir.isDirectory()) {
                addTask(basedir, basepath, true,performTest);
            }else{
                if(basedir.length()>hdfs.getDefaultBlockSize(basepath)){
                    log.println("Warning: the size of "+basedir.getCanonicalPath()+" is bigger than default block size.");
                }
                if(performTest){
                    try(FileInputStream fis=new FileInputStream(basedir)){
                        //noinspection ResultOfMethodCallIgnored
                        fis.read();
                    }catch (Exception e){
                        log.println("Error in testing "+basedir.getCanonicalPath());
                    }
                }else {
                    pool.submit(new Callable<Integer>() {
                        @Override
                        public Integer call() {
                            transferData(basedir, basepath);
                            return 0;
                        }
                    });
                }
            }
        }
    }

    private void doTransfer() {
        if(pool==null){
            return;
        }
        long last=0;
        pool.shutdown();
        StringBuilder builder1=new StringBuilder();
        StringBuilder normal=new StringBuilder();
        Formatter formatter=new Formatter(normal);
        while(!pool.isTerminated()){
            long cur=bytes.get();
            builder1.append("\rCurrent:    ").append(normalize((cur-last)>>1,normal,formatter));
            normal.delete(0,normal.length());
            builder1.append("/s    Total:    ").append(normalize(cur,normal,formatter)).append("    ");
            normal.delete(0,normal.length());
            System.out.print(builder1);
            last=cur;
            try {
                if(pool.awaitTermination(2,TimeUnit.SECONDS)){
                    break;
                }
            } catch (InterruptedException ignored) {}
        }
        System.out.println();
        log.flush();
    }

    private static StringBuilder normalize(long val,StringBuilder builder,Formatter fmt){
        if(val<1024){
            builder.append(val).append(" B");
        }else if(val<1024*1024){
            fmt.format("%.2f KiB",((double)val)/1024);
        }else if(val<1024*1024*1024){
            fmt.format("%.2f MiB",((double)val)/1024/1024);
        }else if(val<1024L*1024*1024*1024){
            fmt.format("%.2f GiB",((double)val)/1024/1024/1024);
        }else{
            fmt.format("%.2f TiB",((double)val)/1024/1024/1024/1024);
        }
        return builder;
    }

    private static int optInt(CommandLine cmdline){
        if(cmdline.hasOption('t')){
            return Integer.parseInt(cmdline.getOptionValue('t'));
        }
        return 1;
    }

    private static String optHadoopHome(CommandLine cmdline){
        if(cmdline.hasOption('c')){
            return cmdline.getOptionValue('c');
        }
        return Strings.nullToEmpty(System.getenv("HADOOP_HOME"));
    }

    private static String optLogfile(CommandLine cmdline){
        if(cmdline.hasOption('l')){
            return cmdline.getOptionValue('l');
        }
        return System.getProperty("java.io.tmpdir")+"/hadoop-transmission-"+System.currentTimeMillis()+".log";
    }

    public static void main(String args[])throws Exception{
        Options opts=new Options();
        opts.addOption("f","force",false,"Overwrites the destination if it already exists");
        opts.addOption("h","help",false,"Show this message");
        opts.addOption("t","threads",true,"Specify the number of threads");
        opts.addOption("c","conf",true,"Specify the config directory of hadoop");
        opts.addOption("z","test",false,"Perform input/output test");
        opts.addOption("l","log",true,"Specify the log file of transmission");

        CommandLine cmdline=new DefaultParser().parse(opts,args);
        List<String> argv=cmdline.getArgList();
        int sz=argv.size();
        if(sz<2 || cmdline.hasOption('h')){
            HelpFormatter fmt=new HelpFormatter();
            fmt.setSyntaxPrefix("Usage: ");
            fmt.printHelp("hadoop-put","hadoop data transmission tool v0.1",opts,null,true);
            return;
        }
        try(Put put=new Put(optInt(cmdline),cmdline.hasOption('f'),cmdline.hasOption('z'),argv,sz, optHadoopHome(cmdline),optLogfile(cmdline))) {
            put.doTransfer();
        }
    }
}
