package cas.ihep.hadoop.util;

import cas.ihep.util.FileAD;
import cas.ihep.util.RandomGenerator;
import com.google.common.io.ByteStreams;
import com.google.common.io.Closer;
import org.apache.commons.logging.Log;
import org.apache.hadoop.hdfs.DFSClient;
import org.luaj.vm2.LuaValue;

import java.io.Closeable;
import java.io.File;
import java.io.FileWriter;
import java.io.OutputStream;
import java.net.URI;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;

public abstract class HadoopLuaAbstractEngine implements HadoopLuaEngine {

    protected abstract DFSClient getDFSClient();
    protected abstract Log getLogger();
    protected abstract String getDefaultFS();
    protected abstract String getLogPrefix();

    @Override
    public LuaValue hadoop_value(String key) {
        return LuaValue.NIL;
    }

    protected void preRun(){}

    protected void postRun(){}

    public int hadoop_run(String cmd,HadoopRunConfig config)throws Exception {
        final String defaultFS = getDefaultFS();
        try (Closer curCloser = Closer.create()) {
            StringBuilder sb = new StringBuilder();
            for (String str : config.sources()) {
                sb.append("source ").append(str).append('\n');
            }
            sb.append(cmd);
            File tmpFile=curCloser.register(new FileAD("/dev/shm", RandomGenerator.instance.next(HadoopLuaEngine.class)));
            try(FileWriter writer=new FileWriter(tmpFile)){
                writer.write(sb.toString());
            }
            ProcessBuilder builder=new ProcessBuilder(Arrays.asList(config.shell(),"-l",tmpFile.getCanonicalPath()));
            builder.directory(new File(config.workingDirectory()));
            List<String> strings = config.logs();
            config.updateEnv(builder.environment());
            preRun();
            if (config.logInherited()) {
                return processInheritedLog(builder, curCloser);
            } else {
                String stdoutfile, stderrfile;
                int ssz = strings.size();
                if (ssz >= 2) {
                    stdoutfile = strings.get(0);
                    stderrfile = strings.get(1);
                } else if (ssz == 1) {
                    stdoutfile = strings.get(0);
                    stderrfile = getLogPrefix() + ".err";
                } else {
                    String tmp = getLogPrefix();
                    stdoutfile = tmp + ".log";
                    stderrfile = tmp + ".err";
                }
                cmd = config.logDirectory();
                if (!cmd.startsWith("hdfs://")) {
                    File logDir = new File(cmd).getCanonicalFile();
                    cmd = logDir.getCanonicalPath();
                    if (cmd.startsWith("/hdfs/")) {
                        processHdfsLog(defaultFS + cmd.substring(5), stdoutfile, stderrfile, builder, curCloser);
                    } else {
                        return processLocalLog(logDir, stdoutfile, stderrfile, builder, curCloser);
                    }
                }
                return processHdfsLog(cmd,stdoutfile,stderrfile,builder,curCloser);
            }
        }
    }

    private int processInheritedLog(ProcessBuilder builder,Closer closer)throws Exception{
        builder.redirectOutput(ProcessBuilder.Redirect.INHERIT);
        builder.redirectError(ProcessBuilder.Redirect.INHERIT);
        final Process subprocess=builder.start();
        closer.register(new Closeable() {
            @Override
            public void close() {
                subprocess.destroy();
                postRun();
            }
        });
        return subprocess.waitFor();
    }

    private int processLocalLog(File localDir,String stdoutfile,String stderrfile,ProcessBuilder builder, Closer closer)throws Exception{
        File stdout=new File(localDir,stdoutfile),stderr=new File(localDir,stderrfile);
        builder.redirectOutput(stdout);
        builder.redirectError(stderr);
        Log LOG=getLogger();
        LOG.info("Redirect stdout to "+stdout.getCanonicalPath());
        LOG.info("Redirect stderr to "+stderr.getCanonicalPath());
        final Process subprocess=builder.start();
        closer.register(new Closeable() {
            @Override
            public void close() {
                subprocess.destroy();
                postRun();
            }
        });
        return subprocess.waitFor();
    }

    private int processHdfsLog(String url,String stdoutfile,String stderrfile,ProcessBuilder builder,final Closer closer)throws Exception{
        Log LOG=getLogger();
        final DFSClient dfsClient=getDFSClient();
        dfsClient.mkdirs(url,null,true);
        URI uriOut=new URI(url+'/'+stdoutfile);
        URI uriErr=new URI(url+'/'+stderrfile);
        builder.redirectOutput(ProcessBuilder.Redirect.PIPE);
        builder.redirectError(ProcessBuilder.Redirect.PIPE);
        LOG.info("Redirect stdout to "+uriOut.toString());
        LOG.info("Redirect stderr to "+uriErr.toString());
        final OutputStream stdout=closer.register(dfsClient.create(uriOut.getPath(),true)),stderr=closer.register(dfsClient.create(uriErr.getPath(),true));
        final Process subprocess=builder.start();
        closer.register(new Closeable() {
            @Override
            public void close() {
                subprocess.destroy();
                postRun();
            }
        });
        Future<Long> future=YbjThreadPool.pool.submit(new Callable<Long>() {
            @Override
            public Long call() throws Exception {
                return ByteStreams.copy(closer.register(subprocess.getInputStream()),stdout);
            }
        });
        ByteStreams.copy(closer.register(subprocess.getErrorStream()),stderr);
        future.get();
        return subprocess.waitFor();
    }
}
