package cas.ihep.hadoop.mc;

import cas.ihep.hadoop.util.HadoopLuaEval;
import cas.ihep.hadoop.util.Utils;
import cas.ihep.util.MapBindings;
import cas.ihep.util.RandomGenerator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.luaj.vm2.LuaBoolean;

import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.StandardOpenOption;

public class YbjMC3 extends Configured implements Tool {
    private static final Log LOG = LogFactory.getLog(YbjMC3.class);
    static private final Path TMP_DIR = new Path("/tmp/input/"+YbjMC3.class.getSimpleName());
    private static void usage(){
        System.out.println("YbjMC3 <job option file> <job run file>");
    }

    private int startNum,numMaps;
    private String defaultFS;
    private boolean waitFor=false;

    private void readingOptionFile(String optionfile,Configuration conf,FileSystem fs) throws IOException, ScriptException, URISyntaxException {
        LOG.info("using option file: "+optionfile);
        ScriptEngineManager sem=new ScriptEngineManager();
        ScriptEngine e=sem.getEngineByName("luaj");
        String tmpoptfile=Utils.GlobalValuePrefix+RandomGenerator.instance.next(YbjMC3.class);
        conf.set(Utils.GlobalValueKey,tmpoptfile);
        tmpoptfile=defaultFS+tmpoptfile;
        Path tmpOpt=new Path(tmpoptfile);
        LOG.info("using global value: "+tmpoptfile);
        try(Reader optreader=new FileReader(optionfile);
            ObjectOutputStream oos=new ObjectOutputStream(fs.create(
                    tmpOpt,true))){
            MapBindings bd=new MapBindings();
            HadoopLuaEval.addExternalVariable(bd,conf);
            e.eval(optreader,bd);
            Object value=bd.remove("Hadoop_Queue");
            if(value!=null) {
                conf.set("mapred.job.queue.name",(String)value);
            }
            value=bd.remove("Hadoop_WaitFor");
            if(value!=null){
                waitFor=((LuaBoolean)value).toboolean();
            }
            startNum=(Integer)bd.remove("Index");
            numMaps=(Integer)bd.remove("Times");
            Utils.mkdirOutputPath(bd,defaultFS,fs);
            oos.writeObject(HadoopLuaEval.convertForSerialize(bd));
            oos.flush();
        }
        LOG.info("submiting to queue: " + conf.get("mapred.job.queue.name"));
    }

    private void processingRunfile(String runfile,Configuration jobconf,FileSystem fs)throws IOException,URISyntaxException{
        URI jofUri=new URI(runfile);
        if("hdfs".equals(jofUri.getScheme())){
            String hdfsOpt=jofUri.getPath();
            LOG.info("using run file: "+hdfsOpt);
            jobconf.set(Utils.RunfileKey,hdfsOpt);
        }else{
            File jOptionFile=new File(runfile);
            runfile=jOptionFile.getAbsolutePath();
            if(runfile.startsWith("/hdfs/")){
                jobconf.set(Utils.RunfileKey,runfile);
                runfile=runfile.substring(5);
                LOG.info("using run file: "+defaultFS+runfile);
            }else{
                String tmprunfile="/tmp/"+ RandomGenerator.instance.next(YbjMC3.class);
                jobconf.set(Utils.RunfileKey,tmprunfile);
                jobconf.set(Utils.RunfileUsetmp,"true");
                runfile=defaultFS+tmprunfile;
                LOG.info("using tmp run file: "+runfile);
                Path tmpPath=new Path(runfile);
                try(FileChannel is=FileChannel.open(jOptionFile.toPath(), StandardOpenOption.READ);
                    WritableByteChannel os= Channels.newChannel(fs.create(tmpPath,true))){
                    is.transferTo(0,is.size(),os);
                }
            }
        }
    }

    @Override
    public int run(String[] args) throws Exception {
        if(args.length<2){
            LOG.error("Missing <Job Option> and <Job Run>");
            usage();
            return 1;
        }
        Configuration jobconf = getConf();
        FileSystem fs=FileSystem.get(jobconf);
        String jobName = jobconf.get("ybj.jobName", RandomGenerator.instance.next(YbjMC3.class));
        defaultFS=jobconf.get("fs.defaultFS","hdfs://hadoop06.ihep.ac.cn:8020");
        readingOptionFile(args[0],jobconf,fs);
        processingRunfile(args[1],jobconf,fs);
        //String opfilePath = optionFile + "#OptionFile.txt";
        LOG.info("Index="+startNum);
        LOG.info("Times="+numMaps);
        jobconf.set("user.name",System.getenv("LOGNAME"));

        //jobconf.set("tmpfiles", validateFiles(opfilePath, jobconf));
        final Path inDir = new Path(TMP_DIR, jobName);
        if (fs.exists(inDir)) {
            throw new IOException("Tmp directory " + fs.makeQualified(inDir)
                    + " already exists.  Please remove it or change your Job Name!.");
        }
        if (!fs.mkdirs(inDir)) {
            throw new IOException("Cannot create input directory " + inDir);
        }
        jobconf.set(Utils.TmpInputDirKey,inDir.toUri().toString());
        //generate an input file for each map task
        for(long i=startNum; i < startNum+numMaps; ++i) {
            String s=Long.toString(i);
            final Path file = new Path(inDir, s+".txt");
            try(PrintWriter out = new PrintWriter(fs.create(file,(short)1))){
                out.print(s);
            }
            LOG.info("Wrote input for I #"+i);
        }
        Job job = Job.getInstance(jobconf);
        //job.setJobName("Ybj");
        job.setJarByClass(YbjMC3.class);
        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(NullOutputFormat.class);
        job.setMapperClass(YbjMCMapper3.class);
        job.setReducerClass(YbjMCReducer3.class);
        job.setNumReduceTasks(1);
        //setup input/output directories
        FileInputFormat.setInputPaths(job, inDir);
        job.submit();
        if(waitFor) {
            job.waitForCompletion(true);
        }
        return 0;
    }

    public static void main(String args[]) throws Exception{
        System.exit(ToolRunner.run(new YbjMC3(), args));
    }
}
