package cas.ihep.hadoop.da;

import cas.ihep.fs.UnifiedFileSystem;
import cas.ihep.hadoop.util.HadoopLuaEval;
import cas.ihep.hadoop.util.HepFileInputFormat;
import cas.ihep.hadoop.util.Utils;
import cas.ihep.util.MapBindings;
import cas.ihep.util.RandomGenerator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.luaj.vm2.LuaBoolean;

import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.StandardOpenOption;

public class Ybj5 extends Configured implements Tool {
    private static final Log LOG = LogFactory.getLog(Ybj3.class);
    private String defaultFS;
    private boolean waitFor=false;

    private void readingOptionFile(String optionFile, Configuration conf, FileSystem fs,UnifiedFileSystem ufs) throws IOException, ScriptException, URISyntaxException {
        LOG.info("using option file: "+optionFile);
        ScriptEngineManager sem=new ScriptEngineManager();
        ScriptEngine e=sem.getEngineByName("luaj");
        String tmpoptfile= defaultFS+Utils.GlobalValuePrefix+ RandomGenerator.instance.next(Ybj5.class);
        conf.set(Utils.GlobalValueKey,tmpoptfile);
        URI tmpOpt=new URI(tmpoptfile);
        LOG.info("using global value: "+tmpoptfile);
        try(Reader optreader=new FileReader(optionFile);
            ObjectOutputStream oos=new ObjectOutputStream(ufs.createStream(
                    tmpOpt))){
            MapBindings bd=new MapBindings();
            HadoopLuaEval.addExternalVariable(bd,conf);
            e.eval(optreader,bd);
            Object value=bd.remove("Hadoop_Queue");
            if(value!=null) {
                conf.set("mapred.job.queue.name",(String)value);
            }
            value=bd.remove("Hadoop_WaitFor");
            if(value!=null){
                waitFor=((LuaBoolean)value).toboolean();
            }
            Utils.addInputPaths(conf,bd,fs,defaultFS);
            Utils.mkdirOutputPath(bd,defaultFS,ufs);
            oos.writeObject(HadoopLuaEval.convertForSerialize(bd));
            oos.flush();
        }
        LOG.info("submiting to queue: " + conf.get("mapred.job.queue.name"));
    }

    private void processingRunfile(String optionFile,Configuration jobconf,UnifiedFileSystem ufs)throws IOException,URISyntaxException{
        int idx=optionFile.indexOf("://");
        if(idx>=0){
            URI jofUri=new URI(optionFile);
            optionFile=jofUri.toString();
            LOG.info("using run file: "+optionFile);
            jobconf.set(Utils.RunfileKey,optionFile);
        }else{
            File jOptionFile=new File(optionFile);
            optionFile=jOptionFile.getCanonicalPath();
            if(optionFile.startsWith("/hdfs/")){
                optionFile=defaultFS+optionFile.substring(5);
                LOG.info("using run file: "+optionFile);
                jobconf.set(Utils.RunfileKey,optionFile);
            }else{
                String tmpoptfile=defaultFS+"/tmp/"+ RandomGenerator.instance.next(Ybj5.class);
                jobconf.set(Utils.RunfileKey,tmpoptfile);
                jobconf.set(Utils.RunfileUsetmp,"true");
                LOG.info("using tmp run file: "+tmpoptfile);
                URI tmpOpt=new URI(tmpoptfile);
                try(FileChannel is=FileChannel.open(jOptionFile.toPath(), StandardOpenOption.READ);
                    WritableByteChannel os= Channels.newChannel(ufs.createStream(tmpOpt))){
                    is.transferTo(0,is.size(),os);
                }
            }
        }
    }


    @Override
    public int run(String[] args) throws Exception {
        if(args.length<2){
            System.err.println("Missing option file and run file");
            return 1;
        }
        //noinspection OctalInteger
        //String jobName = jobconf.get("ybj.jobName", Ybj3.class.getSimpleName() + '-' + System.currentTimeMillis());
        UnifiedFileSystem.Configuration uConf=UnifiedFileSystem.newConfiguration(getConf());
        Configuration jobconf=uConf.hadoopConf();
        defaultFS=jobconf.get("fs.defaultFS","hdfs://hadoop06.ihep.ac.cn:8020");
        UnifiedFileSystem ufs=UnifiedFileSystem.get(uConf);
        readingOptionFile(args[0],jobconf,ufs.hdfsSystem(),ufs);
        processingRunfile(args[1],jobconf,ufs);
        jobconf.set("user.name",System.getenv("LOGNAME"));
        //Path[] inputPaths= Utils.getRecursivePaths(fs,inputPath);
        Job job=Job.getInstance(jobconf);
        job.setJarByClass(Ybj5.class);
        job.setInputFormatClass(HepFileInputFormat.class);
        job.setOutputFormatClass(NullOutputFormat.class);
        job.setMapperClass(YbjMapper5.class);
        job.setReducerClass(YbjReducer5.class);
        job.setNumReduceTasks(1);
        job.submit();
        if(waitFor){
            job.waitForCompletion(true);
        }
        return 0;
    }

    public static void main(String args[]) throws Exception {
        System.exit(ToolRunner.run(new JobConf(),new Ybj5(),args));
    }
}
