package cas.ihep.spark.mc;

import cas.ihep.hadoop.util.*;
import cas.ihep.util.ByteArrayOutputStreamZC;
import cas.ihep.util.MapBindings;
import cas.ihep.util.RandomGenerator;
import cas.ihep.util.StaticCloser;
import com.google.common.io.ByteStreams;
import com.google.common.io.Closer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.Service4Hep;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.broadcast.Broadcast;
import org.luaj.vm2.LuaValue;
import scala.Tuple2;

import javax.script.Compilable;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;

public class YbjMC4 implements Serializable{
    private static final Log LOG= LogFactory.getLog(YbjMC4.class);

    private static FileSystem hdfs;
    private static DFSClient dfsClient;
    private static Configuration hdpConf;
    private static String defaultFS;
    private static Service4Hep s4h=null;

    private static Service4Hep serviceForHep(){
        if(s4h==null){
            synchronized (YbjMC4.class){
                if(s4h==null){
                    s4h= StaticCloser.instance.register(new Service4HepAC(dfsClient,hdpConf));
                }
            }
        }
        return s4h;
    }

    static{
        hdpConf=new Configuration();
        try {
            String hdpHome=System.getenv("HADOOP_HOME");
            if(hdpHome==null){
                hdpHome=System.getProperty("hadoop.home");
                if (hdpHome==null){
                    throw new RuntimeException("hadoop home must be defined");
                }
            }
            FileInputStream fis = StaticCloser.instance.register(new FileInputStream(hdpHome+ "/etc/hadoop/core-site.xml"));
            hdpConf.addResource(fis);
            fis=StaticCloser.instance.register(new FileInputStream(hdpHome+"/etc/hadoop/hdfs-site.xml"));
            hdpConf.addResource(fis);
            defaultFS=hdpConf.get("fs.defaultFS");
            hdfs=StaticCloser.instance.register(FileSystem.get(hdpConf));
            if(hdfs instanceof DistributedFileSystem){
                dfsClient=((DistributedFileSystem)hdfs).getClient();
            }else{
                dfsClient=StaticCloser.instance.register(new DFSClient(new URI(defaultFS),hdpConf));
            }
            LOG.info("FileSystem: "+hdfs);
            LOG.info("DFSClient: "+dfsClient);
        }catch (FileNotFoundException e){
            throw new RuntimeException(e);
        }catch (Exception ignored){
            LOG.error("Exception in initialization: ",ignored);
        }
    }

    private static Tuple2<Integer,Integer> readOptionFile(String optionfile, final MapBindings bd) throws IOException, ScriptException {
        LOG.info("using option file: "+optionfile);
        ScriptEngineManager sem=new ScriptEngineManager();
        ScriptEngine e=sem.getEngineByName("luaj");
        try(FileReader reader=new FileReader(optionfile)){
            e.eval(reader,bd);
            int startNum=(Integer)bd.remove("Index");
            int numMaps=(Integer)bd.remove("Times");
            String outputdir = (String) bd.get("Hadoop_OutputDir");
            if(outputdir.startsWith("/hdfs/")){
                outputdir=defaultFS+ outputdir.substring(5);
                bd.put("Hadoop_OutputDir", outputdir);
            }
            return new Tuple2<>(startNum,numMaps);
        }
    }

    private static byte[] readRunfile(String runfile)throws IOException,URISyntaxException{
        URI jofUri=new URI(runfile);
        InputStream is;
        ByteArrayOutputStream bos;
        try(Closer closer=Closer.create()) {
            if ("hdfs".equals(jofUri.getScheme())) {
                Path path=new Path(jofUri);
                is=closer.register(hdfs.open(path));
                long sz=hdfs.getFileStatus(path).getLen();
                bos=new ByteArrayOutputStreamZC((int)sz);
                LOG.info("using run file: "+jofUri.toString());
            } else {
                File jOptionFile = new File(runfile);
                runfile = jOptionFile.getAbsolutePath();
                if (runfile.startsWith("/hdfs/")) {
                    runfile = runfile.substring(5);
                    Path path=new Path(defaultFS+runfile);
                    is=closer.register(hdfs.open(path));
                    long sz=hdfs.getFileStatus(path).getLen();
                    bos=new ByteArrayOutputStreamZC((int)sz);
                    LOG.info("using run file: "+path.toString());
                } else {
                    is=closer.register(new FileInputStream(jOptionFile));
                    bos=new ByteArrayOutputStreamZC((int)jOptionFile.length());
                    LOG.info("using run file: "+jOptionFile.getAbsolutePath());
                }
            }
            ByteStreams.copy(is,bos);
        }
        return bos.toByteArray();
    }

    public static void main(String args[])throws Exception{
        if(args.length<2){
            LOG.error("Missing <Job Option> and <Job Run>");
            return;
        }
        final SparkConf conf=new SparkConf();
        try(Closer mainCloser=Closer.create()){
            MapBindings bd=new MapBindings();
            Tuple2<Integer,Integer> ret=readOptionFile(args[0],bd);
            int startNum=ret._1(),numMaps=ret._2();
            byte[] runfileStreams=readRunfile(args[1]);
            JavaSparkContext jctx=mainCloser.register(new JavaSparkContext(conf));
            ArrayList<Integer> l=new ArrayList<>(numMaps);
            for(int i=0;i<numMaps;i++){
                int val=startNum+i;
                l.add(val);
                LOG.info("Wrote input for I #"+val);
            }
            JavaRDD<Integer> rdd=jctx.parallelize(l,numMaps);
            HadoopLuaEval.convertForSerialize(bd);
            final Broadcast<MapBindings> bdbroadcast=jctx.broadcast(bd);
            final Broadcast<byte[]> scriptBroadcast=jctx.broadcast(runfileStreams);
            final String jobName= RandomGenerator.instance.next(YbjMC4.class);
            long cnt=rdd.map(new Function<Integer, Integer>() {
                @Override
                public Integer call(Integer iValue) throws Exception {
                    LOG.info("I="+iValue);
                    try(final Closer closer=Closer.create()) {
                        final MapBindings bindings = new MapBindings(bdbroadcast.getValue());
                        HadoopLuaEval.convertForBindings(bindings);
                        String outputdirt = (String) bindings.get("Hadoop_OutputDir");
                        final String outputDir = outputdirt.charAt(outputdirt.length() - 1) == '/' ? outputdirt : outputdirt + '/';
                        final boolean isWrite2Hdfs=outputDir.startsWith("hdfs://");
                        bindings.put("Index", iValue);
                        HadoopLuaEngine engine=new HadoopLuaAbstractEngine() {
                            String tempdir=null;
                            ArrayList<String> outputfiles=new ArrayList<>();

                            public String hadoop_getInputFile(int idx){
                                return "/";
                            }

                            public String hadoop_getInput(int i){
                                return iValue+".txt";
                            }

                            public String hadoop_getOutput(int i){
                                return outputfiles.get(i-1);
                            }

                            protected String getLogPrefix(){
                                return jobName+iValue;
                            }

                            protected String getDefaultFS(){
                                return defaultFS;
                            }

                            @Override
                            protected DFSClient getDFSClient() {
                                return dfsClient;
                            }

                            protected Log getLogger(){
                                return LOG;
                            }

                            @Override
                            public void hadoop_addOutput(String args) throws Exception {
                                outputfiles.add(args);
                                String outputFile=outputDir+args;
                                LOG.info("adding output file: "+outputFile);
                                URI outuri=new URI(outputFile);
                                if(isWrite2Hdfs){
                                    HdfsFileCloserMc file=closer.register(new HdfsFileCloserMc(dfsClient,serviceForHep(),outuri.getPath()));
                                    file.hfstatus= file.s4h.createFile(file.outputFile,true,(short)1,2147483648L);
                                    file.lb= file.s4h.addBlock(file.outputFile,file.hfstatus.getFileId());
                                    String tempfile= file.s4h.getTmpFile(file.lb);
                                    if (tempfile.charAt(tempfile.length()-1)!='/'){
                                        tempfile=tempfile+'/';
                                    }
                                    if(tempdir==null){
                                        tempdir=tempfile;
                                    }
                                    file.opath=tempdir+args; //full absolute path used by user program
                                    file.cpfile=tempfile+file.lb.getBlock().getBlockName();// full absolute path
                                    file.rfs.put(file.outputFile,file.hfstatus.getFileId(),file.opath,file.dfs);
                                }else if(tempdir==null){
                                    tempdir=outputDir;
                                }
                            }

                            public String hadoop_getOutputFile(int idx){
                                return tempdir;
                            }

                            protected void preRun(){
                                if(tempdir!=null && !tempdir.isEmpty()){
                                    File dir=new File(tempdir);
                                    if(dir.isDirectory()) {
                                        File[] files = dir.listFiles();
                                        if(files!=null) {
                                            for (File f1 : files) {
                                                for (String f2 : outputfiles) {
                                                    String f1s=f1.getName();
                                                    if (f1s.equals(f2) && f1.delete()) {
                                                        LOG.info("Deleting existed file: "+f1s);
                                                    }
                                                }
                                            }
                                        }
                                    }
                                }
                            }

                            @Override
                            public LuaValue hadoop_value(String name) {
                                Object obj=bindings.get(name);
                                return obj!=null?HadoopLuaEval.convertObjectValue(obj):LuaValue.NIL;
                            }
                        };
                        Reader optreader=closer.register(new InputStreamReader(new ByteArrayInputStream(scriptBroadcast.getValue())));
                        ScriptEngineManager sem=new ScriptEngineManager();
                        ScriptEngine e=sem.getEngineByName("luaj");
                        HadoopLuaEval.eval(((Compilable)e).compile(optreader),engine,bindings);
                        return iValue;
                    }
                }
            }).count();
            LOG.info(cnt+" maps has been completed.");
        }
    }

}
