package cas.ihep.hadoop.mc;

import cas.ihep.alluxio.AlluxioSystem;
import cas.ihep.hadoop.util.*;
import com.google.common.io.Closer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.Service4Hep;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import javax.script.*;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Map;

public class YbjMCMapper3 extends Mapper<LongWritable, Text, Text, Text> {

    private static final Log LOG = LogFactory.getLog(YbjMCMapper3.class);

    private DFSClient dfsClient;
    private Service4Hep serviceForHep;
    private String defaultFS;

    protected void setup(Context context)throws IOException{
        Configuration conf=context.getConfiguration();
        defaultFS=conf.get("fs.defaultFS","hdfs://hadoop06.ihep.ac.cn:8020");
        URI nn;
        try {
             nn = new URI(defaultFS);
        }catch (URISyntaxException e){
            throw new IOException(e);
        }
        dfsClient=new DFSClient(nn,conf);
        serviceForHep=new Service4HepAC(dfsClient,conf);
    }

    protected void cleanup(Context context)throws IOException{
        serviceForHep.close();
        dfsClient.close();
    }

    protected void map(LongWritable key, Text value, Context context) throws IOException {
        try(Closer closer=Closer.create()){
            Configuration conf=context.getConfiguration();
            final String jobName=conf.get("mapreduce.job.name");
            final String iValue=value.toString();
            ObjectInputStream ois=closer.register(new ObjectInputStream(dfsClient.open(conf.get("ybj.global.value"))));
            @SuppressWarnings("unchecked") final Map<String,Object> hValues= (Map<String, Object>) ois.readObject();
            HadoopLuaEval.convertForBindings(hValues);
            hValues.put("Index",Integer.parseInt(iValue));
            String outputDirt=(String)hValues.get("Hadoop_OutputDir");
            final String outputDir = outputDirt.charAt(outputDirt.length()-1)=='/'?outputDirt:outputDirt+'/';
            HadoopLuaEngine engine=new HadoopLuaAbstractEngine() {
                String tempdir=null;
                final ArrayList<String> outputfiles=new ArrayList<>();

                public String hadoop_getInputFile(int idx){
                    return iValue+".txt";
                }

                public String hadoop_getInput(int i){
                    return iValue+".txt";
                }

                public String hadoop_getOutput(int i){
                    return outputfiles.get(i-1);
                }

                protected String getDefaultFS(){
                    return defaultFS;
                }

                protected String getLogPrefix(){
                    return jobName+iValue;
                }

                @Override
                protected DFSClient getDFSClient() {
                    return dfsClient;
                }

                protected Log getLogger(){
                    return LOG;
                }

                @Override
                public void hadoop_addOutput(String args) throws Exception {
                    outputfiles.add(args);
                    String outputFile=outputDir+args;
                    LOG.info("Output file is "+outputFile);
                    URI outuri=new URI(outputFile);
                    String scheme=outuri.getScheme();
                    if("hdfs".equals(scheme)){
                        HdfsFileCloserMc file=closer.register(new HdfsFileCloserMc(dfsClient,serviceForHep,outuri.getPath()));
                        String tempfile= serviceForHep.getTmpFile(file.lb);
                        if (tempfile.charAt(tempfile.length()-1)!='/'){
                            tempfile=tempfile+'/';
                        }
                        if(tempdir==null){
                            tempdir=tempfile;
                        }
                        file.opath=tempdir+args; //full absolute path used by user program
                        file.cpfile=tempfile+file.lb.getBlock().getBlockName();// full absolute path
                        file.rfs.put(file.outputFile,file.hfstatus.getFileId(),file.opath,file.dfs);
                    }else if("alluxio".equals(scheme)){
                        AlluxioSystem alluxioSystem=AlluxioSystem.get();
                    }
                }

                public String hadoop_getOutputFile(int idx){
                    return tempdir;
                }

                protected void preRun(){
                    if(tempdir!=null && !tempdir.isEmpty()){
                        File dir=new File(tempdir);
                        if(dir.isDirectory()) {
                            File[] files = dir.listFiles();
                            if(files!=null) {
                                for (File f1 : files) {
                                    for (String f2 : outputfiles) {
                                        String f1s=f1.getName();
                                        if (f1s.equals(f2)) {
                                            //noinspection ResultOfMethodCallIgnored
                                            f1.delete();
                                            LOG.info("Deleting existed file: "+f1s);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }

            };
            Reader optreader = closer.register(new InputStreamReader(dfsClient.open(conf.get("ybj.runfile.hdfs"))));
            ScriptEngineManager sem = new ScriptEngineManager();
            ScriptEngine e = sem.getEngineByName("luaj");
            CompiledScript cs = ((Compilable) e).compile(optreader);
            HadoopLuaEval.eval(cs,engine,new SimpleBindings(hValues));
        }catch (IOException e){
            LOG.error(e);
            throw e;
        }catch (Exception e){
            LOG.error(e);
            throw new IOException(e);
        }
    }

    private static Map<String,Object> globalValues(DFSClient dfsc,String path) throws IOException, ClassNotFoundException {
        if(gVals==null){
            synchronized (YbjMCMapper3.class){
                if(gVals==null){
                    try(ObjectInputStream ois=new ObjectInputStream(dfsc.open(path))){
                        //noinspection unchecked
                        gVals=HadoopLuaEval.convertForBindings((Map<String,Object>)ois.readObject());
                    }
                }
            }
        }
        return gVals;
    }

    private static Map<String,Object> gVals=null;

}
