package cas.ihep.hadoop.da;

import cas.ihep.hadoop.util.*;
import com.google.common.io.Closer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.Service4Hep;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import javax.script.*;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashMap;

public class YbjMapper3 extends Mapper<JobID, Path, Text, Text> {
    private static final Log LOG = LogFactory.getLog(YbjMapper3.class);

    @Override
    public void run(Context context) throws IOException {
        setup(context);
        JobID jobId = context.getJobID();
        FileSplit split = (FileSplit)context.getInputSplit();
        Path path = split.getPath();
        map(jobId, path, context);
        cleanup(context);
    }

    private DFSClient dfsClient;
    private Service4Hep serviceForHep;
    private String defaultFS;

    protected void setup(Context context)throws IOException{
        Configuration conf=context.getConfiguration();
        defaultFS=conf.get("fs.defaultFS","hdfs://hadoop06.ihep.ac.cn:8020");
        URI nn;
        try {
            nn = new URI(defaultFS);
        }catch (URISyntaxException e){
            throw new IOException(e);
        }
        dfsClient=new DFSClient(nn,conf);
        serviceForHep=new Service4HepAC(dfsClient,conf);
    }

    protected void cleanup(Context context)throws IOException{
        serviceForHep.close();
        dfsClient.close();
    }

    @Override
    public void map(JobID jobId, Path path, Context context) throws IOException {
        try(Closer closer=Closer.create()){
            final String inputfilename = path.getName();
            //String attemp = context.getTaskAttemptID().toString();
            //checkOutputDir(outputFile);
            Configuration conf = context.getConfiguration();
            //final String jobName =  context.getConfiguration().get("mapred.job.name");
            ObjectInputStream ois=closer.register(new ObjectInputStream(dfsClient.open(conf.get("ybj.global.value"))));
            @SuppressWarnings("unchecked") final HashMap<String,Object> hValues= (HashMap<String, Object>) ois.readObject();
            HadoopLuaEval.convertForBindings(hValues);
            String outputdirt = (String)hValues.get("Hadoop_OutputDir");
            final String outputdir=outputdirt.charAt(outputdirt.length()-1)=='/'?outputdirt:outputdirt+'/';
            final String realinputfilename=serviceForHep.getFile(path.toUri().getPath());
            final HadoopLuaEngine engine=new HadoopLuaAbstractEngine() {
                final ArrayList<String> filearray=new ArrayList<>(); //absolute file path
                final ArrayList<String> outputfiles=new ArrayList<>();//logical file name

                protected String getLogPrefix(){
                    return inputfilename;
                }

                protected Log getLogger(){
                    return LOG;
                }

                protected String getDefaultFS(){
                    return defaultFS;
                }

                protected DFSClient getDFSClient(){
                    return dfsClient;
                }

                @Override
                public void hadoop_addOutput(String args) throws Exception{
                    String out;
                    URI tmp=new URI(outputdir+args);
                    if ("hdfs".equals(tmp.getScheme())) {
                        HdfsFileCloser hfile=closer.register(new HdfsFileCloser(dfsClient,serviceForHep,tmp.getPath()));
                        hfile.opath = hfile.s4h.getTmpFile(hfile.lb)+ '/' + hfile.lb.getBlock().getBlockName();
                        hfile.rfs.put(hfile.outputFile, hfile.hfstatus.getFileId(), hfile.opath,hfile.dfs);
                        out=hfile.opath;
                    } else {
                        out = new File(tmp.getPath()).getAbsolutePath();
                    }
                    filearray.add(out);
                    outputfiles.add(args);
                }

                public String hadoop_getOutputFile(int i){
                    return filearray.get(i-1);
                }

                public String hadoop_getInputFile(int i){
                    return realinputfilename;
                }

                public String hadoop_getInput(int i){
                    return inputfilename;
                }

                public String hadoop_getOutput(int i){
                    return outputfiles.get(i-1);
                }

            };
            Reader optreader = closer.register(new InputStreamReader(dfsClient.open(conf.get("ybj.runfile.hdfs"))));
            ScriptEngineManager sem = new ScriptEngineManager();
            ScriptEngine e = sem.getEngineByName("luaj");
            CompiledScript cs = ((Compilable) e).compile(optreader);
            HadoopLuaEval.eval(cs,engine,new SimpleBindings(hValues));
        }catch(IOException e){
            LOG.error(e);
            throw e;
        }catch (Exception e){
            LOG.error(e);
            throw new IOException(e);
        }
    }

}
