package cas.ihep.hadoop.da;

import cas.ihep.fs.UnifiedFile;
import cas.ihep.fs.UnifiedFileSystem;
import cas.ihep.hadoop.util.*;
import cas.ihep.util.MapBindings;
import com.google.common.io.Closer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;

public class YbjMapper5 extends Mapper<JobID, Path, Text, Text> {


    private static final Log LOG = LogFactory.getLog(YbjMapper5.class);
    private String defaultFS;
    private UnifiedFileSystem ufs;

    public void run(Context context) throws IOException {
        setup(context);
        JobID jobId = context.getJobID();
        FileSplit split = (FileSplit)context.getInputSplit();
        Path path = split.getPath();
        map(jobId, path, context);
        cleanup(context);
    }

    protected void setup(Context context) {
        Configuration conf=context.getConfiguration();
        defaultFS=conf.get("fs.defaultFS","hdfs://hadoop06.ihep.ac.cn:8020");
        ufs=UnifiedFileSystem.get(UnifiedFileSystem.newConfiguration(conf));
    }

    protected void cleanup(Context context)throws IOException{
        ufs.close();
    }

    protected void map(JobID id,Path path,Context context)throws IOException{
        try(final Closer closer=Closer.create()){
            Configuration conf = context.getConfiguration();
            ObjectInputStream ois=closer.register(new ObjectInputStream(ufs.openStream(new URI(conf.get(Utils.GlobalValueKey)),
                    UnifiedFileSystem.defaultOpenOptions())));
            final MapBindings hValues= (MapBindings) ois.readObject();
            HadoopLuaEval.convertForBindings(hValues);
            //final String outputdir=outputdirt.charAt(outputdirt.length()-1)=='/'?outputdirt:outputdirt+'/';
            final URI outputDirUri=new URI((String)hValues.get("Hadoop_OutputDir")).normalize();
            final ArrayList<HadoopLuaOopEngine.HepFile> outputlist=new ArrayList<>();
            URI uri=path.toUri();
            UnifiedFile hdfsfile=ufs.open(uri);
            if(hdfsfile.block(0).absolutePath()==null){
                String rawpath=uri.getPath();
                if(rawpath.startsWith("/alluxio")){
                    uri=new URI("alluxio://hadoop06.ihep.ac.cn:19998/"+rawpath.substring(8));
                }
                hdfsfile.close();
                hdfsfile=ufs.open(uri);
            }
            final HadoopHepFile input=closer.register(new HadoopHepFile(hdfsfile,true){
                public void reopen(){
                    throw new RuntimeException("Never call this");
                }
            });
            LOG.info("Processing file: "+hdfsfile.path());
            HadoopLuaOopEngine engine=new HadoopLuaOopAbstractEngine() {
                @Override
                protected DFSClient getDFSClient() {
                    return ufs.dfsClient();
                }

                @Override
                protected Log getLogger() {
                    return LOG;
                }

                @Override
                protected String getDefaultFS() {
                    return defaultFS;
                }

                @Override
                protected String getLogPrefix() {
                    return input.baseName();
                }

                @Override
                public void hadoop_addOutput(String name) {
                    name=name.trim();
                    if(name.isEmpty()){
                        return;
                    }
                    int index=name.indexOf("://");
                    try {
                        URI output;
                        if (index < 0) {
                            if(name.charAt(0)=='/'){
                                output=new URI(outputDirUri.getScheme(),outputDirUri.getUserInfo(),outputDirUri.getHost(),
                                        outputDirUri.getPort(),name,outputDirUri.getQuery(),outputDirUri.getFragment());
                            }else {
                                output = new URI(outputDirUri.toString()+'/'+name).normalize();
                            }
                        }else{
                            output=new URI(name).normalize();
                        }
                        outputlist.add(new HadoopHepFile(ufs.create(output),false){
                            public void reopen(){
                                try{
                                    file=ufs.open(output);
                                    block=file.block(0);
                                    isRead=true;
                                    isOpen=true;
                                }catch (IOException e){
                                    throw new RuntimeException(e);
                                }
                            }
                        });
                    }catch (URISyntaxException ignored){

                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                @Override
                public HepFile hadoop_getInput(int index) {
                    return input;
                }

                @Override
                public HepFile hadoop_getOutput(int index) {
                    return outputlist.get(index-1);
                }
            };
            closer.register(new Closeable() {
                @Override
                public void close() throws IOException {
                    IOException e=null;
                    for(HadoopLuaOopEngine.HepFile hepFile:outputlist){
                        try{
                            hepFile.complete();
                        }catch (Exception ex){
                            if(e==null){
                                e=new IOException();
                            }
                            e.addSuppressed(ex);
                        }
                    }
                    if(e!=null){
                        throw e;
                    }
                }
            });
            Reader optreader = closer.register(new InputStreamReader(ufs.openStream(new URI(conf.get(Utils.RunfileKey)))));
            ScriptEngineManager sem = new ScriptEngineManager();
            ScriptEngine e = sem.getEngineByName("luaj");
            HadoopLuaEval.evalOop(e,optreader,engine,hValues);
        } catch (URISyntaxException | ClassNotFoundException | ScriptException e) {
            throw new IOException(e);
        }catch (IOException e){
            LOG.error("File: "+path.toString());
            throw e;
        }
    }
}
