package cas.ihep.spark.da;

import cas.ihep.hadoop.util.HadoopLuaEval;
import cas.ihep.hadoop.util.Service4HepAC;
import cas.ihep.hadoop.util.Utils;
import cas.ihep.spark.mc.YbjMC4;
import cas.ihep.spark.rdd.FileBasedRDD;
import cas.ihep.util.ByteArrayOutputStreamZC;
import cas.ihep.util.MapBindings;
import cas.ihep.util.StaticCloser;
import com.google.common.io.ByteStreams;
import com.google.common.io.Closer;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.Service4Hep;
import org.apache.spark.Partition;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaNewHadoopRDD;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.input.PortableDataStream;
import org.apache.spark.input.StreamInputFormat;
import org.apache.spark.rdd.RDD;
import scala.Tuple2;
import scala.collection.JavaConversions;
import scala.collection.Seq;

import javax.script.Bindings;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;

public class Ybj4 implements Serializable{

    private static final Log LOG= LogFactory.getLog(Ybj4.class);

    private static FileSystem hdfs;
    private static DFSClient dfsClient;
    private static Configuration hdpConf;
    private static String defaultFS;
    private static Service4Hep s4h=null;

    private static Service4Hep serviceForHep(){
        if(s4h==null){
            synchronized (YbjMC4.class){
                if(s4h==null){
                    s4h= StaticCloser.instance.register(new Service4HepAC(dfsClient,hdpConf));
                }
            }
        }
        return s4h;
    }

    static{
        hdpConf=new Configuration();
        try {
            String hdpHome=System.getenv("HADOOP_HOME");
            if(hdpHome==null){
                hdpHome=System.getProperty("hadoop.home");
                if (hdpHome==null){
                    throw new RuntimeException("hadoop home must be defined");
                }
            }
            FileInputStream fis = StaticCloser.instance.register(new FileInputStream(hdpHome+ "/etc/hadoop/core-site.xml"));
            hdpConf.addResource(fis);
            fis=StaticCloser.instance.register(new FileInputStream(hdpHome+"/etc/hadoop/hdfs-site.xml"));
            hdpConf.addResource(fis);
            defaultFS=hdpConf.get("fs.defaultFS");
            hdfs=StaticCloser.instance.register(FileSystem.get(hdpConf));
            if(hdfs instanceof DistributedFileSystem){
                dfsClient=((DistributedFileSystem)hdfs).getClient();
            }else{
                dfsClient=StaticCloser.instance.register(new DFSClient(new URI(defaultFS),hdpConf));
            }
            LOG.info("FileSystem: "+hdfs);
            LOG.info("DFSClient: "+dfsClient);
        }catch (FileNotFoundException e){
            throw new RuntimeException(e);
        }catch (Exception ignored){
            LOG.error("Exception in initialization: ",ignored);
        }
    }

    private static List<URI> readOptionFile(String optionfile, MapBindings bd)throws IOException,ScriptException,URISyntaxException{
        LOG.info("using option file: "+optionfile);
        ScriptEngineManager sem=new ScriptEngineManager();
        ScriptEngine e=sem.getEngineByName("luaj");
        try(Reader optreader=new FileReader(optionfile)){
            e.eval(optreader,bd);
            List<URI> ret=Utils.addInputPaths(hdpConf,bd,hdfs,defaultFS);
            Utils.mkdirOutputPath(bd,defaultFS,hdfs);
            return ret;
        }
    }

    private static byte[] readRunfile(String runfile)throws IOException,URISyntaxException {
        URI jofUri=new URI(runfile);
        InputStream is;
        ByteArrayOutputStream bos;
        try(Closer closer=Closer.create()) {
            if ("hdfs".equals(jofUri.getScheme())) {
                Path path=new Path(jofUri);
                is=closer.register(hdfs.open(path));
                long sz=hdfs.getFileStatus(path).getLen();
                bos=new ByteArrayOutputStreamZC((int)sz);
            } else {
                File jOptionFile = new File(runfile);
                runfile = jOptionFile.getAbsolutePath();
                if (runfile.startsWith("/hdfs/")) {
                    runfile = runfile.substring(5);
                    Path path=new Path(defaultFS+runfile);
                    is=closer.register(hdfs.open(path));
                    long sz=hdfs.getFileStatus(path).getLen();
                    bos=new ByteArrayOutputStreamZC((int)sz);
                } else {
                    is=closer.register(new FileInputStream(jOptionFile));
                    bos=new ByteArrayOutputStreamZC((int)jOptionFile.length());
                }
            }
            ByteStreams.copy(is,bos);
        }
        return bos.toByteArray();
    }

    private static void addVariable(Bindings bd,String values[]){
        if(values!=null){
            for(int i=0;i<values.length;){
                String key=values[i++];
                if(i<values.length){
                    String value=values[i++];
                    bd.put(key,HadoopLuaEval.tryParse(value));
                }
            }
        }
    }

    public static void main(String args[])throws Exception{
        Options opts=new Options();
        Option prop=new Option("D",true,"");
        prop.setArgName("property=value");
        prop.setArgs(2);
        opts.addOption(prop);
        CommandLine cmdline=new DefaultParser().parse(opts,args);
        List<String> argv=cmdline.getArgList();
        if(argv.size()<2){
            LOG.error("Missing <Job Option> and <Job Run>");
            return;
        }
        final SparkConf conf=new SparkConf();
        try(Closer mainCloser=Closer.create()){
            JavaSparkContext jctx=mainCloser.register(new JavaSparkContext(conf));
            MapBindings bd=new MapBindings();
            addVariable(bd,cmdline.getOptionValues('D'));
            List<URI> partitions=readOptionFile(argv.get(0),bd);
            byte[] scriptStream=readRunfile(argv.get(1));
            final Broadcast<byte[]> scriptBroadcast=jctx.broadcast(scriptStream);
            HadoopLuaEval.convertForSerialize(bd);
            final Broadcast<MapBindings> bdBroadcast=jctx.broadcast(bd);
            //JavaPairRDD<String,PortableDataStream> inputRdd=jctx.binaryFiles(inputPath);
            JavaRDD<URI> uriRdd= FileBasedRDD.newRDD(partitions, 5, new FileBasedRDD.Spliter() {
                @Override
                public String groupBy(URI uri) {
                    try {
                        String path = uri.getPath();
                        return dfsClient.getBlockLocations(path, 0, dfsClient.getFileInfo(path).getLen())[0].getHosts()[0];
                    }catch (Exception e){
                        return "localhost";
                    }
                }
            },jctx);
            long cnt=uriRdd.map(new Function<URI, Integer>() {
                @Override
                public Integer call(URI uri) throws Exception {
                    String path=uri.getPath();
                    System.out.println("Logical path: "+uri.toString());
                    System.out.println("Physical path: "+serviceForHep().getFile(path));
                    return 0;
                }
            }).count();
            System.out.println("Count: "+cnt);
            //JavaNewHadoopRDD<String,PortableDataStream> hadoopRdd=(JavaNewHadoopRDD<String, PortableDataStream>) jctx.newAPIHadoopRDD(hdpConf,StreamInputFormat.class,String.class,PortableDataStream.class);

//            long cnt=inputRdd.map(new Function<Tuple2<String,PortableDataStream>, Integer>() {
//                @Override
//                public Integer call(Tuple2<String, PortableDataStream> tp) throws Exception {
//                    try(Closer closer=Closer.create()){
//                        Path path=new Path(tp._1());
//                        LOG.info("Processing file: "+path.toString());
//                        final String inputfilename = path.getName();
//                        final MapBindings bindings=new MapBindings(bdBroadcast.getValue());
//                        HadoopLuaEval.convertForBindings(bindings);
//                        String outputdirt = (String)bindings.get("Hadoop_OutputDir");
//                        final String outputdir=outputdirt.charAt(outputdirt.length()-1)=='/'?outputdirt:outputdirt+'/';
//                        final String realinputfilename=serviceForHep().getFile(path.toUri().getPath());
//                        LOG.info("Physical path: "+realinputfilename);
//                        HadoopLuaEngine engine=new HadoopLuaAbstractEngine() {
//                            final ArrayList<String> filearray=new ArrayList<>(); //absolute file path
//                            final ArrayList<String> outputfiles=new ArrayList<>();//logical file name
//                            @Override
//                            public void hadoop_addOutput(String args) throws Exception{
//                                String out;
//                                URI tmp=new URI(outputdir+args);
//                                if ("hdfs".equals(tmp.getScheme())) {
//                                    HdfsFileCloser hfile=closer.register(new HdfsFileCloser());
//                                    hfile.dfs=dfsClient;//dfs;
//                                    hfile.s4h=serviceForHep();//s4h;
//                                    hfile.rfs=RefreshFileSize.instance();//rfs;
//                                    hfile.outputFile=tmp.getPath();
//                                    hfile.hfstatus = hfile.s4h.createFile(hfile.outputFile, true, (short) 1, 2147483648L);
//                                    hfile.lb = hfile.s4h.addBlock(hfile.outputFile, hfile.hfstatus.getFileId());
//                                    hfile.opath = hfile.s4h.getTmpFile(hfile.lb)+ '/' + hfile.lb.getBlock().getBlockName();
//                                    hfile.rfs.put(hfile.outputFile, hfile.hfstatus.getFileId(), hfile.opath,hfile.dfs);
//                                    out=hfile.opath;
//                                } else {
//                                    out = new File(tmp.getPath()).getAbsolutePath();
//                                }
//                                filearray.add(out);
//                                outputfiles.add(args);
//                            }
//
//                            protected String getLogPrefix(){
//                                return inputfilename;
//                            }
//
//                            protected String getDefaultFS(){
//                                return defaultFS;
//                            }
//
//                            @Override
//                            protected DFSClient getDFSClient() {
//                                return dfsClient;
//                            }
//
//                            protected Log getLogger(){
//                                return LOG;
//                            }
//
//                            public String hadoop_getOutputFile(int i){
//                                return filearray.get(i-1);
//                            }
//
//                            public String hadoop_getInputFile(int i){
//                                return realinputfilename;
//                            }
//
//                            public String hadoop_getInput(int i){
//                                return inputfilename;
//                            }
//
//                            public String hadoop_getOutput(int i){
//                                return outputfiles.get(i-1);
//                            }
//
//                            @Override
//                            public LuaValue hadoop_value(String name) {
//                                Object obj=bindings.get(name);
//                                return obj!=null?HadoopLuaEval.convertObjectValue(obj):LuaValue.NIL;
//                            }
//                        };
//                        Reader optreader=closer.register(new InputStreamReader(new ByteArrayInputStream(scriptBroadcast.getValue())));
//                        ScriptEngineManager sem=new ScriptEngineManager();
//                        ScriptEngine e=sem.getEngineByName("luaj");
//                        HadoopLuaEval.eval(((Compilable)e).compile(optreader),engine,bindings);
//                    }
//                    return 0;
//                }
//            }).count();
        }
    }
}
