package cas.ihep.hep.analysis;

import cas.ihep.hep.util.EventBlock;
import cas.ihep.hep.util.MyApplication;
import cas.ihep.hep.util.NativeBlock;
import cas.ihep.util.MemoryBuffer;
import com.google.common.io.Closer;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.broadcast.Broadcast;
import org.freehep.math.minuit.FCNBase;
import org.freehep.math.minuit.FunctionMinimum;
import org.freehep.math.minuit.MnMigrad;
import org.freehep.math.minuit.MnUserParameters;
import org.luaj.vm2.LuaTable;
import org.luaj.vm2.LuaValue;
import org.luaj.vm2.Varargs;
import scala.Tuple2;
import scala.Tuple3;

import javax.script.*;
import java.io.*;
import java.net.URI;
import java.util.Formatter;
import java.util.List;

@SuppressWarnings("UnstableApiUsage")
public class RDDPWA extends MyApplication {

    private MnUserParameters mParameters;
    private Bindings values;

    public RDDPWA(String[] args) throws IOException, ScriptException {
        super(args);
        mParameters=new MnUserParameters();
        ScriptEngineManager sem=new ScriptEngineManager();
        ScriptEngine engine=sem.getEngineByName("luaj");
        values=new SimpleBindings();
        try(Reader reader=new BufferedReader(new FileReader(args[0]))){
            engine.eval(reader,values);
        }
        LuaTable optionObj=(LuaTable) values.get("option");
        for(LuaValue i = LuaValue.NIL;;){
            Varargs kv=optionObj.next(i);
            i=kv.arg1();
            if(i.isnil()){
                break;
            }
            LuaValue vvv=kv.arg(2);
            if(i.isstring() && vvv.istable()){
                String pname=i.tojstring();
                LuaTable tb=(LuaTable)vvv;
                double v=tb.get(1).todouble();
                double e=tb.get(2).todouble();
                double l=tb.get(3).todouble();
                double u=tb.get(4).todouble();
                mParameters.add(pname,v,e);
                if (e<0){
                    mParameters.fix(pname);
                }else{
                    if(l!=999 && u!=999) {
                        mParameters.setLimits(pname, l, u);
                    }else if(l!=999){
                        mParameters.setLowerLimit(pname,l);
                    }else if(u!=999){
                        mParameters.setUpperLimit(pname,u);
                    }
                }
            }
        }
    }

    private static class MapFunction implements Function<Tuple2<EventBlock,EventBlock>,Result>{
        Broadcast<Parameter> paramBcast;
        MapFunction(Broadcast<Parameter> bc){
            paramBcast=bc;
        }
        public Result call(Tuple2<EventBlock, EventBlock> t) throws Exception {
            //NewUnsafe.loadLibrary("/mnt/nfs/libNewUnsafe.so");
            Parameter bt=paramBcast.getValue();
            long start=System.nanoTime();
            double[] param=bt.par;
            //double[] ret=new double[2];
            double []ret;
            try(NativeBlock blk0=t._1().nativeData(); NativeBlock blk1=t._2().nativeData()) {
                //ret = NewUnsafe.compute(param, blk0.address(), t._1().count(), blk1.address(), t._2().count());
                MemoryBuffer mbuf0=blk0.getBuffer().mark(),mbuf1=blk1.getBuffer().mark();
                ret=NewPWACalculator.compute(param, mbuf0, t._1().count(), mbuf1, t._2().count(),0);
                //LLVMInvoker.instance("/mnt/nfs/newpwa.bc").invoke("NewPwaCompute",param,blk0.address(),t._1().count(),
                 //       blk1.address(),t._2().count(),ret);
            }
            return new Result(ret[0],ret[1],(System.nanoTime()-start)/1e6);
        }
    }

    private static class Parameter implements Serializable{
        double [] par;
        Parameter(double [] p){
            par=p;
        }
    }

    private static class Result implements Serializable{
        double p1,p2;
        double totalMillis;
        Result(double p,double q,double millis){
            p1=p;
            p2=q;
            totalMillis=millis;
        }
    }

    private static class RDDFcnBase implements FCNBase,Closeable{
        RDDFcnBase(JavaSparkContext ctx,JavaRDD<Tuple2<EventBlock,EventBlock>> r,long dt,long mc,String recordFile) throws IOException {
            jsc=ctx;
            sourceRdd=r;
            mcNumber=mc;
            dtNumber=dt;
            numOfCalls=0;
            avgTimes=0;
            avgValidTime=0;
            output=new PrintWriter(new FileWriter(recordFile));
        }
        JavaSparkContext jsc;
        JavaRDD<Tuple2<EventBlock,EventBlock>> sourceRdd;
        long mcNumber,dtNumber;
        int numOfCalls;
        double avgTimes,avgValidTime;
        PrintWriter output;

        public void close(){
            new Formatter(System.out).format("Average valid time: %.02f\nAverage total time: %.02f\n" +
                    "Average valid computing rate: %.02f\n"+
                    "Number of function calls: %d\n",avgValidTime/numOfCalls,avgTimes/numOfCalls,avgValidTime/avgTimes,numOfCalls);
            output.close();
        }
        public double valueOf(double[] par) {
            numOfCalls++;
            long start=System.nanoTime();
            final Broadcast<Parameter> paramBcast=jsc.broadcast(new Parameter(par));
            JavaRDD<Result> resultrdd=sourceRdd.map(new MapFunction(paramBcast));
            Result result=resultrdd.reduce(reduceSumup);
            start=System.nanoTime()-start;
            avgTimes+=start/1e6;
            avgValidTime+=result.totalMillis/50.0;
            paramBcast.destroy();
            double ret=result.p1+Math.log(result.p2/mcNumber)*dtNumber;
            output.println(ret);
            return ret;
        }
    }

    @SuppressWarnings("Convert2Lambda")
    private static final Function2<Result,Result,Result> reduceSumup=new Function2<Result, Result, Result>() {
        @Override
        public Result call(Result r1, Result r2) {
            return new Result(r1.p1+r2.p1,r1.p2+r2.p2,r1.totalMillis+r2.totalMillis);
        }
    };

    private static void srun(Bindings values,MnUserParameters mParameters)throws Exception{
        SparkConf sconf=new SparkConf();
        LuaTable dataf=(LuaTable) values.get("data");
        LuaTable mcf=(LuaTable)values.get("mc");
        String recordFile=(String)values.get("record");
        final int partitions=(Integer)values.get("slices");
        try(Closer closer= Closer.create()){
            Tuple3<List<Tuple2<EventBlock,EventBlock>>,Long,Long> source=NewLoopPWA.parseInput(
                    new URI(dataf.get(LuaValue.valueOf("file")).tojstring()).normalize(),
                    new URI(mcf.get(LuaValue.valueOf("file")).tojstring()).normalize(),partitions);
            JavaSparkContext jsc=closer.register(new JavaSparkContext(sconf));
            JavaRDD<Tuple2<EventBlock,EventBlock>> sourcerdd=jsc.parallelize(source._1(),partitions);
            long start=System.nanoTime();
            MnMigrad migrad=new MnMigrad(closer.register(new RDDFcnBase(jsc,sourcerdd.cache(),source._2(),source._3(),recordFile)),mParameters,1);
            migrad.setPrecision(1e-11);
            FunctionMinimum minimum=migrad.minimize(100000,200);
            start=System.nanoTime()-start;
            MnUserParameters  resultpars = minimum.userParameters();
            double pvec2=resultpars.value(2);
            double pvec3=resultpars.value(3);
            double pvec6=resultpars.value(6);
            double pvec7=resultpars.value(7);
            new Formatter(System.out).format("Exiting spark application ...\nIsValid = %s\n" +
                            "likelihood = %f\nUser Parameters: %f %f %f %f\n" +
                    "Using time: %f ms\n",
                    minimum.isValid()?"true":"false",minimum.fval(),pvec2,pvec3,pvec6,pvec7,(double)start/1e6);
            for(Tuple2<EventBlock,EventBlock> t:source._1()){
                t._1().close();
                t._2().close();
            }
        }
    }

    public void run()throws Exception{
        srun(values,mParameters);
    }
}
