package cas.ihep.hep.analysis;

import cas.ihep.hep.util.EventBlock;
import cas.ihep.hep.util.MTRandom;
import cas.ihep.hep.util.MyApplication;
import cas.ihep.hep.util.NativeBlock;
import cas.ihep.spark.util.GlobalConfiguration;
import cas.ihep.util.Finalizer;
import cas.ihep.util.MemoryBuffer;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.broadcast.Broadcast;
import org.luaj.vm2.LuaTable;
import org.luaj.vm2.LuaValue;
import scala.Tuple2;
import scala.Tuple3;

import javax.script.*;
import java.io.*;
import java.net.URI;
import java.util.Formatter;
import java.util.List;

public class RDDDemo extends MyApplication {
    private Bindings values;
    public RDDDemo(String[] args)  throws IOException, ScriptException {
        super(args);
        ScriptEngineManager sem=new ScriptEngineManager();
        ScriptEngine engine=sem.getEngineByName("luaj");
        values=new SimpleBindings();
        try(Reader reader=new BufferedReader(new FileReader(args[0]))){
            engine.eval(reader,values);
        }
    }

    @Override
    public void run() throws Exception {
        srun(values);
    }

    private static class Parameter implements Serializable{
        double [] par;
        long currentMillis;
        Parameter(double [] p){
            par=p;
            currentMillis=System.currentTimeMillis();
        }
    }
    private static class Result implements Serializable{
        double p1,p2;
        double currentMillis;
        double totalMillis;
        Result(double a,double b,double millis,double time){
            p1=a;
            p2=b;
            totalMillis=millis;
            currentMillis=time;
        }
        Result(double p,double q,double millis){
            p1=p;
            p2=q;
            totalMillis=millis;
            currentMillis=System.currentTimeMillis();
        }
    }

    private static Object bindingsGetOrDefault(Bindings values,String key,Object defVal){
        if(values.containsKey(key)){
            return values.get(key);
        }
        return defVal;
    }

    private static void srun(Bindings values)throws Exception{
        SparkConf sconf=new SparkConf();
        sconf.setAppName("RDDDemo");
        LuaTable dataf=(LuaTable) values.get("data");
        LuaTable mcf=(LuaTable)values.get("mc");
        final int partitions=(Integer)values.get("slices");
        final int iterationNumber=(Integer)values.get("iterations");
        final int seed=(Integer)values.get("seed");
        MTRandom rand=new MTRandom(seed);
        GlobalConfiguration.setProperty("spark.loop.type",(String)bindingsGetOrDefault(values,"LoopType","udp"));
        GlobalConfiguration.setProperty("spark.loop.log",(String)bindingsGetOrDefault(values,"LogLevel","Info"));
        final int waitForTime=(Integer)bindingsGetOrDefault(values,"waitfor",0);
        try(Finalizer closer= Finalizer.create()){
            Tuple3<List<Tuple2<EventBlock,EventBlock>>,Long,Long> source=NewLoopPWA.parseInputData(
                    new URI(dataf.get(LuaValue.valueOf("file")).tojstring()).normalize(),
                    new URI(mcf.get(LuaValue.valueOf("file")).tojstring()).normalize(),partitions);
            JavaSparkContext jsc=closer.register(new JavaSparkContext(sconf));
            final long dtNumber=source._2(),mcNumber=source._3();
            long start=System.nanoTime();
            JavaRDD<Tuple2<EventBlock,EventBlock>> sourceRdd=jsc.parallelize(source._1(),partitions).cache();
            double sum0=0,sum1=0;
            double avgSched=0;
            double ei=0;
            for(int i=0;i<iterationNumber;i++){
                double[] par=new double[8];
                for(int j=0;j<8;j++){
                    par[j]=rand.nextDouble();
                }
                long start0=System.nanoTime();
                final Broadcast<Parameter> bcast=jsc.broadcast(new Parameter(par));
                //noinspection Convert2Lambda
                Result value=sourceRdd.map(new Function<Tuple2<EventBlock, EventBlock>, Result>() {
                    @Override
                    public Result call(Tuple2<EventBlock, EventBlock> tp) throws Exception {
                        Parameter bt=bcast.getValue();
                        long taskStart=System.nanoTime();
                        double[] param=bt.par;
                        //double[] ret=new double[2];
                        double []ret;
                        NativeBlock blk0=tp._1().nativeData(); NativeBlock blk1=tp._2().nativeData();
                        //ret = NewUnsafe.compute(param, blk0.address(), tp._1().count(), blk1.address(), tp._2().count());
                        MemoryBuffer buf0=blk0.getBuffer();
                        MemoryBuffer buf1=blk1.getBuffer();
                        buf0.mark();
                        buf1.mark();
                        ret=NewPWACalculator.compute(param, buf0, tp._1().count(), buf1, tp._2().count(),waitForTime);
                        //LLVMInvoker.instance("/mnt/nfs/newpwa.bc").invoke("NewPwaCompute",param,blk0.address(),t._1().count(),
                        //       blk1.address(),t._2().count(),ret);
                        return new Result(ret[0],ret[1],(System.nanoTime()-taskStart)/1e6);
                    }
                }).reduce(new Function2<Result, Result, Result>() {
                    @Override
                    public Result call(Result r1, Result r2) {
                        return new Result(r1.p1+r2.p1,r1.p2+r2.p2,
                                r1.totalMillis+r2.totalMillis, Math.min(r1.currentMillis,r2.currentMillis));
                    }
                });
                avgSched+=(System.nanoTime()-start0)/1e6;
                ei+=value.totalMillis/50.0;
                bcast.destroy();
                sum0+=value.p1;
                sum1+=value.p2;
            }
            start=System.nanoTime()-start;
            new Formatter(System.out).format("Processing %d mc events and %d data events\n" +
                            "Final result: %f %f\n"+
                            "Duration: %.02f\nValid computing rate: %.04f\nEach iteration: %.02fms\n",
                    mcNumber,dtNumber,sum0,sum1,(double)start/1e6,ei/avgSched,ei/iterationNumber);
            for(Tuple2<EventBlock,EventBlock> t:source._1()){
                t._1().close();
                t._2().close();
            }
        }
    }
}
