package cas.ihep.hep.analysis;

import cas.ihep.hep.util.EventBlock;
import cas.ihep.hep.util.MTRandom;
import cas.ihep.hep.util.MyApplication;
import cas.ihep.hep.util.NativeBlock;
import cas.ihep.spark.util.GlobalConfiguration;
import cas.ihep.util.Finalizer;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.broadcast.Broadcast;
import org.luaj.vm2.LuaTable;
import org.luaj.vm2.LuaValue;
import scala.Tuple2;
import scala.Tuple3;

import javax.script.*;
import java.io.*;
import java.net.URI;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

public class DataInJVM extends MyApplication {
    private Bindings values;
    public DataInJVM(String[] args)throws IOException, ScriptException {
        super(args);
        ScriptEngineManager sem=new ScriptEngineManager();
        ScriptEngine engine=sem.getEngineByName("luaj");
        values=new SimpleBindings();
        try(Reader reader=new BufferedReader(new FileReader(args[0]))){
            engine.eval(reader,values);
        }
    }

    @Override
    public void run() throws Exception {
        srun(values);
    }

    private static class Parameter implements Serializable {
        double [] par;
        long currentMillis;
        Parameter(double [] p){
            par=p;
            currentMillis=System.currentTimeMillis();
        }
    }
    private static class Result implements Serializable{
        double dt=0,mc=0;
    }

    private static Object bindingsGetOrDefault(Bindings values,String key,Object defVal){
        if(values.containsKey(key)){
            return values.get(key);
        }
        return defVal;
    }

    private static void srun(Bindings values)throws Exception{
        SparkConf sconf=new SparkConf();
        sconf.setAppName("DataInJVM");
        LuaTable dataf=(LuaTable) values.get("data");
        LuaTable mcf=(LuaTable)values.get("mc");
        final int partitions=(Integer)values.get("slices");
        sconf.set("spark.default.parallelism",String.valueOf(partitions));
        final int iterationNumber=(Integer)values.get("iterations");
        final int seed=(Integer)values.get("seed");
        MTRandom rand=new MTRandom(seed);
        GlobalConfiguration.setProperty("spark.loop.type",(String)bindingsGetOrDefault(values,"LoopType","udp"));
        GlobalConfiguration.setProperty("spark.loop.log",(String)bindingsGetOrDefault(values,"LogLevel","Info"));
        try(Finalizer closer= Finalizer.create()){
            JavaSparkContext jsc=closer.register(new JavaSparkContext(sconf));
            Tuple3<List<Tuple2<EventBlock,EventBlock>>,Long,Long> source=NewLoopPWA.parseInput(
                    new URI(dataf.get(LuaValue.valueOf("file")).tojstring()).normalize(),
                    new URI(mcf.get(LuaValue.valueOf("file")).tojstring()).normalize(),partitions);
            JavaPairRDD<EventBlock,EventBlock> sourceRdd=jsc.parallelizePairs(source._1(),partitions);
            JavaRDD<NewPWACalculator.Data> dataRdd=sourceRdd.flatMap(new FlatMapFunction<Tuple2<EventBlock, EventBlock>, NewPWACalculator.Data>() {
                @Override
                public Iterator<NewPWACalculator.Data> call(Tuple2<EventBlock, EventBlock> tp) throws Exception {
                    try(NativeBlock blk1=tp._1().nativeData();NativeBlock blk2=tp._2().nativeData()){
                        ArrayList<NewPWACalculator.Data> ret=new ArrayList<>(tp._1().count()+tp._2().count());
                        for(int i=0;i<tp._1().count();i++){
                            ret.add(NewPWACalculator.Data.readFrom(blk1.getBuffer()));
                        }
                        for(int i=0;i<tp._2().count();i++){
                            NewPWACalculator.Data mc= NewPWACalculator.Data.readFrom(blk2.getBuffer());
                            mc.isMc=true;
                            ret.add(mc);
                        }
                        return ret.iterator();
                    }
                }
            }).cache();

            /*final long dtNumber=dt._2(),mcNumber=mc._2();
            double sum0=0,sum1=0;
            double avgSched=0;
            double ei=0;*/
            double[] par=new double[8];
            long start=System.nanoTime();
            for(int i=0;i<iterationNumber;i++){
                for(int j=0;j<8;j++){
                    par[j]=rand.nextDouble();
                }
                //long start0=System.nanoTime();
                final Broadcast<Parameter> bcast=jsc.broadcast(new Parameter(par));
                dataRdd.mapPartitions(new FlatMapFunction<Iterator<NewPWACalculator.Data>, Double>() {
                    @Override
                    public Iterator<Double> call(Iterator<NewPWACalculator.Data> dataIterator) throws Exception {
                        return null;
                    }
                });
                dataRdd.map(new Function<NewPWACalculator.Data, Result>() {
                    @Override
                    public Result call(NewPWACalculator.Data data) {
                        Parameter bt=bcast.getValue();
                        Result ret=new Result();
                        if(data.isMc){
                            ret.mc=NewPWACalculator.computeMc(bt.par,data.par);
                        }else{
                            ret.dt=NewPWACalculator.computeDt(bt.par,data.par);
                        }
                        return ret;
                    }
                }).reduce(new Function2<Result, Result, Result>() {
                    @Override
                    public Result call(Result r1, Result r2) {
                        Result ret=new Result();
                        ret.dt=r1.dt+r2.dt;
                        ret.mc=r1.mc+r2.mc;
                        return ret;
                    }
                });
                //avgSched+=(System.nanoTime()-start0)/1e6;
                bcast.destroy();
            }
            start=System.nanoTime()-start;
            System.out.println("Duration: "+(1.0*start/1e6));
            for(Tuple2<EventBlock,EventBlock> t:source._1()){
                t._1().close();
                t._2().close();
            }
        }
    }

}
