package cas.ihep.spark;

import cas.ihep.alluxio.AlluxioSystem;
import cas.ihep.converter.Root2Alluxio;
import cas.ihep.pwa.entity.ComputeResultMc;
import cas.ihep.pwa.entity.PwaParameter;
import cas.ihep.pwa.evtgen.EvtPDL;
import cas.ihep.pwa.fcn.*;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.broadcast.Broadcast;
import org.freehep.math.minuit.FCNBase;
import org.freehep.math.minuit.FunctionMinimum;
import org.freehep.math.minuit.MnSimplex;
import org.freehep.math.minuit.MnUserParameters;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.List;

public class PwaRun {

    private String optionfile;

    public PwaRun(String optfile){
        optionfile=optfile;
    }

    static class PwaFCN implements FCNBase{
        JavaSparkContext jctxt;
        JobParser parser;
        EvtPDL pdl;
        String decay;
        JavaRDD<String> mcrdd,bgrdd,dtrdd;
        long mcevts,bgevts,dtevts;
        ArrayList<Double> rval;
        Broadcast<String> decaypath;
        JavaPairRDD<Long,ComputeResultMc> ximpRdd;
        boolean inited=false;

        @Override
        public double valueOf(double[] doubles) {
            if(!inited && parser.Vimp().size()==parser.theparnameSize()){
                ximpRdd=mcrdd.flatMapToPair(new McFcn(pdl,parser,parser.basicVimp(),decay)).cache();
                inited=true;
            }
            SparkFCN.preset(doubles,pdl,parser,parser,rval);
            //Broadcast<JobParser> bparser=jctxt.broadcast(parser);
            ArrayList<Double> sys=new ArrayList<>();
            for(int i=0;i<parser.modeConstrainSize();i++){
                sys.add(Double.parseDouble(parser.modeConstrain(i,1)));
            }
//            Broadcast<EvtPDL> evtPDLBroadcast=jctxt.broadcast(pdl);
//            Broadcast<JobParser> parserBroadcast=jctxt.broadcast(parser);
//            Broadcast<double[]> vpars=jctxt.broadcast(doubles);
            JavaPairRDD<Long,ComputeResultMc> mcnext=mcrdd.flatMapToPair(new McFcn(pdl,parser,doubles,decay));
            JavaPairRDD<Double,Double> bgnext=bgrdd.mapToPair(new BgFcn(pdl,parser,doubles,decay));
            JavaPairRDD<Double,Double> dtnext=dtrdd.mapToPair(new DtFcn(pdl,parser,doubles,decay));
            ReduceTupleFcn reduceFcn=new ReduceTupleFcn();
            Tuple2<Double,Double> dtResult=dtnext.treeReduce(reduceFcn);
            double lnLdt=dtResult._1();
            Tuple2<Double,Double> bgResult=bgnext.treeReduce(reduceFcn);
            double lnLbg=bgResult._1()*parser.backgroundWeight();
            if(parser.Vimp().size()==parser.theparnameSize()){
                JavaPairRDD<Long,Tuple2<ComputeResultMc,ComputeResultMc>> joinrdd=mcnext.join(ximpRdd);
                mcnext=joinrdd.mapValues(new McMapValuesFcn());
            }
            Tuple2<Long,ComputeResultMc> lnmc=mcnext.treeReduce(new ReduceCRFcn());
            double mcAmps=lnmc._2().totalAmpSquare;
            double totalMCx=lnmc._2().Amps;
            double msig[]=lnmc._2().msig;
            totalMCx=Math.abs(mcAmps-totalMCx)/mcAmps;
            double chisq=0;
            for(int i=0;i<parser.modeConstrainSize();i++){
                double sy=sys.get(i);
                double xs=msig[i]/mcAmps*(dtevts-bgevts*parser.backgroundWeight());
                double chis=(xs-sy)/Math.sqrt(sy);
                chisq+=sy*chis*chis*0.5;
            }
            ///mcAmps/=mcevts;
            ///double factor=(dtevts-bgevts*parser.backgroundWeight())/mcevts/mcAmps;
            double lnL=lnLdt-lnLbg;
            if(parser.depressInter()){
                lnL-=(dtevts-bgevts*parser.backgroundWeight())*totalMCx*totalMCx;
            }
            lnL-=chisq;
//            evtPDLBroadcast.destroy();
//            parserBroadcast.destroy();
//            vpars.destroy();
            return -lnL;
        }
    }

    public int run()throws Exception{
        final JobParser parser=new JobParser();
        parser.parse(optionfile);
        Root2Alluxio root2Alluxio=new Root2Alluxio();
        Tuple2<List<String>,Long> sourceData[]=root2Alluxio.convert(AlluxioSystem.get(), parser.m_cvtcommand, parser.m_jobname,
                parser.slices, parser.rootFileMcs(),parser.rootFileBgs(),parser.rootFileDts());
        final EvtPDL pdl=new EvtPDL(parser.m_pdttable,parser.m_decayname,parser.m_userdecay);
        if (!parser.PipiSwavePhase().isEmpty()) {
            ArrayList<Double> rval = new ArrayList<>();
            rval.add(0.5);
            int resid=pdl.getId("PipiSWave");
            pdl.setVpars(resid,rval);
        }
        int sz=parser.theparnameSize();
        MnUserParameters params=new MnUserParameters();
        for(int i=0;i<sz;i++){
            PwaParameter p=parser.parameter(i);
            params.add(p.parname,p.value,p.error,p.low,p.up);
        }
        SparkConf sconf=new SparkConf();
        JavaSparkContext jctx=new JavaSparkContext(sconf);
        PwaFCN fcn=new PwaFCN();
        fcn.jctxt=jctx;
        fcn.parser=parser;
        fcn.pdl=pdl;
        fcn.rval=new ArrayList<>();
        fcn.decay=sourceData[0]._1().get(0);
        fcn.mcevts=sourceData[1]._2();
        fcn.bgevts=sourceData[2]._2();
        fcn.dtevts=sourceData[3]._2();
        fcn.decaypath=jctx.broadcast(fcn.decay);
        fcn.mcrdd=jctx.parallelize(sourceData[1]._1(),parser.slices).cache();
        fcn.bgrdd=jctx.parallelize(sourceData[2]._1(),parser.slices).cache();
        fcn.dtrdd=jctx.parallelize(sourceData[3]._1(),parser.slices).cache();
        MnSimplex simplex=new MnSimplex(fcn,params,parser.fitstrategy);
        simplex.setErrorDef(0.5);
        FunctionMinimum minimum=simplex.minimize();
        System.out.println("Exiting spark application ...\nIsValid ="+minimum.isValid());
        System.out.println("likelihood ="+minimum.fval());
        //int covq=0;
        System.out.println(minimum);
        return 0;
    }
}
