package cas.ihep.hep.analysis;

import cas.ihep.fs.UnifiedFileSystem;
import cas.ihep.hep.config.Config;
import cas.ihep.hep.convert.Root2Alluxio;
import cas.ihep.hep.function.PWACalculator;
import com.google.common.io.Closer;
import org.apache.hadoop.conf.Configuration;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.freehep.math.minuit.FCNBase;
import org.freehep.math.minuit.FunctionMinimum;
import org.freehep.math.minuit.MnMigrad;
import org.freehep.math.minuit.MnUserParameters;
import scala.Tuple2;

import java.io.Closeable;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;

public class PartitialWaveAnalysis implements Runnable{
    private SparkConf mConf;
    private MnUserParameters mParameters;
    private String dataf,mcf;

    public PartitialWaveAnalysis(SparkConf conf,String optionFile,String datafile,String mcfile)throws IOException{
        mConf=conf;
        dataf=datafile;
        mcf=mcfile;
        mParameters=new MnUserParameters();
        try(Closer closer=Closer.create()){
            Config jobj=Config.newJSONConfig(closer.register(new FileInputStream(optionFile)));
            String para=jobj.getString("ParameterFile");
            String res=jobj.getString("ResonanceFile");
            Config paraobj=Config.newJSONConfig(closer.register(new FileInputStream(para)));
            Config resobj=Config.newJSONConfig(closer.register(new FileInputStream(res)));
            List<String> par_name=jobj.getStringArray("parameterName");
            for(String pname:par_name){
                List<Double> pcf;
                boolean mag_exists=pname.contains("mag"),
                        phase_exists=pname.contains("phase");
                if(mag_exists || phase_exists){
                    pcf=paraobj.getDoubleArray(pname);
                }else{
                    pcf=resobj.getDoubleArray(pname);
                }
                double v=pcf.get(0);
                double e=pcf.get(1);
                double l=pcf.get(2);
                double u=pcf.get(3);
                mParameters.add(pname,v,e);
                if (e<0){
                    mParameters.fix(pname);
                }else{
                    if(l!=999 && u!=999){
                        mParameters.setLimits(pname,l,u);
                    }else if(l!=999){
                        mParameters.setLowerLimit(pname,l);
                    }else if(u!=999){
                        mParameters.setUpperLimit(pname,u);
                    }
//                    if (!(mag_exists || phase_exists)){
//                        freeReson=true;
//                    }
                }
            }
        }
    }

    static class AlluxioDeleter implements Closeable {
        String mpath;
        UnifiedFileSystem afs;
        AlluxioDeleter(UnifiedFileSystem sys,String str){
            mpath=str;
            afs=sys;
        }
        public void close()throws IOException{
            try {
                afs.delete(new URI(mpath));
            } catch (URISyntaxException e1){
                throw new IOException(e1);
            }
        }
    }
    @Override
    public void run() {
        UnifiedFileSystem ufs=UnifiedFileSystem.get(UnifiedFileSystem.newConfiguration(new Configuration()));
        Root2Alluxio converter=new Root2Alluxio();
        try(Closer closer=Closer.create()){
            System.err.println("Serializing events ...");
            Tuple2<List<String>,Long> data=converter.convert(ufs,Config.newJSONConfig(closer.register(new FileInputStream(dataf))));
            Tuple2<List<String>,Long> mc=converter.convert(ufs,Config.newJSONConfig(closer.register(new FileInputStream(mcf))));
            long start=System.currentTimeMillis();
            closer.register(new AlluxioDeleter(ufs,converter.basedir));
            System.err.println("Starting spark application ...");
            final JavaSparkContext jsc=closer.register(new JavaSparkContext(mConf));
            final Tuple2<JavaRDD<String>,Long> datardd=new Tuple2<>(
                    jsc.parallelize(data._1(),
                    mConf.getInt("spark.default.parallelism",96)).cache(),data._2());
            final Tuple2<JavaRDD<String>,Long> mcrdd=new Tuple2<>(
                    jsc.parallelize(mc._1(),
                    mConf.getInt("spark.default.parallelism",96)).cache(),mc._2());
            MnMigrad migrad=new MnMigrad(new FCNBase() {
                @Override
                public double valueOf(double[] par) {
                    return PWACalculator.operate(par, jsc,datardd,mcrdd);
                }
            }, mParameters, 1);
            migrad.setPrecision(1e-11);
            FunctionMinimum minimum=migrad.minimize(100000,200);
            System.out.println("Exiting spark application ...\nIsValid ="+minimum.isValid());
            System.out.println("likelihood ="+minimum.fval());
            //int covq=0;
            System.out.println(minimum);
            MnUserParameters  resultpars = minimum.userParameters();
            double pvec2=resultpars.value(2);
            double pvec3=resultpars.value(3);
            double pvec6=resultpars.value(6);
            double pvec7=resultpars.value(7);
            System.out.print(pvec2);
            System.out.print(' ');
            System.out.print(pvec3);
            System.out.print(' ');
            System.out.print(pvec6);
            System.out.print(' ');
            System.out.println(pvec7);
            System.out.println("Run time: "+((System.currentTimeMillis()-start)/1000.)+'s');
        }catch (Exception e){
            e.printStackTrace();
        }
    }
}
