package cas.ihep.hep.function;

import cas.ihep.fs.UnifiedBlock;
import cas.ihep.fs.UnifiedFile;
import cas.ihep.fs.UnifiedFileSystem;
import cas.ihep.hep.unsafe.Unsafe;
import org.apache.hadoop.conf.Configuration;
import org.apache.spark.api.java.JavaDoubleRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.DoubleFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.broadcast.Broadcast;
import scala.Tuple2;

import java.net.URI;

public class PWACalculator {

    static class MCFunction implements DoubleFunction<String> {
        static final UnifiedFileSystem ufs=UnifiedFileSystem.get(UnifiedFileSystem.newConfiguration(new Configuration()));
        Broadcast<double[]> public_array;
        MCFunction(Broadcast<double[]> b){
            public_array=b;
        }

        @Override
        public double call(String iter) throws Exception {
            double val;
            double params[]=public_array.getValue();
            try(UnifiedFile input=ufs.open(new URI(iter))){
                UnifiedBlock blk=input.block(0);
                val=Unsafe.computePartitionAlluxio(params,blk.absolutePath(),true);
                return val;
            }
        }
    }

    static class DataFunction implements DoubleFunction<String>{
        static final UnifiedFileSystem ufs=UnifiedFileSystem.get(UnifiedFileSystem.newConfiguration(new Configuration()));
        Broadcast<double[]> public_array;
        DataFunction(Broadcast<double[]> b){
            public_array=b;
        }
        @Override
        public double call(String iter) throws Exception {
            double params[]=public_array.getValue();
            try(UnifiedFile input=ufs.open(new URI(iter))){
                UnifiedBlock blk=input.block(0);
                return Unsafe.computePartitionAlluxio(params,blk.absolutePath(),false);
            }
        }
    }

    private static class SumupHelper implements Function2<Double,Double,Double> {
        @Override
        public Double call(Double a, Double b) throws Exception {
            return a + b;
        }
    }

    private static final SumupHelper sumup=new SumupHelper();

    public static double operate(double[] pars, JavaSparkContext jsc,
                                 Tuple2<JavaRDD<String>, Long> datardd,
                                 Tuple2<JavaRDD<String>, Long> mcrdd){
        try {
//            int sz = pars.length;
//            for (int i = 0; i < sz; i++) {
//                parameters.setValue(i, pars[i]);
//            }
//            pars[0] = parameters.value("f0_1_mass");
//            pars[1] = parameters.value("f0_1_width");
//            pars[2] = parameters.value("f0_1_mag");
//            pars[3] = parameters.value("f0_1_phase");
//            pars[4] = parameters.value("f0_2_mass");
//            pars[5] = parameters.value("f0_2_width");
//            pars[6] = parameters.value("f0_2_mag");
//            pars[7] = parameters.value("f0_2_phase");
            final Broadcast<double[]> public_array=jsc.broadcast(pars);
            JavaDoubleRDD mctcs_rdd2 = mcrdd._1().mapToDouble(new MCFunction(public_array));
            final double mctcs = mctcs_rdd2.treeReduce(sumup)/mcrdd._2();
            JavaDoubleRDD sumlogdcs_rdd2 = datardd._1().mapToDouble(new DataFunction(public_array));
            double sumlogdcs=sumlogdcs_rdd2.treeReduce(sumup)+Math.log(mctcs)*datardd._2();
            public_array.destroy();
            return sumlogdcs;
        }catch (Exception e){
            e.printStackTrace();
            return 0;
        }
    }
}
