package cas.ihep.spark.context;

import cas.ihep.spark.rdd.JIterativePairRDD;
import cas.ihep.spark.rdd.JIterativeRDD;
import cas.ihep.spark.rdd.ParallelCollectionRDDExt;
import cas.ihep.spark.rdd.UserRDD;
import cas.ihep.spark.util.PartitionLocator;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.JavaSparkContext$;
import org.apache.spark.rdd.RDD;
import scala.Tuple2;
import scala.collection.JavaConversions;
import scala.collection.Seq;

import java.util.*;

@SuppressWarnings("WeakerAccess")
public class SparkContextExt extends JavaSparkContext {

    public void close(){
        super.close();
    }

    public SparkContextExt(SparkConf conf){
        super(conf);
    }

    public <T> JavaRDD<T>  wholeList(List<T> src,PartitionLocator<T> loc){
        sc().assertNotStopped();
        RDD<T> rdd=new ParallelCollectionRDDExt<>(sc(),src,loc,JavaSparkContext$.MODULE$.fakeClassTag());
        return JIterativeRDD.from(rdd);
    }

    public <T> JavaRDD<T> wholeList(List<T> src){
        return wholeList(src,null);
    }

    public <K,V> JavaPairRDD<K,V> wholeMap(List<Tuple2<K,V>> data,PartitionLocator<Tuple2<K,V>> loc){
        sc().assertNotStopped();
        RDD<Tuple2<K,V>> rdd=new ParallelCollectionRDDExt<>(sc(),data,loc,JavaSparkContext$.MODULE$.<Tuple2<K,V>>fakeClassTag());
        return JIterativePairRDD.from(rdd);
    }

    public <K,V> JavaPairRDD<K,V> wholeMap(List<Tuple2<K,V>> data){
        return wholeMap(data,null);
    }

    public <K,V> JavaPairRDD<K,V> wholeMap(Map<K,V> data,PartitionLocator<Tuple2<K,V>> loc){
        sc().assertNotStopped();
        RDD<Tuple2<K,V>> rdd=new ParallelCollectionRDDExt<>(sc(),map2List(data),loc,JavaSparkContext$.MODULE$.fakeClassTag());
        return JIterativePairRDD.from(rdd);
    }

    public <K,V> JavaPairRDD<K,V> wholeMap(Map<K,V> data){
        return wholeMap(data,null);
    }

    public <T> JavaRDD<T> parallelize(List<T> src, int numOfSlices, PartitionLocator<T> loc){
        SparkContext ctx=sc();
        ctx.assertNotStopped();
        RDD<T> rdd=new ParallelCollectionRDDExt<>(ctx,JavaConversions.asScalaBuffer(src),numOfSlices,loc,
                JavaSparkContext$.MODULE$.<T>fakeClassTag());
        return JIterativeRDD.from(rdd);
    }

    public <T> JavaRDD<T> parallelize(List<T> src,PartitionLocator<T> loc){
        return parallelize(src,defaultParallelism(),loc);
    }

    public <K,V> JavaPairRDD<K,V> parallelizePairs(List<Tuple2<K,V>> data, int numOfSlices, PartitionLocator<Tuple2<K,V>> loc){
        sc().assertNotStopped();
        RDD<Tuple2<K,V>> rdd=new ParallelCollectionRDDExt<>(sc(),JavaConversions.asScalaBuffer(data),numOfSlices,loc,
                JavaSparkContext$.MODULE$.<Tuple2<K,V>>fakeClassTag());
        return JIterativePairRDD.from(rdd);
    }

    public <K,V> JavaPairRDD<K,V> parallelizePairs(List<Tuple2<K,V>> data,PartitionLocator<Tuple2<K,V>> loc){
        return parallelizePairs(data,defaultParallelism(),loc);
    }

    public <K,V> JavaPairRDD<K,V> parallelizePairs(Map<K,V> data, int numOfSlices, PartitionLocator<Tuple2<K,V>> loc){
        sc().assertNotStopped();
        RDD<Tuple2<K,V>> rdd=new ParallelCollectionRDDExt<>(sc(),map2Seq(data),numOfSlices,loc,
                JavaSparkContext$.MODULE$.fakeClassTag());
        return JIterativePairRDD.from(rdd);
    }

    public <K,V> JavaPairRDD<K,V> parallelizePairs(Map<K,V> data,PartitionLocator<Tuple2<K,V>> loc){
        return parallelizePairs(data,defaultParallelism(),loc);
    }

    public <K,V> JavaPairRDD<K,V> parallelizePairs(Map<K,V> data){
        return parallelizePairs(data,defaultParallelism(),null);
    }

    public <K,V> JavaPairRDD<K,V> parallelizePairs(Map<K,V> data,int numSlices){
        return parallelizePairs(data,numSlices,null);
    }

    public <T> JIterativeRDD<T> forIterative(JavaRDD<T> rdd){
        return JIterativeRDD.from(rdd);
    }

    public <K,V> JIterativePairRDD<K,V> forIterative(JavaPairRDD<K,V> rdd){
        return JIterativePairRDD.from(rdd);
    }

    @SuppressWarnings("unchecked")
    private static <K,V> List<Tuple2<K,V>> map2List(Map<K,V> data){
        if(data==null || data.size()==0){
            return (List<Tuple2<K,V>>)Collections.EMPTY_LIST;
        }
        ArrayList<Tuple2<K,V>> ret=new ArrayList<>(data.size());
        for(Map.Entry<K,V> entry:data.entrySet()){
            ret.add(new Tuple2<>(entry.getKey(),entry.getValue()));
        }
        return ret;
    }

    private static <K,V> Seq<Tuple2<K,V>> map2Seq(Map<K,V> data){
        return JavaConversions.asScalaBuffer(map2List(data));
    }

    @SuppressWarnings("unchecked")
    private static <K,V> Map<K,V> list2Map(List<Tuple2<K,V>> data){
        if(data==null || data.size()==0){
            return (Map<K,V>)Collections.EMPTY_MAP;
        }
        HashMap<K,V> ret=new HashMap<>(data.size());
        for(Tuple2<K,V> tp:data){
            ret.put(tp._1(),tp._2());
        }
        return ret;
    }

    public <T> JavaRDD<T> makeRDD(List<Tuple2<T,List<String>>> data){
        ArrayList<Tuple2<T,Seq<String>>> adata=new ArrayList<>(data.size());
        for(Tuple2<T,List<String>> tp:data){
            adata.add(new Tuple2<>(tp._1(),JavaConversions.asScalaBuffer(tp._2())));
        }
        return JIterativeRDD.from(sc().makeRDD(JavaConversions.asScalaBuffer(adata),JavaSparkContext$.MODULE$.fakeClassTag()));
    }

    public <T> JavaRDD<T> createRDD(Collection<T> data){
        sc().assertNotStopped();
        RDD<T> rdd=new UserRDD<>(sc(),data, JavaSparkContext$.MODULE$.fakeClassTag());
        return JIterativeRDD.from(rdd);
    }
}
