package cas.ihep.spark.rdd;

import cas.ihep.spark.util.PartitionLocator;
import org.apache.spark.*;
import org.apache.spark.api.java.JavaSparkContext$;
import org.apache.spark.rdd.ParallelCollectionPartition;
import org.apache.spark.rdd.ParallelCollectionRDD;
import org.apache.spark.serializer.DeserializationStream;
import org.apache.spark.serializer.JavaSerializer;
import org.apache.spark.serializer.SerializationStream;
import org.apache.spark.serializer.Serializer;
import scala.collection.JavaConversions;
import scala.collection.Map;
import scala.collection.Seq;
import scala.reflect.ClassTag;

import java.io.*;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.*;

public class ParallelCollectionRDDExt<T> extends ParallelCollectionRDD<T> {
    private transient Partition[] parts;
    private transient PartitionLocator<T> locator;

    private static class DelegateOutputStream extends FilterOutputStream{

        DelegateOutputStream(OutputStream out) {
            super(out);
        }

        public void close()throws IOException{
            out.flush();
        }
    }

    private static class DelegateInputStream extends FilterInputStream{

        DelegateInputStream(InputStream in) {
            super(in);
        }

        public void close(){}
    }

    private static class SingleObjectPartition<T> implements Partition, Serializable{
        private long rddId;
        private int slice;
        private T object;
        SingleObjectPartition(long id, int idx, T obj){
            rddId=id;
            slice=idx;
            object=obj;
        }
        @Override
        public int index() {
            return slice;
        }

        public boolean equals(Object obj){
            if(obj instanceof ParallelCollectionRDDExt.SingleObjectPartition){
                return rddId==((SingleObjectPartition) obj).rddId && slice==((SingleObjectPartition) obj).slice;
            }
            return false;
        }

        public int hashCode(){
            return (int) (41 * (41 + rddId) + slice);
        }

        private void writeObject(ObjectOutputStream oos)throws IOException{
            Serializer ser=SparkEnv.get().serializer();
            if(ser instanceof JavaSerializer){
                oos.defaultWriteObject();
            }else{
                oos.writeLong(rddId);
                oos.writeInt(slice);
                OutputStream delegate=new DelegateOutputStream(oos);
                try(SerializationStream ss=ser.newInstance().serializeStream(delegate)){
                    ss.writeObject(object, JavaSparkContext$.MODULE$.fakeClassTag());
                }
            }
        }
        private void readObject(ObjectInputStream ois)throws IOException,ClassNotFoundException{
            Serializer sfactory=SparkEnv.get().serializer();
            if(sfactory instanceof JavaSerializer){
                ois.defaultReadObject();
            }else{
                rddId=ois.readLong();
                slice=ois.readInt();
                InputStream delegate=new DelegateInputStream(ois);
                try(DeserializationStream ds=sfactory.newInstance().deserializeStream(delegate)){
                    object=ds.readObject(JavaSparkContext$.MODULE$.<T>fakeClassTag());
                }
            }
        }
    }

    @SuppressWarnings("unchecked")
    public ParallelCollectionRDDExt(SparkContext sc, List<T> data, PartitionLocator loc, ClassTag<T> tag){
        super(sc,(Seq<T>)EMPTY_SEQ,data.size(),EMPTY_PREDEFS,tag);
        int sz=data.size();
        parts=new Partition[sz];
        long id=id();
        for(int i=0;i<sz;i++){
            parts[i]=new SingleObjectPartition<>(id,i,data.get(i));
        }
        locator=loc;
    }

    public ParallelCollectionRDDExt(SparkContext sc, Seq<T> data, int n,PartitionLocator<T> loc, ClassTag<T> evidence$2) {
        super(sc, data, n, EMPTY_PREDEFS, evidence$2);
        parts=super.getPartitions();
        locator=loc;
    }

    public final Partition[] getPartitions(){
        return parts;
    }

    @SuppressWarnings("unchecked")
    public Seq<String> getPreferredLocations(Partition p){
        if(locator!=null){
            List<String> locations;
            if(p instanceof ParallelCollectionPartition){
                ParallelCollectionPartition pp=(ParallelCollectionPartition)p;
                locations=locator.getPreferredLocations(JavaConversions.seqAsJavaList(pp.values()));
            }else if(p instanceof ParallelCollectionRDDExt.SingleObjectPartition){
                SingleObjectPartition<T> pp=(SingleObjectPartition<T>)p;
                locations=locator.getPreferredLocations(Collections.singletonList(pp.object));
            }else{
                return EMPTY_SEQ;
            }
            if(locations!=null && !locations.isEmpty()){
                LinkedHashSet<InetAddress> locs=new LinkedHashSet<>();
                for(String host:locations){
                    try {
                        locs.add(InetAddress.getByName(host));
                    } catch (UnknownHostException e) {
                        e.printStackTrace();
                    }
                }
                int sz=locs.size();
                String[] ret=new String[sz];
                java.util.Iterator<InetAddress> iter=locs.iterator();
                for(int i=0;i<sz;i++){
                    ret[i]=LOCATION_PREFIX+iter.next().getHostAddress();
                }
                return JavaConversions.asScalaBuffer(Arrays.asList(ret));
            }
        }
        return EMPTY_SEQ;
    }

    private static class ComputeIterator<T> extends InterruptibleIterator<T>{
        private T object;
        private boolean next;
        ComputeIterator(TaskContext context,  T obj) {
            super(context, null);
            object=obj;
            next=true;
        }
        public boolean hasNext(){
            context().killTaskIfInterrupted();
            return next;
        }
        public T next(){
            if(next){
                next=false;
                return object;
            }
            throw new NoSuchElementException();
        }
    }

    @SuppressWarnings("unchecked")
    public scala.collection.Iterator<T> compute(Partition s, TaskContext context){
        if(s instanceof ParallelCollectionRDDExt.SingleObjectPartition){
            return new ComputeIterator<>(context, ((SingleObjectPartition<T>) s).object);
        }else if(s instanceof ParallelCollectionPartition){
            return new InterruptibleIterator<>(context,((ParallelCollectionPartition) s).iterator());
        }else{
            throw new UnsupportedOperationException(s.getClass().getName()+" is not supported by ParallelCollectionRDD");
        }
    }

    private static final String LOCATION_PREFIX="hdfs_cache_";
    @SuppressWarnings("unchecked")
    private static final Seq<String> EMPTY_SEQ=JavaConversions.asScalaBuffer((List<String>)Collections.EMPTY_LIST);
    @SuppressWarnings("unchecked")
    private static final Map<Object,Seq<String>> EMPTY_PREDEFS=JavaConversions.mapAsScalaMap(Collections.EMPTY_MAP);
}
