package cas.ihep.spark.test;

import cas.ihep.fs.UnifiedBlock;
import cas.ihep.fs.UnifiedFile;
import cas.ihep.fs.UnifiedFileSystem;
import cas.ihep.util.MemoryBuffer;
import com.google.common.io.Closer;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.input.PortableDataStream;
import scala.Tuple2;

import java.io.*;
import java.net.URI;
import java.util.List;

public class ReadRandom {

    private static Tuple2<Long,Long> test0(Tuple2<String,PortableDataStream> tuple)throws Exception{
        try(Closer closer=Closer.create()){
            DataInputStream dis=closer.register(tuple._2().open());
            long startTime=System.nanoTime();
            int sum=0;
            while(true){
                int value;
                try {
                    value = dis.readInt();
                }catch (EOFException ignored){
                    break;
                }
                sum+=value;
                sum%=1024;
            }
            startTime=System.nanoTime()-startTime;
            System.out.println(sum);
            return new Tuple2<>(0L,startTime);
        }
    }

    private static Tuple2<Long,Long> test1(Tuple2<String,PortableDataStream> tuple)throws Exception{
        try(Closer closer=Closer.create()){
            UnifiedFileSystem ufs=closer.register(UnifiedFileSystem.get(UnifiedFileSystem.newConfiguration(System.getenv("HADOOP_HOME"))));
            DataInputStream dis=closer.register(new DataInputStream(new BufferedInputStream(ufs.openStream(new URI(tuple._1())))));
            long startTime=System.nanoTime();
            int sum=0;
            while(true){
                int value;
                try{
                    value=dis.readInt();
                }catch (EOFException ignored){
                    break;
                }
                sum+=value;
                sum%=1024;
            }
            startTime=System.nanoTime()-startTime;
            System.out.println(sum);
            return new Tuple2<>(0L,startTime);
        }
    }

    private static Tuple2<Long,Long> test2(Tuple2<String,PortableDataStream> tuple)throws Exception{
        try(Closer closer=Closer.create()){
            UnifiedFileSystem ufs=closer.register(UnifiedFileSystem.get(UnifiedFileSystem.newConfiguration(System.getenv("HADOOP_HOME"))));
            UnifiedFile file=closer.register(ufs.open(new URI(tuple._1())));
            long startTime=System.nanoTime();
            UnifiedBlock block=file.block(0);
            DataInputStream dis=closer.register(new DataInputStream(new BufferedInputStream(new FileInputStream(block.absolutePath()))));
            int sum=0;
            long midTime=System.nanoTime();
            while(true){
                int value;
                try{
                    value=dis.readInt();
                }catch (EOFException ignored){
                    break;
                }
                sum+=value;
                sum%=1024;
            }
            long endTime=System.nanoTime();
            System.out.println(sum);
            return new Tuple2<>(midTime-startTime,endTime-midTime);
        }
    }

    private static Tuple2<Long,Long> test3(Tuple2<String,PortableDataStream> tuple)throws Exception{
        try(Closer closer=Closer.create()){
            UnifiedFileSystem ufs=closer.register(UnifiedFileSystem.get(UnifiedFileSystem.newConfiguration(System.getenv("HADOOP_HOME"))));
            UnifiedFile file=closer.register(ufs.open(new URI(tuple._1())));
            long startTime=System.nanoTime();
            UnifiedBlock block=file.block(0);
            MemoryBuffer mbuf=closer.register(new MemoryBuffer(new File(block.absolutePath())));
            long midTime=System.nanoTime();
            int sum=0;
            for(int i=0;i<1024*1024*1024/4;i++){
                int value=mbuf.getInt();
                sum+=value;
                sum%=1024;
            }
            long endTime=System.nanoTime();
            System.out.println(sum);
            return new Tuple2<>(midTime-startTime,endTime-midTime);
        }
    }

    public static void main(String args[]) {
        SparkConf conf=new SparkConf();
        int count=Integer.parseInt(args[0]);
        final int type=Integer.parseInt(args[1]);
        try(JavaSparkContext jctx=new JavaSparkContext(conf)){
            JavaPairRDD<String,PortableDataStream> srcRdd=jctx.binaryFiles("alluxio://hadoop06.ihep.ac.cn:19998/test/",count);
            JavaPairRDD<Long,Long> resultRdd=srcRdd.mapToPair(new PairFunction<Tuple2<String, PortableDataStream>, Long, Long>() {
                @Override
                public Tuple2<Long, Long> call(Tuple2<String, PortableDataStream> tuple) throws Exception {
                    switch (type){
                        case 0:
                            return test0(tuple);
                        case 1:
                            return test1(tuple);
                        case 2:
                            return test2(tuple);
                        default:
                            return test3(tuple);
                    }
                }
            });
            List<Tuple2<Long,Long>> result=resultRdd.collect();
            double part1=0,part2=0,sum=0;
            for(Tuple2<Long,Long> t:result){
                long p1=t._1(),p2=t._2();
                part1+=p1;
                part2+=p2;
                sum+=p1+p2;
            }
            int sz=result.size();
            System.out.printf("%fms %fms %fms\n",part1/sz/1e6,part2/sz/1e6,sum/sz/1e6);
        }
    }
}
