package cas.ihep.test;

import cas.ihep.functions.FunctionTuple2;
import cas.ihep.hdfs.HdfsBlock;
import cas.ihep.hdfs.HdfsFile;
import cas.ihep.hdfs.HdfsSystem;
import com.google.common.io.Closer;
import org.apache.hadoop.conf.Configuration;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.input.PortableDataStream;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.List;

public class SparkMS {
    public static void main(String args[])throws Exception{
        try(Closer closer=Closer.create()) {
            for(String str:args){
                System.out.println(str);
            }
            final SparkConf conf = new SparkConf();
            final HashMap<String,String> mc=new HashMap<>();
            mc.put("target.directory",args[1]);
            JavaSparkContext jsc=closer.register(new JavaSparkContext(conf));
            FileInputStream lfis=closer.register(new FileInputStream(args[0]));
            InputStreamReader lireader=closer.register(new InputStreamReader(lfis));
            BufferedReader linput=closer.register(new BufferedReader(lireader));
            String line;
            JavaPairRDD<String,PortableDataStream> rdd0=null;
            while((line=linput.readLine())!=null){
                if (line.length()>0){
                    if(rdd0==null){
                        rdd0=jsc.binaryFiles(line);
                    }else{
                        rdd0=rdd0.union(jsc.binaryFiles(line));
                    }
                }
            }
            if (rdd0==null){
                System.out.println("Shutdown");
                return;
            }else{
                rdd0=rdd0.coalesce(9);
            }
            JavaRDD<String> result=rdd0.map(new FunctionTuple2<String,PortableDataStream, String>() {
                @Override
                public String call(String s, PortableDataStream portableDataStream) {
                    try(Closer closer=Closer.create()){
                        Configuration lconf=new Configuration();
                        HdfsSystem hdfs = closer.register(new HdfsSystem("hdfs://hadoop06.ihep.ac.cn:8020",lconf ));
                        java.net.URI path=new java.net.URI(s);
                        String p=path.getPath();
                        HdfsFile file=closer.register(hdfs.open(path.getPath()));
                        int idx=p.lastIndexOf('/');
                        HdfsBlock blk=closer.register(file.getBlock(0));
                        String blkpath=blk.getBlockPath();
                        ProcessBuilder builder=new ProcessBuilder();
                        builder.command("globus-url-copy",blkpath,"ftp://helion:123456@helion01.ihep.ac.cn:5555/"+mc.get("target.directory")+p.substring(idx+1));
                        builder.redirectError(File.createTempFile("/tmp/job",".err"));
                        builder.redirectOutput(File.createTempFile("/tmp/job",".out"));
                        Process process=builder.start();
                        int exitValue=process.waitFor();
                        if (exitValue==0){
                            return "Success";
                        }
                        System.err.println("Error");
                        return "Error";
                    }catch(Exception e){
                        e.printStackTrace();
                        return e.getMessage();
                    }
                }
            });
            List<String> rets=result.collect();
            for(String str:rets){
                System.out.println(str);
            }
        }

    }
}
