package cas.ihep.hdfsIn;

/**
 * Created by chocolate on 2017/11/24.
 */
import cas.ihep.util.RedirectIO;
import org.apache.hadoop.hdfs.Service4Hep;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.io.Text;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.lang.StringUtils;
import java.io.IOException;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.net.URI;
import java.net.URISyntaxException;

import cas.ihep.util.RefreshFileSize;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
import org.apache.hadoop.util.Shell.ExitCodeException;
public class inMapper extends Mapper<LongWritable, Text, Text, Text> {
    DFSClient dfs = null;
    Service4Hep s4h = null;

//    public void run(Context context) throws IOException, InterruptedException { /*context上下文对象，存储一些job conf的信息，*/
//        setup(context);
//        JobID jobId = context.getJobID();
//        FileSplit split = (FileSplit)context.getInputSplit();
//        Path path = split.getPath();
//        map(jobId, path, context);
//        cleanup(context);
//    }
    public void map(LongWritable lno, Text line, Context context) throws IOException, InterruptedException, ExitCodeException {
        //Path paths[] = {path};
        String attemp = context.getTaskAttemptID().toString();
        LocatedBlock lb = null;
        HdfsFileStatus hfstatus = null;
        String opath = "";
        //boolean isWrite2hdfs = true;
        //boolean exception = false;
        RefreshFileSize rfs = null;
        String path=line.toString();/*输入文件里的每一行*/
        Configuration conf = context.getConfiguration();
        String nn = conf.get("fs.default.name","hdfs://192.168.60.41:8020");/*namenode*/
        String targetdir=conf.get("target.directory");
        URI uri;
        try {
            uri = new URI(nn);

        } catch (URISyntaxException e) {
            throw new  IllegalArgumentException(e);
        }
        dfs = new DFSClient(uri,conf);
        s4h = new Service4Hep(dfs,conf);
        rfs = new RefreshFileSize(dfs);
        int lastSlash=path.lastIndexOf('/');/*获取文件名*/
        String dest=targetdir+'/'+path.substring(lastSlash+1);/*完整的目标逻辑路径*/
        hfstatus = s4h.createFile(dest,true,(short) 1,2147483648L);/*新建文件获取元数据信息*/
        lb = s4h.addBlock(dest,hfstatus.getFileId());/*为文件分配block*/
        String tmp =  s4h.getTmpFile(lb);
        opath = tmp+"/"+lb.getBlock().getBlockName();/*实际的物理路径*/
        rfs.put(dest,hfstatus.getFileId(),opath);
        ProcessBuilder builder=new ProcessBuilder();
        builder.command("time","/workfs/cc/weizc/globus/bin/globus-url-copy","-vb","-p","3","ftp://xingg:123456@helion02.ihep.ac.cn:5555/"+ path,opath);
        //File tmpdir=new File("/tmp");
        //builder.redirectError(File.createTempFile("hmsjob",".err",tmpdir));
        //builder.redirectOutput(File.createTempFile("hmsjob",".out",tmpdir));
        Process process=builder.start();
        RedirectIO thr1 = new RedirectIO(process.getInputStream(),System.out);
        RedirectIO thr2 = new RedirectIO(process.getErrorStream(),System.err);
        Thread t = new Thread(thr1);
        t.start();
        thr2.run();
        t.join();

        int exitValue=process.waitFor();//等待process结束
        if (exitValue==0){
            rfs.remove(dest);
            File f = new File(opath);
            if(f.exists()){
                s4h.calculatefileCheckSums(opath,lb);
                lb.getBlock().setNumBytes(f.length());
                s4h.close(lb,dest,hfstatus.getFileId());
            }else{
                dfs.delete(dest,true);
            }
            rfs.close();
            s4h.close();
            dfs.close();
            return ;
        }
        System.err.println("Error");
    }



}
