package cas.ihep.hadoop.util;

import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.Service4Hep;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;

import java.io.Closeable;
import java.io.File;
import java.io.IOException;

public class HdfsFileCloser implements Closeable{

    public RefreshFileSize rfs;
    public Service4Hep s4h;
    public LocatedBlock lb;
    public String outputFile,opath;
    public DFSClient dfs;
    public HdfsFileStatus hfstatus;

    public HdfsFileCloser(DFSClient dfsClient,Service4Hep service4Hep,String outfile) throws IOException {
        dfs=dfsClient;
        s4h=service4Hep;
        rfs=RefreshFileSize.instance();
        outputFile=outfile;
        hfstatus=service4Hep.createFile(outfile,true, (short) 1, 2147483648L);
        lb=service4Hep.addBlock(outfile,hfstatus.getFileId());
    }

    @Override
    public void close() throws IOException {
        rfs.remove(outputFile);
        File f = new File(opath);
        if(f.exists()){
            s4h.calculatefileCheckSums(opath,lb);
            lb.getBlock().setNumBytes(f.length());
            s4h.close(lb,outputFile,hfstatus.getFileId());
        }else{
            dfs.delete(outputFile,true);
        }
    }
}
