package cas.ihep.fs.hdfs;

import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.server.datanode.BlockMetadataHeader;
import org.apache.hadoop.util.DataChecksum;

import java.io.*;

class HdfsFileCheckSum {
    private final DataChecksum checksum;
    private byte[] buf;
    private byte[] chksum;

    HdfsFileCheckSum(DataChecksum sum) {
        this.checksum = sum;
        this.buf = new byte[sum.getBytesPerChecksum()];
        this.chksum = new byte[this.getChecksumSize()];
    }

    public void calculateChunkedSums(File srcFile, File metaFile) throws IOException {
        //RandomAccessFile metaRAF = new RandomAccessFile(metaFile, "rw");
        FileOutputStream crcOut = new FileOutputStream(metaFile);
        InputStream in = new FileInputStream(srcFile);
        DataOutputStream checksumOut = new DataOutputStream(new BufferedOutputStream(crcOut, HdfsConstants.SMALL_BUFFER_SIZE));
        BlockMetadataHeader.writeHeader(checksumOut, this.checksum);

        int count;
        for(long remain = srcFile.length(); remain > 0L; remain -= (long)count) {
            count = in.read(this.buf);
            this.checksum.calculateChunkedSums(this.buf, 0, count, this.chksum, 0);
            checksumOut.write(this.chksum);
        }

        checksumOut.flush();
        checksumOut.close();
        in.close();
    }

    private int getChecksumSize() {
        return this.checksum.getChecksumSize();
    }
}

