package org.apache.hadoop.hdfs.server.datanode;

import org.apache.hadoop.hdfs.protocol.Block;

public class Chunk {
	
	private long startOffset;
	private long endOffset;
	private byte[] data;
	private byte[] checksum;
	private Block block;
	
	
	public Chunk(Block block, byte[] data, long startOffset, long endOffset, byte[] checkum) {
		this.block = block;
		this.data = data;
		this.startOffset = startOffset;
		this.endOffset = endOffset;
		this.checksum = checkum;
	}
	
	public long getBlockID() {
		return this.block.getBlockId();
	}
	
	public byte[] getData() {
		return data;
	}
	public void setData(byte[] data) {
		this.data = data;
	}
	public long getEndOffset() {
		return endOffset;
	}
	public void setEndOffset(long endOffset) {
		this.endOffset = endOffset;
	}
	public long getStartOffset() {
		return startOffset;
	}
	public void setStartOffset(long startOffset) {
		this.startOffset = startOffset;
	}
	
	@Override
	public int hashCode() {
		return (int) ((block.getBlockId() ^ (long) startOffset ^ (long) endOffset) % PrototypeUtilities.HASH_DIVISION_CONSTANT);
	}
	
	@Override
	public boolean equals(Object object) {
		if (object instanceof Chunk) {
			Chunk chunk = (Chunk) object;
			if ((chunk.getBlockID() == this.getBlockID()) && 
				(chunk.getStartOffset() == this.getStartOffset()) &&
				(chunk.getEndOffset() == this.getEndOffset())) {
				return true;
			}
		}
		return false;
	}
	
	public String toString() {
		StringBuilder stringBuilder = new StringBuilder();
		
		stringBuilder.append("Chunk of Block: " + this.getBlockID() + " between offsets " + this.getStartOffset() + " and " + this.getEndOffset() + "\n");
		stringBuilder.append("Data contained in block:\n");
		for (byte chunkByte : data) {
			stringBuilder.append((char) chunkByte);
		}
		
		
		return stringBuilder.toString();
	}

	public byte[] getChecksum() {
		return checksum;
	}

	public void setChecksum(byte[] checksum) {
		this.checksum = checksum;
	}
	
}