package org.kumas.bigdata.hdfs3.fsdirectory;

import org.apache.hadoop.io.Writable;

import java.io.*;
import java.util.Arrays;

/*************************************************
 * TODO_Kumas
 *  Author： KumasZhang
 *  DateTime： 2021-12-02 15:27
 *  Description： 数据块，当前实现实在内存中，后续可以将按文件将block序列化到磁盘，block只保存元数据
 **/
public class Block implements Writable {
    public static final int BLOCK_SIZE = 1024;
    private byte[] bytes = new byte[BLOCK_SIZE];//默认块大小1K
    private int numBytes;
    public static final Block[] EMPTY_ARRAY = {};

    public Block() {
    }

    private String blockID;

    public Block(String blockID) {
        this.blockID = blockID;
    }

    public void setNumBytes(int len) {
        if (len > BLOCK_SIZE) {
            this.numBytes = BLOCK_SIZE;
            return;
        }
        this.numBytes = len;
    }

    public void setContent(byte[] src, int len) {
        setNumBytes(len);
        System.arraycopy(src, 0, bytes, 0, Math.min(len, BLOCK_SIZE));
    }

    public byte[] getContent() {
        return Arrays.copyOf(bytes, numBytes);
    }

    @Override
    public void write(DataOutput out) throws IOException {
        out.writeUTF(blockID);
        out.writeInt(numBytes);
        out.write(bytes,0, numBytes);
    }

    @Override
    public void readFields(DataInput in) throws IOException {
        blockID = in.readUTF();
        numBytes = in.readInt();
        bytes = new byte[BLOCK_SIZE];
        in.readFully(bytes, 0, numBytes);
    }
}
