package org.apache.hadoop.hbase.regionserver;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryType;
import java.lang.management.RuntimeMXBean;

/**
 * @author pizhihui
 * @date 2024-06-06 17:52
 */

public class MemStoreDemo {

    private static final Logger LOG = LoggerFactory.getLogger(MemStoreDemo.class);

    public static void main(String[] args) throws IOException {

        Configuration conf = HBaseConfiguration.create();

        Pair<Long, MemoryType> pair = MemorySizeUtil.getGlobalMemStoreSize(conf);
        long globalMemStoreSize = pair.getFirst();
        boolean offheap = false;// this.regionServerAccounting.isOffheap();
        // When off heap memstore in use, take full area for chunk pool.
        float poolSizePercentage = offheap
                ? 1.0F
                : conf.getFloat(MemStoreLAB.CHUNK_POOL_MAXSIZE_KEY, MemStoreLAB.POOL_MAX_SIZE_DEFAULT);
        float initialCountPercentage = conf.getFloat(MemStoreLAB.CHUNK_POOL_INITIALSIZE_KEY,
                MemStoreLAB.POOL_INITIAL_SIZE_DEFAULT);
        int chunkSize = conf.getInt(MemStoreLAB.CHUNK_SIZE_KEY, MemStoreLAB.CHUNK_SIZE_DEFAULT);
        float indexChunkSizePercent = conf.getFloat(MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_KEY,
                MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT);
        // init the chunkCreator
        ChunkCreator.initialize(chunkSize, offheap, globalMemStoreSize, poolSizePercentage,
                initialCountPercentage, null, indexChunkSizePercent);



        RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean();
        LOG.info("vmName=" + runtime.getVmName() + ", vmVendor=" + runtime.getVmVendor()
                + ", vmVersion=" + runtime.getVmVersion());
        LOG.info("vmInputArguments=" + runtime.getInputArguments());

        DefaultMemStore memstore1 = new DefaultMemStore();

        // TODO: x32 vs x64
        final int count = 10000;
        byte[] fam = Bytes.toBytes("col");
        byte[] qf = Bytes.toBytes("umn");
        byte[] empty = new byte[0];
        MemStoreSizing memStoreSizing = new NonThreadSafeMemStoreSizing();
        for (int i = 0; i < count; i++) {
            // Give each its own ts
            memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty), memStoreSizing);
        }
        LOG.info("memstore1 estimated size={}", memStoreSizing.getMemStoreSize().getDataSize()
                + memStoreSizing.getMemStoreSize().getHeapSize());
        for (int i = 0; i < count; i++) {
            memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty), memStoreSizing);
        }
        LOG.info("memstore1 estimated size (2nd loading of same data)={}",
                memStoreSizing.getMemStoreSize().getDataSize()
                        + memStoreSizing.getMemStoreSize().getHeapSize());
        // Make a variably sized memstore.
        DefaultMemStore memstore2 = new DefaultMemStore();
        memStoreSizing = new NonThreadSafeMemStoreSizing();
        for (int i = 0; i < count; i++) {
            memstore2.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, new byte[i]), memStoreSizing);
        }
        LOG.info("memstore2 estimated size={}", memStoreSizing.getMemStoreSize().getDataSize()
                + memStoreSizing.getMemStoreSize().getHeapSize());
        final int seconds = 30;

//        for (Segment segment : memstore1.getSegments()) {
//            CellSet cellSet = segment.getCellSet();
//            System.out.println("size::::" +cellSet.first());
//        }

        LOG.info("Waiting " + seconds + " seconds while heap dump is taken");
        LOG.info("Exiting.");
    }

}
