package io.kiki.sba.registry.server.shared.util;


import io.kiki.sba.registry.common.model.dataserver.Datum;
import io.kiki.sba.registry.common.model.dataserver.DatumVersion;
import io.kiki.sba.registry.common.model.store.*;
import io.kiki.sba.registry.compress.CompressCachedExecutor;
import io.kiki.sba.registry.compress.CompressUtils;
import io.kiki.sba.registry.compress.CompressedItem;
import io.kiki.sba.registry.compress.Compressor;
import io.kiki.sba.registry.core.model.Data;
import io.kiki.sba.registry.util.StringUtil;
import io.kiki.sba.registry.util.SystemUtils;
import io.kiki.stack.netty.serialization.HessianSerializer;
import org.apache.commons.collections.CollectionUtils;

import java.util.*;
import java.util.Map.Entry;


public final class DatumUtils {
    public static final HessianSerializer HESSIAN_SERIALIZER = new HessianSerializer();
    private static final String KEY_COMPRESS_DATUM_CACHE_CAPACITY = "registry.compress.datum.capacity";
    public static final CompressCachedExecutor<CompressedItem> compressCachedExecutor = CompressUtils.newCachedExecutor("datum_compress", 60 * 1000, SystemUtils.getSystemInteger(KEY_COMPRESS_DATUM_CACHE_CAPACITY, 1024 * 1024 * 128));
    private static final String KEY_DECOMPRESS_DATUM_CACHE_CAPACITY = "registry.decompress.datum.capacity";
    public static final CompressCachedExecutor<SubPublisherList> decompressCachedExecutor = CompressUtils.newCachedExecutor("datum_decompress", 60 * 1000, SystemUtils.getSystemInteger(KEY_DECOMPRESS_DATUM_CACHE_CAPACITY, 1024 * 1024 * 384));

    private DatumUtils() {
    }

    public static Map<String, DatumVersion> intern(Map<String, DatumVersion> dataInfoIdToDatumVersionMap) {
        Map<String, DatumVersion> map = new HashMap<>(dataInfoIdToDatumVersionMap.size());
        dataInfoIdToDatumVersionMap.forEach((string, datumVersion) -> map.put(StringInterner.intern(string), datumVersion));
        return map;
    }

    public static Map<String, Long> getDataInfoIdToDatumVersionMap(Map<String, Datum> dataInfoIdToDatumMap) {
        Map<String, Long> map = new HashMap<>(dataInfoIdToDatumMap.size());
        dataInfoIdToDatumMap.forEach((k, datum) -> map.put(k, datum.getVersion()));
        return map;
    }

    public static DataCenterToSubDatumMap newEmptyMultiSubDatum(Subscriber subscriber, Set<String> datacenterSet, long version) {

        Map<String, SubDatum> subDatumMap = new HashMap<>(datacenterSet.size());
        for (String datacenter : datacenterSet) {
            subDatumMap.put(datacenter, newEmptySubDatum(datacenter, subscriber, version));
        }

        return new DataCenterToSubDatumMap(subscriber.getDataInfoId(), subDatumMap);
    }

    public static SubDatum newEmptySubDatum(String datacenter, Subscriber subscriber, long version) {
        return SubDatum.emptyOf(datacenter, subscriber.getDataInfoId(), subscriber.getDataId(), subscriber.getInstanceId(), subscriber.getGroup(), version);
    }

    public static SubDatum of(Datum datum) {
        List<SubPublisher> subPublisherList = new ArrayList<>(datum.publisherSize());
        for (Publisher publisher : datum.getRegisterIdToPublisherMap().values()) {
            final Url url = publisher.getClientUrl();
            // temp publisher the srcAddress maybe null
            final String srcAddressString = url == null ? null : url.buildAddressString();
            subPublisherList.add(new SubPublisher(publisher.getRegisterId(), publisher.getZone(), publisher.getServerDataList(), publisher.getClientId(), publisher.getVersion(), srcAddressString, publisher.getRegisterTimestamp(), publisher.getPublishSource()));
        }
        return SubDatum.normalOf(datum.getDataCenter(), datum.getDataInfoId(), datum.getDataId(), datum.getInstanceId(), datum.getGroup(), datum.getVersion(), subPublisherList, datum.getRecentVersions());
    }

    public static long getDataListSize(List<Data> dataList) {
        if (CollectionUtils.isEmpty(dataList)) {
            return 0;
        }
        long sum = 0;
        for (Data data : dataList) {
            if (data == null || data.getData() == null) {
                continue;
            }
            sum += data.getData().length();
        }
        return sum;
    }

    public static SubDatum compressSubDatum(SubDatum subDatum, Compressor compressor) {
        if (compressor == null || subDatum == null) {
            return subDatum;
        }
        CompressedItem compressedItem;
        try {
            compressedItem = compressCachedExecutor.execute(subDatum.compressKey(compressor.getEncoding()), () -> {
                List<SubPublisher> subPublisherList = subDatum.getUnzippedSubPublisherList();
                byte[] data = HESSIAN_SERIALIZER.serialize(new SubPublisherList(subPublisherList));
                byte[] compressed = compressor.compress(data);
                return new CompressedItem(compressed, data.length, compressor.getEncoding());
            });
        } catch (Throwable e) {
            throw new RuntimeException("compress publishers failed", e);
        }
        return SubDatum.zipOf(subDatum.getDataCenter(), subDatum.getDataInfoId(), subDatum.getDataId(), subDatum.getInstanceId(), subDatum.getGroup(), subDatum.getVersion(), subDatum.getRecentVersionList(), new ZipSubPublisherList(compressedItem.getCompressedBytes(), compressedItem.getOriginSize(), compressedItem.getEncoding(), subDatum.getSubPublisherListSize()));
    }

    public static DataCenterToSubDatumMap decompressDataCenterToSubDatumMap(DataCenterToSubDatumMap dataCenterToSubDatumMap) {
        StringUtil.checkNotEmpty(dataCenterToSubDatumMap.getDataCenterToSubDatumMap(), "multiSubDatum.datumMap");
        Map<String, SubDatum> map = new HashMap<>(dataCenterToSubDatumMap.getDataCenterToSubDatumMap().size());
        for (Entry<String, SubDatum> entry : dataCenterToSubDatumMap.getDataCenterToSubDatumMap().entrySet()) {
            map.put(entry.getKey(), decompressSubDatum(entry.getValue()));
        }
        return new DataCenterToSubDatumMap(dataCenterToSubDatumMap.getDataInfoId(), map);
    }

    public static SubDatum decompressSubDatum(SubDatum subDatum) {
        ZipSubPublisherList zipSubPublisherList = subDatum.getZipSubPublisherList();
        if (zipSubPublisherList == null) {
            return subDatum;
        }
        Compressor compressor = CompressUtils.mustGet(zipSubPublisherList.getEncoding());
        SubPublisherList subPublisherList;
        try {
            subPublisherList = decompressCachedExecutor.execute(subDatum.compressKey(compressor.getEncoding()), () -> {
                byte[] data = compressor.decompress(zipSubPublisherList.getCompressedBytes(), zipSubPublisherList.getOriginSize());
                return HESSIAN_SERIALIZER.deserialize(data, SubPublisherList.className);
            });
        } catch (Throwable e) {
            throw new RuntimeException("decompress publishers failed", e);
        }
        return SubDatum.normalOf(subDatum.getDataCenter(), subDatum.getDataInfoId(), subDatum.getDataId(), subDatum.getInstanceId(), subDatum.getGroup(), subDatum.getVersion(), subPublisherList.getSubPublisherList(), subDatum.getRecentVersionList());
    }
}
