package io.gitee.tziye.core.msg;

import com.google.common.base.Charsets;
import com.google.common.hash.Hashing;
import io.gitee.tziye.common.Constants;
import io.gitee.tziye.core.instance.DefaultRenaiClient;
import io.gitee.tziye.jdbc.JdbcProxy;
import io.gitee.tziye.jdbc.ProcedureResult;
import io.vavr.Tuple2;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.jdbc.core.BeanPropertyRowMapper;

import java.util.*;
import java.util.concurrent.atomic.AtomicLong;

@Slf4j
public class DefaultMsgOperator extends MsgOperator {

    private static final int PARTITIONS = 3;
    private final AtomicLong COUNT = new AtomicLong(0);

    private final JdbcProxy jdbcProxy;

    public DefaultMsgOperator(DefaultRenaiClient renaiClient) {
        super(renaiClient);
        jdbcProxy = renaiClient.getJdbcProxy();
    }

    @Override
    public <T> void produce(String topic, String key, T value) {
        commonProduce(topic, List.of(new Tuple2<>(key, value)));
    }

    @Override
    public <T> void produce(String topic, List<Tuple2<String, T>> msgs) {
        commonProduce(topic, msgs);
    }

    private <T> void commonProduce(String topic, List<Tuple2<String, T>> msgs) {
        assert StringUtils.isNotBlank(topic) && CollectionUtils.isNotEmpty(msgs)
                && msgs.stream().noneMatch(msg -> Objects.isNull(msg._2));
        String sql = "INSERT INTO renai_msg_produce(topic, pt, k, msg, g, producer) VALUES(?, ?, ?, ?, ?, ?)";
        List<Object[]> params = msgs.stream()
                .map(msg -> new Object[]{topic, calPartition(msg._1), msg._1, msg._2,
                        renaiClient.instance.getG() + Constants.DELIMITER + renaiClient.instance.getService(),
                        renaiClient.instance.getInstanceId()})
                .toList();
        jdbcProxy.batchUpdate(sql, params);
    }

    private int calPartition(String key) {
        if (StringUtils.isNotBlank(key)) {
            long hash = Hashing.murmur3_128().hashString(key, Charsets.UTF_8).asLong();
            return (int) Math.abs(hash % PARTITIONS);
        }
        return (int) (COUNT.getAndIncrement() % PARTITIONS);
    }

    @Override
    public List<RenaiMsg> consume(String topic, String group, int count, Date start) {
        assert StringUtils.isNotBlank(topic) && StringUtils.isNotBlank(group) && count > 0;
        if (start == null) {
            start = new Date();
        }
        Map<String, Object> params = new HashMap<>();
        params.put(Constants.TOPIC, topic);
        params.put(Constants.GROUP, group);
        params.put(Constants.SIZE, count);
        params.put(Constants.START, start);
        params.put(Constants.INSTANCE, renaiClient.instance.getInstanceId());
        ProcedureResult result = jdbcProxy.call(Constants.MSG_CONSUME, params);
        List<RenaiMsg> renaiMsgs = result.list(RenaiMsg.class);
        logConsume(topic, group, renaiMsgs);
        return renaiMsgs;
    }

    private void logConsume(String topic, String group, List<RenaiMsg> renaiMsgs) {
        if (log.isDebugEnabled()) {
            int pt = -1;
            List<Long> msgIds = new ArrayList<>();
            if (CollectionUtils.isNotEmpty(renaiMsgs)) {
                pt = renaiMsgs.get(0).getPt();
                msgIds = renaiMsgs.stream().map(RenaiMsg::getId).toList();
            }
            log.debug("[Consume] topic:{}, group:{}, pt:{}, msgId:{}", topic, group, pt, msgIds);
        }
    }

    @Override
    public void ack(String topic, int pt, String group, long msgId) {
        assert StringUtils.isNotBlank(topic) && pt >= 0 && pt < PARTITIONS && StringUtils.isNotBlank(group);
        String sql = """
                UPDATE renai_msg_consume SET consumed_id = ?
                    WHERE topic = ? AND pt = ? AND g = ? AND consumer = ?
                      AND consumed_id <= ? AND occupied_id >= ?""";
        log.debug(String.format("[Ack] topic:%s, pt:%s, group:%s, msgId:%s", topic, pt, group, msgId));
        int acked = jdbcProxy.update(sql, msgId, topic, pt, group, renaiClient.instance.getInstanceId(), msgId, msgId);
        if (acked == 0) {
            log.error(String.format("[Ack fail] topic:%s, pt:%s, group:%s, msgId:%s", topic, pt, group, msgId));
        }
    }

    @Override
    public List<RenaiMsg> replay(String topic, Date startTime, Date endTime, int count, long curId) {
        String sql = """
                SELECT * FROM renai_msg_produce
                 WHERE topic = ? AND id > ? AND create_time >= ? AND create_time <= ?
                 ORDER BY id LIMIT ?""";
        return jdbcProxy.query(sql, new BeanPropertyRowMapper<>(RenaiMsg.class),
                topic, curId, startTime, endTime, count);
    }

    @Override
    public List<RenaiMsg> replay(String topic, long minId, long maxId, int count, long curId) {
        String sql = """
                SELECT * FROM renai_msg_produce
                 WHERE topic = ? AND id > ? AND id >= ? AND id <= ?
                 ORDER BY id LIMIT ?""";
        return jdbcProxy.query(sql, new BeanPropertyRowMapper<>(RenaiMsg.class),
                topic, curId, minId, maxId, count);
    }

    @Override
    public int delete(String topic) {
        assert StringUtils.isNotBlank(topic);
        return jdbcProxy.doInTransaction(() -> {
            jdbcProxy.update("DELETE FROM renai_msg_consume WHERE topic = ?", topic);
            jdbcProxy.update("DELETE FROM renai_msg_archive WHERE topic = ?", topic);
            jdbcProxy.update("DELETE FROM renai_msg_produce_cal WHERE topic = ?", topic);
            return jdbcProxy.update("DELETE FROM renai_msg_produce WHERE topic = ?", topic);
        });
    }

}
