package cn.net.yugu.doraemon.datahub.worker;

import cn.net.yugu.doraemon.datahub.model.SendResult;
import com.aliyun.datahub.client.DatahubClient;
import com.aliyun.datahub.client.exception.DatahubClientException;
import com.aliyun.datahub.client.model.*;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.checkerframework.checker.units.qual.A;
import org.springframework.util.CollectionUtils;
import org.springframework.util.ReflectionUtils;

import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;

import static cn.net.yugu.doraemon.datahub.constant.CommonConstant.DATE_TIME_FORMAT;

/**
 * @author wanghongli
 * @date 2024/5/30 20:46
 * @description RecordSender
 **/
@Slf4j
public class SendRecordWorker {
    private static final ConcurrentHashMap<String, RecordSchema> RECORD_SCHEMA_CONCURRENT_MAP = new ConcurrentHashMap<>(256);
    private final ConcurrentLinkedDeque<Object> unSendList;
    private volatile boolean isSending;
    private final AtomicInteger unSendRecordCount;
    private final ScheduledExecutorService scheduledExecutorService;
    private final ExecutorService executorService;
    private final int maxBatchSize;
    private final int minBatchSize;
    private final int maxSendSizePerRequest;
    private final ReentrantLock reentrantLock;
    private final DatahubClient datahubClient;
    private final String projectName;
    private final String topicName;
    private volatile boolean isStop;

    public SendRecordWorker(DatahubClient datahubClient,
                            String projectName,
                            String topicName,
                            int minBatchSize,
                            int maxBatchSize,
                            int maxSendSizePerRequest,
                            long maxBatchSendLingerMs) {
        this.unSendRecordCount = new AtomicInteger(0);
        this.datahubClient = datahubClient;
        this.projectName = projectName;
        this.topicName = topicName;
        this.reentrantLock = new ReentrantLock(false);
        this.minBatchSize = minBatchSize;
        this.maxBatchSize = maxBatchSize;
        this.maxSendSizePerRequest = Math.max(maxSendSizePerRequest, 100);
        this.unSendList = new ConcurrentLinkedDeque<>();
        this.scheduledExecutorService = Executors.newScheduledThreadPool(1);
        this.scheduledExecutorService.scheduleAtFixedRate(this::doSendRecord, maxBatchSendLingerMs, maxBatchSendLingerMs, TimeUnit.MILLISECONDS);
        this.executorService = Executors.newFixedThreadPool(2);
    }

    public void addRecord(Object record) {
        addRecords(Lists.newArrayList(record));
    }

    public void addRecords(List<Object> records) {
        if (CollectionUtils.isEmpty(records)) {
            return;
        }
        if (this.unSendRecordCount.get() > maxBatchSize) {
            log.error("cache record exceed max size, currentSize:{}", this.unSendRecordCount);
            return;
        }
        int retainingCount = maxBatchSize - this.unSendRecordCount.get();
        List<Object> subRecords = records.subList(0, Math.min(retainingCount, records.size()));
        this.unSendList.addAll(subRecords);
        int unSendSize = this.unSendRecordCount.getAndAdd(subRecords.size());
        if (unSendSize >= minBatchSize) {
            this.doSendRecord();
        }
    }

    public void stop() {
        this.scheduledExecutorService.shutdownNow();
        this.isStop = true;
    }

    private void doSendRecord() {
        if (isStop) {
            return;
        }
        if (isSending) {
            return;
        }
        if (this.unSendRecordCount.get() == 0) {
            return;
        }
        this.executorService.execute(() -> {
            if (isStop) {
                return;
            }
            if (isSending) {
                return;
            }
            if (!reentrantLock.tryLock()) {
                return;
            }
            try {
                this.isSending = true;
                int maxCount = this.unSendRecordCount.get();
                List<Object> objects = new ArrayList<>(maxCount);
                while (maxCount > 0) {
                    if (isStop) {
                        return;
                    }
                    Object record = this.unSendList.poll();
                    if (record == null) {
                        break;
                    }
                    objects.add(record);
                    maxCount--;
                }
                this.unSendRecordCount.getAndAdd(-objects.size());
                Lists.partition(objects, this.maxSendSizePerRequest).forEach(list -> {
                    if (!isStop) {
                        sendDataHubRecords(datahubClient, projectName, topicName, list);
                    }
                });
            } finally {
                this.isSending = false;
                reentrantLock.unlock();
            }
        });
    }

    public static SendResult sendDataHubRecords(DatahubClient datahubClient, String projectName, String topicName, List<Object> dataList) {
        if (CollectionUtils.isEmpty(dataList)) {
            return SendResult.build(0, 0, "send send record is empty");
        }
        if(datahubClient == null){
            log.error("push failed for datahubClient is null, projectNm:{} topicNm:{} ", projectName, topicName);
            return SendResult.build(dataList.size(), 0, "send error happened for not initializing datahub client");
        }
        RecordSchema recordSchema = getRecordSchemaBy(datahubClient, projectName, topicName);
        if (CollectionUtils.isEmpty(recordSchema.getFields())) {
            log.error("push failed for fields is empty, projectNm:{} topicNm:{}", projectName, topicName);
            return SendResult.build(dataList.size(), 0, "send error happened for not found related message topic");
        }

        long startTime = System.currentTimeMillis();
        List<Field> fields = recordSchema.getFields();
        List<RecordEntry> recordEntries = new ArrayList<>();
        for (Object object : dataList) {
            RecordEntry recordEntry = new RecordEntry();
            TupleRecordData data = new TupleRecordData(recordSchema);
            for (Field field : fields) {
                String name = field.getName();
                Object invoke = null;
                try {
                    Method method = get(object.getClass(), name);
                    if (method == null) {
                        continue;
                    }
                    invoke = method.invoke(object);
                } catch (Exception e) {
                    log.error("data push error happened projectNm:{} topicNm:{} filedName:{} objectClass:{} message:{}", projectName, topicName, name, object.getClass().getSimpleName(), ExceptionUtils.getStackTrace(e));
                }
                if (log.isDebugEnabled()) {
                    log.debug("name:{}, value:{}", name, invoke);
                }
                if (invoke instanceof Date) {
                    if (FieldType.TIMESTAMP.equals(field.getType())) {
                        data.setField(name, ((Date) invoke).getTime());
                    } else if (FieldType.STRING.equals(field.getType())) {
                        data.setField(name, DateFormatUtils.format((Date) invoke, DATE_TIME_FORMAT));
                    } else if (FieldType.BIGINT.equals(field.getType())) {
                        data.setField(name, ((Date) invoke).getTime());
                    } else {
                        data.setField(name, invoke);
                    }
                } else {
                    data.setField(name, invoke);
                }
            }
            recordEntry.setRecordData(data);
            recordEntries.add(recordEntry);
        }
        try {
            PutRecordsResult result = datahubClient.putRecords(projectName, topicName, recordEntries);
            int i = result.getFailedRecordCount();
            long endTime = System.currentTimeMillis();
            log.info("push data success, projectNm:{} topicNm:{} totalCount:{}, failedCount:{},cost(ms){}", projectName, topicName, dataList.size(), result.getFailedRecordCount(), endTime - startTime);
            return SendResult.build(recordEntries.size(), i, "");
        }  catch (DatahubClientException e) {
            log.error("push data failed for projectNm:{} topicNm:{} requestId：{}，message {}", projectName, topicName,  e.getRequestId(), e.getErrorMessage());
            return SendResult.build(recordEntries.size(), recordEntries.size(), "send error happened for " + e.getErrorMessage());
        }
    }

    private static Method get(Class<?> obj, String name) throws NoSuchMethodException {
        StringBuilder methodName = new StringBuilder("get");
        for (String s : name.split("_")) {
            StringBuilder append = methodName.append(s.substring(0, 1).toUpperCase()).append(s.substring(1));
        }
        return ReflectionUtils.findMethod(obj, methodName.toString());
    }

    private static RecordSchema getRecordSchemaBy(DatahubClient datahubClient, String projectName, String topicName) {
        final String key = (projectName + topicName).intern();
        RecordSchema rs = RECORD_SCHEMA_CONCURRENT_MAP.get(key);
        if (rs == null) {
            GetTopicResult gr = datahubClient.getTopic(projectName, topicName);
            if (gr != null) {
                rs = gr.getRecordSchema();
            }
            if (rs == null) {
                rs = new RecordSchema();
            }
            RECORD_SCHEMA_CONCURRENT_MAP.put(key, rs);
        }
        return rs;
    }
}
