package com.jzl.canal_consumer.etl.et.rocketmq.transform.impl;

import com.alibaba.otter.canal.protocol.CanalEntry;
import com.alibaba.otter.canal.protocol.Message;
import com.google.protobuf.InvalidProtocolBufferException;
import com.jzl.canal_consumer.bean.MergeSqlEntry;
import com.jzl.canal_consumer.bean.DdlEntry;
import com.jzl.canal_consumer.bean.DmlEntry;
import com.jzl.canal_consumer.bean.TableBatchMergeSqlEntry;
import com.jzl.canal_consumer.etl.et.rocketmq.transform.MqDataTransform;
import com.jzl.canal_consumer.util.MessageEntryUtil;
import com.jzl.util.CollectionUtil;
import com.jzl.util.LoggerUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.*;
import java.util.stream.Collectors;
import static com.alibaba.otter.canal.protocol.CanalEntry.EntryType.*;
import static com.alibaba.otter.canal.protocol.CanalEntry.EventType.*;

/**
 * 合并算法
 * 1.对不同表的记录进行分组
 * 2.对同一张表内以DDL语句进行垂直分组
 * 3.对每个分组内的记录再以Id进行分组
 * 4.对每一个Id的记录组进行合并算法
 * 5.对合并后的记录，同一张表内对不同Id的记录以DML操作类型进行分组
 *
 * @author ZhenWuWang
 */
@Component
public class MqMessageTransform implements MqDataTransform<Message, TableBatchMergeSqlEntry>
{

    private MessageEntryUtil messageEntryUtil;

    @Autowired
    public MqMessageTransform(MessageEntryUtil messageEntryUtil)
    {
        this.messageEntryUtil = messageEntryUtil;
    }

    @Override
    public List<TableBatchMergeSqlEntry> transform(String schemaName, List<Message> messages)
    {
        messages = CollectionUtil.filterNull(messages);
        List<List<CanalEntry.Entry>> entryListList = messages
                .stream()
                .filter(message -> message.getId() != -1)
                .map(Message::getEntries)
                .filter(entries -> entries != null && entries.size() > 0)
                .map(entries -> resetSchemaName(schemaName, entries))
                .collect(Collectors.toList());
        if (entryListList.isEmpty())
        {
            return Collections.emptyList();
        }
        Map<String, TableBatchMergeSqlEntry> map = new LinkedHashMap<>();
        entryListList.forEach(entries ->
        {
            Map<String, TableBatchMergeSqlEntry> entryMap = transformCanalEntry(entries);
            if (entryMap != null && entryMap.size() > 0)
            {
                entryMap.forEach((tableName, entry) ->
                {
                    TableBatchMergeSqlEntry tableBatchMergeSqlEntry = map.get(tableName);
                    if (tableBatchMergeSqlEntry == null)
                    {
                        map.put(tableName, entry);
                    } else
                    {
                        tableBatchMergeSqlEntry.addMergeSqlEntries(entry.getMergeSqlEntries());
                        tableBatchMergeSqlEntry.addTransactionEntries(entry.getTransactionEntries());
                    }
                });
            }
        });
        return new ArrayList<>(map.values());
    }

    private List<CanalEntry.Entry> resetSchemaName(String schemaName, List<CanalEntry.Entry> entries)
    {
        return entries.stream().map(entry -> resetSchemaName(schemaName, entry)).collect(Collectors.toList());
    }

    private CanalEntry.Entry resetSchemaName(String schemaName, CanalEntry.Entry entry)
    {
        CanalEntry.Header header = entry.getHeader().toBuilder().setSchemaName(schemaName).build();
        return entry.toBuilder().setHeader(header).build();
    }

    private Map<String, TableBatchMergeSqlEntry> transformCanalEntry(List<CanalEntry.Entry> entries)
    {
        //按全表名分组
        Map<String, List<CanalEntry.Entry>> tableEntriesMap = groupByFullTableName(entries);
        Map<String, TableBatchMergeSqlEntry> map = new LinkedHashMap<>();
        for (Map.Entry<String, List<CanalEntry.Entry>> mapEntry : tableEntriesMap.entrySet())
        {
            String tableName = mapEntry.getKey();
            if ("-".equals(tableName))
            {
                continue;
            }
            List<CanalEntry.Entry> list = mapEntry.getValue();
            if (list == null || list.isEmpty())
            {
                continue;
            }

            TableBatchMergeSqlEntry tableBatchMergeSqlEntry = new TableBatchMergeSqlEntry(tableName);
            for (CanalEntry.Entry entry : list)
            {
                CanalEntry.EntryType entryType = entry.getEntryType();
                //合并算法舍弃事务同步，事务语句一般只是打印日志
                if (entryType == TRANSACTIONBEGIN || entryType == TRANSACTIONEND)
                {
                    tableBatchMergeSqlEntry.addTransactionEntries(entry);
                    continue;
                }

                //如果含有DDL语句，必须以DDL语句为分界点进行批处理，不能把DDL后面的DML语句和前面的DML语句进行合并，必须在DDL执行之后处理
                if (entryType == ROWDATA)
                {
                    CanalEntry.RowChange rowChange;
                    try
                    {
                        rowChange = CanalEntry.RowChange.parseFrom(entry.getStoreValue());
                    } catch (InvalidProtocolBufferException e)
                    {
                        LoggerUtil.error("parse event has an error data:" + entry.toString(), e);
                        continue;
                    }
                    MergeSqlEntry mergeSqlEntry = tableBatchMergeSqlEntry.getLastMergeSqlEntry();
                    CanalEntry.EventType eventType = rowChange.getEventType();
                    if (mergeSqlEntry == null || !messageEntryUtil.isDML(eventType) || rowChange.getIsDdl())
                    {
                        mergeSqlEntry = MergeSqlEntry.create();
                        tableBatchMergeSqlEntry.addMergeSqlEntry(mergeSqlEntry);
                    }
                    //DDL
                    if (!messageEntryUtil.isDML(eventType) || rowChange.getIsDdl())
                    {
                        DdlEntry ddlEntry = new DdlEntry();
                        ddlEntry.setEventType(eventType);
                        ddlEntry.setHeader(entry.getHeader());
                        ddlEntry.setSql(rowChange.getSql());
                        mergeSqlEntry.setDdlEntry(ddlEntry);
                    } else
                    {
                        //DML
                        mergeSqlEntry.addDmlEntries(groupByPks(entry.getHeader(), eventType, rowChange));
                    }
                }
            }
            map.put(tableName, tableBatchMergeSqlEntry);
        }
        return map;
    }

    /**
     * 按照全表名分组
     */
    private Map<String, List<CanalEntry.Entry>> groupByFullTableName(List<CanalEntry.Entry> entries)
    {
        Map<String, List<CanalEntry.Entry>> tableEntriesMap = new HashMap<>();
        for (CanalEntry.Entry entry : entries)
        {
            String key = getFullTableName(entry);
            List<CanalEntry.Entry> list = tableEntriesMap.computeIfAbsent(key, k -> new ArrayList<>());
            list.add(entry);
        }
        return tableEntriesMap;
    }

    /**
     * 获取全表名
     */
    private String getFullTableName(CanalEntry.Entry entry)
    {
        CanalEntry.Header header = entry.getHeader();
        if (header == null)
        {
            return "-";
        }
        return header.getSchemaName() + "." + header.getTableName();
    }

    /**
     * 按照主键分组
     */
    private Map<String, List<DmlEntry>> groupByPks(CanalEntry.Header header, CanalEntry.EventType eventType,
                                                   CanalEntry.RowChange rowChange)
    {
        Map<String, List<DmlEntry>> dmlEntriesMap = new HashMap<>();
        for (CanalEntry.RowData rowData : rowChange.getRowDatasList())
        {
            List<CanalEntry.Column> columns = Collections.emptyList();
            if (eventType == UPDATE || eventType == INSERT)
            {
                columns = rowData.getAfterColumnsList();
            } else if (eventType == DELETE)
            {
                columns = rowData.getBeforeColumnsList();
            }
            List<String> pkColumns = new ArrayList<>();
            for (CanalEntry.Column column : columns)
            {
                if (column.getIsKey() || "id".equals(column.getName()))
                {
                    pkColumns.add(column.getValue());
                }
            }
            String ik = String.join(",", pkColumns);
            DmlEntry dmlEntry = new DmlEntry();
            dmlEntry.setHeader(header);
            dmlEntry.setRowData(rowData);
            dmlEntry.setEventType(eventType);
            if (eventType == DELETE || eventType == INSERT || eventType == UPDATE)
            {
                List<DmlEntry> dmlEntries = dmlEntriesMap.computeIfAbsent(ik, k -> new ArrayList<>());
                dmlEntries.add(dmlEntry);
            }
        }
        return dmlEntriesMap;
    }
}
