package cn.buhler.sink;

import cn.buhler.jdbc.JDBCConfig;
import cn.hutool.core.util.*;
import cn.hutool.db.*;
import cn.hutool.json.JSONUtil;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.flume.*;
import org.apache.flume.conf.Configurable;
import org.apache.flume.conf.ConfigurationException;
import org.apache.flume.sink.AbstractSink;
import org.slf4j.*;

import java.util.*;

public class RichJDBCSink extends AbstractSink implements Configurable {

    private static final String TARGET_TABLE="targetTable";
    private static final Logger LOG = LoggerFactory.getLogger(RichJDBCSink.class);
    private int batchSize;

    private HikariDataSource dataSource;


    @Override
    public void configure(Context context) {
        try {
            JDBCConfig jdbcConfig = new JDBCConfig(context);
            this.dataSource = jdbcConfig.getDataSource();
        } catch (ConfigurationException e) {
            throw e;
        }

        batchSize = context.getInteger("batchSize", 100);
    }



    @Override
    public Status process() throws EventDeliveryException {
        LOG.debug("Executing JDBCSink.process()...");
        Status status = Status.READY;
        Channel channel = getChannel();
        Transaction txn = channel.getTransaction();
        List<Event> eventList = new ArrayList<>();

        try {
            txn.begin();

            for (int i = 0; i < batchSize; i++) {
                Event event = channel.take();
                if (event == null) {
                    break;
                }
                eventList.add(event);
            }

            if (eventList.isEmpty()) {
                status = Status.BACKOFF;
            } else {
                processEvents(eventList);
            }

            txn.commit();
        } catch (Throwable throwable) {
            LOG.error("Exception during process", throwable);
            txn.rollback();
            status = Status.BACKOFF;

            if (throwable instanceof Error) {
                throw new EventDeliveryException(throwable);
            }
        } finally {
            txn.close();
        }

        return status;
    }

    private void processEvents(List<Event> eventList) {
        Map<String, List<Entity>> tableEntityMap = new HashMap<>();

        for (Event event : eventList) {
            Map<String, String> headers = event.getHeaders();
            String table = headers.get(TARGET_TABLE);
            if(StrUtil.isEmpty(table)){
                LOG.info("收到{},但是没有指定目标表，忽略该数据", JSONUtil.toJsonStr(event));
            }

            List<Entity> entityList = tableEntityMap.computeIfAbsent(table, k -> new ArrayList<>());
            Map<String, Object> recordInMap = ObjectUtil.unserialize(event.getBody());
            Entity entity = new Entity();
            entity.putAll(recordInMap);
            entity.setTableName(table);
            entityList.add(entity);
        }

        tableEntityMap.forEach((table, entityList) -> {
            try {
                int[] rows = Db.use(dataSource).insert(entityList);
                for (int row : rows) {
                    if (row > 0) {
                        LOG.info("Insert success for table: {}", table);
                    } else {
                        LOG.debug("Possible duplicate data for table: {}", table);
                    }
                }
            } catch (Exception e) {
                LOG.error("Exception during insertion for table: {}", table, e);
            }
        });
    }

    @Override
    public synchronized void start() {
        super.start();
        LOG.info("------------Buhler SmarT FLume RichJDBC Sink 启动成功------------");
    }

    @Override
    public synchronized void stop() {
        if (dataSource != null) {
            dataSource.close();
        }

        super.stop();
        LOG.info("------------Buhler SmarT FLume RichJDBC Sink 准备关闭------------");
    }
}
