package cn.buhler.sink;


import cn.buhler.jdbc.JDBCConfig;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.db.Db;
import cn.hutool.db.Entity;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.flume.*;
import org.apache.flume.conf.Configurable;
import org.apache.flume.conf.ConfigurationException;
import org.apache.flume.sink.AbstractSink;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.*;


public class JDBCSink extends AbstractSink implements Configurable{
    private static final Logger LOG = LoggerFactory.getLogger(JDBCSink.class);
    private  final String COLUMNS = "columns";
    private static final String TABLE = "table";

    private int batchSize;

    private String columns;

    private String[] targetColumnArray;

    private String table;

    private boolean sinkAllColumns;


    private HikariDataSource dataSource;


    @Override
    public void configure(Context context) {
        try {
            JDBCConfig jdbcConfig = new JDBCConfig(context);
            this.dataSource = jdbcConfig.getDataSource();
        } catch (ConfigurationException e) {
            throw e;
        }

        table = context.getString(TABLE);
        if(StrUtil.isBlank(table)){
            throw new ConfigurationException("table参数为空，要写入的表未设置");
        }
        columns = context.getString(COLUMNS,"*");

        if("*".equals(columns)){
            sinkAllColumns = true;
        }

        batchSize = context.getInteger("batchSize", 100);


    }

    @Override
    public Status process() throws EventDeliveryException {
        LOG.debug("Executing JDBCSink.process()...");
        Status status = Status.READY;
        Channel channel = getChannel();
        Transaction txn = channel.getTransaction();
        List<Event> eventList = new ArrayList<>();
        try {
            txn.begin();
            for (int i = 0; i < batchSize; i++) {
                Event event = channel.take();
                if (event == null) {
                    break;
                }
                eventList.add(event);
            }

            if (eventList.isEmpty()) {
                status = Status.BACKOFF;
            } else {
                processEvents(eventList);
            }
            txn.commit();
        } catch (Throwable throwable) {
            LOG.error("Exception during process", throwable);
            txn.rollback();
            status = Status.BACKOFF;
            if (throwable instanceof Error) {
                throw new FlumeException(throwable);
            }
        } finally {
            txn.close();
        }
        return status;
    }


    private void processEvents(List<Event> eventList) {
        List<Entity> entityList = new ArrayList<>();
        for (Event event : eventList) {
            Map<String, String> headers = event.getHeaders();
            String table = headers.get(TABLE);


            Map<String, Object> recordInMap = ObjectUtil.unserialize(event.getBody());
            Entity entity = new Entity();
            if (sinkAllColumns) {
                entity.putAll(recordInMap);
            } else {
                if(targetColumnArray==null){
                    targetColumnArray=columns.split(",");
                }
                for (String column : targetColumnArray) {
                    if (recordInMap.containsKey(column)) {
                        entity.set(column, recordInMap.get(column));
                    }
                }
            }
            entity.setTableName(table);
            entityList.add(entity);
        }

        if(!entityList.isEmpty()){
            try {
                int[] rows = Db.use(dataSource).insert(entityList);
                if (Arrays.stream(rows).sum() < rows.length) {
                    LOG.info("向表: {}中插入数据失败，可能存在重复数据", table);
                } else {
                    LOG.info("向表: {}中插入数据成功", table);
                }
            } catch (Exception e) {
                LOG.error("向表: {}中插入数据失败，{}", table, e);
            }
        }
    }


    @Override
    public synchronized void start() {
        try {

            LOG.info("JDBC Sink ActiveRecordContext started ");
        }catch (Exception e){
            LOG.info("JDBC Sink ActiveRecordContext start failed {} ",e.toString());
            throw new FlumeException("Failed to connect to database", e);
        }

    }

    @Override
    public synchronized void stop() {
        try {

            if(this.dataSource !=null){
                this.dataSource.close();
            }
            LOG.info("buhler smart jdbc source connection pool stop success");

        } catch (Exception e) {
            throw new FlumeException("Failed to close SQL Server connection", e);
        }
    }
}