package com.alibaba.datax.plugin.writer.hiverdbmswriter;

import com.alibaba.datax.common.element.Record;
import com.alibaba.datax.common.exception.DataXException;
import com.alibaba.datax.common.plugin.RecordReceiver;
import com.alibaba.datax.common.spi.Writer;
import com.alibaba.datax.common.util.Configuration;
import com.alibaba.datax.plugin.rdbms.util.DBUtil;
import com.alibaba.datax.plugin.rdbms.util.DBUtilErrorCode;
import com.alibaba.datax.plugin.rdbms.util.DataBaseType;
import com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter;
import com.alibaba.datax.plugin.rdbms.writer.Constant;
import com.mchange.v2.c3p0.ComboPooledDataSource;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * Description:
 * Author: 无事人
 * Date: 2024/6/12
 */
public class HiveRdbmsWriter extends Writer {

    private static final DataBaseType DATABASE_TYPE = DataBaseType.Hive;
    private static final Logger LOG = LoggerFactory.getLogger(HiveRdbmsWriter.Job.class);

    public static class Job extends Writer.Job {

        private Configuration originalConfig = null;
        private CommonRdbmsWriter.Job commonRdbmsWriterJob;

        @Override
        public void preCheck() {
            this.init();
            this.commonRdbmsWriterJob.writerPreCheck(this.originalConfig, DATABASE_TYPE);
        }

        @Override
        public void init() {
            this.originalConfig = this.getPluginJobConf();
            LOG.info("hive writer params:{}", originalConfig.toJSON());
            this.commonRdbmsWriterJob = new CommonRdbmsWriter.Job(DATABASE_TYPE);
            commonRdbmsWriterJob.init(originalConfig);
        }

        @Override
        public void prepare() {
            this.commonRdbmsWriterJob.prepare(this.originalConfig);
        }

        @Override
        public List<Configuration> split(int mandatoryNumber) {
            return this.commonRdbmsWriterJob.split(this.originalConfig, mandatoryNumber);
        }

        @Override
        public void post() {
            this.commonRdbmsWriterJob.post(this.originalConfig);
        }

        @Override
        public void destroy() {}
    }

    public static class Task extends Writer.Task {

        private Configuration writerSliceConfig;
//        private ComboPooledDataSource comboPooledDataSource;;

        private Connection connection;
        private int columnNumber = 0;
        private int batchSize;
        private int batchByteSize;
        private StringBuilder writeRecordSql;

//        private ExecutorService executorService = Executors.newFixedThreadPool(10);

        @Override
        public void init() {
            this.writerSliceConfig = super.getPluginJobConf();
            this.batchSize = writerSliceConfig.getInt(Key.BATCH_SIZE, Constant.DEFAULT_BATCH_SIZE);
            this.batchByteSize = writerSliceConfig.getInt(Key.BATCH_BYTE_SIZE, Constant.DEFAULT_BATCH_BYTE_SIZE);
            String username = writerSliceConfig.getString(Key.USERNAME);
            String password = writerSliceConfig.getString(Key.PASSWORD);
            String jdbcUrl = writerSliceConfig.getString(Key.JDBC_URL);
//            this.comboPooledDataSource = ConnectionPool.getComboPool(DATABASE_TYPE.getDriverClassName(), jdbcUrl, username, password);
            this.connection = HiveServer2ConnectUtil.getConnection(username,password,jdbcUrl);
        }

        @Override
        public void prepare() {
            String table = this.writerSliceConfig.getString(Key.TABLE);
            String partition = this.writerSliceConfig.getString(Key.PARTITION);
            List<String> columns = this.writerSliceConfig.getList(Key.COLUMN, String.class);
            this.columnNumber = columns.size();
            StringBuilder sql = null;
            if (StringUtils.isNotBlank(partition)){
                sql = new StringBuilder("INSERT INTO TABLE ").append(table).append(" PARTITION (");
                sql.append(partition).append(") (");
                sql.append(StringUtils.join(columns, ","));
                sql.append(") VALUES ");
            }else {
                sql = new StringBuilder("INSERT INTO TABLE ").append(table);
                sql.append(" (");
                sql.append(StringUtils.join(columns, ","));
                sql.append(") VALUES ");
            }
            this.writeRecordSql = sql;
        }

        @Override
        public void destroy() {
        }

        @Override
        public void startWrite(RecordReceiver lineReceiver) {
            LOG.info("begin do write...");
            List<Record> writeBuffer = new ArrayList<>(this.batchSize);
            ArrayList<List<Record>> lists = new ArrayList<>();
            int bufferBytes = 0;
            try {
                Record record;
                while ((record = lineReceiver.getFromReader()) != null) {
                    if (record.getColumnNumber() != this.columnNumber) {
                        // 源头读取字段列数与目的表字段写入列数不相等，直接报错
                        throw DataXException
                                .asDataXException(
                                        DBUtilErrorCode.CONF_ERROR,
                                        String.format(
                                                "列配置信息有错误. 因为您配置的任务中，源头读取字段数:%s 与 目的表要写入的字段数:%s 不相等. 请检查您的配置并作出修改.",
                                                record.getColumnNumber(),
                                                this.columnNumber));
                    }
                    writeBuffer.add(record);
                    bufferBytes += record.getMemorySize();

                    if (writeBuffer.size() >= batchSize || bufferBytes >= batchByteSize) {
                        //把数据存起来
//                        ArrayList<Record> records = new ArrayList<>();
//                        records.addAll(writeBuffer);
//                        lists.add(records);
                        doBatchInsert(writeBuffer);
                        writeBuffer.clear();
                        bufferBytes = 0;
                    }
                }
                if (!writeBuffer.isEmpty()) {
//                    //把数据存起来
//                    ArrayList<Record> records = new ArrayList<>();
//                    records.addAll(writeBuffer);
//                    lists.add(records);
                    doBatchInsert(writeBuffer);
                    writeBuffer.clear();
                }
                //进行异步批量插入数据
//                ArrayList<CompletableFuture> completableFutures = new ArrayList<>();
//                lists.parallelStream().forEach(i -> {
//                    CompletableFuture<Void> future = CompletableFuture.runAsync(() -> {
//                        doBatchInsert(i);
//                    }, executorService);
//                    completableFutures.add(future);
//                });
//                CompletableFuture<Void> future = CompletableFuture.allOf(completableFutures.toArray(new CompletableFuture[completableFutures.size()]));
//                future.join();
            } catch (Exception e) {
                throw DataXException.asDataXException(
                        DBUtilErrorCode.WRITE_DATA_ERROR, e);
            } finally {
                writeBuffer.clear();
                DBUtil.closeDBResources(null, connection);
            }
        }
        protected void doBatchInsert(List<Record> buffer) {
            PreparedStatement preparedStatement = null;
            ArrayList<String> values = new ArrayList<>();
            ArrayList<String> valuesList = new ArrayList<>();
            try {
                for (Record record : buffer) {
                    ArrayList<String> cols = new ArrayList<>();
                    StringBuilder value = new StringBuilder();
                    value.append("(");
                    for (int i = 0; i < record.getColumnNumber(); i++) {
                        cols.add(record.getColumn(i).asString());
                    }
                    valuesList.addAll(cols);
                    value.append(StringUtils.repeat("?", ",", cols.size()));
                    value.append(")");
                    values.add(value.toString());
                }
                String sql = this.writeRecordSql + StringUtils.join(values,",").toString();
                LOG.info("插入语句：" + sql);
                preparedStatement = this.connection.prepareStatement(sql.toString());
                for (int i = 0; i < valuesList.size(); i++) {
                    preparedStatement.setObject(i+1, valuesList.get(i));
                }
                preparedStatement.execute();
            } catch (SQLException e) {
                LOG.warn("数据写入失败. 因为:" + e.getMessage());
            } catch (Exception e) {
                throw DataXException.asDataXException(
                        DBUtilErrorCode.WRITE_DATA_ERROR, e);
            } finally {
                DBUtil.closeDBResources(preparedStatement, null);
            }
        }
    }
}
