package com.thinkingdata.datax.plugin.writer;


import com.alibaba.datax.common.element.*;
import com.alibaba.datax.common.plugin.RecordReceiver;
import com.alibaba.datax.common.plugin.TaskPluginCollector;
import com.alibaba.datax.common.spi.Writer;
import com.alibaba.datax.common.util.Configuration;
import com.alibaba.datax.common.util.RetryUtil;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.github.rholder.retry.RetryException;
import com.github.rholder.retry.Retryer;

import com.thinkingdata.datax.plugin.writer.check.JobConfigCheck;
import com.thinkingdata.datax.plugin.writer.dto.EventDo;
import com.thinkingdata.datax.plugin.writer.dto.TaDataDo;
import com.thinkingdata.datax.plugin.writer.dto.UserDo;
import com.thinkingdata.datax.plugin.writer.exption.TaDataWriterErrorCode;
import com.thinkingdata.datax.plugin.writer.meta.TaDataWriterColumn;
import com.thinkingdata.datax.plugin.writer.util.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpEntity;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.AbstractHttpEntity;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.*;

/**
 * @author: Felix.Wang
 * @email: Felix@thinkingdata.cn
 * @date: 2019/8/12-14:43
 * @module: DATAX插件
 * @describe: 向TA集群写入数据。
 */
public class TaDataxWriter extends Writer {

    /**
     * @author: Felix.Wang
     * @email: Felix@thinkingdata.cn
     * @date: 2019/8/12-15:30
     * @describe: 数据处理job
     */
    public static class Job extends Writer.Job {

        private static final Logger logger = LoggerFactory.getLogger(Job.class);

        private Configuration writerSliceConfig = null;


        private String appid;

        /**
         * @describe: 线程数
         */
        private int dataProcessorThreadNum;

        /**
         * @describe: user/event
         */
        private String type;


        private String jobId;

        private String pushUrl;

        private int retryNum;


        /**
         * Job对象初始化工作
         *
         * @return : null
         * @Author: Felix.Wang
         * @Date: 2019/8/7 11:25
         */
        @Override
        public void init() {


            this.writerSliceConfig = this.getPluginJobConf();
            writerSliceConfig.set(Key.JOBID, StringUtil.generateTimeUuid());
            this.type = this.writerSliceConfig.getNecessaryValue(Key.TYPE, TaDataWriterErrorCode.TYPE_NULL);

            /**
             * @describe: receiver
             */
            this.pushUrl = this.writerSliceConfig.getNecessaryValue(Key.PUSH_URL, TaDataWriterErrorCode.PUSH_URL);

            /**
             * @describe: retryNum
             */
            this.writerSliceConfig.getNecessaryValue(Key.RETRY_NUM, TaDataWriterErrorCode.RETRY_NUM);

            /**
             * @describe: kafka
             */
            this.appid = this.writerSliceConfig.getNecessaryValue(Key.APPID, TaDataWriterErrorCode.APPID_NULL);
            this.dataProcessorThreadNum = this.writerSliceConfig.getInt(Key.THREAD, 3);
            this.jobId = this.writerSliceConfig.getString(Key.JOBID);
        }


        /**
         * 全局准备工作
         *
         * @return : null
         * @Author: Felix.Wang
         * @Date: 2019/8/7 11:26
         */
        @Override
        public void prepare() {
            logger.info("==  job 参数校验 开始==");
            JobConfigCheck jobConfigCheck = new JobConfigCheck();
            jobConfigCheck.setAppid(this.appid);
            jobConfigCheck.checkMeta(writerSliceConfig);
            logger.info("==  job 参数校验 结束==");
            logger.info("====================================");
            logger.info("jobid:" + jobId + ",如果本次job数据有问题，可根据jobid进行处理。");
            logger.info("====================================");

        }

        /**
         * 全局的后置工作
         *
         * @return : null
         * @Author: Felix.Wang
         * @Date: 2019/8/7 11:27
         */
        @Override
        public void post() {

            logger.info("====================================");
            logger.info("jobid:" + jobId + ",如果本次job数据有问题，可根据jobid进行处理。");
            logger.info("====================================");


        }

        /**
         * Job对象自身的销毁工作
         *
         * @return : null
         * @Author: Felix.Wang
         * @Date: 2019/8/7 11:34
         */
        @Override
        public void destroy() {

        }

        /**
         * 拆分Task：目前设计为一个TASK,task内部多线程
         *
         * @return : null
         * @Author: Felix.Wang
         * @Date: 2019/8/7 11:27
         */
        @Override
        public List<Configuration> split(int mandatoryNumber) {
            logger.info("begin do split...");
            List<Configuration> writerSplitConfigs = new ArrayList<Configuration>();
            for (int i = 0; i < mandatoryNumber; i++) {
                writerSplitConfigs.add(this.writerSliceConfig);
            }
            logger.info("end do split.");
            return writerSplitConfigs;
        }

    }

    /**
     * @author: Felix.Wang
     * @email: Felix@thinkingdata.cn
     * @date: 2019/8/12-15:30
     * @describe: 数据处理task
     */
    public static class Task extends Writer.Task {


        private static final Logger LOG = LoggerFactory.getLogger(Task.class);

        private Configuration writerSliceConfig;
        /**
         * @describe: user/event
         */
        private String type;

        /**
         * @describe: 项目Appid
         */
        private String appid;
        /**
         * @describe: 线程数
         */
        private int dataProcessorThreadNum;
        /**
         * @describe: 压缩格式适用于http方式
         */
        private String compress;


        private String pushUrl;

        private int retryNum;


        private Retryer retryer = RetryerUtil.initRetryer();

        private Map<Integer, ArrayBlockingQueue<TaDataDo>> taDataQueueMap = new HashMap<>();
        private final int DATA_QUEUE_SIZE = 1000;
        private boolean needShutDown = false;
        private CloseableHttpClient httpClient = null;

        private Map<Integer, ArrayList<TaDataWriterColumn>> indexColumnMap = new HashMap<>();

        protected TaskPluginCollector taskPluginCollector;
        private String jobId;

        private int batchSize = 1000;

        /**
         * Task对象的初始化
         *
         * @return : null
         * @Author: Felix.Wang
         * @Date: 2019/8/7 11:35
         */
        @Override
        public void init() {

            //适配http和https
            this.httpClient = HttpRequestUtil.getConnection();
            this.writerSliceConfig = this.getPluginJobConf();
            this.type = this.writerSliceConfig.getNecessaryValue(Key.TYPE, TaDataWriterErrorCode.TYPE_NULL);
            /**
             * @describe: receiver
             */
            this.pushUrl = this.writerSliceConfig.getNecessaryValue(Key.PUSH_URL, TaDataWriterErrorCode.PUSH_URL);


            this.appid = this.writerSliceConfig.getNecessaryValue(Key.APPID, TaDataWriterErrorCode.APPID_NULL);
            this.dataProcessorThreadNum = this.writerSliceConfig.getInt(Key.THREAD, 3);


            this.jobId = this.writerSliceConfig.getString(Key.JOBID);

            this.batchSize = this.writerSliceConfig.getInt(Key.BATCHSIZE, 1000);


            //默认值
            this.compress = this.writerSliceConfig.getString(Key.COMPRESS, "none");


            this.pushUrl = this.writerSliceConfig.getString(Key.PUSH_URL);

            this.retryNum = this.writerSliceConfig.getInt(Key.RETRY_NUM, 5);
            dealColumn(this.writerSliceConfig);

        }


        /**
         * 列解析
         *
         * @Author: Felix.Wang
         * @Date: 2019/8/12 16:25
         */
        private void dealColumn(Configuration originalConfig) {
            List<JSONObject> columns = originalConfig.getList(Key.COLUMN, JSONObject.class);
            for (JSONObject column : columns) {
                Integer index = column.getInteger("index");
                if (index != null) {
                    TaDataWriterColumn taDataWriterColumn = JSONObject.parseObject(column.toJSONString(), TaDataWriterColumn.class);
                    ArrayList<TaDataWriterColumn> taDataWriterColumns = new ArrayList<>();
                    if (indexColumnMap.get(index) != null) {
                        taDataWriterColumns.addAll(indexColumnMap.get(index));
                    }
                    taDataWriterColumns.add(taDataWriterColumn);
                    indexColumnMap.put(index, taDataWriterColumns);
                } else {
                    TaDataWriterColumn taDataWriterColumn = JSONObject.parseObject(column.toJSONString(), TaDataWriterColumn.class);
                    ArrayList<TaDataWriterColumn> taDataWriterColumns = new ArrayList<>();
                    if (indexColumnMap.get(null) != null) {
                        taDataWriterColumns.addAll(indexColumnMap.get(null));
                    }
                    taDataWriterColumns.add(taDataWriterColumn);
                    indexColumnMap.put(null, taDataWriterColumns);
                }
            }


        }


        /**
         * 局部的准备工作。
         *
         * @return : null
         * @Author: Felix.Wang
         * @Date: 2019/8/7 11:36
         */
        @Override
        public void prepare() {


        }

        /**
         * reader
         * 从数据源读数据，写入到RecordSender中
         * <p>
         * writer
         * 从RecordReceiver中读取数据，写入目标数据源
         *
         * @return : null
         * @Author: Felix.Wang
         * @Date: 2019/8/7 11:36
         */
        @Override
        public void startWrite(RecordReceiver lineReceiver) {

            this.taskPluginCollector = super.getTaskPluginCollector();
            ExecutorService exe = Executors.newFixedThreadPool(50);
            for (Integer i = 0; i < dataProcessorThreadNum; i++) {
                ArrayBlockingQueue<TaDataDo> taDataQueue = new ArrayBlockingQueue<>(DATA_QUEUE_SIZE);
                taDataQueueMap.put(i, taDataQueue);
                Thread dataProcessThread = new Thread(new TaDataProcessThread(taDataQueue), "ta-data-process-thread-" + i);
                exe.execute(dataProcessThread);
            }

            exe.shutdown();

            LOG.info("begin do write...");
            Record record;
            while ((record = lineReceiver.getFromReader()) != null) {
                try {
                    processTaData(record);
                } catch (Exception e) {
                    throw new RuntimeException(e);
                }

            }
            LOG.info("finish reading data from channel.");
            //从datax内部读取数据结束
            needShutDown = true;
            while (true) {
                if (exe.isTerminated()) {
                    break;
                }
                try {
                    Thread.sleep(100);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }

            LOG.info("end do write");
        }


        /**
         * 局部的后置工作。
         *
         * @return : null
         * @Author: Felix.Wang
         * @Date: 2019/8/7 11:37
         */
        @Override
        public void post() {

        }

        /**
         * Task象自身的销毁工作。
         *
         * @return : null
         * @Author: Felix.Wang
         * @Date: 2019/8/7 11:37
         */
        @Override
        public void destroy() {

        }


        /**
         * 从Datax中获取record放入queue中。
         *
         * @Author: Felix.Wang
         * @Date: 2019/8/19 14:00
         */
        public void processTaData(Record record) throws ExecutionException, RetryException {


            TaDataDo taDataDo = transferRecordToJsonArry(record);
            if (taDataDo != null) {
                int queueIndex;
                if (StringUtils.isNotBlank(taDataDo.getAccountId())) {
                    queueIndex = CommonUtil.hashIndexWithMurmur3(taDataDo.getAccountId(), dataProcessorThreadNum);
                } else if (StringUtils.isNotBlank(taDataDo.getDistinctId())) {
                    queueIndex = CommonUtil.hashIndexWithMurmur3(taDataDo.getDistinctId(), dataProcessorThreadNum);
                } else {
                    queueIndex = CommonUtil.hashIndexWithMurmur3(UUID.randomUUID().toString(), dataProcessorThreadNum);
                }
                retryer.call(() -> {
                    taDataQueueMap.get(queueIndex).put(taDataDo);
                    return null;
                });
            } else {

                this.taskPluginCollector.collectDirtyRecord(record, new NullPointerException());

            }


        }

        /**
         * 把Record转换成Json
         *
         * @return : null
         * @Author: Felix.Wang
         * @Date: 2019/8/12 10:26
         */
        private TaDataDo transferRecordToJsonArry(Record record) {

            TaDataDo taDataDo = null;
            Map<String, Object> splitedRows = new HashMap<String, Object>();
            int recordLength = record.getColumnNumber();
            if (0 != recordLength) {
                Column column;
                for (int i = 0; i < recordLength; i++) {
                    //获取指定index的配置信息
                    List<TaDataWriterColumn> indexColumns = indexColumnMap.get(i);
                    //获取datax中的column
                    column = record.getColumn(i);
                    //如果没有配置，说明列为废弃列
                    if (indexColumns != null) {
                        //如果值为空也丢弃
                        if (column.getRawData() != null) {
                            //迭代column，有一列使用两次情况
                            for (TaDataWriterColumn taDataWriterColumn : indexColumns) {
                                splitedRows.put(taDataWriterColumn.getColTargetName(), ColumnAnalysis.castValueToAppointValue(taDataWriterColumn, column));
                            }

                        }
                    }
                }
                /**
                 * @describe: 自定义常量列
                 */
                ArrayList<TaDataWriterColumn> addColumnList = indexColumnMap.get(null);
                if (addColumnList != null) {
                    for (TaDataWriterColumn addcolumn : addColumnList) {
                        splitedRows.put(addcolumn.getColTargetName(), ColumnAnalysis.castValueToAppointValue(addcolumn.getType(), addcolumn.getValue()));
                    }
                }
                /**
                 * @describe: 添加jobid
                 */
                splitedRows.put(Key.JOBID, jobId);

            }

            if (Constants.EVENT.equals(type)) {
                taDataDo = EventDo.toEvent(splitedRows);
            } else if (Constants.USER.equals(type)) {
                taDataDo = UserDo.toUser(splitedRows);
            }
            return taDataDo;
        }


        /**
         * @author: Felix.Wang
         * @email: Felix@thinkingdata.cn
         * @describe: 数据发送线程
         */
        private class TaDataProcessThread implements Runnable {
            private final Logger LOG = LoggerFactory.getLogger(TaDataProcessThread.class);
            private ArrayBlockingQueue<TaDataDo> dataQueue;

            public TaDataProcessThread(ArrayBlockingQueue<TaDataDo> dataQueue) {
                this.dataQueue = dataQueue;
            }

            @Override
            public void run() {
                JSONArray dataArray = new JSONArray();
                long pushDatetime = System.currentTimeMillis();
                while (true) {
                    try {
                        if (dataArray.size() < batchSize && (System.currentTimeMillis() - pushDatetime) / 1000 < 5) {
                            TaDataDo taDataDo = dataQueue.poll(200, TimeUnit.MILLISECONDS);
                            if (taDataDo != null) {
                                dataArray.add(taDataDo);
                            } else {
                                if (dataQueue.size() == 0) {
                                    Thread.sleep(1000);
                                }
                            }
                            continue;
                        }
                        if (dataArray.size() != 0) {

                            sendDataByRestfulApi(appid, dataArray);

                            dataArray.clear();
                        }
                        pushDatetime = System.currentTimeMillis();

                        if (needShutDown && dataQueue.size() == 0) {
                            Thread.sleep(1000);
                            if (dataQueue.size() == 0) {
                                LOG.info("thread name is " + Thread.currentThread().getName() + " ShutDown .");
                                break;
                            }
                        }
                    } catch (Throwable e) {
                        LOG.error("ta data process error, thread: " + Thread.currentThread().getName(), e);
                    }
                }
            }

        }


        private void sendDataByRestfulApi(String appid, JSONArray dataArray) throws IOException {

            try {
                RetryUtil.executeWithRetry((Callable<Object>) () -> {
                    HttpPost httpPost;
                    CloseableHttpResponse closeableHttpResponse;
                    AbstractHttpEntity params = encodeRecord(dataArray);
                    RequestConfig requestConfig = RequestConfig.custom().setSocketTimeout(600000).setConnectTimeout(300000).build();
                    httpPost = new HttpPost(pushUrl);
                    httpPost.addHeader("appid", appid);
                    httpPost.addHeader("user-agent", "datax-1.0");
                    httpPost.addHeader("compress", compress);
                    httpPost.setConfig(requestConfig);

                    httpPost.setEntity(params);
                    closeableHttpResponse = httpClient.execute(httpPost);

                    if (closeableHttpResponse.getStatusLine().getStatusCode() != 200) {
                        throw new Exception("http post with error ,please check data receiver . .return code is not 200.");
                    } else {
                        HttpEntity responseEntity = closeableHttpResponse.getEntity();
                        String responseStr = EntityUtils.toString(responseEntity);
                        JSONObject obj = JSONObject.parseObject(responseStr);
                        Integer status = obj.getInteger("code");
                        if (status != 0) {
                            throw new Exception("http post with error ,please check data receiver .return code is not 0.");
                        }
                    }
                    closeableHttpResponse.close();
                    LOG.info("data send size :" + dataArray.size() + ",compress is " + compress + ",appid is " + appid);
                    return "";
                }, 5, 1000L, true);
                //warn: 7 means 2 minutes
            } catch (Exception e) {
                LOG.error(e.getMessage());
            }


        }


        /**
         * 压缩内容
         *
         * @Author: Felix.Wang
         * @Date: 2019/8/19 14:06
         */
        private AbstractHttpEntity encodeRecord(JSONArray contents) throws IOException {

            String data = contents.toString();
            byte[] dataBytes = data.getBytes(StandardCharsets.UTF_8);

            byte[] dataCompressed = null;
            if ("gzip".equalsIgnoreCase(this.compress)) {
                dataCompressed = CompressUtil.gzipCompress(dataBytes);
            } else if ("lzo".equalsIgnoreCase(this.compress)) {
                dataCompressed = CompressUtil.lzoCompress(dataBytes);
            } else if ("lz4".equalsIgnoreCase(this.compress)) {
                dataCompressed = CompressUtil.lz4Compress(dataBytes);
            } else if ("snappy".equalsIgnoreCase(this.compress)) {
                dataCompressed = CompressUtil.snappyCompress(dataBytes);
            } else if ("none".equalsIgnoreCase(this.compress)) {
                dataCompressed = dataBytes;
            }
            return new ByteArrayEntity(dataCompressed);
        }


    }
}
