package com.watchme.es.service;

import com.alibaba.fastjson.JSONObject;
import com.watchme.es.util.DbHelper;
import com.watchme.es.util.UUIDGenerator;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpHost;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.bulk.BackoffPolicy;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;

import java.io.IOException;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;

public class ElasticService {

    private String url = "jdbc:mysql://172.16.14.33:3306/test?characterEncoding=utf8";
    private String username = "root";
    private String password = "root";
    private String sql = "select * from dual";
    private String field_rule = "{}";

    private String es_scheme = "http";
    private String es_address = "127.0.0.1";
    private int es_port = 9200;
    private String es_index = "";
    private String es_type = "";
    private int commitSize = 5000;

    public ElasticService(Properties properties) {
        String mysql_address = properties.getProperty("mysql_address");
        String mysql_username = properties.getProperty("mysql_username");
        String mysql_password = properties.getProperty("mysql_password");
        String mysql_sql = properties.getProperty("mysql_sql");
        String field_rule = properties.getProperty("mysql_field_rule");
        String es_scheme = properties.getProperty("es_scheme");
        String es_address = properties.getProperty("es_address");
        String es_port = properties.getProperty("es_port");
        String es_index = properties.getProperty("es_index");
        String es_type = properties.getProperty("es_type");
        String commitSize = properties.getProperty("es_commit_size");

        if (StringUtils.isNotBlank(mysql_address)) {
            this.url = mysql_address;
        }
        if (StringUtils.isNotBlank(mysql_username)) {
            this.username = mysql_username;
        }
        if (StringUtils.isNotBlank(mysql_password)) {
            this.password = mysql_password;
        }
        if (StringUtils.isNotBlank(mysql_sql)) {
            this.sql = mysql_sql;
        }
        if (StringUtils.isNotBlank(field_rule)) {
            this.field_rule = field_rule;
        }
        if (StringUtils.isNotBlank(es_scheme)) {
            this.es_scheme = es_scheme;
        }
        if (StringUtils.isNotBlank(es_address)) {
            this.es_address = es_address;
        }
        if (StringUtils.isNotBlank(es_port)) {
            this.es_port = Integer.parseInt(es_port);
        }
        if (StringUtils.isNotBlank(es_index)) {
            this.es_index = es_index;
        }
        if (StringUtils.isNotBlank(es_type)) {
            this.es_type = es_type;
        }
        if (StringUtils.isNotBlank(commitSize)) {
            this.commitSize = Integer.parseInt(commitSize);
        }
    }

    public List selectIndex() {
        List<String> list = new ArrayList<>();
        RestHighLevelClient client = new RestHighLevelClient(RestClient.builder(new HttpHost(es_address, es_port, es_scheme)));
        GetIndexRequest request = new GetIndexRequest().indices("_all");
        try {
            GetIndexResponse getIndexResponse = client.indices().get(request, RequestOptions.DEFAULT);
            String[] indices = getIndexResponse.getIndices();
            for (String str : indices) {
                if (!str.trim().startsWith(".")) {
                    list.add(str);
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                client.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        return list;
    }

    public void createIndex() {
        RestHighLevelClient client = new RestHighLevelClient(RestClient.builder(new HttpHost(es_address, es_port, es_scheme)));
        try {
            // 其实应该先判断是否存在对应的索引，懒得写咯...
            CreateIndexRequest requestIndex = new CreateIndexRequest(es_index.toLowerCase());
            // 创建的每个索引都可以有与之关联的特定设置。设置副本数与刷新时间对于索引数据效率有不小的提升
            requestIndex.settings(Settings.builder()
//                    .put("index.number_of_shards", 5)
//                    .put("index.number_of_replicas", 0)
//                    .put("index.refresh_interval", "-1")
            );
            CreateIndexResponse createIndexResponse = client.indices().create(requestIndex, RequestOptions.DEFAULT);
            System.out.println("索引创建成功");
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                client.close();
            } catch (IOException e) {
                e.printStackTrace();
            }

        }
    }


    public void writeMysqlDataToES() {
        RestHighLevelClient client = new RestHighLevelClient(RestClient.builder(new HttpHost(es_address, es_port, es_scheme)));
        BulkProcessor bulkProcessor = getBulkProcessor(client);
        Connection conn = null;
        PreparedStatement ps = null;
        ResultSet rs = null;
        try {
            SimpleDateFormat sdf =new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS" );
            long beginTime = System.currentTimeMillis();
            System.out.println("开始处理数据 :"+sdf.format(new Date(beginTime)));
            conn = DbHelper.getConnection(url, username, password);
            ps = conn.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            ps.setFetchSize(Integer.MIN_VALUE);
            rs = ps.executeQuery();

            ResultSetMetaData colData = rs.getMetaData();
            ArrayList<HashMap<String, Object>> dataList = new ArrayList<HashMap<String, Object>>();

            JSONObject rule = JSONObject.parseObject(field_rule);
            HashMap<String, Object> map = null;
            int count = 0;
            while (rs.next()) {
                count++;
                map = new HashMap<String, Object>(100);
                for (int i = 1; i <= colData.getColumnCount(); i++) {
                    String columnName = colData.getColumnName(i);
                    String newColumnName = rule.getString(columnName);
                    if (StringUtils.isNotBlank(newColumnName)) {
                        map.put(newColumnName, handlerValue(rs.getObject(columnName)));
                    } else {
                        map.put(columnName, handlerValue(rs.getObject(columnName)));
                    }
                }
                dataList.add(map);
                // 每20万条写一次，不足的批次的最后再一并提交
                if (count % commitSize == 0) {
                    LocalDateTime localDateTime = LocalDateTime.now();
                    String time = localDateTime.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
                    System.out.println(time + "开始处理数据条数 : " + count);
                    // 写入ES
                    for (HashMap<String, Object> hashMap2 : dataList) {
                        bulkProcessor.add(new IndexRequest(es_index.toLowerCase(), es_type, UUIDGenerator.getUUID())
                                .source(hashMap2));
                    }
                    // 每提交一次便将map与list清空
                    map.clear();
                    dataList.clear();
                    bulkProcessor.flush();
                }
            }

            // count % 200000 处理未提交的数据
            for (HashMap<String, Object> hashMap2 : dataList) {
                bulkProcessor.add(
                        new IndexRequest(es_index.toLowerCase(), es_type, UUIDGenerator.getUUID()).source(hashMap2));
            }

            System.out.println("一共插入数据条数：" + count);
            // 将数据刷新到es, 注意这一步执行后并不会立即生效，取决于bulkProcessor设置的刷新时间
            bulkProcessor.flush();
            long endTime = System.currentTimeMillis();
            System.out.println("本次操作一共使用时间：" + (endTime - beginTime) + " 毫秒");

        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                rs.close();
                ps.close();
                conn.close();
                boolean terminatedFlag = bulkProcessor.awaitClose(150L, TimeUnit.SECONDS);
                client.close();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }

    private static BulkProcessor getBulkProcessor(RestHighLevelClient client) {
        long begin = System.currentTimeMillis();
        BulkProcessor bulkProcessor = null;
        try {
            BulkProcessor.Listener listener = new BulkProcessor.Listener() {
                @Override
                public void beforeBulk(long executionId, BulkRequest request) {
//                    System.out.println("Try to insert data number : " + request.numberOfActions());
                }

                @Override
                public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
                    System.out.println("成功插入数据条数：" + request.numberOfActions() + " , id: "
                            + executionId + " 使用时间：" + (System.currentTimeMillis() - begin) + " 毫秒");

                }

                @Override
                public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
                    System.out.println("Bulk is unsuccess : " + failure + ", executionId: " + executionId);
                }
            };

            BiConsumer<BulkRequest, ActionListener<BulkResponse>> bulkConsumer = (request, bulkListener) -> client
                    .bulkAsync(request, RequestOptions.DEFAULT, bulkListener);

            BulkProcessor.Builder builder = BulkProcessor.builder(bulkConsumer, listener);
            builder.setBulkActions(1000);
            builder.setBulkSize(new ByteSizeValue(50L, ByteSizeUnit.MB));
            builder.setConcurrentRequests(5);
            builder.setFlushInterval(TimeValue.timeValueSeconds(10L));
            builder.setBackoffPolicy(BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(1L), 3));
            // 注意点：在这里感觉有点坑，官网样例并没有这一步，而笔者因一时粗心也没注意，在调试时注意看才发现，上面对builder设置的属性没有生效
            bulkProcessor = builder.build();

        } catch (Exception e) {
            try {
                bulkProcessor.awaitClose(100L, TimeUnit.SECONDS);
                client.close();
            } catch (Exception e1) {
                e.printStackTrace();
            }

        }
        return bulkProcessor;
    }


    private Object handlerValue(Object value) {
        if (value instanceof Timestamp) {
            return ((Timestamp) value).getTime();
        }
        return value;
    }
}
