package com.seari.influxdb;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

import org.influxdb.InfluxDB;
import org.influxdb.InfluxDB.ConsistencyLevel;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.influxdb.dto.Point.Builder;
import org.influxdb.dto.Pong;
import org.influxdb.dto.Query;
import org.influxdb.dto.QueryResult;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

/**
 * InfluxDB数据库连接操作类
 */
@Component
public class InfluxDBConnection {

    @Value(value = "${influxdb.username}")
    private String username;
    @Value(value = "${influxdb.password}")
    private String password;
    @Value(value = "${influxdb.url}")
    private String openurl;
    // 数据库
    @Value(value = "${influxdb.database}")
    private String database;
    // 保留策略
    @Value(value = "${influxdb.retentionPolicy}")
    private String retentionPolicy;

    private InfluxDB influxDB;

    public InfluxDBConnection() {

    }

    public InfluxDBConnection(String username, String password, String openurl, String database,
                              String retentionPolicy) {
        this.username = username;
        this.password = password;
        this.openurl = openurl;
        this.database = database;
        this.retentionPolicy = retentionPolicy == null || retentionPolicy.equals("") ? "autogen" : retentionPolicy;
        influxDbBuild();
    }

    /**
     * 创建数据库
     *
     * @param dbName
     */
    @SuppressWarnings("deprecation")
    public void createDB(String dbName) {
        influxDB.createDatabase(dbName);
    }

    /**
     * 删除数据库
     *
     * @param dbName
     */
    @SuppressWarnings("deprecation")
    public void deleteDB(String dbName) {
        influxDB.deleteDatabase(dbName);
    }

    /**
     * 测试连接是否正常
     *
     * @return true 正常
     */
    public boolean ping() {
        boolean isConnected = false;
        Pong pong;
        try {
            pong = influxDB.ping();
            if (pong != null) {
                isConnected = true;
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        return isConnected;
    }

    /**
     * 连接时序数据库 ，若不存在则创建
     *
     * @return
     */
    public InfluxDB influxDbBuild() {
        if (influxDB == null) {
            influxDB = InfluxDBFactory.connect(openurl, username, password);
        }
        try {
            if (!influxDB.databaseExists(database)) {
                influxDB.createDatabase(database);
            }
        } catch (Exception e) {
            // 该数据库可能设置动态代理，不支持创建数据库
            // e.printStackTrace();
        } finally {
            influxDB.setRetentionPolicy(retentionPolicy);
        }
        influxDB.setLogLevel(InfluxDB.LogLevel.NONE);
        return influxDB;
    }

    /**
     * 创建自定义保留策略
     *
     * @param policyName  策略名
     * @param duration    保存天数
     * @param replication 保存副本数量
     * @param isDefault   是否设为默认保留策略
     */
    public void createRetentionPolicy(String policyName, String duration, int replication, Boolean isDefault) {
        String sql = String.format("CREATE RETENTION POLICY \"%s\" ON \"%s\" DURATION %s REPLICATION %s ", policyName,
                database, duration, replication);
        if (isDefault) {
            sql = sql + " DEFAULT";
        }
        this.query(sql);
    }

    /**
     * 创建默认的保留策略
     * <p>
     * 策略名 ：default，保存天数：30天，保存副本数量：1
     * 设为默认保留策略
     */
    public void createDefaultRetentionPolicy() {
        String command = String.format("CREATE RETENTION POLICY \"%s\" ON \"%s\" DURATION %s REPLICATION %s DEFAULT",
                "default", database, "30d", 1);
        this.query(command);
    }

    /**
     * 查询
     *
     * @param command 查询语句
     * @return
     */
    public QueryResult query(String command) {
        return influxDB.query(new Query(command, database));
    }

    /**
     * 插入
     *
     * @param measurement 表
     * @param tags        标签
     * @param fields      字段
     */
    public void insert(String measurement, Map<String, String> tags, Map<String, Object> fields, long time,
                       TimeUnit timeUnit) {
        Builder builder = Point.measurement(measurement);
        builder.tag(tags);
        builder.fields(fields);
        if (0 != time) {
            builder.time(time, timeUnit);
        }
        influxDB.write(database, retentionPolicy, builder.build());
    }

    /**
     * 批量写入测点
     *
     * @param batchPoints
     */
    public void batchInsert(BatchPoints batchPoints) {
        influxDB.write(batchPoints);
        // influxDB.enableGzip();
        // influxDB.enableBatch(2000,100,TimeUnit.MILLISECONDS);
        // influxDB.disableGzip();
        // influxDB.disableBatch();
    }

    /**
     * 批量写入数据
     *
     * @param database        数据库
     * @param retentionPolicy 保存策略
     * @param consistency     一致性
     * @param records         要保存的数据（调用BatchPoints.lineProtocol()可得到一条record）
     */
    public void batchInsert(final String database, final String retentionPolicy, final ConsistencyLevel consistency,
                            final List<String> records) {
        influxDB.write(database, retentionPolicy, consistency, records);
    }

    /**
     * 删除
     *
     * @param command 删除语句
     * @return 返回错误信息
     */
    public String deleteMeasurementData(String command) {
        QueryResult result = influxDB.query(new Query(command, database));
        return result.getError();
    }

    /**
     * 关闭数据库
     */
    public void close() {
        influxDB.close();
    }

    /**
     * 构建Point
     *
     * @param measurement
     * @param time
     * @param fields
     * @return
     */
    public Point pointBuilder(String measurement, long time, Map<String, String> tags, Map<String, Object> fields) {
        Point point = Point.measurement(measurement).time(time, TimeUnit.MILLISECONDS).tag(tags).fields(fields).build();
        return point;
    }

    /**
     * 取数据的时候，注意空值判断，将返回数据先进行判空oneResult.getSeries() != null，然后调用oneResult.getSeries().getValues().get(0)获取到第一条SQL的返回结果集，然后遍历valueList，取出每条记录中的目标字段值。
     * <p>
     * InfluxDB封装的结果集有点深，主要是由于支持多条SQL一次性查询，可以提高查询速度，这个地方有别于关系型数据库的使用。
     *
     * @param args
     */
    public static void main(String[] args) {
        InfluxDBConnection influxDBConnection = new InfluxDBConnection("root", "000000", "http://localhost:8086", "dbTest", "autogen");
        QueryResult results = influxDBConnection
                .query("SELECT * FROM meatests where IDS = '10016'  order by time desc limit 1000");
        //results.getResults()是同时查询多条SQL语句的返回值，此处我们只有一条SQL，所以只取第一个结果集即可。
        QueryResult.Result oneResult = results.getResults().get(0);
        if (oneResult.getSeries() != null) {
            List<List<Object>> valueList = oneResult.getSeries().stream().map(QueryResult.Series::getValues)
                    .collect(Collectors.toList()).get(0);
            if (valueList != null && valueList.size() > 0) {
                for (List<Object> value : valueList) {
                    Map<String, String> map = new HashMap<String, String>();
                    // 数据库中字段1取值
                    String field1 = value.get(0) == null ? null : value.get(0).toString();
                    // 数据库中字段2取值
                    String field2 = value.get(1) == null ? null : value.get(1).toString();
                    // TODO 用取出的字段做你自己的业务逻辑……
                }
            }
        }
    }

    /**
     * 使用InfluxDB工具类，插入单条数据
     */
    public static void main11(String[] args) {
        InfluxDBConnection influxDBConnection = new InfluxDBConnection("root", "000000", "http://localhost:8086", "dbTest", "autogen");
        Map<String, String> tags = new HashMap<String, String>();
        tags.put("ID", "10010");
        Map<String, Object> fields = new HashMap<String, Object>();
        fields.put("Name", "Lee");
        // 数值型，InfluxDB的字段类型，由第一天插入的值得类型决定
        fields.put("Age", 18);
        // 时间使用毫秒为单位
        influxDBConnection.insert("meatests", tags, fields, System.currentTimeMillis(), TimeUnit.MILLISECONDS);
    }

    /**
     * 通过BatchPoints组装数据后，循环插入数据库。
     */
    public static void main111(String[] args) {
        InfluxDBConnection influxDBConnection = new InfluxDBConnection("root", "000000", "http://localhost:8086", "dbTest", "autogen");
        Map<String, String> tags1 = new HashMap<String, String>();
        tags1.put("ID", "10030");
        Map<String, String> tags2 = new HashMap<String, String>();
        tags2.put("ID", "10031");
        Map<String, String> tags3 = new HashMap<String, String>();
        tags3.put("ID", "10032");
        Map<String, String> tags4 = new HashMap<String, String>();
        tags4.put("ID", "10033");
        Map<String, String> tags5 = new HashMap<String, String>();
        tags5.put("ID", "10034");
        Map<String, Object> fields1 = new HashMap<String, Object>();
        fields1.put("Name", "Tian");
        // 数值型，InfluxDB的字段类型，由第一天插入的值得类型决定
        fields1.put("age", 18);
        Map<String, Object> fields2 = new HashMap<String, Object>();
        fields2.put("Name", "Sun");
        fields2.put("age", 18);
        Map<String, Object> fields3 = new HashMap<String, Object>();
        fields3.put("Name", "Guo");
        fields3.put("age", 18);
        Map<String, Object> fields4 = new HashMap<String, Object>();
        fields4.put("Name", "Liao");
        fields4.put("age", 18);
        Map<String, Object> fields5 = new HashMap<String, Object>();
        fields5.put("Name", "Zhang");
        fields5.put("age", 18);
        // 记录值
        Point point1 = influxDBConnection.pointBuilder("meatests", System.currentTimeMillis(), tags1, fields1);
        Point point2 = influxDBConnection.pointBuilder("meatests", System.currentTimeMillis(), tags2, fields2);
        Point point3 = influxDBConnection.pointBuilder("meatests", System.currentTimeMillis(), tags3, fields3);
        Point point4 = influxDBConnection.pointBuilder("meatests", System.currentTimeMillis(), tags4, fields4);
        Point point5 = influxDBConnection.pointBuilder("meatests", System.currentTimeMillis(), tags5, fields5);
        // 将记录添加到batchPoints中
        BatchPoints batchPoints1 = BatchPoints.database("dbTest").retentionPolicy("autogen")
                .consistency(ConsistencyLevel.ALL).build();

        batchPoints1.point(point1);
        batchPoints1.point(point2);
        batchPoints1.point(point3);
        batchPoints1.point(point4);
        batchPoints1.point(point5);
        // 将数据批量插入到数据库中
        influxDBConnection.batchInsert(batchPoints1);
    }

    /**
     * 通过BatchPoints组装数据，序列化后，一次性插入数据库。
     */
    public static void main22(String[] args) {
        InfluxDBConnection influxDBConnection = new InfluxDBConnection("root", "000000", "http://localhost:8086", "dbTest", "autogen");
        Map<String, String> tags1 = new HashMap<String, String>();
        tags1.put("ID", "10040");
        Map<String, String> tags2 = new HashMap<String, String>();
        tags2.put("ID", "10041");
        Map<String, String> tags3 = new HashMap<String, String>();
        tags3.put("ID", "10042");
        Map<String, String> tags4 = new HashMap<String, String>();
        tags4.put("ID", "10043");
        Map<String, String> tags5 = new HashMap<String, String>();
        tags5.put("ID", "10044");
        Map<String, Object> fields1 = new HashMap<String, Object>();
        fields1.put("Name", "Tian");
        // 数值型，InfluxDB的字段类型，由第一天插入的值得类型决定
        fields1.put("age", 18);
        Map<String, Object> fields2 = new HashMap<String, Object>();
        fields2.put("Name", "Sun");
        fields2.put("age", 18);
        Map<String, Object> fields3 = new HashMap<String, Object>();
        fields3.put("Name", "Guo");
        fields3.put("age", 18);
        Map<String, Object> fields4 = new HashMap<String, Object>();
        fields4.put("Name", "Liao");
        fields4.put("age", 18);
        Map<String, Object> fields5 = new HashMap<String, Object>();
        fields5.put("Name", "Zhang");
        fields5.put("age", 18);
        // 一条记录值
        Point point1 = influxDBConnection.pointBuilder("meatests", System.currentTimeMillis(), tags1, fields1);
        Point point2 = influxDBConnection.pointBuilder("meatests", System.currentTimeMillis(), tags2, fields2);
        Point point3 = influxDBConnection.pointBuilder("meatests", System.currentTimeMillis(), tags3, fields3);
        Point point4 = influxDBConnection.pointBuilder("meatests", System.currentTimeMillis(), tags4, fields4);
        Point point5 = influxDBConnection.pointBuilder("meatests", System.currentTimeMillis(), tags5, fields5);
        BatchPoints batchPoints1 = BatchPoints.database("dbTest").tag("IDS", "10016")
                .retentionPolicy("autogen").consistency(ConsistencyLevel.ALL).build();
        // 将记录添加到batchPoints中
        batchPoints1.point(point1);
        batchPoints1.point(point2);
        batchPoints1.point(point3);
        batchPoints1.point(point4);
        batchPoints1.point(point5);

        // 将batchPoints序列化后，一次性写入数据库，提高写入速度
        List<String> records = new ArrayList<>();
        records.add(batchPoints1.lineProtocol());
        // 将数据批量插入到数据库中
        influxDBConnection.batchInsert("dbTest", "autogen", ConsistencyLevel.ALL, records);
    }
}