package demo.db.influxdb;

import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;

/**
 * https://github.com/influxdata/influxdb-client-java#writes-and-queries-in-influxdb-2x
 */
public class InfluxdbWriteDemo {


    private static char[] token = "my-token".toCharArray();
    private static String org = "my-org";
    private static String bucket = "my-bucket";

    private static String majorUrl = "http://192.168.3.66:8086";
    private static String minorUrl = "http://192.168.3.66:8086";
    private static String username = "southgate";
    private static String password = "wmxPpAkoST18";
    private static String database = "southgate";

    /**
     * 通过BatchPoints组装数据，序列化后，一次性插入数据库。
     * <p>
     * 推荐使用第二种方式，属于一个数据库的数据，可以一次性批量写入，写入速度最快。
     *
     * @param args
     */
    public static void main(String[] args) {
        InfluxDBConnectionDemo influxDBConnection = new InfluxDBConnectionDemo("admin", "admin", "1.1.1.1", "db-test", "hour");
        Map<String, String> tags1 = new HashMap<String, String>();
        tags1.put("tag1", "标签值");
        Map<String, String> tags2 = new HashMap<String, String>();
        tags2.put("tag2", "标签值");
        Map<String, Object> fields1 = new HashMap<String, Object>();
        fields1.put("field1", "abc");
        // 数值型，InfluxDB的字段类型，由第一天插入的值得类型决定
        fields1.put("field2", 123456);
        Map<String, Object> fields2 = new HashMap<String, Object>();
        fields2.put("field1", "String类型");
        fields2.put("field2", 3.141592657);
        // 一条记录值。（注意：生产环境不要用System.currentTimeMillis()，因为数据量大会产生重复时间戳，导致数据丢失，要用数据自己的时间戳，这里只做演示）
        Point point1 = influxDBConnection.pointBuilder("表名", System.currentTimeMillis(), TimeUnit.MILLISECONDS, tags1, fields1);
        Point point2 = influxDBConnection.pointBuilder("表名", System.currentTimeMillis(), TimeUnit.MILLISECONDS, tags2, fields2);
        BatchPoints batchPoints1 = BatchPoints.database("db-test").tag("tag1", "标签值1")
                .retentionPolicy("hour").consistency(InfluxDB.ConsistencyLevel.ALL).precision(TimeUnit.MILLISECONDS).build();
        // 将两条记录添加到batchPoints中
        batchPoints1.point(point1);
        BatchPoints batchPoints2 = BatchPoints.database("db-test").tag("tag2", "标签值2")
                .retentionPolicy("hour").consistency(InfluxDB.ConsistencyLevel.ALL).precision(TimeUnit.MILLISECONDS).build();
        // 将两条记录添加到batchPoints中
        batchPoints2.point(point2);
        // 将不同的batchPoints序列化后，一次性写入数据库，提高写入速度
        List<String> records = new ArrayList<String>();
        records.add(batchPoints1.lineProtocol());
        records.add(batchPoints2.lineProtocol());
        // 将两条数据批量插入到数据库中
        influxDBConnection.batchInsert("db-test", "hour", InfluxDB.ConsistencyLevel.ALL, TimeUnit.MILLISECONDS, records);
    }

    /**
     * InfluxDB的字段类型，由第一条插入的值得类型决定；tags的类型只能是String型，可以作为索引，提高检索速度。
     */
    public void addDemo() {
        String measurement = "table_test";
        String dbName = "db_name_test";

        InfluxDBConnectionDemo influxDBConnection = new InfluxDBConnectionDemo("admin", "admin", "1.1.1.1", "db-test", "hour");

        InfluxDB influxDB = InfluxDBFactory.connect(majorUrl, username, password);
        Map<String, String> tags = new HashMap<String, String>();
        tags.put("tag1", "标签值");
        Map<String, Object> fields = new HashMap<String, Object>();
        fields.put("field1", "String类型");
        // 数值型，InfluxDB的字段类型，由第一天插入的值得类型决定
        fields.put("field2", 3.141592657);

        long time = System.currentTimeMillis();
        TimeUnit timeUnit = TimeUnit.MILLISECONDS;

        Point.Builder builder = Point.measurement(measurement);
        builder.tag(tags);
        builder.fields(fields);
        if (0 != time) {
            builder.time(time, timeUnit);
        }
        influxDB.write(dbName, "hour", builder.build());

    }

    /**
     * 使用这两种种方式，要就这两条数据都写入到同一数据库下且tag相同,若tag不相同，需将它们放到不同的BatchPoint对象中，否则会出现数据写入错乱问题。
     * <p>
     * 方式一：通过BatchPoints组装数据后，循环插入数据库。
     *
     * @param args
     */
    public void batchAdd1() {
        InfluxDBConnectionDemo influxDBConnection = new InfluxDBConnectionDemo("admin", "admin", "1.1.1.1", "db-test", "hour");
        Map<String, String> tags = new HashMap<String, String>();
        tags.put("tag1", "标签值");
        Map<String, Object> fields1 = new HashMap<String, Object>();
        fields1.put("field1", "abc");
        // 数值型，InfluxDB的字段类型，由第一天插入的值得类型决定
        fields1.put("field2", 123456);
        Map<String, Object> fields2 = new HashMap<String, Object>();
        fields2.put("field1", "String类型");
        fields2.put("field2", 3.141592657);
        // 一条记录值
        Point point1 = influxDBConnection.pointBuilder("表名", System.currentTimeMillis(), TimeUnit.SECONDS, tags, fields1);
        Point point2 = influxDBConnection.pointBuilder("表名", System.currentTimeMillis(), TimeUnit.SECONDS, tags, fields2);
        // 将两条记录添加到batchPoints中
        BatchPoints batchPoints1 = BatchPoints.database("db-test").tag("tag1", "标签值1").retentionPolicy("hour")
                .consistency(InfluxDB.ConsistencyLevel.ALL).build();
        BatchPoints batchPoints2 = BatchPoints.database("db-test").tag("tag2", "标签值2").retentionPolicy("hour")
                .consistency(InfluxDB.ConsistencyLevel.ALL).build();
        batchPoints1.point(point1);
        batchPoints2.point(point2);
        // 将两条数据批量插入到数据库中
        influxDBConnection.batchInsert(batchPoints1, TimeUnit.MILLISECONDS);
        influxDBConnection.batchInsert(batchPoints2, TimeUnit.MILLISECONDS);
    }


}
