package influxdb;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.influxdb.InfluxDB.ConsistencyLevel;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;

import influxdb.util.InfluxDBConnection;

public class TestInsertBatch {
	public static void main(String[] args) {
		InfluxDBConnection influxDBConnection = new InfluxDBConnection("admin", "admin",  "http:\\192.168.219.130:8086", "pastoralDogDB", "30day");
		Map<String, String> tags1 = new HashMap<String, String>();
		tags1.put("tag1", "标签值");
		
		Map<String, String> tags2 = new HashMap<String, String>();
		tags2.put("tag2", "标签值");
		
		Map<String, Object> fields1 = new HashMap<String, Object>();
		fields1.put("field1", "abc");
		// 数值型，InfluxDB的字段类型，由第一天插入的值得类型决定
		fields1.put("field2", 123456.0);
		
		Map<String, Object> fields2 = new HashMap<String, Object>();
		fields2.put("field1", "String类型");
		fields2.put("field2", 3.141592657);
		// 一条记录值
		Point point1 = influxDBConnection.pointBuilder("measurements001", System.currentTimeMillis(), tags1, fields1);
		Point point2 = influxDBConnection.pointBuilder("measurements001", System.currentTimeMillis(), tags2, fields2);
		BatchPoints batchPoints1 = BatchPoints.database("pastoralDogDB").tag("tag1", "标签值1")
				.retentionPolicy("30day").consistency(ConsistencyLevel.ALL).build();
		// 将两条记录添加到batchPoints中
		batchPoints1.point(point1);
		BatchPoints batchPoints2 = BatchPoints.database("pastoralDogDB").tag("tag2", "标签值2")
				.retentionPolicy("30day").consistency(ConsistencyLevel.ALL).build();
		// 将两条记录添加到batchPoints中
		batchPoints2.point(point2);
		
		// 将不同的batchPoints序列化后，一次性写入数据库，提高写入速度
		List<String> records = new ArrayList<String>();
		records.add(batchPoints1.lineProtocol());
		records.add(batchPoints2.lineProtocol());
		// 将两条数据批量插入到数据库中
		influxDBConnection.batchInsert("pastoralDogDB", "30day", ConsistencyLevel.ALL, records);
	}
}
