package com.htdata.iiot.kafka2tsdb.kafka.muil;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.log4j.Logger;

import com.htdata.iiot.kafka2tsdb.config.Configs;
import com.htdata.iiot.kafka2tsdb.dataParser.DataParser;
import com.htdata.iiot.kafka2tsdb.dataParser.pojo.StreamingData;
import com.htdata.iiot.kafka2tsdb.kafka.HTKafkaConsumer;
import com.htdata.iiot.kafka2tsdb.tsdb.Post;

public class MuilKafkaClient implements Runnable{
	private static Logger logger = Logger.getLogger(MuilKafkaClient.class);
	private final long threadId;
	public MuilKafkaClient(long threadId) {
		this.threadId=threadId;
	}
	@Override
	public void run() {
		logger.info("Begin...");
		System.setProperty("HADOOP_USER_NAME", "hdfs");

		HTKafkaConsumer htConsumer = new HTKafkaConsumer();
		String topic = Configs.KafkaConfig.TOPIC;

		Consumer<String, String> consumer = htConsumer.getConsumer();
		
		
		consumer.subscribe(Arrays.asList(topic));
		
		DataParser dataParser = DataParser.getDataParser();
		logger.info("线程"+threadId+"开始消费。。。");
		Post post = new Post(Configs.BaseConfig.url);
		
		while (true) {
			long startTime = System.currentTimeMillis()-1;
			ConsumerRecords<String, String> records = consumer.poll(100000);
			long pollEndTime = System.currentTimeMillis();
			if (records.count() == 0) {
				logger.warn("null");
		//		consumer.commitSync();
				try {
					Thread.sleep(2000);
				} catch (InterruptedException e) {
					e.printStackTrace();
				}
				continue;
			}
			List<StreamingData> list = new ArrayList<StreamingData>();
			for (ConsumerRecord<String, String> record : records) {
				String value = record.value();
				if (value != null && !"".equals(value)) {
					try {
						StreamingData streamingData = dataParser.toStreamingData(value);
						list.add(streamingData);
					} catch (Exception e) {
						logger.error(e);
					} 

				}else{
					logger.warn("Data is null!!!");
				}
			}
			
			long analyEndTime = System.currentTimeMillis();
			if(list.size()!=0){
				post.send(list);
			}
			if(DstatusStore.log){
				long endTime = System.currentTimeMillis();
				long pollTime = pollEndTime-startTime+1;
				long analysisTime = analyEndTime-pollEndTime+1;
				long writeTime = endTime-analyEndTime+1;
				long allTime = endTime-startTime+1;
				logger.info("本次获取数据:"+list.size()
				+"条，本次获取数据平均速率:"+(list.size()*1000/pollTime)
				+"，本次分析数据平均速率:"+(list.size()*1000/analysisTime)
				+"，本次写数据数据平均速率:"+(list.size()*1000/writeTime)
				+"，本次平均速率:"+(list.size()*1000/allTime)
						);
			}
		//	consumer.commitSync();
			
		}

		
	}

	
	
}
