package com.juanpi.order.runner;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Set;

/**
 * @author xueqing
 *         Created on 2017/2/13.
 */
public class Cow implements Runnable{
    private final Properties properties;
    private final UrlTopicMappingBean urlTopicMappingBean;
    private final PushHttpClient pushHttpClient;

    public Cow(Properties properties, UrlTopicMappingBean urlTopicMappingBean, PushHttpClient pushHttpClient) {
        this.properties = properties;
        this.urlTopicMappingBean = urlTopicMappingBean;
        this.pushHttpClient = pushHttpClient;
    }

    @Override
    public void run() {
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer(properties);
        List<String> topics = new ArrayList<>();
        topics.add(urlTopicMappingBean.getTopic());
        kafkaConsumer.subscribe(topics);
        while(true){

            ConsumerRecords<String, String> poll = kafkaConsumer.poll(10);

            if(poll.count()==0){
                try {
                    Thread.sleep(1000l);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
                continue;
            }
            Set<TopicPartition> partitions = poll.partitions();
            List<TopicPartition> list = new ArrayList<>();
            list.addAll(partitions);
            poll(kafkaConsumer, poll, list,urlTopicMappingBean);


        }
    }

    private  void poll(KafkaConsumer<String, String> kafkaConsumer, ConsumerRecords<String, String> poll, List<TopicPartition> list,UrlTopicMappingBean urlTopicMappingBean) {
        for(TopicPartition topicPartition:list){
            List<ConsumerRecord<String, String>> records = poll.records(topicPartition);
            for(ConsumerRecord<String, String> consumerRecord:records){
                String value = consumerRecord.value();
                long offset = consumerRecord.offset();

                //只要有失败才seek
                //推送数据
                boolean b = pushHttpClient.pushBody(urlTopicMappingBean.getUrl(), value,urlTopicMappingBean.getHeader());
                if(!b) {
                    kafkaConsumer.seek(topicPartition, offset);
                    try {
                        Thread.sleep(1000l);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                    return;
                } else {
                    kafkaConsumer.commitSync();
                }
            }

        }
    }
}
