#-*- coding: utf-8 -*-
import pykafka
class Pykafka(object):
    def __init__(self, conf):
        self.kafka_zk_url=str(conf.get(u'kafka_zk_url'))
        self.kafka_host=str(conf.get(u'kafka_host'))
        self.topic_name=str(conf.get(u'topic_name'))
        self.group_name=str(conf.get(u'group_name'))
        self.client=pykafka.KafkaClient(zookeeper_hosts=self.kafka_zk_url)
        self.topic=self.client.topics[self.topic_name]
        self.producer=self.get_asy_produce(self.topic)
        self.blance_consumer=self.get_blance_consumer(self.topic)
    def get_asy_produce(self,topic):
        return topic.get_producer(delivery_reports=True)
    def get_sync_produce(self,topic):
        return  topic.get_sync_producer()
    def produce_message(self,message):
        self.producer.produce(message)
    def get_consumer(self,topic):
        return topic.get_simple_consumer()
    def get_blance_consumer(self,topic):
        return  topic.get_balanced_consumer(
                                consumer_group = self.group_name,
                                auto_commit_enable = True,
                                zookeeper_connect = self.kafka_zk_url)
    def consume_message(self,):
        return self.blance_consumer.consume(False)
if __name__ == '__main__':
    conf={}
    conf['kafka_zk_url']='127.0.0.1:2181'
    conf['kafka_host']='127.0.0.1:9092'
    conf['topic_name']='datasaver_es'
    conf['group_name'] = 'es_gs'
    kafaka=Pykafka(conf=conf)
    # kafaka.producer.produce('sdfdsfsdfdsfddddddddddd                dddddddddddddddddddd')
    client = pykafka.KafkaClient(hosts=conf['kafka_host'])
    topic = client.topics['datasaver_es']
    # with topic.get_producer(delivery_reports=True) as produce:
    #     try:
    #         produce.produce('nohosdfsdf')
    #         produce.produce('sdfdsfdsfs')
    #     except Exception as e:
    #         print e.message
    #         print e.message
    com=topic.get_simple_consumer()
    for i in com:
        print i.value



