#coding:utf-8
import json, time
from kafka import KafkaProducer,TopicPartition, KafkaConsumer
from kafka.errors import KafkaError

KAFKA_HOSTS = ["10.13.40.138:9092"]
topic = 'test'
group = 'news_search'



class ConsumerHandler(object):
    def __init__(self, topic=topic, group=group, hosts=KAFKA_HOSTS):
        self.__topic = topic
        self.__group = group
        self.__hosts = hosts
        self.__closed = False
        # self.__topic,
        self.__consumer = KafkaConsumer(
                                group_id = self.__group,
                                bootstrap_servers = self.__hosts)
        
        #print(self.__consumer.topics()) # 获取主题列表 添加这句后不报错了　？　？　
        self.__consumer.topics()
        rets = self.__consumer.partitions_for_topic(self.__topic)
        #print(rets) 
        ps = []
        for i in rets:
            ps.append(TopicPartition(self.__topic,i))
        self.__partitions = ps
        self.__consumer.assign(self.__partitions)
        self.__cnt = 0

        self.end_offsets = self.__consumer.end_offsets(self.__partitions)
        self.beginning_offsets = self.__consumer.beginning_offsets(self.__partitions)
        '''
        for p in self.__partitions:
            off = self.__consumer.position(p)
            print("position:", p, off)
        '''
        
    def consumer_data(self):
        try:
            for message in self.__consumer:
                yield message

        except Exception as e:
            print(str(e))

    def read(self):
        msg = self.__consumer.poll(timeout_ms=1000,max_records=1)
        if not msg: return ''
        self.__cnt += 1
        if self.__cnt % 500 == 0:
            self.commit()
        #print(list(msg.values())[0][0].topic, list(msg.values())[0][0].partition, list(msg.values())[0][0].offset)
        regmsg = list(msg.values())[0][0]
        
        # write to log
        self.read_topic     = regmsg.topic
        self.read_partition = regmsg.partition
        self.read_offset    = regmsg.offset

        return regmsg.value
        #return regmsg.value.decode()

    def count(self):
        '''
        从当前读取位置开始还剩多少条消息
        '''
        ret = 0
        ends = self.__consumer.end_offsets(self.__partitions)
        for p,off in ends.items():
            ret += off - self.__consumer.position(p) 
            #print(p, self.__consumer.position(p))
        return ret

    def seek(self,offset,pos):
        if pos == 2:
            offs = self.__consumer.end_offsets(self.__partitions)
            #print(offs)
            for p,off in offs.items():
                #print(p,off + offset)
                self.__consumer.seek(p,off + offset)
        elif pos == 0:
            offs = self.__consumer.beginning_offsets(self.__partitions)
            #print(offs)
            for p,off in offs.items():
                self.__consumer.seek(p,off + offset)
        elif pos == 1:
            for p in self.__partitions:
                off = self.__consumer.position(p)
                self.__consumer.seek(p,off + offset)
        else:
            raise Exception('not support')

        self.__consumer.commit()
            
    def __del__(self):

        #print(self.__consumer.topics())
        if not self.__closed and self.__consumer: 
            #self.__consumer.close()
            self.__closed = True

    def get_offsets(self):
        ret = {}
        for p in self.__partitions:
            ret[p] = self.__consumer.position(p)
        return ret

    def commit(self):
        self.__consumer.commit()

    def commit_async(self):
        self.__consumer.commit_async()


class ProducerHandler(object):
    def __init__(self,topic,hosts=KAFKA_HOSTS,api_version=(0,8,1),username=None,password=None):
        self.topic = topic
        kafka_args = {"api_version":api_version,"bootstrap_servers":hosts,"request_timeout_ms":7000}
        if username and password:
            kafka_args["security_protocol"] = 'SASL_PLAINTEXT'
            kafka_args["sasl_plain_username"] = username
            kafka_args["sasl_plain_password"] = password
            kafka_args["sasl_mechanism"] = 'PLAIN'
        self.producer = KafkaProducer(**kafka_args)

    def write(self,msg):
        return self.producer.send(self.topic, msg)

    def write2partititon(self,msg,partition):
        return self.producer.send(self.topic, key = partition, value = msg)

    def close(self):
        self.producer.close()


class SysProducerHandler(ProducerHandler):
    def __init__(self,topic,hosts=KAFKA_HOSTS,api_version = (0,10,1),username='sinaRecmd',password='0dd844e301d3671cd30aee0e8e0fe732'):
        super(SysProducerHandler, self).__init__(topic, hosts=hosts, username=username, password=password, api_version=api_version)



