#!~/anaconda3/bin/python
# encoding: utf-8
# from kafka import KafkaProducer
from kafka import KafkaConsumer
import json
import sys


# 初始化日志模块
import logging
import logging.handlers
# 定义一个Handler打印INFO及以上级别的日志到sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(logging.Formatter(
    '%(asctime)s - %(levelname)s - %(message)s'))

logger = logging.getLogger('producer')
logger.setLevel(logging.INFO)
logger.addHandler(console)





group_id = 'group1'
if len(sys.argv)>1:
    group_id = sys.argv[1]


logger.info('start consumer {}'.format(group_id))
# docker run --rm --name "test" -v "/Users/ben/Documents/docker/kafka/test:/home/app/www" py3web:1 python /home/app/www/consumer.py
    
consumer = KafkaConsumer('cache-urls', 
                         group_id=group_id, 
                         bootstrap_servers=[
                             '192.168.102.239:9093',
                             '192.168.102.239:9094',
                             '192.168.102.239:9095'],
                         value_deserializer=lambda m: json.loads(m.decode('utf-8')))

# consumer = KafkaConsumer('cache_urls',
#                          group_id=group_id,
#                          bootstrap_servers=[
#                              '192.168.30.78:9092',
#                              '192.168.30.78:9093',
#                              '192.168.30.78:9094'],
#                          value_deserializer=lambda m: json.loads(m.decode('utf-8')))

# consumer = KafkaConsumer('cache_urls', group_id=group_id, bootstrap_servers=[
#                          '127.0.0.1:9092'], value_deserializer=lambda m: json.loads(m.decode('utf-8')))



# producer = KafkaProducer(bootstrap_servers=['192.168.99.100:9092'])
for msg in consumer:
    # print('recieve:',  msg.value.get('count', ''))
    # key = msg.key.decode(encoding="utf-8")
    # value = msg.value.decode(encoding="utf-8")
    logger.info("%s-%d-%d value=%s" %
          (msg.topic, msg.partition, msg.offset, msg.value))
