#!~/anaconda3/bin/python
# encoding: utf-8
from kafka import KafkaProducer
# from kafka import KafkaConsumer
import json
import time


# 初始化日志模块
import logging
import logging.handlers
# 定义一个Handler打印INFO及以上级别的日志到sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(logging.Formatter(
    '%(asctime)s - %(levelname)s - %(message)s'))

logger = logging.getLogger('producer')
logger.setLevel(logging.INFO)
logger.addHandler(console)




producer = KafkaProducer(bootstrap_servers=[
                         '192.168.102.239:9093',
                         '192.168.102.239:9094',
                         '192.168.102.239:9095'], value_serializer=lambda m: json.dumps(m).encode('utf-8'))


# producer = KafkaProducer(bootstrap_servers=[
#                          'kafka-node1:9092',
#                          'kafka-node2:9093',
#                          'kafka-node3:9094'], value_serializer=lambda m: json.dumps(m).encode('utf-8'))



# producer = KafkaProducer(bootstrap_servers=[
#                          '127.0.0.1:9092'], value_serializer=lambda m: json.dumps(m).encode('utf-8'))

logger.info('start producer')
for i in range(1000):
    future = producer.send('cache-urls', value={'count': i}, partition=0)
    result = future.get(timeout=1)
    # future = producer.send('cache_urls', value={'count': i}, partition=1)
    # result = future.get(timeout=1)
    logger.info('send:{}'.format(i))
    time.sleep(1)
    
