# _*_  coding:utf-8 _*_
from confluent_kafka import avro
from confluent_kafka.avro import AvroProducer
import threading
import json
import time
import sys

reload(sys)
sys.setdefaultencoding('utf-8')

c = threading.RLock()
kafka_config = {'host': ['prd-pg-crawler-kafka-141.weiboyi.com', 'prd-pg-crawler-kafka-142.weiboyi.com',
                         'prd-pg-crawler-kafka-143.weiboyi.com', 'prd-pg-crawler-kafka-144.weiboyi.com'],
                'schema_registry_url': 'http://pg-schema-registry.weiboyi.com:28081', 'port': 9092, 'batch_count': 100}
# kafka_config = {'host': ['192.168.1.141', '192.168.1.142', '192.168.1.143'], 'port': 9092, 'schema_registry_url': 'http://pg-schema-registry.weiboyi.com:28081', 'batch_count': 100}
item_count = 0
avroProducer_dict = {}


def store(item, spider_name):
    print(item)
    print(spider_name)
    topic_name = 'my_favorite_topic2'
    kafka_key = unicode(int(time.time()))
    key_schema = {'type': 'record', 'namespace': 'regular.daily.test', 'name': 'media_info',
                  'fields': [{'type': 'string', 'name': 'name'}]}
    value_schema = {
        'type': 'record',
        'namespace': 'regular.daily.test',
        'name': 'media_info',
        'fields': [
            {
                'type': [
                    'null',
                    'string'
                ],
                'name': 'media_caption'
            },
            {
                'type': 'int',
                'name': 'fetched_time'
            },
            {
                'type': 'int',
                'name': 'media_created_at'
            },
            {
                'type': 'string',
                'name': 'media_id'
            },
            {
                'type': 'string',
                'name': 'media_content'
            }
        ]
    }
    value_schema = avro.loads(json.dumps(value_schema))
    key_schema = avro.loads(json.dumps(key_schema))

    # try:
    avroProducer = AvroProducer(
        {'bootstrap.servers': "127.0.0.1:9092", 'schema.registry.url': kafka_config['schema_registry_url'],
         'log.connection.close': 'false', 'log_level': '4', 'queue.buffering.max.ms': '1000'},
        default_key_schema=key_schema, default_value_schema=value_schema)
    kafka_key_dict = {"name": kafka_key}
    avroProducer.produce(topic=topic_name, value=dict(item), key=kafka_key_dict)
    avroProducer.flush()
    # avroProducer.flush()
    # except Exception as e:
    #     print(e)


if __name__ == '__main__':
    item = {"media_id": u"12334", "media_content": u"测试", "fetched_time": 1545301058, "media_created_at": -1,
            "media_caption": None}
    # item['media_caption'] = json.dumps({}).encode('utf-8')
    spider_name = 'avro_kafka_test_spider'
    store(item, spider_name)
