1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# _*_  coding:utf-8 _*_
from confluent_kafka import avro
from confluent_kafka.avro import AvroProducer
import threading
import json
import time
import sys
reload(sys)
sys.setdefaultencoding('utf-8')


c = threading.RLock()
kafka_config = {'host': ['prd-pg-crawler-kafka-141.weiboyi.com', 'prd-pg-crawler-kafka-142.weiboyi.com', 'prd-pg-crawler-kafka-143.weiboyi.com', 'prd-pg-crawler-kafka-144.weiboyi.com'], 'schema_registry_url': 'http://pg-schema-registry.weiboyi.com:28081', 'port': 9092, 'batch_count': 100}
# kafka_config = {'host': ['192.168.1.141', '192.168.1.142', '192.168.1.143'], 'port': 9092, 'schema_registry_url': 'http://pg-schema-registry.weiboyi.com:28081', 'batch_count': 100}
item_count = 0
avroProducer_dict = {}


def store(item, spider_name):
    print(item)
    print(spider_name)
    topic_name = 'Test_Media_Info_data'
    kafka_key = unicode(int(time.time()))
    key_schema = {'type': 'record', 'namespace': 'regular.daily.test', 'name': 'media_info', 'fields': [{'type': 'string', 'name': 'name'}]}
    value_schema = {
                  'type': 'record',
                  'namespace': 'regular.daily.test',
                  'name': 'media_info',
                  'fields': [
                    {
                      'type': [
                        'null',
                        'string'
                      ],
                      'name': 'media_caption'
                    },
                    {
                      'type': 'int',
                      'name': 'fetched_time'
                    },
                    {
                      'type': 'int',
                      'name': 'media_created_at'
                    },
                    {
                      'type': 'string',
                      'name': 'media_id'
                    },
                    {
                      'type': 'string',
                      'name': 'media_content'
                    }
                  ]
                }
    value_schema = avro.loads(json.dumps(value_schema))
    key_schema = avro.loads(json.dumps(key_schema))

    # try:
    avroProducer = AvroProducer({'bootstrap.servers': ','.join(['{}:{}'.format(host, kafka_config['port']) for host in kafka_config['host']]),'schema.registry.url': kafka_config['schema_registry_url'],'log.connection.close': 'false', 'log_level': '4', 'queue.buffering.max.ms': '1000'},default_key_schema=key_schema, default_value_schema=value_schema)
    kafka_key_dict = {"name": kafka_key}
    avroProducer.produce(topic=topic_name, value=dict(item), key=kafka_key_dict)
    avroProducer.flush()
    # avroProducer.flush()
    # except Exception as e:
    #     print(e)





if __name__ == '__main__':
    item = {"media_id": u"12334", "media_content": u"测试内容", "fetched_time": 1545301058, "media_created_at": -1, "media_caption": None}
    # item['media_caption'] = json.dumps({}).encode('utf-8')
    spider_name = 'avro_kafka_test_spider'
    store(item, spider_name)