import json
import datetime
import pika
from scpy.rabbit_asy_producer import AsyPublisher
from scpy.logger import get_logger

from pymongo import MongoClient
logger = get_logger(__file__)
COLL = MongoClient('10.132.23.104', 27017)['stockProj']['preDisclosure']
class StockProducer(AsyPublisher):
    def publish_message(self):
        count = 0
        for row in COLL.find().batch_size(20):
            count += 1
            if count%100==0:
                logger.info('%d'%count)
            if row.get('stockCode'):
                row['stockCode'] = '000000'
            COLL.find_one_and_update(
                {'_id':row.get('_id')},
                {'$set':row}
            )
            file_key = row.get('fileKey', '')

            if not file_key :
                continue

            message = {
                'fileKey':file_key,
                'announcementTime':row.get('announcementTime'),
                'type':row.get('type'),
                'stockCode':'000000',
                'columnId':row.get('columnId'),
                'title':row.get('title') ,
                'orgId':row.get('orgId'),
            }

            properties = pika.BasicProperties(app_id='example-publisher',
                                              content_type='application/json',
                                              headers=message)
            self._channel.basic_publish(
                '',
                self.QUEUE,
                json.dumps(message),
                properties)


if __name__ == '__main__':
    amqp_url = 'amqp://sc-admin:1qaz2wsx@10.51.29.242:5672/%2F'
    StockProducer(amqp_url=amqp_url, queue_name='stockProj').run()
