# -*- coding: utf-8 -*-

import json

import scpy2.util as util
from scpy2.crawlers.conveyor_driven_crawler import ConveyorDrivenCrawler


class NewsCrawler(ConveyorDrivenCrawler):

    def _parse_task_msg(self, msg):
        tk = json.loads(msg)

        return None if not tk else {
            'processor': 'toutiao',
            'crawl_params': {
                'keyword': tk['searchKey'],
                'time_limit': util.get_date(-7 if tk['status'] else -1)
            }
        }

    def _package_task_results(self, task, results):
        return {
            'bucket_name': self._cfg['s3_bucket'],
            'object_key': 'parsed/%s/%s-%s.json' % (util.current_date('%Y/%m/%d'), task['processor'], util.current_ts()),
            'data': json.dumps(results, ensure_ascii=False, encoding='utf-8')
        }


if __name__ == '__main__':
    worker = NewsCrawler()
    worker.run()
