# -*- coding: utf-8 -*-

import json

import scpy2.util as util
from scpy2.crawlers.conveyor_driven_crawler import ConveyorDrivenCrawler


class RecruitmentCrawler(ConveyorDrivenCrawler):

    def _parse_task_msg(self, msg):
        tk = json.loads(msg)

        return None if not tk else {
            'processor': 'lagou',
            'crawl_params': {
                'company_name':tk.get('company_name')
            }
        }

    def _package_task_results(self, task, results):
        return {
            'bucket_name': self._cfg['s3_bucket'],
            'object_key': 'parsed/%s/%s-%s.json' % (util.current_date('%Y/%m/%d'), task['processor'], util.current_ts()),
            'data': json.dumps(results, ensure_ascii=False, encoding='utf-8')
        }

    def _run(self, params=None):
        msg = []
        try:
            msg = self._components['conveyor'].receive()
        except Exception as e:
            self.logger.error('[conveyor receiving]: %s' % e)
            if self._cfg.get('raise_conveyor_exception', False) is True:
                raise e

        if not msg:
            return

        for x in msg:
            self.logger.info('[accepted]: %s' % util.ustr(x))

            tk = None
            try:
                tk = self._parse_task_msg(x)
            except Exception as e:
                self.logger.warn('[parsing]: %s' % e)

            if not tk:
                return

            data = None
            try:
                data = self.__crawl(tk)
                self.logger.debug('[fetched]: %s' % util.ustr(data))
            except Exception as e:
                self.logger.warn('[crawling]: %s' % e)

            if not data:
                self.logger.info('No data was fetched.')
                continue

            try:
                data = self._package_task_results(tk, data)
                for item in json.loads(data['data']):
                    send_info = {
                        'bucket_name': self._cfg['s3_bucket'],
                        'object_key': 'parsed/%s/%s-%s.json' % (util.current_date('%Y/%m/%d'), tk['processor'], util.current_ts()),
                        'data': json.dumps(item,ensure_ascii=False,indent=1)
                    }
                    self._components['conveyor'].send(send_info)
                    self.logger.debug('[sent]: %s' % send_info)
            except Exception as e:
                self.logger.error('[conveyor sending]: %s' % e)
                if self._cfg.get('raise_conveyor_exception', False) is True:
                    raise e

    def __crawl(self, tk):
        cp_name = tk.get('processor', None)
        cp = self._load_crawl_processor(cp_name)
        return cp.crawl(tk['crawl_params'])

if __name__ == '__main__':
    worker = RecruitmentCrawler()
    worker.run()
