# -*- coding: utf-8 -*-

import json
import abc
import scpy2.util as util
from scpy2.worker import Worker
import requests


class ResumeTasker(Worker):

    def run(self):
        # sqle = self._components['sqlexecutor']
        # sqle.sql = """
        #             select company_name from recruitment_monitor_company
        #            """
        # result = sqle.execute_dataset()
        # for item in result:
        #     self._components['conveyor'].send(json.dumps({'company_name':item.get('company_name')}))
        index = 1
        while True:
            params = {
                'index': index,
                'size': 20,
            }
            response = requests.get('http://192.168.31.116:6060/api/crawler/monitor/companies/page', params=params)
            result = json.loads(response.content)
            for company_name in result['content']:
                self._components['conveyor'].send(json.dumps({'company_name': company_name}))
            if result['last'] == False:
                index += 1
            else:
                break


if __name__ == '__main__':
    worker = ResumeTasker()
    worker.run()
