import scrapy
from scrapy import Request
import pandas as pd

from Project.DogRecruitment.DogRecruitment.items import FindJobItem


class Dog_Recruitment(scrapy.Spider):
    name = "FindJob"

    start_urls = [f"https://www.5iai.com/api/enterprise/job/public/es?pageSize=10&pageNumber={i}&willNature=&function=&wageList=%255B%255D&workplace=&keyword=" for i in range(1,159)]

    def start_requests(self):
        for url in self.start_urls:
            yield Request(url,dont_filter=True,callback=self.parse)

    def parse(self, response, **kwargs):
        item_list = response.json()['data']['content']
        for r in item_list:
            item = FindJobItem()
            item['id'] = r['id']
            item['publishTime'] = r['publishTime']
            item['updateTime'] = r['updateTime']
            item['willNature'] = r['willNature']
            item['positionName'] = r['positionName']
            item['minimumWage'] = r['minimumWage']
            item['maximumWage'] = r['maximumWage']
            item['payMethod'] = r['payMethod']
            item['exp'] = r['exp']
            item['educationalRequirements'] = r['educationalRequirements']
            item['count'] = r['count']
            item['enterpriseId'] = r['enterpriseAddress']['enterpriseId']
            yield item