import scrapy
from scrapy import Request
from Project.DogRecruitment.DogRecruitment.items import FindPeopleItem


class Dog_Recruitment(scrapy.Spider):
    name = "FindPeople"

    start_urls = [
        f"https://www.5iai.com/api/resume/baseInfo/public/es?pageSize=10&pageNumber={i}&function=&skills=&workplace=&keyword="
        for i in range(1,1093)
    ]

    def start_requests(self):
        for url in self.start_urls:
            yield Request(url, dont_filter=True,callback=self.parse)

    def parse(self,response):
        item_list = response.json()['data']['content']
        for r in item_list:
            item = FindPeopleItem()
            item['resumeId'] = r['id']
            item['username'] = r['username']
            item['gender'] = r['gender']
            item['jobStatus'] = r['jobStatus']
            item['exp'] = r['exp']
            item['expectPosition'] = r['expectPosition']
            item['willSalaryStart'] = r['willSalaryStart']
            item['willSalaryEnd'] = r['willSalaryEnd']
            item['city'] = r['city']
            item['publishTime'] = r['publishTime']
            item['updateTime'] = r['updateTime']
            yield item



