import scrapy
from scrapy import Request
import pandas as pd

from Project.DogRecruitment.DogRecruitment.items import FindJobItem, FindJobDetailItem


class Dog_Recruitment(scrapy.Spider):
    name = "FindJobDetail"
    data = pd.read_csv(r"D:\code\Python-Project\Scrapy\Project\DogRecruitment\FindJob.csv",encoding='utf-8')
    JobId = data['jobId'].values
    start_urls = [f"https://www.5iai.com/api/enterprise/job/public?id={i}" for i in JobId]

    def start_requests(self):
        for url in self.start_urls:
            yield Request(url,dont_filter=True,callback=self.parse)

    def parse(self, response, **kwargs):
        r = response.json()['data']

        item = FindJobDetailItem()
        item['id'] = r['id']
        item['jobRequiredments'] = r['jobRequiredments']
        item['welfare'] = r['welfare']
        item['workplace'] = r['workplace']
        item['deadline'] = r['deadline']
        item['function'] = r['function']
        item['publisher'] = r['publisher']
        item['status'] = r['status']
        item['publisherName'] = r['publisherName']
        item['enterpriseName'] = r['enterpriseName']
        item['messageTemplateId'] = r['messageTemplateId']

        temp_list = []
        for i in r['keywordList']:
            temp_list.append(
                i['labelName']
            )
        item['keywordList'] = temp_list

        temp_list1 = []
        for i in r['skillsList']:
            temp_list1.append(
                i['labelName']
            )
        item['skillsList'] = temp_list1

        item['resumeCount'] = r['resumeCount']
        yield item
