# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import csv

class DogrecruitmentPipeline:
    def process_item(self, item, spider):
        return item



class WriteToCsvPipeline:

    def __init__(self):
        self.file = open('FindPeople.csv', 'w', newline='',encoding='utf-8')
        self.writer = csv.writer(self.file)
        self.writer.writerows([['resumeId','username','gender','jobStatus','exp',
                               'expectPosition','willSalaryStart','willSalaryEnd','city','publishTime','updateTime']])

    def process_item(self, item, spider):

        data = [item.get('resumeId', ""),
             item.get('username', ""),
             item.get('gender', -1),
             item.get('jobStatus', -1),
             item.get('exp', ""),
             item.get('expectPosition', ""),
             item.get('willSalaryStart', -1),
             item.get('willSalaryEnd', -1),
             item.get('city', ""),
             item.get('publishTime', ""),
             item.get('updateTime', "")]
        self.writer.writerows([data])
        return item

    def close_spider(self, spider):
        self.file.close()


class WriteToCsvPipeline2:

    def __init__(self):
        self.file = open('FindPeopleDetail.csv', 'w', newline='',encoding='utf-8')
        self.writer = csv.writer(self.file)
        self.writer.writerows([['resumeId',
                                'resumeName',
                                'birthday',
                                'address',
                                'arrivalTime',
                                'politicalStatus',
                                'selfEvaluation',
                                'expectIndustry',
                                'willNature',
                                'keywordList',
                                'educationExperienceList',
                                'projectExperienceList',
                                'competitionExperienceList',
                                'trainingExperienceList',
                                'skillList',
                                'languageList',
                                'certList',
                                'workExperienceList',
                                'attachmentList']])

    def process_item(self, item, spider):

        data = [item.get('resumeId',""),
                item.get('resumeName',""),
                item.get('birthday',""),
                item.get('address',""),
                item.get('arrivalTime',""),
                item.get('politicalStatus',""),
                item.get('selfEvaluation',""),
                item.get('expectIndustry',""),
                item.get('willNature',-1),
                item.get('keywordList',""),
                item.get('educationExperienceList',""),
                item.get('projectExperienceList',""),
                item.get('competitionExperienceList',""),
                item.get('trainingExperienceList',""),
                item.get('skillList',""),
                item.get('languageList',""),
                item.get('certList',""),
                item.get('workExperienceList',""),
                item.get('attachmentList',"")]
        self.writer.writerows([data])
        return item

    def close_spider(self, spider):
        self.file.close()


class WriteToCsvPipeline3:
    # FindJob
    def __init__(self):
        self.file = open('FindJob.csv', 'w', newline='',encoding='utf-8')
        self.writer = csv.writer(self.file)
        self.writer.writerows([['jobId',
                                'publishTime',
                                'updateTime',
                                'willNature',
                                'positionName',
                                'minimumWage',
                                'maximumWage',
                                'payMethod',
                                'exp',
                                'educationalRequirements',
                                'count',
                                'enterpriseId']])

    def process_item(self, item, spider):

        data = [item.get('id',""),
                item.get('publishTime',""),
                item.get('updateTime',""),
                item.get('willNature',-1),
                item.get('positionName',""),
                item.get('minimumWage',-1),
                item.get('maximumWage',-1),
                item.get('payMethod',-1),
                item.get('exp',""),
                item.get('educationalRequirements',-1),
                item.get('count',-1),
                item.get('enterpriseId'"")]
        self.writer.writerows([data])
        return item

    def close_spider(self, spider):
        self.file.close()


class WriteToCsvPipeline4:
    # FindJobDetail
    def __init__(self):
        self.file = open('FindJobDetail.csv', 'w', newline='',encoding='utf-8')
        self.writer = csv.writer(self.file)
        self.writer.writerows([["jobId",
                                "jobRequiredments",
                                "welfare",
                                "workplace",
                                "deadline",
                                "function",
                                "publisher",
                                "status",
                                "publisherName",
                                "enterpriseName",
                                "messageTemplateId",
                                "keywordList",
                                "skillsList",
                                "resumeCount"]])

    def process_item(self, item, spider):

        data = [item.get("id",""),
                item.get("jobRequiredments",""),
                item.get("welfare",""),
                item.get("workplace",""),
                item.get("deadline",""),
                item.get("function",""),
                item.get("publisher",""),
                item.get("status",-1),
                item.get("publisherName",""),
                item.get("enterpriseName",""),
                item.get("messageTemplateId",""),
                item.get("keywordList",""),
                item.get("skillsList",""),
                item.get("resumeCount",-1)]
        self.writer.writerows([data])
        return item

    def close_spider(self, spider):
        self.file.close()


class WriteToCsvPipeline5:
    # FindJobDetail
    def __init__(self):
        self.file = open('CompanyDetail.csv', 'w', newline='',encoding='utf-8')
        self.writer = csv.writer(self.file)
        self.writer.writerows([["id",
                                "enterpriseId",
                                "logo",
                                "shortName",
                                "industry",
                                "econKind",
                                "startDate",
                                "registCapi",
                                "personScope",
                                "website",
                                "email",
                                "phone",
                                "slogan",
                                "introduction",
                                "photo",
                                "label",
                                "postCode",
                                "recruitJobNum",
                                "totalPublicJobNum",
                                "provinceCode",
                                "cityCode",
                                "regionCode",
                                "detailedAddress",
                                "remarks"]])

    def process_item(self, item, spider):

        data = [item.get("id",""),
                item.get("enterpriseId",""),
                item.get("logo",""),
                item.get("shortName",""),
                item.get("industry",""),
                item.get("econKind",""),
                item.get("startDate",""),
                item.get("registCapi",""),
                item.get("personScope",""),
                item.get("website",""),
                item.get("email",""),
                item.get("phone",""),
                item.get("slogan",""),
                item.get("introduction",""),
                item.get("photo",""),
                item.get("label",""),
                item.get("postCode",""),
                item.get("recruitJobNum",-1),
                item.get("totalPublicJobNum",-1),
                item.get("provinceCode",""),
                item.get("cityCode",""),
                item.get("regionCode",""),
                item.get("detailedAddress",""),
                item.get("remarks","")]
        self.writer.writerows([data])
        return item

    def close_spider(self, spider):
        self.file.close()
