# -*- coding: utf-8 -*-
import scrapy
import json
from ..items import JobsItem
import re
import pymongo
import time
import requests
from lxml import etree


class LagouSpider(scrapy.Spider):
    name = "lagou"
    allowed_domains = ["lagou.com"]
    client = pymongo.MongoClient("localhost", 27017)
    db = client.spider_data
    positionId = None

    def start_requests(self):
        for page_index in range(0, 30):
            url = 'https://www.lagou.com/jobs/positionAjax.json?px=default&needAddtionalResult=false&first=true&pn={page}&kd=数据科学家'
            url = url.format(page=page_index)

            yield scrapy.Request(url, callback=self.Get_positionId)
            break

    def Get_positionId(self, response):
        response = json.loads(response.text)
        data = response['content']['positionResult']['result']
        # client = pymongo.MongoClient("localhost",27017)
        # db =client.spider_data
        # print(data)
        for detail_data in data:
            companyFullName = detail_data['companyFullName']
            city = detail_data['city']
            companySize = detail_data['companySize']
            education = detail_data['education']
            financeStage = detail_data['financeStage']
            firstType = detail_data['firstType']
            positionId = detail_data['positionId']
            positionName = detail_data['positionName']
            salary = detail_data['salary']
            secondType = detail_data['secondType']
            workYear = detail_data['workYear']
            self.positionId = positionId
            self.db.lagou_Jobs.insert_one(
                {"companyFullName": companyFullName.strip(), "city": city.strip(), "companySize": companySize.strip(),
                 "education": education.strip(), "financeStage": financeStage.strip(),
                 "positionId": positionId,
                 "positionName": positionName.strip(), "salary": salary.strip(),
                 "secondType": secondType.strip(), "workYear": workYear.strip(),
                 "firstType": firstType.strip()})
            work_url = 'https://www.lagou.com/jobs/{positionId}.html'.format(positionId=positionId)
            # print(work_url)
            # Jobdata = requests.get(work_url).text
            # html = etree.parse(Jobdata)
            # Job_describe = html.xpath('//dd[@class="job_bt"]/div/p/text()')
            # self.db.lagou_JobDescribe.insert_one({"positionId": positionId,
            #                                  "Job_describe":Job_describe.strip()})
            # print(Job_describe)
            yield scrapy.Request(work_url, callback=self.Get_worktext)
            break

    def Get_worktext(self, response):
        # client = pymongo.MongoClient("localhost", 27017)
        # db = client.spider_data
        # item = JobsItem()

        Job_describe = response.xpath('//dd[@class="job_bt"]/div/p/text()').extract()
        Job_describe = ' '.join([i.replace('\xa0', '').strip() for i in Job_describe])
        self.db.lagou_Jobs.update({"positionId":self.positionId},{"$set":{"Job_describe": Job_describe}},{"upsert":"True"})
