# -*- coding: utf-8 -*-
import scrapy

import uuid
import pandas as pd
import json
from json.decoder import JSONDecodeError
from bs4 import BeautifulSoup
from urllib import parse
from ..items import PositionItem



class PositionSpider(scrapy.Spider):
    name = 'la'
    #allowed_domains = ['www.lagou.com']
    start_urls = ['http://www.lagou.com/']

    base_url = 'https://www.lagou.com/jobs/positionAjax.json?'

    custom_settings = {
        'ITEM_PIPELINES': {
            'lagou.pipelines.PositonPipeline': 350,
        }
    }
    def start_requests(self):
        headers = self.settings.get('HEADERS')
        keyword = self.settings.get('KEYWORD')
        df = pd.read_csv('LagouCityPage.csv', encoding='gb18030')
        df = df[df['page'] > 0]  # 过滤，将页码为0的城市剔除
        cities = list(df['city'])
        pages = list(df['page'])
        for i in range(len(cities)):
            city = cities[i]
            page = pages[i]
            params = {
                'px':'default',
                'city': city,
                'needAddtionalResult': 'false'
            }
            headers['Referer'] = 'https://www.lagou.com/jobs/list_%E6%95%B0%E6%8D%AE%E5%88%86%E' \
                                 '6%9E%90?px=default&city=' + parse.quote(city)

            for pn in range(1, page + 1):
                data = {
                    'first': 'False',
                    'pn': str(pn),
                    'kd': keyword,
                }
                #url = self.base_url + parse.urlencode(params,encoding='utf-8')
                url = self.base_url + parse.urlencode(params)
                # cookie = {
                #     'JSESSIONID': uuid.uuid4(),
                #     'user_trace_token': uuid.uuid4()
                # }
                yield scrapy.FormRequest(url=url,method='POST',formdata=data,headers=headers,callback=self.parse_position)
                #yield scrapy.X

    def parse_position(self,response):
        try:
            rs = json.loads(response.text)
            results = rs['content']['positionResult']['result']
            position = PositionItem()
            for i in range(len(results)):
                info = {
                    'businessZones': results[i]['businessZones'],
                    'city': results[i]['city'],
                    'companyFullName': results[i]['companyFullName'],
                    'companyShortName': results[i]['companyShortName'],
                    'companySize': results[i]['companySize'],
                    'createTime': results[i]['createTime'],
                    'district': results[i]['district'],
                    'education': results[i]['education'],
                    'industryField': results[i]['industryField'],
                    'industryLables': results[i]['industryLables'],
                    'jobNature': results[i]['jobNature'],
                    'positionId': results[i]['positionId'],
                    'workYear': results[i]['workYear'],
                    'salary': results[i]['salary']
                }
                yield PositionItem(info)
        except JSONDecodeError:
            pass
        except KeyError:
            print('===========================',type(rs),rs)

    def parse(self, response):
        pass

