# coding:utf-8
import json
import random
import math
import time

import requests
from fake_useragent import UserAgent
from lxml import etree
from modules import init_db
from pymongo import UpdateOne
from utils.selenuim_login import lg_login

agent = [
    'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36',
    'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11',
    'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36 QIHU 360EE/13.0.2212.0'

]


class LaGou(object):
    def __init__(self, city, kd):
        self.url = 'https://www.lagou.com/wn/jobs?city={}&kd={}&pn='.format(city, kd)
        self.cookie_path = '../jobs/cookie.json'
        with open(self.cookie_path) as f1:
            self.cookie = json.load(f1)['cookie']

    def get_page(self):
        header = {"User-Agent": random.choice(agent)}
        res = requests.get(self.url + str(1), headers=header)
        if res.status_code == 200:
            html = etree.HTML(res.text)
            job_str = html.xpath('//*[@id="__NEXT_DATA__"]/text()')
            try:
                job_json = json.loads(job_str[0])
                total_count = job_json['props']['pageProps']['initData']['content']['positionResult']['totalCount']
                resultSize = job_json['props']['pageProps']['initData']['content']['positionResult']['resultSize']
                total_page = math.ceil(total_count / resultSize)
                return total_page
            except Exception as e:
                return 30

    @staticmethod
    def get_content(content, page):
        html = etree.HTML(content)
        job_str = html.xpath('//*[@id="__NEXT_DATA__"]/text()')
        try:
            job_json = json.loads(job_str[0])
            result = job_json['props']['pageProps']['initData']['content']['positionResult']['result']
            print('正在爬取的数据………………', len(result))
            arr = [UpdateOne({'positionId': k['positionId']}, {'$set': k}, upsert=True) for k in result]
            job_lg.bulk_write(arr)
            time.sleep(random.randint(1, 5))
        except Exception as e:
            print('bug出现了:__', e)
            print('第 %s 页' % (str(page)))

    def crawl(self, total_page):
        print('总计页数……', total_page)
        for i in range(total_page):
            header = {"User-Agent": random.choice(agent), 'cookie': self.cookie}
            url = self.url + str(i + 1)
            res = requests.get(url, headers=header)
            is_login = self.need_login(res.text)
            if res.status_code == 200 and not is_login:
                self.get_content(res.text, i + 1)
            elif is_login:
                self.get_cookie()

    def get_cookie(self):
        url = 'https://passport.lagou.com/login/login.html?service=https%3a%2f%2fwww.lagou.com%2f'
        self.cookie = lg_login(url)

    @staticmethod
    def need_login(content):
        html = etree.HTML(content)
        try:
            login_name = html.xpath('/html/body/section/div[2]/div[1]/div[1]/ul/li[1]/text()')[0]
            if '登录' in login_name:
                return True
            else:
                return False
        except:
            return False


if __name__ == '__main__':
    # 爬 3-5页就会需要登录，重新获取cookie
    city = ['北京', '天津']
    keyword = 'python'
    for j in city:
        lg = LaGou(j, keyword)
        db = init_db()
        job_lg = db['job_lg']
        page = lg.get_page()
        lg.crawl(page)

queue = [] # 从队列里边获取数据
for i in queue:
    pass