# coding:utf-8
import json
import random
import math
import time

import requests
from fake_useragent import UserAgent
from lxml import etree
from modules import init_db
from pymongo import UpdateOne
from utils.selenuim_login import boss_login

agent = [
    'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36',
    'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11',
    'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36 QIHU 360EE/13.0.2212.0'

]


class Boss(object):
    def __init__(self, city, kd):
        self.city = city
        self.kd = kd
        self.cookie_path = '../jobs/boss_cookie.json'
        self.url = 'https://www.zhipin.com/job_detail/?query={}&city={}&page={}&ka=page-{}'
        with open(self.cookie_path) as f1:
            self.cookie = json.load(f1)['cookie']

    def get_page(self):
        header = {
            "User-Agent": random.choice(agent),
            "Host": "www.zhipin.com",
            "cookie":self.cookie
        }
        url = self.url.format(self.kd, self.city, str(1), str(1))
        res = requests.get(url + str(1), headers=header)
        if res.status_code == 200:
            html = etree.HTML(res.text)
            try:
                job_str = html.xpath('//*[@id="__NEXT_DATA__"]/text()')
                job_json = json.loads(job_str[0])
                total_count = job_json['props']['pageProps']['initData']['content']['positionResult']['totalCount']
                resultSize = job_json['props']['pageProps']['initData']['content']['positionResult']['resultSize']
                total_page = math.ceil(total_count / resultSize)
                return total_page
            except Exception as e:
                return 30

    @staticmethod
    def get_content(content, page):
        html = etree.HTML(content)
        if str(page) == '1':
            rule_path = '3'
        else:
            rule_path = '2'
        limit = 31
        result = []
        for i in range(1,31):
        # positionLables = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[2]/div[1]/span[{}]'.format(rule_path))
            try:
                securityId = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[1]/div[1]/div/div[1]/span[1]/a/@data-securityid'.format(rule_path,i))
                lid = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[1]/div[1]/div/div[1]/span[1]/a/@data-lid'.format(rule_path,i))
                ka = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[1]/div[1]/div/div[1]/span[1]/a/@ka'.format(rule_path,i))
                positionName = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[1]/div[1]/div/div[1]/span[1]/a/text()'.format(rule_path,i))

                positionHref = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[1]/div[1]/div/div[1]/span[1]/a/@href'.format(rule_path,i))
                salary = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[1]/div[1]/div/div[2]/span/text()'.format(rule_path,i))
                job_area = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[1]/div[1]/div/div[1]/span[2]/span/text()'.format(rule_path,i))
                # positionDetail = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[1]/div[1]/div/div[3]/div[2]/div[2]/text()'.format(rule_path,i))
                companyShortName = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[1]/div[2]/div/h3/a/text()'.format(rule_path,i))
                industryField = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[1]/div[2]/div/p/a/text()'.format(rule_path,i))
                companySize = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[1]/div[2]/div/p'.format(rule_path,i))
                positionLables = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[2]/div[1]'.format(rule_path,i))
                companyLabelList = html.xpath('//*[@id="main"]/div/div[{}]/ul/li[{}]/div/div[2]/div[2]/text()'.format(rule_path,i))


                data = {
                     "securityId":securityId[0],
                     "lid":lid[0],
                     "ka":ka[0],
                     "positionName":positionName[0],
                     "positionHref":positionHref[0],
                     "salary":salary[0],
                     "job_area":job_area[0],
                     # "positionDetail":positionDetail[0],
                     "companyShortName":companyShortName[0],
                     "industryField":industryField[0],
                     "companySize":companySize[0].xpath('string(.)').strip(),
                     "positionLables":positionLables[0].xpath('string(.)').strip(),
                     "companyLabelList":companyLabelList[0].split('，')
                }
                result.append(UpdateOne({'securityId': data['securityId']}, {'$set': data}, upsert=True))
            except Exception as e:
                print('bug出现了:__', e)
                print('第 %s 页' % (str(page)))
                continue

        job_boss.bulk_write(result)
        time.sleep(random.randint(1, 5))

    def crawl(self, start_page, total_page):
        print('总计页数……', total_page)
        for i in range(start_page,total_page):
            header = {
                "User-Agent": random.choice(agent),
                "Host": "www.zhipin.com",
                "cookie": self.cookie,
                "referer": "https://www.zhipin.com/job_detail/?query=python&city=101010100&page={}&ka=page-{}".format(i,i)

            }
            url = self.url.format(self.kd, self.city,str(i + 1),str(i + 1))

            res = requests.get(url, headers=header,allow_redirects=False,timeout=1)
            if res.status_code == 200:
                self.get_content(res.text, i + 1)
            else:
                print('~~~~~~需要重新设置cookie！！！！！')
                time.sleep(10)
                self.get_cookie()
            time.sleep(random.randint(1,4))

    def get_cookie(self):
        with open(self.cookie_path) as f1:
            self.cookie = json.load(f1)['cookie']
        # self.cookie = boss_login()

    @staticmethod
    def need_login(content):
        html = etree.HTML(content)
        try:
            login_name = html.xpath('/html/body/section/div[2]/div[1]/div[1]/ul/li[1]/text()')[0]
            if '登录' in login_name:
                return True
            else:
                return False
        except:
            return False


if __name__ == '__main__':
    city = ['101010100', '101030100']
    keyword = 'python'
    for j in city:
        boss = Boss(j, keyword)
        db = init_db()
        job_boss = db['job_boss']
        # page = boss.get_page()
        boss.crawl(2,10)
