import time

from utils.WriteFile import write_json
from utils.ReadFile import read_json
from utils.Common import remove_repeat
from pyquery.pyquery import PyQuery as pyq
from config.header import boss

# 城市
city = ['c101010100', 'c101020100', 'c101280100', 'c101280600', 'c101210100', 'c101030100', 'c101110100', 'c101190400',
        'c101200100', 'c101230200', 'c101250100', 'c101270100']
# 经验
experience = ['e_102', 'e_103', 'e_104', 'e_105', 'e_106', 'e_107']
# 学历
degree = ['d_202', 'd_203', 'd_204', 'd_205', 'd_206', 'd_207']
# 规模
scale = ['s_301', 's_302', 's_303', 's_304', 's_305', 's_306']
# 融资
financing = ['t_801', 't_802', 't_803', 't_804', 't_805', 't_806', 't_807', 't_808']
# 行业
domain = ['i501', 'i502', 'i503', 'i504', 'i505', 'i506', 'i507', 'i508', 'i509', 'i510', 'i511', 'i512', 'i513',
          'i514', 'i515', 'i516', 'i517', 'i518', 'i519', 'i520', 'i521', 'i522', 'i523', 'i524', 'i525', 'i526',
          'i527', 'i528', 'i529']
# 薪资
salary = ['y_1', 'y_2', 'y_3', 'y_4', 'y_5', 'y_6', 'y_7', 'y_8']
boss_company = '../data/boss_company.json'
# 顺便解析职位
boss_position = '../data/boss_position.json'
boss_url = '../data/boss_url.json'


def get_url():
    results = {}
    for i in city:
        for j in experience:
            for k in degree:
                url = 'https://www.zhipin.com/' + i + '/' + j + '-' + k + '/?page='
                results[url] = 0
                pass
            pass
        pass
    write_json(results, boss_url)
    pass


def get(url, page):
    url = url + str(page)
    position = []
    company = []
    try:
        doc = pyq(url, headers=boss)
        content = doc('.job-list .job-primary')
        for c in content.items():
            primary = c('.info-primary h3 a').text()
            _salary = c('.info-primary h3 a span').text()
            position.append(primary.replace(_salary, ''))
            _company = c('.info-company .company-text h3 a').text()
            company.append(_company)
            pass
        pass
    except:
        pass
    return position, company
    pass


def get_all(max_page):
    company = read_json(boss_company)
    position = read_json(boss_position)
    url = read_json(boss_url)
    interval = 1
    cnt = 0
    for u, status in url.items():
        if status == 0:
            url[u] = 1
            print(u)
            # json解析出错一般是由于没有返回数据
            for i in range(1, max_page):
                _position, _company = get(u, i)
                if len(_position) <= 0:
                    break
                    pass
                position.extend(_position)
                company.extend(_company)
                pass
            pass
            position = remove_repeat(position)
            company = remove_repeat(company)
            print('SLEEP 3 second...')
            time.sleep(3)
            # 触发写入数据
            if cnt >= interval:
                write_json(company, boss_company)
                write_json(position, boss_position)
                write_json(url, boss_url)
                cnt = 0
                # 休息10s
                time.sleep(10)
                print('SLEEP.........')
                pass
            cnt += 1
            pass
        pass
    pass


get_all(20)
