import requests
import json
import re
import time

from json.decoder import JSONDecodeError
from config.url import URL
from config.header import lagou
from utils.ReadFile import read_json
from utils.WriteFile import write_json
from utils.Common import remove_repeat
from pyquery.pyquery import PyQuery as pyq

lagou_url = '../data/lagou_url.json'
lagou_company = '../data/lagou_company.json'


def get(url, page):
    headers = lagou
    data = {
        'first': False,
        'pn': page,
        'sortField': 0,
        'havemark': 0,
    }
    results = []
    rs = requests.post(url, data, headers=headers)
    if rs.status_code == 200:
        try:
            rs = json.loads(rs.text)
            result = rs['result']
            for r in result:
                results.append(r['companyFullName'])
                pass
            pass
        except JSONDecodeError:
            return False
            pass
        pass
    return results
    pass


def get_all(max_page):
    results = read_json(lagou_company)
    url = read_json(lagou_url)
    interval = 10
    cnt = 0
    for u, status in url.items():
        if status == 0:
            url[u] = 1
            print(u)
            try:
                # json解析出错一般是由于没有返回数据
                for i in range(1, max_page):
                    temp_result = get(u, i)
                    if temp_result is False:
                        break
                        pass
                    results.extend(temp_result)
                    pass
                pass
            except JSONDecodeError:
                pass
            results = remove_repeat(results)
            print('SLEEP 3 second...')
            time.sleep(3)
            # 触发写入数据
            if cnt >= interval:
                write_json(results, lagou_company)
                write_json(url, lagou_url)
                cnt = 0
                # 休息10s
                time.sleep(10)
                print('SLEEP.........')
                pass
            cnt += 1
            pass
        pass
    pass


def get_url():
    # 城市
    # city = ['45', '171', '357', '118', '306', '276', '2', '13', '32', '163', '113', '300', '268', '239', '125', '318',
    #         '285', '244', '47', '41', '77', '252', '5', '198', '87', '16', '82', '207', '15', '204', '120', '34', '55',
    #         '232', '248', '126', '230', '43', '161', '152', '256', '62', '266', '20', '48', '325', '36', '189', '128',
    #         '218', '121', '46', '146', '240', '213', '273', '237', '143', '265', '242', '340', '258', '6', '111', '57',
    #         '224', '249', '31', '102', '11', '90', '185', '193', '201', '164', '18', '227', '304', '209', '116', '114',
    #         '119', '37', '56', '172', '245', '246', '60', '330', '67', '100', '348', '148', '101', '104', '155', '220',
    #         '71', '140', '192', '25', '85', '233', '174', '144', '138', '190', '23', '323', '49', '317', '316', '64',
    #         '282', '81', '168', '343', '17', '169', '315', '160', '236', '89', '162', '308', '255', '108', '294', '124',
    #         '136', '261', '159', '177', '210', '28', '274', '272', '29', '73', '52', '326', '257', '115', '222', '225',
    #         '263', '66', '79', '98', '7', '235', '88', '179', '262', '135', '137', '260', '219', '130', '175', '53',
    #         '170', '139', '149', '132', '10', '229', '105', '241', '58', '324', '283', '65', '158', '3', '80', '44',
    #         '215', '8', '103', '217', '250', '96', '147', '186', '214', '122', '180', '178', '226', '259', '131', '195',
    #         '202', '307', '76', '4', '19', '107', '9', '94', '156', '54', '35', '97', '74', '117', '319', '358', '184',
    #         '84', '99', '154', '342', '112', '157', '302', '238', '40', '339', '33', '129', '298', '86', '356', '173',
    #         '301', '12', '176', '329', '200', '197', '191', '181', '194', '27', '188', '127', '141', '39', '211', '38',
    #         '153', '92', '337', '91', '187', '203', '145', '26', '50', '206', '228', '264', '243', '21', '208', '303',
    #         '284', '305', '234', '142', '267', '167', '216', '231', '70', '150', '93', '223', '221', '199', '134', '83',
    #         '275', '151', '106', '22', '14', '182', '183', '253', '269', '205', '286', '321', '165']
    city = ['2', '3', '215', '213', '6', '252', '79', '184', '298', '129', '198', '80', '4']
    # 融资阶段
    status = [1, 2, 3, 4, 5, 6, 7, 8]
    # 行业领域
    domain = [24, 25, 33, 27, 29, 45, 31, 28, 47, 34, 35, 43, 32, 41, 26, 48, 38, 49, 10594]
    results = {}
    for i in range(len(city)):
        for j in range(len(status)):
            for k in range(len(domain)):
                url = 'https://www.lagou.com/gongsi/' + city[i] + '-' + str(status[j]) + '-' + str(domain[k]) + '.json'
                results[url] = 0
                pass
            pass
        pass
    write_json(results, lagou_url)
    pass


def get_city():
    url = 'https://www.lagou.com/gongsi/allCity.html?option=1-0-0'
    doc = pyq(url)
    content = doc('.word_list tr')
    p = re.compile('\d+(?=\-)')
    city = []
    for i in content.items():
        td = i('td').eq(1)
        li = td('a')
        for j in li.items():
            u = j.attr('href')
            m = p.search(u)
            if m is not None:
                city.append(m.group())
                pass
            pass
        pass
    pass


get_all(20)
