#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author  : hu_cl
# @Email   : 760730895@qq.com
# @Date    : 2021/4/23 14:13
# @File    : arq_OID.py
import random
from multiprocessing import Pool
import requests
from bs4 import BeautifulSoup
import time

req_url = 'http://192.168.20.237:8088/manager/GlobalConfig/DevOidAction.htm'  # 小说主地址

req_header = {
    'Host': '192.168.20.237:8088',
    'Connection': 'keep-alive',
    'Content-Length': '129',
    'X-Requested-With': 'XMLHttpRequest',
    'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
    'Accept': '*/*',
    'Origin': 'http://192.168.20.237:8088',
    'Referer': 'http://192.168.20.237:8088/manager/loginController.htm?act=login',
    'Accept-Encoding': 'gzip, deflate',
    'Accept-Language': 'zh-CN,zh;q=0.9',
    'Cookie': 'JSESSIONID=F2CBB88C3A1BE2F6FC9C2AA908B00CC4; ys-leag_oid_grid=o%3Acolumns%3Da%253Ao%25253Aid%25253Ds%2525253Anumberer%25255Ewidth%25253Dn%2525253A30%255Eo%25253Aid%25253Dn%2525253A1%25255Ewidth%25253Dn%2525253A285%255Eo%25253Aid%25253Dn%2525253A2%25255Ewidth%25253Dn%2525253A180%255Eo%25253Aid%25253Dn%2525253A3%25255Ewidth%25253Dn%2525253A180%255Eo%25253Aid%25253Dn%2525253A4%25255Ewidth%25253Dn%2525253A45%255Eo%25253Aid%25253Dn%2525253A5%25255Ewidth%25253Dn%2525253A55%5Esort%3Do%253Afield%253Ds%25253Astrdevoid%255Edirection%253Ds%25253AASC', }

user_agent_list = [
    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.121 Safari/537.36",
    "Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)",
    "Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
]


def down_chart(key, value, i):
    print(f"{key}开始下载，请稍等。。。。{i}")
    req_header['User-Agent'] = random.choice(user_agent_list)
    name, link = key, value
    try:
        r_chart = requests.get(value, params=req_header)
        r_chart.encoding = 'utf-8'
        soup_chart = BeautifulSoup(r_chart.text, "html.parser")
        section_text = soup_chart.select('.entry-text p')
        section_text.pop(0)
        mycallback(key, section_text, i)
    except Exception as e:
        print(f'错误信息为{e},{name},{link},{i}')
        time.sleep(8)
        down_chart(name, link, i)


def mycallback(key, section_text, i):
    title = 'E:\小说\侯卫东官场笔记(官路风流).txt'
    f = open(title, 'a+', encoding='utf8')
    f.writelines("\n" + key + "\n\n")
    for t in section_text:
        if len(t) > 0:
            f.writelines(t.get_text() + "\n")
    f.close()
    print(f"{key}下载完毕，正在继续下载下一章")
    time.sleep(1)
    print('------------------------------------')


if __name__ == '__main__':
    pool = Pool(1)
    req_header['User-Agent'] = random.choice(user_agent_list)
    data = {"":"23nq2IcX8CnaixWxkyfLr86HMfaZeQ3OqrjggrrzQlDtnWW%2BrXSiwB1NtAwyidpGo5HHXhBHc2UVNcO556Ae47AGzdaY6IarLoX9wAgxX0c%3D"}
    r = requests.post(req_url, params=req_header)
    r.encoding = 'utf-8'
    soup = BeautifulSoup(r.text, "html.parser")
    print(soup)
    section_links = soup.select('.main .entry-text.clearfix .xsbox a[href]')
    chart = {}
    for data in section_links:
        chart[data.get('title')] = data.get('href')
    chart.pop('侯卫东官场笔记 1223章 1222章以后 1224 1225')
    chart.pop(None)
    i = 0
    for key, value in chart.items():
        i = i + 1
        if i >= 1003:
            pool.apply_async(down_chart, (key, value, i))
    pool.close()
    pool.join()
    print("小说下载完毕")
