﻿# -*- coding:utf-8 -*-
import json

from redis import Redis
from pymongo import MongoClient
import requests
import random
from hashlib import md5
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
import time
import re
from lxml import etree

count = 0
client = MongoClient('mongodb://root:MgXwM3Toe5@192.168.7.53:30228')
baiduwenku_url_ = client["webpage"]["baiduwenku_url_"]
proxies = client["proxy"]["proxies_ip_dongtai"]


# 返回随机ua
def get_ua():
    first_num = random.randint(55, 62)
    third_num = random.randint(0, 3200)
    fourth_num = random.randint(0, 140)
    os_type = [
        '(Windows NT 6.1; WOW64)', '(Windows NT 10.0; WOW64)', '(X11; Linux x86_64)',
        '(Macintosh; Intel Mac OS X 10_12_6)'
    ]
    chrome_version = 'Chrome/{}.0.{}.{}'.format(first_num, third_num, fourth_num)

    ua = ' '.join(['Mozilla/5.0', random.choice(os_type), 'AppleWebKit/537.36',
                   '(KHTML, like Gecko)', chrome_version, 'Safari/537.36']
                  )
    return ua


headers = {
    "Cookie": "__yjs_duid=1_d81926bbdf4cc5fdd154097e5a8654ea1615442955251; BAIDUID_BFESS=8CBF49D388137BF837BD67EEFE053448:FG=1; BAIDUID=84F0F7DCDAC3AA6D062BDFA50848EF98:FG=1; BIDUPSID=84F0F7DCDAC3AA6D062BDFA50848EF98; PSTM=1615771772; _click_param_reader_query_ab=-1; _click_param_pc_rec_doc_2017_testid=6; BDUSS=FtUmJVM1JWc2p4TDRuSFh3TWpKekdpQTlIMThBNXhaZThGalhXdWZubWhYNHBnRVFBQUFBJCQAAAAAAAAAAAEAAADhUWJ12Lywos3Y2LwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKHSYmCh0mJgQk; BDUSS_BFESS=FtUmJVM1JWc2p4TDRuSFh3TWpKekdpQTlIMThBNXhaZThGalhXdWZubWhYNHBnRVFBQUFBJCQAAAAAAAAAAAEAAADhUWJ12Lywos3Y2LwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKHSYmCh0mJgQk; kunlunFlag=1; BCLID_BFESS=6550554706366521910; BDSFRCVID_BFESS=VRFOJeC626Ch_WQejK9dMRS5TMTpEHTTH6-Jfn6RbfyywV3sZ8KoEG0P8f8g0Ku-KVl7ogKKLgOTHULF_2uxOjjg8UtVJeC6EG0Ptf8g0M5; H_BDCLCKID_SF_BFESS=tbAf_K0MJCvbfP0kD5KbMb8t52T22-us3Dtj2hcH0KLKDMONKfoxbf0OjN7y2lRltCTiah6gaMb1M66_bT565Mobbqj7B4nO0DQUQl5TtUtWeCnTDMRhqJ_lQxjyKMniBIv9-pnGBpQrh459XP68bTkA5bjZKxtq3mkjbPbDfn02eCKujTL5D5JXjGRf-b-X-TT8LRj85-5ojRIk-PnVepKwQ4nZKxtqtJcmBU76LR58OITgXq56jq0I3h34Bp3nWnFLaJQ8JfoDe4Tw5pD5KpTbyUn405OTfIFO0KJcb66vSR46hPJvyUAsXnO7Lx7lXbrtXp7_2J0WStbKy4oTjxL1Db3JKjvMtgDtVJO-KKC-MKPljM5; BDRCVFR[feWj1Vr5u3D]=I67x6TjHwwYf0; delPer=0; PSINO=6; Hm_lvt_f06186a102b11eb5f7bcfeeab8d86b34=1617097541,1617155862,1617166647,1617238111; BDRCVFR[C0p6oIjvx-c]=mbxnW11j9Dfmh7GuZR8mvqV; H_PS_PSSID=; session_name=; Hm_lvt_d8bfb560f8d03bbefc9bdecafc4a4bf6=1617151626,1617166647,1617237919,1617243353; isJiaoyuVip=1; userFirstTime=true; session_id=1617342119530; ___wk_scode_token=HOgrleuvlXJTYmXsMz2qbXwIMEREXDruNh%2BlBwT1xww%3D; Hm_lpvt_f06186a102b11eb5f7bcfeeab8d86b34=1617345205; LoseUserAllPage=%7B%22type%22%3A0%2C%22status%22%3A0%2C%22expire_time%22%3A0%2C%22create_time%22%3A1617345164%2C%22cookie_time%22%3A1617431564%7D; Hm_lpvt_d8bfb560f8d03bbefc9bdecafc4a4bf6=1617345324; ab_sr=1.0.0_ZmQ5NTEzOGFjZjRkZGYyN2NjMDU5MjgwYTBmNzVmMDQ1M2JjZjY2NGM0YTcwODE3OTY2OTdhODc5N2MzNzcxNjc1NmM5ZmFkMjVjMzRjNTAxYTJjYzIyZmY1ZDFkMTM2; bcat=6bc4acfc582093130d284bebe64dba2ceac7863f03574d982ec152edf0ad3e3161e522383677c411eaa9373fcbabcd2752cdca5d0c512d3483402be0384d65977066b234f8a33af4ec0620873490ed9a33834182c28b8295b7c08c01ed2ff8db473f845dfcc2a6dc90db9fd24e7af285ee6059e0193359b066723194efcc9639",
    'Upgrade-Insecure-Requests': '1',
    'User-Agent': get_ua(),

}


# 使用动态代理
def get_ip_dt():
    items = list(proxies.find({}))
    item = random.choice(items)
    ip = item.get("ip")
    new_proxy = {
        "http": ip,
    }
    print(f"已设置代理 {ip}")
    return new_proxy


# 静态代理列表
proxies_list = [
    "http://pc0308wd:pc0308wd@140.246.91.53:888",
    "http://pc0308wd:pc0308wd@113.125.125.2:888",
    "http://pc0308wd:pc0308wd@203.57.232.249:888",
    "http://pc0308wd:pc0308wd@113.125.9.8:888",
    "http://pc0308wd:pc0308wd@42.123.125.53:888",
]


# 使用静态代理
def get_ip_jt():
    ip = random.choice(proxies_list)
    new_proxy = {
        "http": ip,
    }
    print(f"已设置代理 {ip}")
    return new_proxy


# 使用代理的信号
ip = "dt"


# 选择使用 静态代理 或者 动态代理
def get_ip():
    if ip == "jt":
        return get_ip_jt()
    elif ip == "dt":
        return get_ip_dt()


# 解析函数
def parse(url):
    try:
        resp = requests.get(url, headers=headers, timeout=5, proxies=get_ip())
        return resp
    except Exception as e:
        print(e)
        return None


# gs redis数据库
def get_db():
    db = Redis(
        host="192.168.7.52",
        port=30114,
        db=15,
        decode_responses=True,
        password="3oSama4la]uLjzO1"
    )
    return db


def post_end(get_overs):
    try:
        headers = {'Content-Type': 'application/json;charset=UTF-8',
                   'Connection': 'close'}
        resp = requests.post(url='http://data-platform-api.k8s.laibokeji.com/api/data/fulltext',
                             data=get_overs,
                             headers=headers,
                             timeout=(20, 20))
        return resp.text
    except:
        return ''


comp = re.compile('"typeName":"(.*?)"')


def change_status():
    pass


def main():
    db = get_db()
    while True:
        if db.llen("urls"):
            url = db.lpop("urls")
            print(f"拿到了 {url}")
            resp = parse(url)
            if resp:
                text = resp.text
                file_type = "".join(comp.findall(text))
                print(file_type)
                if file_type == "txt":
                    html = etree.HTML(text)
                    file_name = html.xpath('//h3/text()')[0]
                    datas = html.xpath('//p[@class="p-txt"]/text()')
                    datas = str(datas)
                    result = re.sub('\[|\]', '', datas)
                    get_overs = {
                        'url': url,
                        'title': file_name,
                        'dataType': 'webpage',
                        'source': 'baiduwenku',
                        'content': result,
                        'batchNo': time.strftime('%Y%m%d', time.localtime(time.time()))
                    }
                    try:
                        # 提交五次
                        for pan_num in range(5):
                            response_end = post_end(json.dumps(get_overs))
                            print("响应", response_end)

                            # 失败
                            if 'system' in response_end or '"status":404' in response_end:
                                print("提交失败, 正在重试")
                                continue
                            # 成功, 有一次成功就返回
                            else:
                                if "this url is already exist" in response_end:
                                    print("提交成功, 但是已存在")
                                    baiduwenku_url_.update_one(
                                        {"url": url},
                                        {"$set": {"msg": "url exist", "status": 1}},
                                    )
                                else:
                                    print("提交成功")
                                    baiduwenku_url_.update_one(
                                        {"url": url},
                                        {"$set": {"status": 2}}, )
                                break
                        # 提交5次失败才会执行下方代码
                        baiduwenku_url_.update_one(
                            {"url": url},
                            {"$set": {"status": 0}},
                        )
                    except:
                        pass
                else:
                    print("不是txt")
            else:
                print(url, "请求失败")
                db.lpush("urls", url)
                baiduwenku_url_.update_one(
                    {"url": url},
                    {"$set": {"status": -1, "is_get": 2}},
                )
        else:
            print("正在等待")
            time.sleep(5)


if __name__ == '__main__':
    main()
