import threading

import mysql.connector
import mysql.connector.pooling
import requests

import article
import home
import next

from c67149_image import C67149ImageRequest


def crawl():
    list = []
    list1 = home.home_data()
    list2 = next.next_data(len(list1))
    list.extend(list1)
    list.extend(list2)
    print(len(list))
    for i in list:
        article.image_list(i)


def crawl67149():
    # 文章id
    try:
        kwargs = {
            'host': 'localhost',
            'port': 3306,
            'user': 'root',
            'passwd': '123321',
            'auth_plugin': 'mysql_native_password',
            'database': 'python_db'
        }
        mysql_pool = mysql.connector.pooling.MySQLConnectionPool(pool_name="mysql_pool", pool_size=10, **kwargs)

        cids = [21, 22, 23, 24, 25, 26, 27, 28]
        ts = []
        for i in cids:
            lock = threading.Lock()
            t = threading.Thread(target=loop, args=(i, mysql_pool, lock))
            ts.append(t)
            t.start()

        for t in ts:
            t.join()

        print("所有任务执行完毕")
    except BaseException as ex:
        print("启动线程失败：" + str(ex))


def loop(cid, mysql_pool, lock):
    print(threading.currentThread().name)
    C67149ImageRequest(cid, mysql_pool, lock).next_data()


if __name__ == '__main__':
    requests.packages.urllib3.disable_warnings()
    # 增加重试连接次数
    requests.adapters.DEFAULT_RETRIES = 5
    # 关闭多余的连接 requests使用了urllib3库，默认的http connection是keep - alive的，requests设置False关闭。 操作方法:
    s = requests.session()
    s.keep_alive = False
    crawl67149()
