from utils.Py_Mysql import Py_Mysql
from utils.Movie_requests import Movie_type,Movie_vod
from utils.get_ini import movieurl_select_sql,movievod_create_sql,movieurl_update_sql1,movieurl_update_sql2,movieurl_update_sql3
import configparser
from utils.log import logger
from queue import Queue
from threading import Thread
import time
config = configparser.RawConfigParser()
config.read('./config/config_sql.ini')
header = {
    "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36"
}
mysql = Py_Mysql()
url_data=mysql.Select_All(movieurl_select_sql)

if __name__ == '__main__':
    queue = Queue()
    start = time.time()
    for i in url_data:
        #创建存储电影信息的数据库
        movievod_table = 'movievod_%s' % (i[3])
        mysql.Create(movievod_create_sql % movievod_table, movievod_table)

        #爬取数据总是量，总页数 并更新到数据库 必须每次爬取更新一次
        movie_type = Movie_type(i[1], header)
        mysql.Update(movieurl_update_sql1 % (movie_type[0], movie_type[1], i[2]))

        # 爬取全部
        if i[8] == 0:
            url = i[1].split('?')[0] + '?ac=videolist&pg='
            mysql.Update(movieurl_update_sql2 % (1, i[2]))
        # 爬取当天 12小时
        else:
            url = i[1].split('?')[0] + '?ac=videolist&h=12&pg='
            #更新当天页 12小时 数据信息
            movie_type = Movie_type(i[1]+'&h=12', header)
            mysql.Update(movieurl_update_sql3 % (movie_type[0], movie_type[1], i[2]))

        #将信息放入q队列
        for j in range(1, movie_type[0]):
            args = [url+str(j), i[2], movievod_table, header, j, 2]
            queue.put(args)
    logger.info('queue队列 开始大小 %d' % queue.qsize())

    #使用多线程和队列的方式进行爬取
    for index in range(20):
        thread = Thread(target=Movie_vod, args=(queue, ))
        thread.daemon = True  # 随主线程退出而退出
        thread.start()
    queue.join()  # 队列消费完 线程结束
    end = time.time()
    logger.info('queue队列 结束大小 %d' % queue.qsize())
    logger.info(f'电影数据爬取完成')
    logger.info('总耗时：%s' % (end - start))