import requests
from utils.log import logger
from utils.get_ini import movietype_insert_sql, update_sql, select_all_sql, movievod_insert_sql, movievod_select_sql, \
    movievod_delete_sql
from utils.Py_Mysql import Py_Mysql
import datetime
# 消除request警告
requests.packages.urllib3.disable_warnings()


# def Movie_type(url, mysql, header):
#     requests_get = requests.get(url, headers=header, verify=False)
#     if requests_get.status_code == 200:
#         requests_get_json = requests_get.json()
#         requests_get_json_class = requests_get_json['class']
#         for i in requests_get_json_class:
#             insert = movietype_insert_sql % (i['type_name'], i['type_id'], i['type_id'])
#             Insert_OneData = mysql.Insert_OneData(insert)
#             if '1062' in str(Insert_OneData):
#                 data = mysql.Select_All(select_all_sql % i["type_name"])[0]
#                 if str(i['type_id']) not in str(data[2]):
#                     spaer1 = str(data[2]) + ',' + str(i['type_id'])
#                     mysql.Update(update_sql % ('spaer1', spaer1, i["type_name"]))
#     else:
#         logger.warning('连接失败:' + requests_get.status_code)


def Movie_type(url, header):
    requests_get = requests.get(url, headers=header, verify=False)
    if requests_get.status_code == 200:
        requests_get_json = requests_get.json()
        pagecount = requests_get_json['pagecount']
        total = requests_get_json['total']
        return pagecount,total
    else:
        logger.warning('连接失败:' + requests_get.status_code)

'''
queue 列表队列
0 网页url链接
1 电影来自那个资源站
2 表名
3 请求头部信息 模拟浏览器请求
4 页
5 请求时间
'''
def Movie_vod(queue):
    #解决多线程 数据库报错
    mysql = Py_Mysql()
    while queue.empty() is not True:
        data = queue.get()
        url =data[0]
        movie_type =data[1]
        movievod_table =data[2]
        header =data[3]
        page = data[4]
        timeout =data[5]
        try:
            requests_get = requests.get(url, headers=header, verify=False,timeout=timeout)
            if requests_get.status_code == 200:
                json = requests_get.json()
                for i in json['list']:
                    insert = movievod_insert_sql % (
                        movievod_table, i['vod_id'], i['type_id'], i['type_id_1'], i['group_id'], i['vod_name'], i['vod_sub'],
                        i['vod_en'], i['vod_status'], i['vod_letter'], i['vod_color'], i['vod_tag'], i['vod_class'],
                        i['vod_pic'], i['vod_pic_thumb'], i['vod_pic_slide'], i['vod_pic_screenshot'], i['vod_actor'],
                        i['vod_director'], i['vod_writer'], i['vod_behind'], i['vod_blurb'], i['vod_remarks'],
                        i['vod_pubdate'], i['vod_total'], i['vod_serial'], i['vod_tv'], i['vod_weekday'],
                        i['vod_area'], i['vod_lang'], i['vod_year'], i['vod_version'], i['vod_state'],
                        i['vod_author'], i['vod_jumpurl'], i['vod_tpl'], i['vod_tpl_play'], i['vod_tpl_down'],
                        i['vod_isend'], i['vod_lock'], i['vod_level'], i['vod_copyright'], i['vod_points'],
                        i['vod_points_play'], i['vod_points_down'], i['vod_hits'], i['vod_hits_day'],
                        i['vod_hits_week'], i['vod_hits_month'], i['vod_duration'], i['vod_up'], i['vod_down'],
                        i['vod_score'], i['vod_score_all'], i['vod_score_num'], i['vod_time'], i['vod_time_add'],
                        i['vod_time_hits'], i['vod_time_make'], i['vod_trysee'], i['vod_douban_id'],
                        i['vod_douban_score'], i['vod_reurl'], i['vod_rel_vod'], i['vod_rel_art'], i['vod_pwd'],
                        i['vod_pwd_url'], i['vod_pwd_play'], i['vod_pwd_play_url'], i['vod_pwd_down'],
                        i['vod_pwd_down_url'], i['vod_content'], i['vod_play_from'], i['vod_play_server'],
                        i['vod_play_note'], i['vod_play_url'], i['vod_down_from'], i['vod_down_server'],
                        i['vod_down_note'], i['vod_down_url'], i['vod_plot'], i['vod_plot_name'],
                        i['vod_plot_detail'], i['type_name'], movie_type
                    )
                    data_movie = mysql.Select_All(movievod_select_sql % (movievod_table, i['vod_name']))
                    if len(data_movie)==0:
                        mysql.Insert_OneData(insert)
                    else:
                        data=data_movie[0]
                        if str(data[1]) != i['vod_time']:
                            # 数据更新 -先删除，在插入
                            mysql.Delete(movievod_delete_sql % (movievod_table, data[0]))
                            mysql.Insert_OneData(insert)
                            logger.info(f'{movievod_table}表中，{i["vod_name"]}已更新完毕')
                logger.info(f'{movie_type}:第{page}页电影数据爬取完成')
            else:
                logger.warning('连接失败:' + requests_get.status_code)
        except Exception as e:
            #保存失败的链接，方便再次爬取
            with open(f'失败{datetime.datetime.now().date()}.csv','a',encoding='utf-8') as f:
                f.write(f'{movie_type},{movievod_table},{url}\n')
            logger.warning(str(e))
        queue.task_done() #告诉队列我已经获取队列内容