import os
import re
import threading
import time
from datetime import datetime
import requests
from setting.redis_config import redis_test_url
from utils.msg_queue import connect_message_queue
from unsplash_spider_image.user_agent_kuaidaili_proxy import get_random_ua
import os
from setting.mysql_test import mysql45_config
import pymysql

REDIS_PASSWORD = None
VIDEO_PATH = r'F:\video\mixkit'
proxy = {'http': 'http://t10635913038651:09ht71vf@tps152.kdlapi.com:15818/', 'https': 'http://t10635913038651:09ht71vf@tps152.kdlapi.com:15818/'}


class MixkitVideoSpiderConsume(object):
    def __init__(self, query):
        self.redis_url = redis_test_url
        self.query = query
        self.mysql_conn = pymysql.connect(**mysql45_config)
        self.mysql_course = self.mysql_conn.cursor()

    def run(self):
        # 创建多个消费线程
        # self.consumers_threads = []
        self.consume_message()
        # for i in range(0):
        #     t = threading.Thread(target=self.consume_message)
        #     t.start()
        #     self.consumers_threads.append(t)

    def update_status_local_path(self, video_load_path, md5_url):
        # 更新下载状态
        path_list = video_load_path.split('\\')
        path = path_list[-2] + "\\" + path_list[-1]
        path = path.replace('\\', '\\\\')
        print('path', path)
        sql = "update video_info set download_status=1 ,local_path='%s',update_on = NOW() where md5_url='%s'" % (
        path, md5_url)
        print('更新下载状态和存储路径', md5_url)
        self.mysql_course.execute(sql)
        self.mysql_conn.commit()

    def redis_new_queue(self, json_msg):
        q = connect_message_queue(self.query, url=self.redis_url, maxsize=10000, lazy_limit=True)
        q.put(json_msg)
        print('请求失败重新回归队列')


    def consume_message(self):
        thread_id = threading.current_thread().ident
        p = connect_message_queue(self.query, url=self.redis_url, maxsize=10000, lazy_limit=True)
        while p.qsize() > 0:
            json_msg = p.get()
            print('json_msg', json_msg)
            if not json_msg:
                continue

            # 下载图片前
            print("Thread {} start download video".format(thread_id))
            time.sleep(1)
            result, md5_url, video_download_path = self.save_url_video(json_msg)
            if result:
                self.update_status_local_path(video_download_path, md5_url)
            # 重新写入队列
            else:
                self.redis_new_queue(json_msg)

            print("Thread {} finish download video".format(thread_id))

    def get_video_requests(self, json_msg):
        video_url = json_msg.get('video_url')
        new_user_agent = get_random_ua()
        headers = {
            'Connection': 'close',
            'Cookie': 'CookieConsent={stamp:%27-1%27%2Cnecessary:true%2Cpreferences:true%2Cstatistics:true%2Cmarketing:true%2Cmethod:%27implied%27%2Cver:1%2Cutc:1701682560290%2Cregion:%27KR%27}; _gcl_au=1.1.22244747.1701682563; _gid=GA1.2.1569933861.1701682563; _fbp=fb.1.1701682564796.948669068; _clck=1551ykf%7C2%7Cfha%7C1%7C1433; algolia-user-token=2ce69c086843928b5c2e0c773854d54f; __cf_bm=pGKIWkiYIZJvZbKzboCMe.tNTYwYbNXTqVzHBkze95w-1701761957-0-AbgVKfcUePEQCivPGIld59oiV9cWTanskty0K36/47ZovaWJ46JKQAwGhNCeKKVsL1da0W9iqhns4/QenD/68hs=; _clsk=qy7geb%7C1701761957835%7C6%7C1%7Cw.clarity.ms%2Fcollect; _ga=GA1.1.1620032917.1701682563; _gat_gtag_UA_11834194_84=1; _ga_HD6V8WBY2G=GS1.1.1701761986.4.1.1701762071.0.0.0; _ga_VXF53CMVLJ=GS1.1.1701761986.4.1.1701762071.0.0.0',
            'User-Agent': new_user_agent
        }
        try:
            response = requests.get(video_url, headers=headers, timeout=120)
            print('请求成功: response_status_code', response.status_code)
            video_content = response.content
            response.close()
            return video_content
        except Exception as e:
            print('e', e)
            # retry_queue_name = self.query + '_retry'
            retry_queue_name = self.query
            p = connect_message_queue(retry_queue_name, url=self.redis_url, maxsize=10000, lazy_limit=True)
            p.put(json_msg)
            return None

    def get_folder_name(self, image_name):
        folder_match = re.search(r'\d{8}', image_name)
        # 封装
        if folder_match:
            folder_path = folder_match.group()
            video_folder_path = os.path.join(VIDEO_PATH, folder_path)
            if not os.path.exists(video_folder_path):
                os.makedirs(video_folder_path)
        else:
            # 如果没有日期，就获取当前时间年月日创建文件夹，保存图片
            folder_path = datetime.now().strftime('%Y%m%d')
            video_folder_path = os.path.join(VIDEO_PATH, folder_path)
            if not os.path.exists(video_folder_path):
                os.makedirs(video_folder_path)
        print('video_folder_path', video_folder_path)
        return video_folder_path

    def save_url_video(self, json_msg):
        # 图片的二进制流写入文件
        video_url = json_msg.get('video_url')
        video_name = json_msg.get('video_name')
        md5_url = json_msg.get('md5_url')
        # 获取视频的保存文件夹日期，8位连续数字’20231109‘
        video_folder_path = self.get_folder_name(video_name)
        if not video_url:
            print('图片url不存在，跳过')
            return False, video_url, None

        video_content = self.get_video_requests(json_msg)
        if not video_content:
            print('请求图片失败')
            return False, video_url, None
        video_download_path = os.path.join(video_folder_path, video_name)
        with open(video_download_path, 'wb') as fp:
            fp.write(video_content)
            print('图片下载成功')
        return True, md5_url, video_download_path

    def __del__(self):
        self.mysql_course.close()
        self.mysql_conn.close()


query = 'life'
consume = MixkitVideoSpiderConsume(query)
consume.run()












