import os
import re
import threading
# # Get the absolute paths of the files
# current_dir = os.path.dirname(os.path.abspath(__file__))
# msg_queue_path = os.path.join(current_dir, 'utils', 'msg_queue.py')
# redis_config_path = os.path.join(current_dir, 'setting', 'redis_config.py')
# mysql_config_path = os.path.join(current_dir, 'setting', 'mysql_test.py')
# user_agent_path = os.path.join(current_dir, 'unsplash_spider_image', 'user_agent_kuaidaili_proxy.py')
# md5_path = os.path.join(current_dir, 'utils', 'md5.py')
# import threading
import time
from datetime import datetime
import requests
from setting.redis_config import redis_test_url
from utils.msg_queue import connect_message_queue
from unsplash_spider_image.user_agent_kuaidaili_proxy import get_random_ua
import os
import urllib.request
from setting.mysql_test import mysql45_config
import pymysql

REDIS_PASSWORD = None
IMAGE_PATH = r'F:\image\new_unsplash'
proxy = {'http': 'http://t10635913038651:09ht71vf@tps152.kdlapi.com:15818/', 'https': 'http://t10635913038651:09ht71vf@tps152.kdlapi.com:15818/'}


class UnsplashImageSpiderConsume(object):
    def __init__(self, query):
        self.redis_url = redis_test_url
        self.query = query
        self.mysql_conn = pymysql.connect(**mysql45_config)
        self.mysql_course = self.mysql_conn.cursor()

    def run(self):
        # 创建多个消费线程
        self.consumers_threads = []
        # self.consume_message()
        for i in range(35):
            t = threading.Thread(target=self.consume_message)
            t.start()
            self.consumers_threads.append(t)

    def update_status_local_path(self, image_load_path, md5_url):
        # 更新下载状态
        path_list = image_load_path.split('\\')
        path = path_list[-2] + "\\" + path_list[-1]
        path = path.replace('\\', '\\\\')
        print('path', path)
        sql = "update image_info set download_status=1 ,local_path='%s',update_on = NOW() where md5_url='%s'" % (path, md5_url)
        print('更新下载状态和存储路径',md5_url)
        self.mysql_course.execute(sql)
        self.mysql_conn.commit()


    def consume_message(self):
        thread_id = threading.current_thread().ident
        q = connect_message_queue(self.query, url=self.redis_url, maxsize=10000, lazy_limit=True)
        while q.qsize() > 0:
            json_msg = q.get()
            print('json_msg', json_msg)
            if not json_msg:
                continue

            # 下载图片前
            print("Thread {} start download image".format(thread_id))
            time.sleep(1)
            result, md5_url, image_load_path = self.save_url_image(json_msg)
            if result:
                self.update_status_local_path(image_load_path, md5_url)

            print("Thread {} finish download image".format(thread_id))


    def load_image(self, json_msg):
        #这个方法是直接从url下载图片，比requests慢，不采用
        img_url = json_msg.get('oss_image_url')
        image_name = json_msg.get('oss_image_name')
        if img_url:
            image_load_path = os.path.join(IMAGE_PATH, image_name)
            print('image_load_path', image_load_path)
            urllib.request.urlretrieve(url=img_url, filename=image_load_path)

    def get_image_requests(self, json_msg):
        img_url = json_msg.get('oss_image_url')
        new_user_agent = get_random_ua()
        print('new_user_agent', new_user_agent)
        headers = {
            'Connection': 'close',
            'User-Agent': new_user_agent
        }
        try:
            response = requests.get(img_url, headers=headers, proxies=proxy, timeout=120)
            print('请求成功: response_status_code', response.status_code)
            image_content = response.content
            response.close()
            return image_content
        except Exception as e:
            print('e', e)
            #retry_queue_name = self.query + '_retry'
            retry_queue_name = self.query
            p = connect_message_queue(retry_queue_name, url=self.redis_url, maxsize=10000, lazy_limit=True)
            p.put(json_msg)
            return None

    def get_folder_name(self, image_name):
        folder_match = re.search(r'\d{8}', image_name)
        # 封装
        if folder_match:
            folder_path = folder_match.group()
            image_folder_path = os.path.join(IMAGE_PATH, folder_path)
            if not os.path.exists(image_folder_path):
                os.makedirs(image_folder_path)
        else:
            # 如果没有日期，就获取当前时间年月日创建文件夹，保存图片
            folder_path = datetime.now().strftime('%Y%m%d')
            image_folder_path = os.path.join(IMAGE_PATH, folder_path)
            if not os.path.exists(image_folder_path):
                os.makedirs(image_folder_path)
        print('image_folder_path', image_folder_path)
        return image_folder_path

    def save_url_image(self, json_msg):

        # 图片的二进制流写入文件
        img_url = json_msg.get('oss_image_url')
        image_name = json_msg.get('oss_image_name')
        md5_url = json_msg.get('md5_url')
        #获取图片的保存文件夹日期，8位连续数字’20231109‘
        image_folder_path = self.get_folder_name(image_name)
        if not img_url:
            print('图片url不存在，跳过')
            return False, md5_url, None

        image_content = self.get_image_requests(json_msg)
        if not image_content:
            print('请求图片失败')
            return False, md5_url, None
        image_load_path = os.path.join(image_folder_path, image_name)
        with open(image_load_path, 'wb') as fp:
            fp.write(image_content)
            print('图片下载成功')
        return True, md5_url, image_load_path

    def __del__(self):
        self.mysql_course.close()
        self.mysql_conn.close()

query = 'service'
consume = UnsplashImageSpiderConsume(query)
consume.run()

