#!/usr/bin/env python
# -*- coding: UTF-8 -*-

import os
import sys
import requests
from io import BytesIO
from PIL import Image
import time
import random
import uuid

# 添加项目路径到系统路径
project_path = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(project_path)

# 导入MinIO客户端
from backend.tools.minio_client import minio_client

class RealImageCrawler:
    """真实图片爬虫类"""
    
    def __init__(self):
        """初始化爬虫"""
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
        }
        # 图片搜索关键词
        self.image_keywords = {
            'banners': {
                'banner1': 'massage therapy',
                'banner2': 'spa treatment'
            },
            'icons': {
                'massage': 'massage icon',
                'spa': 'spa icon',
                'footcare': 'foot massage icon',
                'bodycare': 'body care icon'
            },
            'avatars': {
                'technician1': 'massage therapist',
                'technician2': 'spa therapist',
                'technician3': 'beauty therapist'
            },
            'services': {
                'service1': 'full body massage',
                'service2': 'foot massage'
            }
        }
    
    def crawl_image(self, keyword, image_type="photo"):
        """
        从网络爬取图片
        :param keyword: 搜索关键词
        :param image_type: 图片类型 (photo, illustration等)
        :return: 图片二进制数据
        """
        print(f"正在搜索图片: {keyword}")
        
        # 尝试多个搜索引擎
        search_engines = [
            f"https://source.unsplash.com/featured/?{keyword}",
            f"https://picsum.photos/600/300",  # 随机图片服务
        ]
        
        for url in search_engines:
            try:
                print(f"尝试从 {url} 获取图片...")
                response = requests.get(url, headers=self.headers, timeout=10)
                if response.status_code == 200:
                    # 检查返回的内容是否是图片
                    content_type = response.headers.get('content-type', '')
                    if 'image' in content_type:
                        print(f"成功获取图片: {keyword}")
                        return response.content
                    else:
                        print(f"URL返回的不是图片: {content_type}")
                else:
                    print(f"请求失败，状态码: {response.status_code}")
            except Exception as e:
                print(f"获取图片失败: {e}")
                continue
            
            # 添加延迟避免被封
            time.sleep(random.uniform(1, 3))
        
        print(f"无法获取图片: {keyword}")
        return None
    
    def resize_image(self, image_data, max_width=600, max_height=600):
        """
        调整图片大小
        :param image_data: 图片二进制数据
        :param max_width: 最大宽度
        :param max_height: 最大高度
        :return: 调整后的图片二进制数据
        """
        try:
            image = Image.open(BytesIO(image_data))
            
            # 如果图片尺寸超过限制，则调整大小
            if image.width > max_width or image.height > max_height:
                image.thumbnail((max_width, max_height), Image.Resampling.LANCZOS)
            
            # 转换为RGB模式（如果需要）
            if image.mode in ('RGBA', 'LA', 'P'):
                # 创建白色背景
                background = Image.new('RGB', image.size, (255, 255, 255))
                if image.mode == 'P':
                    image = image.convert('RGBA')
                background.paste(image, mask=image.split()[-1] if image.mode in ('RGBA', 'LA') else None)
                image = background
            
            # 保存为JPEG格式
            output = BytesIO()
            image.save(output, format='JPEG', quality=85)
            return output.getvalue()
        except Exception as e:
            print(f"调整图片大小失败: {e}")
            return image_data  # 返回原始数据
    
    def upload_to_minio(self, image_data, folder, filename):
        """
        上传图片到MinIO
        :param image_data: 图片二进制数据
        :param folder: 存储文件夹
        :param filename: 文件名
        :return: 图片URL
        """
        try:
            object_name = f"{folder}/{filename}"
            minio_client.upload_bytes(object_name, image_data, content_type="image/jpeg")
            url = minio_client.presigned_get_url(object_name)
            return url
        except Exception as e:
            print(f"上传图片到MinIO失败: {e}")
            return None
    
    def crawl_and_upload_all_images(self):
        """
        爬取并上传所有需要的图片
        """
        print("开始爬取并上传所有图片...")
        
        # 存储所有图片URL
        image_urls = {}
        
        # 爬取轮播图
        print("\n=== 爬取轮播图 ===")
        banner_urls = {}
        for name, keyword in self.image_keywords['banners'].items():
            image_data = self.crawl_image(keyword)
            if image_data:
                # 调整大小（轮播图尺寸）
                resized_data = self.resize_image(image_data, 600, 300)
                # 上传到MinIO
                url = self.upload_to_minio(resized_data, "banners", f"{name}.jpg")
                if url:
                    banner_urls[name] = url
                    print(f"轮播图 {name} 上传成功: {url}")
                else:
                    print(f"轮播图 {name} 上传失败")
            else:
                print(f"轮播图 {name} 爬取失败")
            
            # 添加延迟
            time.sleep(random.uniform(1, 2))
        
        image_urls['banners'] = banner_urls
        
        # 爬取分类图标
        print("\n=== 爬取分类图标 ===")
        icon_urls = {}
        for name, keyword in self.image_keywords['icons'].items():
            image_data = self.crawl_image(keyword)
            if image_data:
                # 调整大小（图标尺寸）
                resized_data = self.resize_image(image_data, 100, 100)
                # 上传到MinIO
                url = self.upload_to_minio(resized_data, "icons", f"{name}.png")
                if url:
                    icon_urls[name] = url
                    print(f"图标 {name} 上传成功: {url}")
                else:
                    print(f"图标 {name} 上传失败")
            else:
                print(f"图标 {name} 爬取失败")
            
            # 添加延迟
            time.sleep(random.uniform(1, 2))
        
        image_urls['icons'] = icon_urls
        
        # 爬取技师头像
        print("\n=== 爬取技师头像 ===")
        avatar_urls = {}
        for name, keyword in self.image_keywords['avatars'].items():
            image_data = self.crawl_image(keyword)
            if image_data:
                # 调整大小（头像尺寸）
                resized_data = self.resize_image(image_data, 200, 200)
                # 上传到MinIO
                url = self.upload_to_minio(resized_data, "avatars", f"{name}.jpg")
                if url:
                    avatar_urls[name] = url
                    print(f"头像 {name} 上传成功: {url}")
                else:
                    print(f"头像 {name} 上传失败")
            else:
                print(f"头像 {name} 爬取失败")
            
            # 添加延迟
            time.sleep(random.uniform(1, 2))
        
        image_urls['avatars'] = avatar_urls
        
        # 爬取服务图片
        print("\n=== 爬取服务图片 ===")
        service_urls = {}
        for name, keyword in self.image_keywords['services'].items():
            image_data = self.crawl_image(keyword)
            if image_data:
                # 调整大小（服务图片尺寸）
                resized_data = self.resize_image(image_data, 200, 200)
                # 上传到MinIO
                url = self.upload_to_minio(resized_data, "services", f"{name}.jpg")
                if url:
                    service_urls[name] = url
                    print(f"服务图片 {name} 上传成功: {url}")
                else:
                    print(f"服务图片 {name} 上传失败")
            else:
                print(f"服务图片 {name} 爬取失败")
            
            # 添加延迟
            time.sleep(random.uniform(1, 2))
        
        image_urls['services'] = service_urls
        
        print("\n所有图片爬取和上传完成!")
        return image_urls

def main():
    """主函数"""
    crawler = RealImageCrawler()
    urls = crawler.crawl_and_upload_all_images()
    
    print("\n=== 图片URL汇总 ===")
    for category, url_dict in urls.items():
        print(f"\n{category}:")
        for name, url in url_dict.items():
            print(f"  {name}: {url}")

if __name__ == "__main__":
    main()