import requests
from bs4 import BeautifulSoup
import json
import os
import re
import sys


def download_bing_images(num_images=4, search_query="jayjay电脑壁纸"):
    """
    Download specified number of wallpapers from Bing images search

    Args:
        num_images (int): Number of images to download, default is 4
        search_query (str): Search query for Bing images, default is "jayjay电脑壁纸"
    """
    # 1. Send request to Bing images search
    encoded_query = requests.utils.quote(search_query)
    url = f"https://cn.bing.com/images/search?q={encoded_query}&form=ANNTH1&refig=68d667ded62a4ad0bcd4a4f536fb730d&pc=WSEDDB&adppc=EDGEXST&pq={encoded_query}&pqlth=8&assgl=10&sgcn={encoded_query}&qs=RQ&sgtpv=RQ&smvpcn=0&swbcn=9&sctcn=0&sc=9-8&sp=2&ghc=0&cvid=68d667ded62a4ad0bcd4a4f536fb730d&clckatsg=1&hsmssg=0"
    
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
    }
    
    response = requests.get(url, headers=headers)

    # 2. Check if request was successful
    if response.status_code != 200:
        print(f"Failed to fetch webpage, status code: {response.status_code}")
        return False

    # 3. Parse HTML content
    page = BeautifulSoup(response.text, "html.parser")

    # 4. Find all image elements
    image_elements = page.find_all('a', class_="iusc")
    if not image_elements:
        print("No image elements found")
        return False

    print(f"Found {len(image_elements)} images, preparing to download first {num_images}")

    # 5. Create download directory
    download_dir = r"D:\Python\JayJay_Wallpapers"
    if not os.path.exists(download_dir):
        os.makedirs(download_dir)
        print(f"Created directory: {download_dir}")

    # 6. Track download results
    success_count = 0
    failed_count = 0
    failed_list = []

    # 7. Download specified number of images
    for i, element in enumerate(image_elements[:num_images]):
        try:
            # Get m attribute JSON data
            m_data = element.get("m")
            if not m_data:
                print(f"Image {i + 1} has no m data, skipping")
                failed_count += 1
                failed_list.append(f"Image {i + 1}: No m data")
                continue

            # Parse JSON data
            image_info = json.loads(m_data)
            image_url = image_info.get("murl")
            image_title = image_info.get("t", f"image_{i}")

            if not image_url:
                print(f"Image {i + 1} has no valid URL, skipping")
                failed_count += 1
                failed_list.append(f"Image {i + 1}: No valid URL")
                continue

            # Clean filename (remove illegal characters)
            clean_title = re.sub(r'[<>:"/\\|?*]', '', image_title)
            file_extension = os.path.splitext(image_url)[1]
            if not file_extension or len(file_extension) > 5:
                file_extension = '.jpg'

            # Build filename and full path
            filename = f"{clean_title}_{i}{file_extension}"
            filepath = os.path.join(download_dir, filename)

            # Download image
            print(f"Downloading ({i + 1}/{num_images}): {image_title}")
            img_response = requests.get(image_url, headers=headers, stream=True, timeout=30)

            if img_response.status_code == 200:
                # Save image file
                with open(filepath, 'wb') as f:
                    for chunk in img_response.iter_content(chunk_size=8192):
                        if chunk:
                            f.write(chunk)

                # Verify file was downloaded successfully
                if os.path.exists(filepath) and os.path.getsize(filepath) > 0:
                    print(f"✅ Successfully saved: {filename}")
                    success_count += 1
                else:
                    print(f"❌ Failed to save file: {filename}")
                    failed_count += 1
                    failed_list.append(f"Image {i + 1}: Failed to save file")
            else:
                print(f"❌ Download failed, HTTP status code: {img_response.status_code}")
                failed_count += 1
                failed_list.append(f"Image {i + 1}: HTTP error {img_response.status_code}")

        except Exception as e:
            print(f"❌ Error processing image {i + 1}: {str(e)}")
            failed_count += 1
            failed_list.append(f"Image {i + 1}: Exception {str(e)}")

    # 8. Final report
    print("\n" + "=" * 50)
    print("Download Summary:")
    print(f"✅ Successful downloads: {success_count}")
    print(f"❌ Failed downloads: {failed_count}")

    if failed_list:
        print("\nFailure details:")
        for failure in failed_list:
            print(f"  - {failure}")

    # Check downloaded files in target directory
    downloaded_files = [f for f in os.listdir(download_dir) if
                        f.lower().endswith(('.jpg', '.jpeg', '.png', '.gif', '.bmp'))]
    print(f"\nActual images in target directory: {len(downloaded_files)}")

    if downloaded_files:
        print("File list:")
        for file in downloaded_files:
            file_size = os.path.getsize(os.path.join(download_dir, file))
            print(f"  - {file} ({file_size} bytes)")

    # Return result based on failures
    if failed_count > 0:
        print(f"\n❌ Warning: {failed_count} images failed to download!")
        return False
    else:
        print(f"\n✅ All images downloaded successfully!")
        return True


# Run the program
if __name__ == "__main__":
    # Get command line argument (if any)
    if len(sys.argv) > 1:
        try:
            num_images = int(sys.argv[1])
            print(f"Will download {num_images} images from command line argument")
        except ValueError:
            print("⚠️ Invalid argument, using default value of 4 images")
            num_images = 4
    else:
        num_images = 4  # Default to 4 images

    success = download_bing_images(num_images)
    if not success:
        # Handle errors if needed
        exit(1)  # Non-zero exit code indicates error