import cv2
import numpy as np
import os
from imutils import paths
import pickle
import zipfile
import tempfile
import shutil
import glob
from tqdm import tqdm
import logging

logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
from PIL import UnidentifiedImageError


def get_size(file):
    """获取指定文件的大小（以MB为单位）"""
    size_bytes = os.path.getsize(file)
    return size_bytes / (1024 * 1024)


def createXY(train_folder, dest_folder, method='vgg', batch_size=64, max_images=None):
    """从训练文件夹或 zip 文件生成 X, y 特征与标签。

    特性：
    - 支持传入 .zip 文件或在当前工作目录自动查找 zip 并解压
    - 在遇到空或损坏的缓存时自动回退并删除缓存
    - 批次累积在循环内部，避免只写入最后一批的问题
    """
    extracted_tmp = None

    def _cleanup():
        try:
            if extracted_tmp and os.path.exists(extracted_tmp):
                shutil.rmtree(extracted_tmp)
                logging.info(f"清理临时解压目录: {extracted_tmp}")
        except Exception:
            pass

    # 缓存文件路径
    x_file_path = os.path.join(dest_folder, f"X_{method}.pkl")
    y_file_path = os.path.join(dest_folder, f"y_{method}.pkl")
    legacy_x_path = os.path.join(dest_folder, "X.pkl")
    legacy_y_path = os.path.join(dest_folder, "y.pkl")

    # 如果传入的是 zip 文件，先解压到临时目录
    try:
        if os.path.isfile(train_folder) and train_folder.lower().endswith('.zip'):
            logging.info(f"传入的 train_folder 是 zip 文件，解压到临时目录后使用: {train_folder}")
            extracted_tmp = tempfile.mkdtemp(prefix='createXY_')
            try:
                with zipfile.ZipFile(train_folder, 'r') as zf:
                    zf.extractall(extracted_tmp)
                train_folder = extracted_tmp
            except Exception as e:
                logging.exception(f"解压传入 zip 失败: {e}")
    except Exception:
        pass

    # 若路径不存在，尝试解析相对路径或在当前目录查找 zip 并解压
    if not os.path.exists(train_folder):
        alt = os.path.abspath(os.path.join(os.getcwd(), train_folder))
        if os.path.exists(alt):
            logging.info(f"train_folder 未直接找到，使用解析后路径: {alt}")
            train_folder = alt
        else:
            for z in glob.glob('*.zip'):
                tmp = tempfile.mkdtemp(prefix='createXY_')
                try:
                    with zipfile.ZipFile(z, 'r') as zf:
                        zf.extractall(tmp)
                    if any(paths.list_images(tmp)):
                        logging.info(f"在当前目录发现 zip 并解压可用数据: {z} -> {tmp}")
                        extracted_tmp = tmp
                        train_folder = tmp
                        break
                    else:
                        shutil.rmtree(tmp)
                except Exception:
                    try:
                        shutil.rmtree(tmp)
                    except Exception:
                        pass

    # 优先读取缓存
    if os.path.exists(x_file_path) and os.path.exists(y_file_path):
        try:
            x_size = os.path.getsize(x_file_path)
            y_size = os.path.getsize(y_file_path)
            logging.info(f"发现缓存（method={method}）：直接读取 {os.path.basename(x_file_path)}, {os.path.basename(y_file_path)}")
            logging.info(f"X文件大小:{get_size(x_file_path):.2f}MB")
            logging.info(f"y文件大小:{get_size(y_file_path):.2f}MB")
            if x_size == 0 or y_size == 0:
                logging.warning("检测到空缓存文件，忽略并删除")
                try:
                    if x_size == 0:
                        os.remove(x_file_path)
                    if y_size == 0:
                        os.remove(y_file_path)
                except Exception:
                    pass
            else:
                with open(x_file_path, 'rb') as fx:
                    X = pickle.load(fx)
                with open(y_file_path, 'rb') as fy:
                    y = pickle.load(fy)
                try:
                    if np.array(X).size == 0 or np.array(y).size == 0:
                        logging.warning("缓存文件包含空数组，忽略并删除缓存文件")
                        try:
                            os.remove(x_file_path)
                            os.remove(y_file_path)
                        except Exception:
                            pass
                    else:
                        _cleanup()
                        return X, y
                except Exception:
                    _cleanup()
                    return X, y
        except Exception as e:
            logging.exception(f"读取缓存失败，忽略缓存并重新生成: {e}")
    elif os.path.exists(legacy_x_path) and os.path.exists(legacy_y_path):
        try:
            lx_size = os.path.getsize(legacy_x_path)
            ly_size = os.path.getsize(legacy_y_path)
            logging.info(f"发现旧版缓存：直接读取 {os.path.basename(legacy_x_path)}, {os.path.basename(legacy_y_path)}")
            logging.info(f"X文件大小:{get_size(legacy_x_path):.2f}MB")
            logging.info(f"y文件大小:{get_size(legacy_y_path):.2f}MB")
            if lx_size == 0 or ly_size == 0:
                logging.warning("检测到空旧版缓存文件，忽略并删除")
                try:
                    if lx_size == 0:
                        os.remove(legacy_x_path)
                    if ly_size == 0:
                        os.remove(legacy_y_path)
                except Exception:
                    pass
            else:
                with open(legacy_x_path, 'rb') as fx:
                    X = pickle.load(fx)
                with open(legacy_y_path, 'rb') as fy:
                    y = pickle.load(fy)
                try:
                    if np.array(X).size == 0 or np.array(y).size == 0:
                        logging.warning("旧版缓存包含空数组，忽略并删除缓存文件")
                        try:
                            os.remove(legacy_x_path)
                            os.remove(legacy_y_path)
                        except Exception:
                            pass
                    else:
                        _cleanup()
                        return X, y
                except Exception:
                    _cleanup()
                    return X, y
        except Exception as e:
            logging.exception(f"读取旧版缓存失败，忽略缓存并重新生成: {e}")

    logging.info(f"读取所有图像，生成X和y（method={method}）")

    if not os.path.exists(train_folder):
        alt2 = os.path.abspath(os.path.join(os.getcwd(), os.path.basename(train_folder)))
        if os.path.exists(alt2):
            logging.info(f"train_folder 未直接找到，使用候选路径: {alt2}")
            train_folder = alt2

    image_paths = list(paths.list_images(train_folder))
    # 支持只处理部分图片以便快速调试
    if max_images and isinstance(max_images, int) and max_images > 0:
        image_paths = image_paths[:max_images]

    if len(image_paths) == 0:
        logging.error(f"在路径 {train_folder} 中未找到任何图片文件。请检查路径或数据组织（例如 data/train/dog/*.jpg）。")
        _cleanup()
        return np.array([]), np.array([])

    X = []
    y = []

    if method == 'vgg':
        from tensorflow.keras.applications.vgg16 import VGG16, preprocess_input
        from tensorflow.keras.utils import load_img, img_to_array
        model = VGG16(weights='imagenet', include_top=False, pooling="max")
        logging.info("完成构建 VGG16 模型")
    elif method == 'flat':
        model = None

    num_batches = len(image_paths) // batch_size + (1 if len(image_paths) % batch_size else 0)

    for idx in tqdm(range(num_batches), desc="读取图像"):
        batch_images = []
        batch_labels = []
        start = idx * batch_size
        end = min((idx + 1) * batch_size, len(image_paths))
        for i in range(start, end):
            image_path = image_paths[i]
            # 保护性地加载图像：遇到无法识别或读取失败的文件时跳过并记录
            if method == 'vgg':
                from tensorflow.keras.utils import load_img, img_to_array
                try:
                    img = load_img(image_path, target_size=(224, 224))
                    img = img_to_array(img)
                except (UnidentifiedImageError, OSError, ValueError) as e:
                    logging.warning(f"跳过无法识别/打开的图像: {image_path} -> {e}")
                    continue
                except Exception as e:
                    logging.exception(f"加载图像时发生未知错误，跳过: {image_path} -> {e}")
                    continue
            else:
                img = cv2.imread(image_path, cv2.IMREAD_GRAYSCALE)
                if img is None:
                    logging.warning(f"cv2 无法读取图像，跳过: {image_path}")
                    continue
                try:
                    img = cv2.resize(img, (32, 32))
                except Exception as e:
                    logging.warning(f"调整图像大小失败，跳过: {image_path} -> {e}")
                    continue

            batch_images.append(img)
            parent = os.path.basename(os.path.dirname(image_path))
            train_base = os.path.basename(os.path.normpath(train_folder))
            if parent and parent != train_base:
                label_name = parent
            else:
                label_name = os.path.basename(image_path).split('.')[0]
            batch_labels.append(label_name)

        # 如果本批次全部被跳过（例如都为损坏文件），则直接跳过该批次
        if len(batch_images) == 0:
            logging.info(f"批次 {idx} 中没有有效图像，已跳过")
            continue

        batch_images = np.array(batch_images)
        if method == 'vgg':
            from tensorflow.keras.applications.vgg16 import preprocess_input
            batch_images = preprocess_input(batch_images)
            batch_pixels = model.predict(batch_images, verbose=0)
        else:
            batch_pixels = batch_images.reshape((batch_images.shape[0], -1))

        X.extend(batch_pixels)
        y.extend(batch_labels)

    X = np.array(X)
    y = np.array(y)

    if y.size > 0 and (y.dtype.type is np.str_ or y.dtype == object):
        unique_classes = sorted(list({str(v) for v in y}))
        class_to_idx = {c: i for i, c in enumerate(unique_classes)}
        logging.info(f"检测到类别名称，类别映射: {class_to_idx}")
        y = np.array([class_to_idx[str(v)] for v in y], dtype=np.int64)

    logging.info(f"X.shape: {X.shape}")
    logging.info(f"y.shape: {y.shape}")

    if X.size > 0 and y.size > 0:
        try:
            with open(x_file_path, 'wb') as fx:
                pickle.dump(X, fx)
            with open(y_file_path, 'wb') as fy:
                pickle.dump(y, fy)
        except Exception as e:
            logging.exception(f"写入缓存文件失败: {e}")
    else:
        logging.warning("X 或 y 为空，跳过写入缓存文件。")

    _cleanup()
    return X, y
