/**
 * 模型缓存管理器
 * 提供模型文件的缓存控制和管理功能
 */

export interface ModelCacheManager {
  /**
   * 预加载模型到缓存
   */
  preloadModel(url: string): Promise<void>;
  
  /**
   * 清除特定模型的缓存
   */
  clearModelCache(url: string): Promise<void>;
  
  /**
   * 清除所有模型缓存
   */
  clearAllModelCache(): Promise<void>;
  
  /**
   * 获取缓存的模型列表
   */
  getCachedModels(): Promise<string[]>;
  
  /**
   * 检查模型是否在缓存中
   */
  isModelCached(url: string): Promise<boolean>;
  
  /**
   * 获取模型缓存大小
   */
  getCacheSize(): Promise<number>;
}

class ServiceWorkerModelCacheManager implements ModelCacheManager {
  private cacheName = 'ai-human-models-cache';
  
  async preloadModel(url: string): Promise<void> {
    if (!('caches' in window)) {
      console.warn('Cache API not supported');
      return;
    }
    
    try {
      const cache = await caches.open(this.cacheName);
      const cached = await cache.match(url);
      
      if (!cached) {
        const response = await fetch(url);
        
        if (response.ok) {
          // 再次检查，防止并发写入
          const doubleCheck = await cache.match(url);
          if (!doubleCheck) {
            await cache.put(url, response.clone());
            console.log('Model preloaded to cache:', url);
          }
        }
      } else {
        console.log('Model already cached, skipping preload:', url);
      }
    } catch (error) {
      console.error('Failed to preload model:', url, error);
    }
  }
  
  async clearModelCache(url: string): Promise<void> {
    if (!('caches' in window)) return;
    
    try {
      const cache = await caches.open(this.cacheName);
      await cache.delete(url);
      console.log('Model cache cleared:', url);
    } catch (error) {
      console.error('Failed to clear model cache:', url, error);
    }
  }
  
  async clearAllModelCache(): Promise<void> {
    if (!('caches' in window)) return;
    
    try {
      await caches.delete(this.cacheName);
      console.log('All model cache cleared');
    } catch (error) {
      console.error('Failed to clear all model cache:', error);
    }
  }
  
  async getCachedModels(): Promise<string[]> {
    if (!('caches' in window)) return [];
    
    try {
      const cache = await caches.open(this.cacheName);
      const keys = await cache.keys();
      return keys.map(request => request.url);
    } catch (error) {
      console.error('Failed to get cached models:', error);
      return [];
    }
  }
  
  async isModelCached(url: string): Promise<boolean> {
    if (!('caches' in window)) return false;
    
    try {
      const cache = await caches.open(this.cacheName);
      const response = await cache.match(url);
      return !!response;
    } catch (error) {
      console.error('Failed to check model cache:', url, error);
      return false;
    }
  }
  
  async getCacheSize(): Promise<number> {
    if (!('caches' in window)) return 0;
    
    try {
      const cache = await caches.open(this.cacheName);
      const keys = await cache.keys();
      let totalSize = 0;
      
      for (const request of keys) {
        const response = await cache.match(request);
        if (response) {
          const blob = await response.blob();
          totalSize += blob.size;
        }
      }
      
      return totalSize;
    } catch (error) {
      console.error('Failed to get cache size:', error);
      return 0;
    }
  }
}

// 创建全局实例
export const modelCacheManager = new ServiceWorkerModelCacheManager();

// 提供Vue组件使用的工具函数
export const useModelCache = () => {
  return {
    preloadModel: modelCacheManager.preloadModel.bind(modelCacheManager),
    clearModelCache: modelCacheManager.clearModelCache.bind(modelCacheManager),
    clearAllModelCache: modelCacheManager.clearAllModelCache.bind(modelCacheManager),
    getCachedModels: modelCacheManager.getCachedModels.bind(modelCacheManager),
    isModelCached: modelCacheManager.isModelCached.bind(modelCacheManager),
    getCacheSize: modelCacheManager.getCacheSize.bind(modelCacheManager)
  };
};

// 预加载常用模型
export const preloadCommonModels = async (modelUrls: string[]) => {
  const promises = modelUrls.map(url => modelCacheManager.preloadModel(url));
  await Promise.allSettled(promises);
};