"""
Storage backend for Mini-LMCache
"""

import asyncio
from collections import OrderedDict
from typing import TYPE_CHECKING, Optional

import torch

from configs import LMCacheEngineConfig, LMCacheEngineMetadata
from memory_management import MemoryAllocatorInterface
from storage_backend.abstract_backend import StorageBackendInterface
from storage_backend.local_cpu_backend import LocalCPUBackend
from storage_backend.local_disk_backend import LocalDiskBackend


def CreateStorageBackends(
    config: LMCacheEngineConfig,
    metadata: LMCacheEngineMetadata,
    loop: asyncio.AbstractEventLoop,
    allocator: MemoryAllocatorInterface,
    dst_device: str = "cuda",
    layerwise: bool = False,
) -> OrderedDict[str, StorageBackendInterface]:
    if dst_device == "cuda":
        dst_device = f"cuda:{torch.cuda.current_device()}"

    backends: OrderedDict[str, StorageBackendInterface] = OrderedDict()

    local_cpu_backend = LocalCPUBackend(config, allocator, layerwise)
    backend_name = str(local_cpu_backend)
    backends[backend_name] = local_cpu_backend

    if config.local_disk and config.max_local_disk_size > 0:
        local_disk_backend = LocalDiskBackend(
            config, loop, local_cpu_backend, dst_device
        )
        backend_name = str(local_disk_backend)
        backends[backend_name] = local_disk_backend

    return backends
