import os
from concurrent.futures import ThreadPoolExecutor


def get_file_size(file_path):
    try:
        return (file_path, os.path.getsize(file_path))
    except (PermissionError, OSError):
        return (file_path, 0)


def find_largest_files_fast(directory, top_n=10):
    file_sizes = []

    with ThreadPoolExecutor() as executor:
        for root, dirs, files in os.walk(directory):
            futures = [
                executor.submit(get_file_size, os.path.join(root, file))
                for file in files
            ]
            for future in futures:
                file_path, size = future.result()
                if size > 0:
                    file_sizes.append((file_path, size))

    file_sizes.sort(key=lambda x: x[1], reverse=True)

    for i, (file_path, size) in enumerate(file_sizes[:top_n], 1):
        print(f"{i}. {file_path} - {size / (1024 ** 2):.2f} MB")


if __name__ == "__main__":
    find_largest_files_fast("C:\\", top_n=10)