import itertools
import os
from hashlib import md5
from random import choice
from typing import Callable

import pytest

from storage.hash_algorithms import farm128, md5, murmur2_64, farm64, t1ha0_64
from storage.blobstore.fileblob import FileBlobStore
from storage.dedup import DedupFile
from storage.kvstore.dbm import DbmKVStore
from storage.kvstore.pickle import PickleKVStore
from storage.mutex import Mutex


@pytest.fixture
def secret() -> bytes:
    return os.urandom(16)


@pytest.fixture
def kv_path():
    path = './testdata.kv.bin'
    yield path
    if os.path.exists(path):
        os.remove(path)


@pytest.fixture
def blob_path():
    path = './testdata.blob.bin'
    yield path
    if os.path.exists(path):
        os.remove(path)


@pytest.fixture
def dedup_path():
    path = './testdata.dedup.bin'
    yield path
    if os.path.exists(path):
        os.remove(path)


def test_open_dedup_file(kv_path, blob_path, dedup_path, secret):
    kvs = PickleKVStore(kv_path)
    bs = FileBlobStore(blob_path, secret, kvs)
    with DedupFile(dedup_path, 'wb', kvs, bs):
        pass


@pytest.mark.parametrize('write_length', [8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768])
def test_write_read_dedup_file(kv_path, blob_path, dedup_path, secret, write_length):
    blobs = [b'1' * write_length, b'2' * write_length]
    kvs = PickleKVStore(kv_path)
    bs = FileBlobStore(blob_path, secret, kvs)
    writed = b''
    with DedupFile(dedup_path, 'wb', kvs, bs) as f:
        for _ in range(200):
            blck = choice(blobs)
            writed += blck
            f.write(blck)

    writed_hash = md5()
    writed_hash.update(writed)
    with DedupFile(dedup_path, 'rb', kvs, bs) as f:
        data = f.read()
        h = md5()
        h.update(data)
        assert h.hexdigest() == writed_hash.hexdigest()


@pytest.mark.benchmark
@pytest.mark.parametrize('chunk_size,sync,alg,store,lock_type', [
    pytest.param(*t, id=f'chunk:{t[0]},sync:{t[1]},alg:{t[2].__name__},kv:{t[3].__name__},lock:{t[4].__name__}')
    for t in
    itertools.product(
        [-1, *[2 ** i for i in range(14, 15)]],
        [True, False],
        [farm128, murmur2_64, farm64, t1ha0_64],
        [PickleKVStore, DbmKVStore],
        [Mutex],
    )
])
def test_write_speed(benchmark, kv_path, blob_path, dedup_path, secret, chunk_size: int, sync, alg: Callable[[bytes], bytes], store, lock_type):
    blobs = [os.urandom(8192), os.urandom(8192)]
    kvs = store(kv_path, lock_type=lock_type)
    bs = FileBlobStore(blob_path, secret, kvs, sync=sync)

    # 10 MiB 测试数据集
    data = b''.join([choice(blobs) for _ in range(12800)])

    with DedupFile(dedup_path, 'wb', kvs, bs, hash_alg=alg) as f:
        def dedup_write_chunked_10MiB():
            cur = 0
            while cur < len(data):
                if chunk_size == -1:
                    f.write(data)
                    break

                f.write(data[cur:cur + chunk_size])
                cur += chunk_size

        benchmark(dedup_write_chunked_10MiB)


@pytest.mark.benchmark
def test_write_speed_baseline(benchmark):
    blobs = [os.urandom(8192), os.urandom(8192)]
    data = b''.join([choice(blobs) for _ in range(12800)])
    with open('testdata.baseline.bin', 'wb') as f:
        def benchmark_write_baseline():
            f.write(data)

        benchmark(benchmark_write_baseline)


def test_profile_write_speed(kv_path, blob_path, dedup_path, secret):
    blobs = [os.urandom(8192), os.urandom(8192)]
    kvs = PickleKVStore(kv_path)
    bs = FileBlobStore(blob_path, secret, kvs, sync=False)

    # 100 MiB 测试数据集
    data = b''.join([choice(blobs) for _ in range(12800)])
    with DedupFile(dedup_path, 'wb', kvs, bs) as f:
        f.write(data)
