import os
import pickle

include "fileio.pyx"
def check_callables(funcs):
    for ff in funcs:
        if callable(ff[0]) and callable(ff[1]): continue
        else : raise ValueError

def check_strs(strs):
    for s in strs:
        if isinstance(s, str): continue
        else : raise ValueError

cpdef dict update_fields(tuple fix_values, tuple fix_fields, dict var_values, values):
    cdef u64 i
    for field, value in values:
        var_values = update_field(fix_values, fix_fields, var_values, field, value)
    return var_values

cpdef dict update_field(tuple fix_values, tuple fix_fields, dict var_values, field, value):
    if isinstance(field,str):
        pass
    else:
        raise ValueError
    #
    i = bisect_str_in(field, value)
    if i != UINT64_MAX:
        PyTuple_SetItem(fix_values, i, value)
    else:
        if var_values is not None:
            if value is not None:
                var_values[field] = value
            else:
                try:
                    del var_values[field]
                except KeyError:
                    pass
        else:
            var_values = {field: value}
    return var_values

cdef class NamedTuple:
    cdef public fix_values
    cdef public dict var_fields
    cdef dict fix_map_num
    def __init__(self,tuple fix_fields, tuple fix_values, dict vars):
        self.fix_fields, self.fix_values, self.var_fields = fix_fields, fix_values, vars
    cpdef keys(self):
        return IterNamedTupleKeys(self.fix_fields, self.var_fields)
    cpdef values(self):
        return IterNamedTupleValues(self.fix_values, self.var_fields)
    cpdef items(self):
        return IterNamedTupleItems(self.fix_fields, self.fix_values, self.var_fields)
    def __iter__(self):
        return self.keys()
    def __getitem__(self,str field):
        cdef U64 i
        try:
            i = self.fix_map_num[field]
        except KeyError:
            try:
                return self.var_fields[field]
            except KeyError:
                return None
        return self.fix_values[i.val]
    def __setitem__(self, field, value):
        self.var_fields = update_field(self.fix_values, self.fix_fields, self.var_fields, field, value)

cdef class IterNamedTupleItems:
    cdef readonly :
        tuple fix_fields, fix_values
        u64 i
        object iter_vars
    def __init__(self, tuple fix_fields, tuple fix_values, dict var_fields):
        self.fix_fields=fix_fields
        self.fix_values=fix_values
        self.i=0
        self.iter_vars = None if var_fields is None else iter(var_fields.items())
    def __next__(self):
        cdef u64 i
        if self.i < len(self.fix_fields):
            i=self.i
            self.i+=1
            return self.fix_fields[i], self.fix_values[i]
        elif self.iter_vars is not None:
            return next(self.iter_vars)
        else:
            raise StopIteration

cdef class IterNamedTupleKeys:
    cdef readonly :
        tuple fix_fields, fix_values
        u64 i
        object iter_vars
    cdef dict var_fields
    def __init__(self, tuple fix_fields, dict var_fields):
        self.fix_fields=fix_fields
        self.i=0
        self.iter_vars = None if var_fields is None else iter(var_fields.keys())
    def __next__(self):
        cdef u64 i
        if self.i < len(self.fix_fields):
            i = self.i
            self.i += 1
            return self.fix_fields[i]
        elif self.iter_vars is not None:
            return next(self.iter_vars)
        else:
            raise StopIteration

cdef class IterNamedTupleValues:
    cdef readonly :
        tuple fix_fields, fix_values
        u64 i
        object iter_vars
    def __init__(self, tuple fix_values, dict var_fields):
        self.fix_values=fix_values
        self.i=0
        self.iter_vars = None if var_fields is None else iter(var_fields.values())
    def __next__(self):
        cdef u64 i
        if self.i < len(self.fix_fields):
            i = self.i
            self.i += 1
            return self.fix_values[i]
        elif self.iter_vars is not None:
            return next(self.iter_vars)
        else:
            raise StopIteration

'''
cdef class DB:
    cdef readonly :
        str name
        u64 cur_num
        tuple disks_paths
    cdef :
        dict path_map_disks
        set will_used_kvs, cached_kvs, kvs
    cdef get_next_filename(self):
        cdef u64 num = self.cur_meta_num
        suffix = num_to_char37s(num)
        return f'{self.name}.{suffix}'
    cpdef set_will_used_kvs(self, kv_names):
        self.will_used_kvs.clear()
        self.will_used_kvs.union(kv_names)
    cpdef pop_caches(self):
        cdef Kvs kvs
        for name in self.cached_kvs:
            if name not in self.cached_kvs:
                kvs = self.kvs[name]
                kvs.clear_cache()

    cpdef flush(self):
        filename = f'{self.name}.db.{self.cur_num}'
        data = pickle.dumps(self)
        cdef array rs
        cdef list files
        filename = f'{self.name}.db.{self.cur_num}'
        rs = write_file_sync(filename, data, self.disks_paths)
        self.writing_files[filename] = set(self.disks_paths)

    cpdef get_disks_paths(self, paths):
        cdef set disks = set()
        cdef list disks_paths=[ ]
        for path in paths:
            try:
                disk = self.path_map_disks[path]
            except KeyError:
                pass
            #
            disks.add(disk)
            disks_paths.append((disk,path))
        if len(disks)!=len(disks_paths):
            raise ValueError
        return disks_paths
    def create_kvs(self,str name, dict type_and_decode, dict type_and_encode, dict type_and_decode,
                   fix_fields, paths, u64 blocksize):
        if name in self.kvs:
            raise ValueError
        disks_paths = self.check_paths(paths)
        check_strs(fix_fields)
        check_strs(type_and_decode.keys())
        check_callables([type_and_decode.values()])
        check_callables([type_and_encode.values()])
    def read_kvs(self, kv_names_and_keys):
        for name, keys in kvs_and_keys:
            keys'''

cdef class ValuesCache:
    cdef readonly :
        tuple fix_values
        dict var_fields
    def __init__(self,tuple fix_values, dict var_fields):
        self.fix_values=fix_values
        self.var_fields = var_fields
    cpdef update(self, values, dict field_map_indexes):
        cdef U64 i
        for field, value in values:
            try:
                i = field_map_indexes[field]
                self.fix_values[i.val]=value
            except KeyError:
                if self.var_fields is not None:
                    self.var_fields[field] = value
                else:
                    self.var_fields = {field: value}
    cpdef to_namedtuple(self, tuple fix_fields):
        cdef u64 i, l=len(self.fix_values)
        cdef Obj2 vs
        cdef tuple fixs
        cdef dict vars
        if self.fix_values:
            fixs=PyTuple_New(l)
            for i in range(l):
                vs=self.fix_values[i]
                PyTuple_SET_ITEM(fixs, i, vs.obj0)
        else:
            fixs=None
        if self.var_fields:
            vars={}
            for key, vs in self.var_fields.items():
                vars[key]=vs.obj0
        else:
            vars=None
        return NamedTuple(fix_fields, f)

cpdef check_values_encode_decode(values):
    cdef Obj2 vs
    for filed, value_encoded_value in values:
        value, encoded_value = value_encoded_value
        vs=Obj2(value, encoded_value)

cdef u64 _decode_item_len(u8* data, u8* end):
    cdef u64 l
    cdef u8* item_end
    if data<end:
        _decode_num()
        return l
    else:
        raise RuntimeError

cdef _decode_item(u8* data, u8* end, tuple decodes, Buffer buf):
    num = data[0]
    if num < len(decodes):
        decode = decodes[num]
    elif num < 255:
        decode = pickle.loads
    else:
        buf.ptr = data
        buf.size = 1
        return None
    #
    data += 1
    if data < end:
        pass
    else:
        raise RuntimeError
    item_len = _decode_num(data, &data)
    #
    buf.set(data, item_len)
    try:
        obj = decode(buf)
        return obj
    except Exception as e:
        raise RuntimeError from e

cdef _decode_raw_kv(u8 * data, u8 * end, u64 fix_count, tuple decodes, Buffer buf):
    cdef u8 num, bytecount
    cdef u64 kv_len, var_count
    cdef u8 * kv_end
    cdef u8* cur=data
    var_count = _decode_num(cur, &cur)
    if cur<end:pass
    else:
        raise RuntimeError
    #
    key = _decode_item(data, kv_end, decodes, buf)
    data = buf.ptr + buf.size
    cdef uint i
    cdef u64 item_len
    cdef u8 * item_end
    cdef tuple tp = PyTuple_New(fix_count)
    for i in range(fix_count):
        value = _decode_item(data, kv_end, decodes, buf)
        PyTuple_SET_ITEM(tp, i, value)
        data = buf.ptr + buf.size
    cdef dict vars=None
    if data<kv_end:
        vars = {}
        while(data<end):
            key = _decode_item(data, kv_end, decodes, buf)
            data = buf.ptr + buf.size
            value = _decode_item(data, kv_end, decodes, buf)
            data = buf.ptr + buf.size
            vars[key]=value
    if data == kv_end:
        buf.ptr = kv_end
    else:
        raise RuntimeError
    return key, tp, vars

cpdef dict decode_kvs(const u8[:] raw, u64 fix_count, tuple decodes):
    cdef dict d={}, type_map_count={}
    cdef u8* data=&raw[0]
    cdef u64 l=raw.nbytes
    cdef u8* end=data+l
    cdef Buffer buf=Buffer()
    while(data<end):
        key, fixs, vars = _decode_raw_kv(data, end, fix_count, decodes, buf)
        d[key] = ValuesCache(fixs, vars)
        data = buf.ptr
    if data!=end:
        raise RuntimeError
    return d

cpdef dict get_cache_block_type_info(kvs, dict type_counter, dict var_field_counter):
    cdef ValuesCache values
    for key, values in kvs:
        counter_add_1(type_counter, type(key))
        for v in values.fix_values:
            if v is not None:
                counter_add_1(type_counter, type(v))
            else:
                counter_add_1(type_counter, 0)
        if values.var_fields:
            for field, v in values.var_fields.items():
                tv = type(v)
                counter_add_1(type_counter, tv)
                counter_add_1(var_field_counter, tv)
    return type_counter, var_field_counter

def read_write_block_callback(Kvs kvs, U64 block_i):
    def call(const u8[:] data):
        cdef dict block=kvs.decode_block(data)
        kvs.cache_block(block, block_i.val)
    return call

utf8_='utf-8'

cdef class Kvs:
    cdef readonly:
        str name
        tuple split_keys, block_caches
        u64 cur_data_num, cur_meta_num, blocksize
        dict disks_paths
        bint changed
    cdef:
        dict type_map_decode, type_map_encode, type_map_num, type_map_count
        array changed_blocks, filenums
        #dict old_block_type_infos, storage_type_infos
        dict fix_fields  # {name:str, FixField }
        dict var_field_infos  # {name:str, count:U64 }

    def __init__(self, str name, type_and_encode_decodes, fix_fields, disks_paths, u64 blocksize):
        self.disks_paths = disks_paths
        self.cur_data_num = 0
        self.blocksize = blocksize
        self.block_caches=tuple()
        self.split_keys = tuple()
        self.changed_blocks=array('b')
        #
        cdef u64 i=0
        #
        cdef dict field_map_num={}
        for field in fix_fields:
            field_map_num[field]=get_U64(i)
        self.fix_attrs = {}
        self.var_fields = {}
        #
        if len(type_and_encode_decodes)>254:
            raise ValueError
        i=0
        self.type_map_num = {}
        self.type_map_decodes={}
        for t, encode, decode in type_and_encode_decodes:
            self.type_map_num[t]=get_U64(i)
            self.type_and_decodes[t]=decode
            self.type_and_encodes[t]=encode
        self.type_map_encodes={}
    def __getstate__(self):
        return (self.type_map_decode, self.type_map_encode, self.type_map_num, self.type_map_count,
               self. storage_type_infos, self.fix_fields, self.var_field_infos, self.cur_data_num,
                self.cur_meta_num, self.blocksize, self.name)
    def __setstate__(self, state):
        self.type_map_decode, self.type_map_encode, self.type_map_num, self.type_map_count,\
        self.storage_type_infos, self.fix_fields, self.var_field_infos, self.cur_data_num,\
        self.cur_meta_num, self.blocksize, self.name = state
    cdef init_mem(self):
        self.changed_blocks = array(B)
        resize(self.changed_blocks, len(self.split_keys))
        self.changed=False
        self.block_caches = [None]*len(self.split_keys)
    cpdef clear_cache(self):
        if self.changed:
            self.flush()
        cdef u64 i
        cdef dict d
        for i in range(len(self.block_caches)):
            d = self.block_caches[i]
            if d is not None: d.clear()
            PyTuple_SET_ITEM(self.block_caches, i, None)
    cpdef get_block_filename(self, u64 i):
        return f'{self.name}.{self.filenums.data.as_ulonglongs[i]}'
    cpdef type_get_ecode(self, value):
        cdef U64 i
        try:
            i=self.type_map_num[type(value)]
            return self.decodes[i.val]
        except KeyError:
            return pickle.loads
    cpdef encode_multi_field(self, value):
        for v in value:
            self.encode_sigal_field(value, type(value))
    cpdef encode_sigal_field(self, value, type_value):
        cdef U64 i
        cdef bytes encoded
        try:
            i = self.type_map_num[type_value]
            decode = self.decodes[i.val]
            encode = self.encodes[i.val]
        except KeyError:
            i=get_U64(254)
            decode = pickle.loads
            encode = pickle.dumps
        cdef bytes encoded = encode(value)
        if decode(encoded)==value:
            return encoded, i
        else:
            raise ValueError

    cpdef dict decode_block(self, const u8[:] view):
        cdef u64 fix_count=len(self.fix_fields)
        return decode_kvs(view, fix_count, self.decodes)

    cpdef cache_block(self, dict kvs, u64 bi):
        cdef u64 i=bi
        cdef char * changes = self.changed_blocks.data.as_chars
        PyTuple_SetItem(self.block_caches, i, kvs)

    cpdef get_kv(self, key):
        cdef dict block
        cdef ValuesCache values
        cdef u64 i=bisect_obj_in(self.block_files, key ), ii
        block=self.changed_blocks[i]
        if block is not None:
            try:
                values = block[key]
                return values.to_namedtuple(self.fix_fields)
            except KeyError:
                return None
        else:
            return get_U64(i)
    cpdef insert_or_update_kv(self, key,  values):
        cdef u64 i
        cdef dict kvs, vars
        cdef ValuesCache values0
        cdef tuple tp
        cdef Obj2 vs
        cdef char * changes = self.changed_blocks.data.as_chars
        if self.split_keys:
            i = bisect_obj_in(self.block_files, key)
            kvs = PyTuple_GET_ITEM(self.block_caches, i)
            if changes[i]==0:  # 未更改
                if kvs is None:  # 未读取
                    PyTuple_SET_ITEM(self.block_caches, i, {key: values})
                    changes[i] = -1
                else: # 已读取
                    try:
                        values1 = kvs[key]
                        values1.update(values)
                        kvs[key] = values1
                    except KeyError:
                        kvs[key] = values
                    changes[i] = 1
            else: # 已更改
                assert kvs
                if kvs:# 已读取
                    self.update_cache_kv(key, values, kvs)
                else:
                    PyTuple_SET_ITEM(self.block_caches, i, {key: values})
        else:
            if self.block_caches:
                kvs = self.block_caches[0]
                try:
                    values1 = kvs[key]
                    values1.update(values)
                    kvs[key] = values1
                except KeyError:
                    kvs[key] = values
            else:
                kvs = {key:values}
                self.block_caches.append(kvs)
        self.changed = True
    cpdef update_cache_kv(self, key, values, dict kvs):
        cdef dict  vars
        cdef ValuesCache values0
        cdef tuple tp
        cdef Obj2 vs
        if values is not None:  # 更新
            try:
                values0 = kvs[key]
                for field, value in values:
                    self.encode_field(value)
                    vs = value
                    values0.var_fields = update_field(values0.fix_values, self.fix_fields, values0.var_fields, field, value)
            except KeyError:
                tp = get_all_none_tuple(len(self.fix_fields))
                vars = {}
                for field, value in values:
                    self.encode_field(value)
                    vars = update_field(tp, self.fix_fields, vars, field, value)
                kvs[key] = ValuesCache(tp, vars)
        else:  # 删除
            try:
                del kvs[key]
            except KeyError:
                pass
    cdef get_next_data_filename(self):
        cdef u64 num=self.cur_data_num
        suffix=num_to_char37s(num)
        return f'{self.name}.{suffix}'
    cdef get_next_meta_filename(self):
        cdef u64 num = self.cur_meta_num
        suffix = num_to_char37s(num)
        return f'{self.name}.{suffix}'
    cpdef split_block_ranges(self):
        cdef u64 i = 0, load_start, s, e
        cdef char* changes = self.changed_blocks.data.as_chars
        cdef u64* filenums = self.changes.data.as_ulonglongs
        cdef array nums
        cdef U64 bi, encoded_size, max_itemsize, numi_start, numi_end,
        cdef list blocks=[], changed_ranges=[]
        cdef dict filename_map_callbacks={}
        while (i < len(self.block_caches)):
            # 跳过无需更改序列
            while (i < len(self.block_caches) and changes[i] == 0):
                i += 1
            # 找到连续的序列
            s = i
            while (i < len(self.block_caches) and changes[i] != 0):
                if changes[i]==-1:
                    fn = self.get_block_filename(i)
                    filename_map_callbacks[fn] = self.get_block_callbacks(i)
                i += 1
            e=i
            changed_ranges.append((get_U64(s), get_U64(e)))
        return blocks, filename_map_callbacks

    cpdef list sorted_range_blocks(self, u64 start, u64 end):
        cdef list changed_block_ranges=[]
        cdef u64 i
        for i in range(start, end):
            changed_block_ranges += sorted(self.block_caches[i])
        return changed_block_ranges

    cpdef flush_ranges(self,list changed_ranges, dict filename_map_callbacks):
        cdef U64 s, e, ns, ne, bi
        cdef list load_blocks, new_block_caches=[], blocks, little_block_indexes=[], little_blocks=[], write_blocks=[],
        cdef u64 i, count, pre_end=0, cur_num=self.cur_data_num, meta_num=self.cur_meta_num
        cdef array nums=array(B), new_nums=array(B)
        cdef u64* old_nums = self.filenums.data.as_ulonglongs
        cdef dict little_filename_map_callbacks={}
        pre_little=False
        # 读取所有需要更改但未读取的block
        disks = self.disks_paths.keys()
        disks_and_filenames = [( disks, filename_map_callbacks.keys() )]
        read_files_raise(disks_and_filenames, filename_map_callbacks)
        #
        s,e = changed_ranges[0]
        array_extend_buffer(nums, old_nums + pre_end, 8 * (s.val))
        new_block_caches += self.block_caches[0:s.val]
        for i in range(len(changed_ranges)-1):
            ns, ne = changed_ranges[i+1]
            #
            changed_kvs = self.sorted_range_blocks( s.val, e.val )
            blocks, caches, little = self.split_kvs(changed_kvs)
            if len(blocks)>1 or not little:
                # 添加本修改的blocks
                new_block_caches += caches
                for block in blocks:
                    filename = f'{self.name}.{num_to_char37s(cur_num)}'
                    write_blocks.append((filename, block))
                    array_extend_buffer(new_nums, &cur_num, 8)
                    cur_num += 1
                # 添加这个修改的blocks到下个修改的blocks之间没修改的blocks
                new_block_caches += self.block_caches[e.val:ns.val]
                array_extend_buffer(new_nums, old_nums+e.val, 8 * (ns.val - e.val))
            elif len(blocks) == 1 :
                little_block_indexes.append( len(new_block_caches)-1 )
                new_block_caches.append(None)
                write_filename = f'{self.name}.{num_to_char37s(old_nums[e.val])}'
                read_filename = self.get_block_filename(e.val)
                little_filename_map_callbacks[write_filename] = read_write_block_callback(self, e)
                array_extend_buffer(new_nums, &cur_num, 8)
                cur_num+=1
                little_blocks.append((read_filename, write_filename, blocks[0], e))
                # 添加这个修改的blocks到下个修改的blocks之间没修改的blocks
                new_block_caches += self.block_caches[e.val+1:ns.val]
                array_extend_buffer(new_nums, old_nums + e.val, 8 * (ns.val -1 -e.val))
            else:
                raise RuntimeError
            s, e = ns, ne
        #
        disks_and_filenames = [ (disks, little_filename_map_callbacks.keys()) ]
        read_files_raise(disks_and_filenames, little_filename_map_callbacks)
        for read_filename, write_filename, little_block, e in little_blocks:
            block = self.block_caches[e.val]
            little_block += sorted(block)
            write_blocks.append((write_filename, little_block))
        write_files_sync(write_blocks, self.disks_paths.items())
    cpdef flush_meta(self):
        #
        meta_filename = f'{self.name}.{num_to_char37s(self.cur_meta_num)}'
        meta = pickle.dump(self)
        meta_num = self.cur_meta_num + 1
        write_file_sync(meta_filename, meta, self.disks_paths.items())

    cpdef flush(self):
        changed_block_ranges, filename_map_callbacks = self.split_block_ranges()
        self.flush_ranges(changed_block_ranges, filename_map_callbacks)
        self.flush_meta()
    cpdef commit(self):
        NotImplemented
    cpdef array encode_kv(self, key, fix_values, var_values):
        cdef bytes encoded
        cdef U64 num
        cdef array itemdata
        cdef u64 i
        encoded, num = self.encode_field(key)
        itemdata_append_field(encoded, num)
        for i in range(len(fix_values)):
            encoded, num = self.encode_field(fix_values[i])
            itemdata_append_field(encoded, num)
        for field, value in var_values:
            encoded = field.encode(utf8_)
            array_extend_buffer(itemdata, encoded)
            encoded, num = self.encode_field(value)
            itemdata_append_field(encoded, num)
        return itemdata
    cpdef split_kvs(self, list kvs):
        cdef u64 blocksize= self.blocksize, i, bytecount
        cdef array data=array(B), itemdata
        cdef ValuesCache values
        cdef u8[9] encoded_num
        cdef list blocks=[], caches=[]
        cdef dict block_cache={}
        for i in range(len(kvs)):
            key, values = kvs[i]
            itemdata = self.encode_kv(key, values.fix_values, values.var_fields)
            bytecount = _encode_num(len(itemdata), encoded_num)
            array_extend_buffer(data, encoded_num, bytecount)
            if Py_SIZE(data)<blocksize:
                block_cache[key]=values
            else:
                blocks.append(data)
                caches.append(block_cache)
                data = array(B)
        little=True
        if data:
            itemdata = blocks[len(blocks)-1]
            array_extend_buffer(itemdata, data.data.as_uchars, Py_SIZE(data))
            blocks[len(blocks)-1] = itemdata
            little=False
        return blocks, caches, little

cpdef itemdata_append_field(array itemdata, bytes encoded, U64 num):
    cdef u8[9] encoded_num
    cdef u8 bytecount
    array_extend_buffer(itemdata, &num.val, 1)
    if num.val!=255:
        bytecount = _encode_num(num.val, encoded_num)
        array_extend_buffer(itemdata, encoded_num, bytecount)
        array_extend_buffer(itemdata, encoded, len(encoded))
cpdef array encode_kvs( list kvs, array nums, u64 num_start, u64 num_end, u8 len_size,
                        u64 total_itemsize, u64 fix_count, list var_counts):
    cdef array merge_data = array(B)
    resize(merge_data, total_itemsize)
    cdef bytes encoded
    cdef u8* ptr=data.data.as_uchars
    cdef u8* num_arr = nums.data.as_uchars
    cdef u8 num
    cdef u64 i, ii=0, numi=num_start, iii, l
    cdef U64 var_count
    ptr[0] = len_size
    ptr += 1
    for i in range(len(var_count)):
        var_count = var_counts[i]
        l=var_count.val
        memcpy(ptr, l, len_size)
        ptr += len_size
        for iii in range(fix_count):
            encoded=kvs[ii]
            if num_arr[numi]!=255:
                ptr[0]=num_arr[numi]
                ptr+=1
                l=len(encoded)
                memcpy(ptr, l, len_size)
                ptr+=len_size
                memcpy(ptr, <char *> encoded, l)
                ptr+=l
            else:
                assert encoded is None
                ptr[0] = num_arr[numi]
                ptr += 1
            ii += 1
        for iii in range(var_count.val):
            # field
            encoded = kvs[ii]
            l = len(encoded)
            memcpy(ptr, l, len_size)
            ptr += len_size
            memcpy(ptr, <char *> encoded, l)
            ptr += l
            # value
            if num_arr[numi] != 255:
                ptr[0] = num_arr[numi]
                ptr += 1
                l = len(encoded)
                memcpy(ptr, l, len_size)
                ptr += len_size
                memcpy(ptr, <char *> encoded, l)
                ptr += l
            else:
                assert encoded is None
                ptr[0] = num_arr[numi]
                ptr += 1
    return data

#-----------------------------------------------------------------------------------------------------------------------






