from cache_entity import *
from index_base import *
from threading import Thread
from com.struct_format import *
# from multiprocessing import Process, Manager
import time


class ConditionCollect(FieldEntity):
    def __init__(self, encode="utf-8"):
        super().__init__()
        self.encode = encode
        self.field_dict = dict()

    def add(self, field_name, field_value, symbol=SymbolEnum.eq):
        field = super().add(field_name, field_value, symbol)
        if field.name_key not in self.field_dict:
            self.field_dict[field.name_key] = list()
        self.field_dict[field.name_key].append(field)
        return field

    def calc_val(self, name_key, name, val):
        if name_key in self.field_dict:
            for item in self.field_dict[name_key]:
                if name == item.name:
                    if not self.__calc_value__(item, val):
                        return False
        return True

    def __calc_value__(self, item, value):
        calc_val = False
        con_val = convert_num(item.is_num, value)
        item_val = convert_num(item.is_num, item.value)
        if item.symbol.value == (SymbolEnum.eq.value & item.symbol.value):
            calc_val = con_val == item_val
        if item.symbol.value == (SymbolEnum.gt.value & item.symbol.value):
            calc_val = con_val > item_val
        if item.symbol.value == (SymbolEnum.lt.value & item.symbol.value):
            calc_val = con_val < item_val
        return calc_val


class CacheBase:
    def __init__(self, table_name, path=None):
        if path is None:
            path = cfg.index_path
        self.table_name = table_name
        self.cache_dir = "%s/table/%s" % (path, table_name)
        self.encode = cfg.index_encode
        self.cache = dict()
        self.__init__file__()

    def __init__file__(self):
        if os.path.exists(self.cache_dir):
            dirs = os.listdir(self.cache_dir)
            for d in dirs:
                if os.path.isdir(os.path.join(self.cache_dir, d)):
                    self.__get_cache__(d)
        else:
            os.makedirs(self.cache_dir)

    def __get_cache__(self, col_name):
        if col_name not in self.cache:
            path = "%s/%s" % (self.cache_dir, col_name)
            self.cache[col_name] = CacheIndex(index_dir=path)
        return self.cache[col_name]

    def add(self, keys, data):
        for item in keys.field_list:
            cache_index = self.__get_cache__(item.name)
            cache_index.add(item=item, keys=keys, data=data)
            time.sleep(0.1)

    def find(self, keys, call_back=None):
        cache_item, key_item = self.analyse(keys)
        if SymbolEnum.eq.value == key_item.symbol.value:
            return cache_item.find(keys, call_back)
        else:
            return cache_item.find_col(keys, call_back)

    def delete(self, keys, call_back=None):
        find_data = self.find(keys, call_back)
        while find_data.has_next():
            ib = find_data.next().index
            MemoryFileOpt.delete_group_item(ib.group_name, ib.pos)

    # def find_async(self, keys, call_back=None):
    #     cache_item = self.analyse(keys)
    #     return cache_item.find_sync(keys, call_back)

    def analyse(self, condition_list):
        pre_count = 0
        for field in condition_list.field_list:
            catch_index_item = self.__get_cache__(field.name)
            if SymbolEnum.eq.value == field.symbol.value:
                max_val, min_val, top_val, count_val = catch_index_item.get_index_head(field.field_key)
                if pre_count > count_val:
                    pre_count = count_val
                    condition_list.set_key(field)
            else:
                if condition_list.key is None:
                    condition_list.set_key(field)

        item = condition_list.get_key()
        return self.__get_cache__(item.name), item


class ColumnSet(IndexBase):
    def __init__(self, path):
        super().__init__(path="%s/col" % path, single_file=cfg.index.index_file_length, fmt=col_set_fmt)
        self.base_path = path
        self.cache_data = CacheData(cache_dir=self.index_dir)
        self.index_collect_dict = dict()
        self.base_max_index = cfg.collect.base_max_index
        self.base_min_index = cfg.collect.base_min_index

    def __add_index__(self, key_val, data):
        data_start, data_end = self.cache_data.append(data.encode(self.encode))
        index_val = struct.pack(self.fmt, 1, 0, 0, key_val, 0, 0, data_start, data_end, 0)
        start, end = self.append(index_val)
        self.cache_data.close()
        return start, end

    def __symbol__(self, symbol):
        rtn = symbol.value
        if SymbolEnum.eq.value != (symbol.value & SymbolEnum.eq.value):
            rtn = symbol.value | SymbolEnum.eq.value
        return rtn

    def __find_data__(self, condition, item, callback):
        collect_num, index_val = self.__create_path__(item.field_key)
        # print("index value: %d" % index_val)
        symbol = self.__symbol__(item.symbol)
        col_collect = FindIndexCollectEntity(self.group_name.encode(self.encode), index_val, symbol)
        data_collect = ResultListEntity()
        # MemoryFileOpt.print_group(self.group_name.encode(self.encode))
        with self.cache_data.create_fs() as read_collect:
            while col_collect.status > 0:
                fs = read_collect.add_fs(col_collect.d_start)
                bytes_str = self.cache_data.read(col_collect.d_start, col_collect.d_end-col_collect.d_start, fs)
                collect_item = self.__find_col__(str(bytes_str, self.encode))
                data_collect.add(collect_item.get_col_list(condition, item.field_key, item.symbol.value, callback))
                if not col_collect.has_next():
                    break
                col_collect.next()
        return data_collect

    def __create_collect__(self, path, key_index):
        if path not in self.index_collect_dict:
            self.__add_index__(key_index, path)
        return CacheIndexCollect(base_path=path)

    def __create_path__(self, key_index):
        index = self.base_max_index
        path = empty
        index_val = 0
        while index >= self.base_min_index:
            dir_item = key_index // index
            index_val += dir_item * index
            path = "%s/%d" % (path, dir_item)
            index = index // 10
        return path, index_val

    def __find_col__(self, group_name):
        if group_name in self.index_collect_dict:
            return self.index_collect_dict[group_name]
        return self.__load_col__(group_name)

    def __load_col__(self, group_name):
        if os.path.exists(group_name):
            self.index_collect_dict[group_name] = CacheIndexCollect(base_path=group_name)
            return self.index_collect_dict[group_name]
        return None

    def create_index_collect(self, key_index):
        collect_num, index_val = self.__create_path__(key_index)
        if collect_num not in self.index_collect_dict:
            path = "%s/list%s" % (self.base_path, collect_num)
            self.index_collect_dict[path] = self.__create_collect__(path, index_val)
        return self.index_collect_dict[path]

    def get_index_collect(self, key_index):
        collect_num, index_val = self.__create_path__(key_index)
        path = "%s/list%s" % (self.base_path, collect_num)
        return self.index_collect_dict[path]

    def data_close(self):
        self.cache_data.close()


class CacheIndex:
    def __init__(self, index_dir):
        self.index_dir = index_dir
        self.col_index = ColumnSet(index_dir)

    def add(self, item, keys, data):
        cache_collect = self.col_index.create_index_collect(item.field_key)
        cache_collect.add(item.field_key, keys, data)
        cache_collect.data_close()

    def find_col(self, condition_list, call_back=None):
        item = condition_list.get_key()
        return self.col_index.__find_data__(condition_list, item, call_back)

    def find_sync(self, condition_list, call_back=None):
        data_collect = ResultListEntity()
        item = condition_list.get_key()
        index_collect_item = self.col_index.get_index_collect(item.field_key)
        max_val, min_val, top_val, count_val = index_collect_item.get_group_tag()
        data_list = index_collect_item.get_data_list_sync(condition_list, top_val, call_back)
        data_collect.add(data_list)
        return data_collect

    def find(self, condition_list, call_back=None):
        data_collect = ResultListEntity()
        item = condition_list.get_key()
        index_collect_item = self.col_index.get_index_collect(item.field_key)
        max_val, min_val, top_val, count_val = index_collect_item.get_group_tag()
        data_list = index_collect_item.get_data_list(condition_list, top_val, call_back)
        data_collect.add(data_list)
        return data_collect

    def get_index_head(self, field_key):
        index_collect_item = self.col_index.get_index_collect(field_key)
        max_val, min_val, top_val, count_val = index_collect_item.get_group_tag()
        return max_val, min_val, top_val, count_val


class CacheIndexCollect(IndexBase):
    def __init__(self, base_path):
        super().__init__(path="%s/collect" % base_path, single_file=cfg.collect.index_collect_length, fmt=collect_fmt)
        self.base_min_index = cfg.collect.base_min_index
        self.max_thread = cfg.collect.max_thread
        self.cache_data = CacheData(cache_dir=base_path, group_name=self.group_name)
        self.cache_index_data = CacheIndexData(cache_dir=base_path)

    def add(self, key_val, keys, data):
        index_start, index_end = self.cache_index_data.add(keys)
        stat_item = stat.start("append index data")
        data_start, data_end = self.cache_data.append(data)
        stat_item.end()
        index_val = struct.pack(self.fmt, 1, 0, 0, key_val, index_start, index_end, data_start, data_end, 0)
        stat_item = stat.start("append index collect")
        start, end = self.append(index_val)
        stat_item.end()
        return start, end

    def get_data_list_sync(self, condition, position, call_back=None):
        s_back = position
        rtn = self.cache_data.get_collect(call_back)
        with self.cache_index_data.get_check(rtn, self.max_thread) as check_item:
            while s_back > 0:
                ib = IndexBaseEntity(self.group_name, self.get_index(s_back))
                # status, s_front, s_back, str_val, i_start, i_end, d_start, d_end, pos = self.get_index(s_back)
                if not check_item.add_check(ib, ib.i_start, ib.i_end, ib.d_start, ib.d_end, condition):
                    break
        return rtn

    def get_data_list(self, condition, position, call_back=None):
        s_back = position
        rtn = self.cache_data.get_collect(call_back)
        with self.cache_index_data.get_check(rtn) as check_item:
            while s_back > 0:
                ib = IndexBaseEntity(self.group_name, self.get_index(s_back))
                # status, s_front, s_back, str_val, i_start, i_end, d_start, d_end, pos = self.get_index(s_back)
                if not check_item.check(ib, ib.i_start, ib.i_end, ib.d_start, ib.d_end, condition):
                    break
        return rtn

    def get_col_list(self, condition, key_value, tp, call_back=None):
        # print("index value: %d" % key_value)
        rtn = self.cache_data.get_collect(call_back)
        # MemoryFileOpt.print_group(self.group_name.encode(self.encode))
        find_collect = FindIndexCollectEntity(self.group_name.encode(self.encode), key_value, tp)
        with self.cache_index_data.get_check(rtn) as check_item:
            while find_collect.status > 0:
                ib = find_collect.get_data()
                if not check_item.check(ib, ib.i_start, ib.i_end, ib.d_start, ib.d_end, condition):
                    break
                if not find_collect.has_next():
                    break
                find_collect.next()
        return rtn

    def get_index_value(self, start, size):
        return self.cache_index_data.read(start, size)

    def data_close(self):
        self.cache_data.close()
        self.cache_index_data.data_close()


class CacheIndexDataCheckItem:
    def __init__(self, cache_index_data, data_collect, count=0):
        self.__data_lock__ = Lock()
        self.__count_lock__ = Lock()
        self.__read_lock__ = Lock()
        self.__acquire__ = True
        self.__add_final__ = False
        self.__final__ = False
        self.data_collect = data_collect
        self.data_count = 0
        self.exec = True
        self.count = count
        self.cache_index_data = cache_index_data
        # self.read_index_fs = self.cache_index_data.create_fs()
        self.read_real_fs = self.cache_index_data.real_data.create_fs()
        self.thread_list = list()
        self.data_queue = queue.Queue()
        self.sync_time = 0
        self.async_time = 0
        # self.data_queue = dict()
        self.__init_thread__()

    def __init_thread__(self):
        for i in range(0, self.count):
            trd = Thread(target=self.__inner_run__)
            trd.start()
            self.thread_list.append(trd)

    def __inner_run__(self):
        while self.exec:
            # read_data = self.__get_data__()
            read_data = self.__get_data__()
            if read_data is not None:
                t = time.time()
                index, i_start, i_end, d_start, d_end, condition = read_data
                self.check(index, i_start, i_end, d_start, d_end, condition)
                with self.__count_lock__:
                    self.data_count -= 1
                self.sync_time += time.time() - t
                self.__is_final__()

    def __get_data__(self):
        with self.__data_lock__:
            if self.data_queue.empty():
                return None
            return self.data_queue.get(block=False)

    def __is_final__(self):
        if self.__add_final__ and self.data_count <= 0:
            with self.__count_lock__:
                if not self.__final__:
                    self.__final__ = True
                    self.__read_lock__.release()

    def add_check(self, index, i_start, i_end, d_start, d_end, condition):
        with self.__count_lock__:
            if self.__acquire__:
                self.__acquire__ = False
                self.__read_lock__.acquire()
                self.async_time = 0
                self.cache_index_data.run_time = 0
                self.cache_index_data.find_time = 0
                self.cache_index_data.read_time = 0

            self.data_count += 1
            self.data_queue.put((index, i_start, i_end, d_start, d_end, condition), False)
        # self.data_queue.put((i_start, i_end, d_start, d_end, condition))

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.final()

    def check(self, index, i_start, i_end, d_start, d_end, condition):
        t = time.time()
        rtn = True
        check_status = self.cache_index_data.__check__(i_start, i_end, condition, self.read_real_fs)
        if check_status and self.data_collect is not None:
            rtn = self.data_collect.add(index, d_start, d_end - d_start)
        self.async_time += time.time() - t
        return rtn

    def final(self):
        self.__add_final__ = True
        self.__read_lock__.acquire()
        self.exec = False
        self.read_real_fs.close_all()
        self.__read_lock__.release()
        # print("async: %f" % self.async_time)
        # print("sync: %f" % self.sync_time)
        # print("check: %f" % self.cache_index_data.run_time)
        # print("time: %f" % self.cache_index_data.find_time)
        # print("read: %f" % self.cache_index_data.read_time)


class CacheIndexData(IndexBase):
    def __init__(self, cache_dir):
        super().__init__(path="%s/collect/data" % cache_dir, single_file=cfg.collect.index_data_length, fmt=i_data_fmt)
        self.index_base_length = cfg.collect.value_length
        self.real_data = CacheData(cache_dir="%s/value" % self.index_dir)
        self.run_time = 0
        self.find_time = 0
        self.read_time = 0

    def add(self, keys):
        bytes_list = bytearray(self.index_len) * self.index_base_length
        pos = MemoryFileOpt.group_tell(self.group_name_bytes)
        for item in keys.field_list:
            bytes_list = self.__create_bytes__(bytes_list, item, pos)
        last_pos = pos+len(bytes_list)
        MemoryFileOpt.group_seek(self.group_name_bytes, last_pos)
        return pos, last_pos

    def get_check(self, data_collect, count=0):
        return CacheIndexDataCheckItem(self, data_collect, count)

    def __check__(self, i_start, i_end, condition, rfc):
        for k, v in condition.field_dict.items():
            for item in v:
                # key_num = (item.field_key % self.index_base_length) * self.index_len
                t = time.time()
                if not self.__find_key_bytes__(i_start, i_end, condition, item, rfc):
                    self.run_time += time.time() - t
                    return False
        self.run_time += time.time() - t
        return True

    def __find_key_bytes__(self, i_start, i_end, condition, field, rfc):
        t = time.time()
        key_num = self.__calc_index_num_key__(field.name_key)
        next_pos = i_start + key_num
        group_name_bytes = self.group_name.encode(self.encode)
        key_val_bytes = MemoryFileOpt.find_value_index(
            group_name_bytes, next_pos, i_end, field.name_key, self.index_len, self.index_base_length)
        self.read_time += time.time() - t
        status, name_key, field_key, start_val, end_val = self.unpack(key_val_bytes)
        t = time.time()
        if 0 == status:
            return False
        name, val = self.__is_find_item__(start_val, end_val, rfc)
        self.find_time += time.time() - t
        return condition.calc_val(name_key, name, val)

    def __is_find_item__(self, start, end, rfc):
        fs = rfc.add_fs(start)
        real_str = self.real_data.read_str(start, end-start, fs)
        real_val = real_str.split("=", 1)
        return real_val[0], real_val[1]

    def __calc_index_num_key__(self, field_key):
        return field_key % self.index_base_length * self.index_len

    def __create_bytes__(self, byte_array, key_item, pos):
        stat_item = stat.start("add real index data")
        start, end = self.real_data.append(key_item.field_text.encode(self.encode))
        stat_item.end()
        bytes_val = struct.pack(self.fmt, 1, key_item.name_key, key_item.field_key, start, end)
        stat_item = stat.start("add index data")
        key_num = self.__calc_index_num_key__(key_item.name_key)
        while byte_array[key_num] > 0:
            key_num += self.index_base_length * self.index_len
            if key_num >= len(byte_array):
                byte_array += bytearray(self.index_len) * self.index_base_length
        byte_array[key_num:key_num+self.index_len] = bytes_val
        self.edit(pos+key_num, bytes_val)
        stat_item.end()
        return byte_array

    def data_close(self):
        self.real_data.close()


class CacheData(DataBase):
    def __init__(self, cache_dir, group_name=None, prefix=""):
        super().__init__(path="%s/data" % cache_dir, group_name=group_name, max_length=cfg.data.max_file_length, max_open_file=cfg.data.max_open_file, prefix=prefix)
        self.cache_dir = self.data_dir

    def get_collect(self, call_back=None, data_cache=None):
        return DataEntityCollect(self if data_cache is None else data_cache, call_back)


