import math

import numpy as np
import networkx as nx
from networkx.algorithms.dag import dag_longest_path
from typing import List, Tuple
import timeit


def clock(func):
    def clocked(*args, **kwargs):
        start = timeit.default_timer()
        res = func(*args, **kwargs)
        run_time = timeit.default_timer() - start
        func_name = func.__name__
        arg_str = ', '.join(repr(arg) for arg in args)
        print('search>>>%s cost>>>%0.8fs' % (func_name, run_time))
        return res

    return clocked

def seconds_to_hms(seconds_num:int):
    """
    输入秒数 转换为 时分秒输出
    param: seconds_num integer 666
    return: hms str 00:00:00a
    """
    m, s = divmod(seconds_num, 60)
    h, m = divmod(m, 60)
    hms = "%02d:%02d:%02d" % (h, m, s)
    return hms

def iou(bbox: np.ndarray, gt: np.ndarray) -> np.ndarray:
    """
    IoU calculation for next-step filtering
    Parameters
    ----------
    bbox: bounding box array (n, 4)
    gt: bounding box array (m, 4)
    Returns
    -------
    IoU results with dimension (n, m)
    """
    if len(bbox) == 0 or len(gt) == 0:
        return np.array(0)
    lt = np.maximum(bbox[:, None, :2], gt[:, :2])  # left_top (x, y)
    rb = np.minimum(bbox[:, None, 2:], gt[:, 2:])  # right_bottom (x, y)
    wh = np.maximum(rb - lt + 1, 0)  # inter_area (w, h)
    inter_areas = wh[:, :, 0] * wh[:, :, 1]  # shape: (n, m)
    box_areas = (bbox[:, 2] - bbox[:, 0] + 1) * (bbox[:, 3] - bbox[:, 1] + 1)
    gt_areas = (gt[:, 2] - gt[:, 0] + 1) * (gt[:, 3] - gt[:, 1] + 1)
    IoU = inter_areas / (box_areas[:, None] + gt_areas - inter_areas)
    return np.array(IoU)


def generate_slices(original_list, pivot, whole_list):
    # 确保pivot在列表的有效范围内
    if pivot < 0:
        return "Error: Pivot number out of range."

    result = []
    expand_len = min(len(original_list) + pivot, len(whole_list))
    # 从往前10个索引到往前1个索引，逐步减少往前的索引数，同时增加往后索引数
    cache = []
    for i in range(pivot, 0, -1):  # 循环从10递减到1
        start = max(0, original_list[0] - i)  # 防止负索引
        if start in cache:
            continue
        end = start + expand_len  # 计算结束索引
        if end > len(whole_list):
            continue
        result.append(whole_list[start:end])  # 截取并添加到结果列表中
        cache.append(start)

    return result

def sub_tn(sims: np.ndarray,
       tn_max_step: int = 10, tn_top_k: int = 5, max_path: int = 10,
       min_sim: float = 0.2, min_length: int = 5, max_iou: float = 0.3) -> List[List[int]]:
    """
    TN method for video temporal alignment.
    Reimplemented paper:
    {Tan H K, Ngo C W, Hong R, et al. Scalable detection of partial near-duplicate videos by visual-temporal consistency
     [C]//Proceedings of the 17th ACM international conference on Multimedia. 2009: 145-154.}
    Parameters
    ----------
    sims: input similarity map computed from a copied video pair.
    tn_max_step: max step range in TN.
    tn_top_k: Top k frame similarity selection in TN.
    max_path: max loop for multiply segments detection.
    min_sim: min average similarity score for each aligned segment.
    min_length: min segment length.
    max_iou: max iou for filtering overlap segments (bbox).
    Returns
    -------
    list of temporal aligned copied segments, [query_min, ref_min, query_max, ref_max] for each segment
    """
    infringe_box_list = []
    infringe_score_list = []
    path = 0
    node_pair2id = {}
    node_pair2id[(-1, -1)] = 0

    node_id2pair = {}
    node_id2pair[0] = (-1, -1)  # source

    node_num = 1

    DG = nx.DiGraph()
    DG.add_node(0)

    # get top-k values and indices, shape (Q_LEN, top_k)
    top = min(tn_top_k, sims.shape[1])

    topk_indices = np.argsort(-sims)[:, :top]
    topk_sims = np.take_along_axis(sims, topk_indices, axis=-1)

    # add nodes
    for qf_idx in range(sims.shape[0]):
        for k in range(top):
            rf_idx = topk_indices[qf_idx][k]

            node_id2pair[node_num] = (qf_idx, rf_idx)
            node_pair2id[(qf_idx, rf_idx)] = node_num

            DG.add_node(node_num)
            node_num += 1

    # create graph by adding edges
    for q_i in range(sims.shape[0]):
        r_i = topk_indices[q_i]

        intermediate_rs = np.empty((0,), dtype=np.int32)
        # implements Constraints C1 by limiting range end
        for q_j in range(q_i + 1, min(sims.shape[0], q_i + tn_max_step)):
            r_j = topk_indices[q_j]  # shape (top_k, )
            r_diff = r_j[:, None] - r_i  # dst - src, shape (top_k, top_k)

            # Constraints C2
            C2 = (r_diff > 0) & (r_diff < tn_max_step)

            # Constraints C3
            if len(intermediate_rs) == 0:
                C3 = np.ones(C2.shape, dtype=np.bool_)
            else:
                # "the equal sign" in C3 in paper is wrong because it's contradictory to C2
                cond1 = intermediate_rs[None, :] > r_i[:, None]
                cond2 = intermediate_rs[None, :] < r_j[:, None]
                C3 = np.sum(cond2[:, None, :] & cond1, axis=-1) == 0

            # Constraints C4
            s_j = topk_sims[q_j]  # shape (top_k, )
            s_j = np.repeat(s_j.reshape(-1, 1), r_diff.shape[1], axis=1)  # shape (top_k, top_k)
            C4 = s_j >= min_sim

            val_rows, val_cols = np.where(C2 & C3 & C4)
            val_sims = s_j[val_rows, val_cols]
            # update intermediate_rs
            valid_r_j = r_j[val_rows]
            intermediate_rs = np.unique(np.concatenate([intermediate_rs, valid_r_j]))

            edges = [(node_pair2id[(q_i, r_i[c])], node_pair2id[(q_j, r_j[r])], dict(weight=s))
                     for c, r, s in zip(val_cols, val_rows, val_sims)]

            DG.add_edges_from(edges)

    # print(f"Graph N {DG.number_of_nodes()} E {DG.number_of_edges()} for sim {sims.shape[0]}x{sims.shape[1]}")

    # link sink node
    for i in range(0, node_num - 1):
        j = node_num - 1

        pair_i = node_id2pair[i]
        pair_j = node_id2pair[j]

        if (pair_j[0] > pair_i[0] and pair_j[1] > pair_i[1] and
                pair_j[0] - pair_i[0] <= tn_max_step and pair_j[1] - pair_i[1] <= tn_max_step):
            DG.add_edge(i, j, weight=0)

    while True:
        if path > max_path:
            break
        longest_path = dag_longest_path(DG)
        for i in range(1, len(longest_path)):
            DG.add_edge(longest_path[i - 1], longest_path[i], weight=0.0)
        if 0 in longest_path:
            longest_path.remove(0)  # remove source node
        if node_num - 1 in longest_path:
            longest_path.remove(node_num - 1)  # remove sink node
        path_query = [node_id2pair[node_id][0] for node_id in longest_path]
        path_refer = [node_id2pair[node_id][1] for node_id in longest_path]

        if len(path_query) == 0:
            break

        score = 0.0
        for (qf_idx, rf_idx) in zip(path_query, path_refer):
            score += sims[qf_idx][rf_idx]

        if score > 0:
            query_min, query_max = min(path_query), max(path_query)
            refer_min, refer_max = min(path_refer), max(path_refer)
        else:
            query_min, query_max = 0, 0
            refer_min, refer_max = 0, 0

        ave_length = (refer_max - refer_min + query_max - query_min) / 2
        ious = iou(np.expand_dims(np.array([query_min, refer_min, query_max, refer_max]), axis=0),
                   np.array(infringe_box_list))

        if ave_length > 0 and score / ave_length > min_sim and min(refer_max - refer_min,
                                                                   query_max - query_min) >= min_length and ious.max() < max_iou:
            infringe_box_list.append([int(query_min), int(refer_min), int(query_max), int(refer_max)])
            infringe_score_list.append(score / ave_length)
        path += 1
    return infringe_box_list, infringe_score_list


def find_slide(data_array, label_array):
    left_stride = label_array[0] - data_array[0]
    right_stride = label_array[-1] - data_array[-1]
    return left_stride, right_stride

def tn_acc(sims: np.ndarray,
       tn_max_step: int = 10, tn_top_k: int = 5, max_path: int = 10,
       min_sim: float = 0.2, min_length: int = 5, max_iou: float = 0.3, acc=True) -> List[List[int]]:
    """
    TN method for video temporal alignment.
    Reimplemented paper:
    {Tan H K, Ngo C W, Hong R, et al. Scalable detection of partial near-duplicate videos by visual-temporal consistency
     [C]//Proceedings of the 17th ACM international conference on Multimedia. 2009: 145-154.}
    Parameters
    ----------
    sims: input similarity map computed from a copied video pair.
    tn_max_step: max step range in TN.
    tn_top_k: Top k frame similarity selection in TN.
    max_path: max loop for multiply segments detection.
    min_sim: min average similarity score for each aligned segment.
    min_length: min segment length.
    max_iou: max iou for filtering overlap segments (bbox).
    Returns
    -------
    list of temporal aligned copied segments, [query_min, ref_min, query_max, ref_max] for each segment
    """
    infringe_box_list = []
    infringe_score_list = []
    path = 0
    node_pair2id = {}
    node_pair2id[(-1, -1)] = 0

    node_id2pair = {}
    node_id2pair[0] = (-1, -1)  # source

    node_num = 1

    DG = nx.DiGraph()
    DG.add_node(0)

    # get top-k values and indices, shape (Q_LEN, top_k)
    top = min(tn_top_k, sims.shape[1])

    topk_indices = np.argsort(-sims)[:, :top]
    topk_sims = np.take_along_axis(sims, topk_indices, axis=-1)

    # add nodes
    for qf_idx in range(sims.shape[0]):
        for k in range(top):
            rf_idx = topk_indices[qf_idx][k]

            node_id2pair[node_num] = (qf_idx, rf_idx)
            node_pair2id[(qf_idx, rf_idx)] = node_num

            DG.add_node(node_num)
            node_num += 1

    # create graph by adding edges
    for q_i in range(sims.shape[0]):
        r_i = topk_indices[q_i]

        intermediate_rs = np.empty((0,), dtype=np.int32)
        # implements Constraints C1 by limiting range end
        for q_j in range(q_i + 1, min(sims.shape[0], q_i + tn_max_step)):
            r_j = topk_indices[q_j]  # shape (top_k, )
            r_diff = r_j[:, None] - r_i  # dst - src, shape (top_k, top_k)

            # Constraints C2
            C2 = (r_diff > 0) & (r_diff < tn_max_step)

            # Constraints C3
            if len(intermediate_rs) == 0:
                C3 = np.ones(C2.shape, dtype=np.bool_)
            else:
                # "the equal sign" in C3 in paper is wrong because it's contradictory to C2
                cond1 = intermediate_rs[None, :] > r_i[:, None]
                cond2 = intermediate_rs[None, :] < r_j[:, None]
                C3 = np.sum(cond2[:, None, :] & cond1, axis=-1) == 0

            # Constraints C4
            s_j = topk_sims[q_j]  # shape (top_k, )
            s_j = np.repeat(s_j.reshape(-1, 1), r_diff.shape[1], axis=1)  # shape (top_k, top_k)
            C4 = s_j >= 0.8

            val_rows, val_cols = np.where(C2 & C3 & C4)
            val_sims = s_j[val_rows, val_cols]
            # update intermediate_rs
            valid_r_j = r_j[val_rows]
            intermediate_rs = np.unique(np.concatenate([intermediate_rs, valid_r_j]))

            edges = [(node_pair2id[(q_i, r_i[c])], node_pair2id[(q_j, r_j[r])], dict(weight=s))
                     for c, r, s in zip(val_cols, val_rows, val_sims)]

            DG.add_edges_from(edges)

    # print(f"Graph N {DG.number_of_nodes()} E {DG.number_of_edges()} for sim {sims.shape[0]}x{sims.shape[1]}")

    # link sink node
    for i in range(0, node_num - 1):
        j = node_num - 1

        pair_i = node_id2pair[i]
        pair_j = node_id2pair[j]

        if (pair_j[0] > pair_i[0] and pair_j[1] > pair_i[1] and
                pair_j[0] - pair_i[0] <= tn_max_step and pair_j[1] - pair_i[1] <= tn_max_step):
            DG.add_edge(i, j, weight=0)

    while True:
        if path > max_path:
            break
        longest_path = dag_longest_path(DG)
        for i in range(1, len(longest_path)):
            DG.add_edge(longest_path[i - 1], longest_path[i], weight=0.0)
        if 0 in longest_path:
            longest_path.remove(0)  # remove source node
        if node_num - 1 in longest_path:
            longest_path.remove(node_num - 1)  # remove sink node
        path_query = [node_id2pair[node_id][0] for node_id in longest_path]
        path_refer = [node_id2pair[node_id][1] for node_id in longest_path]

        if acc:
            path_refer = np.arange(path_refer[0], path_refer[-1] + 1, dtype=np.int32)
            path_query = np.arange(path_query[0], path_query[-1] + 1, dtype=np.int32)
            if len(path_refer) - sims.shape[0] > 3:
                path += 1
                continue
            len_diff = len(path_query) - len(path_refer)
            max_score = 0.0
            max_step_idx = 0
            if len_diff > 0:
                window_steps = generate_slices(path_refer, len_diff, np.arange(sims.shape[1]))
                for step_id, step in enumerate(window_steps):
                    tmp_score = 0.0
                    for (qf_idx, rf_idx) in zip(path_query, step):
                        sim = sims[qf_idx][rf_idx]
                        # if sim > 0.9:
                        tmp_score += sim
                    avg_score = tmp_score / len(step)
                    if avg_score > max_score:
                        max_score = avg_score
                        max_step_idx = step_id
                if max_score > min_sim:
                    path_refer = window_steps[max_step_idx]

            elif len_diff < 0:
                window_steps = generate_slices(path_query, abs(len_diff), np.arange(sims.shape[0]))
                for step_id, step in enumerate(window_steps):
                    tmp_score = 0.0
                    for (qf_idx, rf_idx) in zip(step, path_refer):
                        sim = sims[qf_idx][rf_idx]
                        # if sim > 0.9:
                        tmp_score += sim
                    avg_score = tmp_score / len(step)
                    if avg_score > max_score:
                        max_score = avg_score
                        max_step_idx = step_id
                if max_score > min_sim:
                    path_query = window_steps[max_step_idx]

        if len(path_query) == 0:
            break

        score = 0.0
        for (qf_idx, rf_idx) in zip(path_query, path_refer):
            score += sims[qf_idx][rf_idx]

        if score > 0:
            query_min, query_max = min(path_query), max(path_query)
            refer_min, refer_max = min(path_refer), max(path_refer)
        else:
            query_min, query_max = 0, 0
            refer_min, refer_max = 0, 0

        ave_length = (refer_max - refer_min + query_max - query_min) / 2
        ious = iou(np.expand_dims(np.array([query_min, refer_min, query_max, refer_max]), axis=0),
                   np.array(infringe_box_list))
        # print(len(path_query), seconds_to_hms(query_min), seconds_to_hms(query_max), seconds_to_hms(refer_min),
        #       seconds_to_hms(refer_max), score / ave_length)
        if ave_length > 0 and score / ave_length > min_sim and min(refer_max - refer_min,
                                                                   query_max - query_min) >= min_length and ious.max() < max_iou:
            if acc:
                duration_diff = (sims.shape[0]) - (refer_max-refer_min) - 1
                if duration_diff > 0:
                    refer_min = max(refer_min - duration_diff, 0)
            infringe_box_list.append([int(query_min), int(refer_min), int(query_max), int(refer_max)])
            infringe_score_list.append(score / ave_length)
        path += 1
    return infringe_box_list, infringe_score_list

def segment_map_to_square(similarity_map: np.ndarray, segment_choice: bool=True, ratio_thrsh: int=3, slice_ratio: int=3) -> Tuple[List[np.ndarray], List[List[int]]]:
    """
    Segment similarity map for SPD inference.
    Similarity maps with high aspect ratio between height and width (e.g. 2000 * 150) are not suitable for pattern detection.
    Segment a high aspect ratio map into several square patches

    Parameters
    ----------
    similarity_map: input similarity map, dimension is VideoLength_q * VideoLength_r
    segment_choice: whether to segment similarity image or not
    ratio_thrsh: minimum aspect ratio between height and width
    slice_ratio: height-width ration of segmented patches

    Returns
    -------
    List of segmented similarity images
    List of starting position in original similarity map
    """
    (h, w) = similarity_map.shape[:2]
    ratio = h / w
    nonzero_thresh = 1
    if 1 / ratio_thrsh < ratio < ratio_thrsh or not segment_choice:
        return [similarity_map], [[0, 0]]
    else:
        images = []
        start_locations = []
        if ratio > 1:
            max_h_ratio = math.ceil(ratio / slice_ratio) * slice_ratio
            padding = np.zeros((max_h_ratio * w, w))
            padding[:h, :w] = similarity_map
            arrays = np.vsplit(padding, int(max_h_ratio / slice_ratio))
            locs = list(range(0, max_h_ratio * w, w * slice_ratio))
            for idx, array in enumerate(arrays):
                if np.count_nonzero(array) / w > nonzero_thresh:
                    images.append(array)
                    start_locations.append([0, locs[idx]])
        else:
            max_w_ratio = math.ceil(1 / ratio / slice_ratio) * slice_ratio
            padding = np.zeros((h, max_w_ratio * h))
            padding[:h, :w] = similarity_map
            arrays = np.hsplit(padding, int(max_w_ratio / slice_ratio))
            locs = list(range(0, max_w_ratio * h, h * slice_ratio))
            for idx, array in enumerate(arrays):
                if np.count_nonzero(array) / h > nonzero_thresh:
                    images.append(array)
                    start_locations.append([locs[idx], 0])
    return images, start_locations
def tn(sims: np.ndarray,
       tn_max_step: int = 10, tn_top_k: int = 5, max_path: int = 10,
       min_sim: float = 0.2, min_length: int = 5, max_iou: float = 0.3) -> List[List[int]]:
    """
    TN method for video temporal alignment.
    Reimplemented paper:
    {Tan H K, Ngo C W, Hong R, et al. Scalable detection of partial near-duplicate videos by visual-temporal consistency
     [C]//Proceedings of the 17th ACM international conference on Multimedia. 2009: 145-154.}
    Parameters
    ----------
    sims: input similarity map computed from a copied video pair.
    tn_max_step: max step range in TN.
    tn_top_k: Top k frame similarity selection in TN.
    max_path: max loop for multiply segments detection.
    min_sim: min average similarity score for each aligned segment.
    min_length: min segment length.
    max_iou: max iou for filtering overlap segments (bbox).
    Returns
    -------
    list of temporal aligned copied segments, [query_min, ref_min, query_max, ref_max] for each segment
    """
    # similarity = sims.clip(0, 1)
    # similar_img = np.where(np.argsort(np.argsort(similarity)) >= similarity.shape[1] - 20, similarity, 0)
    # image_batch, start_coords = segment_map_to_square(similar_img)

    infringe_box_list = []
    infringe_score_list = []
    path = 0
    node_pair2id = {}
    node_pair2id[(-1, -1)] = 0

    node_id2pair = {}
    node_id2pair[0] = (-1, -1)  # source

    node_num = 1

    DG = nx.DiGraph()
    DG.add_node(0)

    # get top-k values and indices, shape (Q_LEN, top_k)
    top = min(tn_top_k, sims.shape[1])

    topk_indices = np.argsort(-sims)[:, :top]
    topk_sims = np.take_along_axis(sims, topk_indices, axis=-1)

    # add nodes
    for qf_idx in range(sims.shape[0]):
        for k in range(top):
            rf_idx = topk_indices[qf_idx][k]

            node_id2pair[node_num] = (qf_idx, rf_idx)
            node_pair2id[(qf_idx, rf_idx)] = node_num

            DG.add_node(node_num)
            node_num += 1

    # create graph by adding edges
    for q_i in range(sims.shape[0]):
        r_i = topk_indices[q_i]

        intermediate_rs = np.empty((0,), dtype=np.int32)
        # implements Constraints C1 by limiting range end
        for q_j in range(q_i + 1, min(sims.shape[0], q_i + tn_max_step)):
            r_j = topk_indices[q_j]  # shape (top_k, )

            r_diff = r_j[:, None] - r_i  # dst - src, shape (top_k, top_k)

            # Constraints C2
            C2 = (r_diff > 0) & (r_diff < tn_max_step)

            # Constraints C3
            if len(intermediate_rs) == 0:
                C3 = np.ones(C2.shape, dtype=np.bool_)
            else:
                # "the equal sign" in C3 in paper is wrong because it's contradictory to C2
                cond1 = intermediate_rs[None, :] > r_i[:, None]
                cond2 = intermediate_rs[None, :] < r_j[:, None]
                C3 = np.sum(cond2[:, None, :] & cond1, axis=-1) == 0

            # Constraints C4
            s_j = topk_sims[q_j]  # shape (top_k, )
            s_j = np.repeat(s_j.reshape(-1, 1), r_diff.shape[1], axis=1)  # shape (top_k, top_k)
            C4 = s_j >= min_sim

            val_rows, val_cols = np.where(C2 & C3 & C4)
            val_sims = s_j[val_rows, val_cols]
            # update intermediate_rs
            valid_r_j = r_j[val_rows]
            intermediate_rs = np.unique(np.concatenate([intermediate_rs, valid_r_j]))

            edges = [(node_pair2id[(q_i, r_i[c])], node_pair2id[(q_j, r_j[r])], dict(weight=s))
                     for c, r, s in zip(val_cols, val_rows, val_sims)]

            DG.add_edges_from(edges)

    # logger.info("Graph N {} E {} for sim {}x{}", DG.number_of_nodes(), DG.number_of_edges(), sims.shape[0],
    #            sims.shape[1])

    # link sink node
    for i in range(0, node_num - 1):
        j = node_num - 1

        pair_i = node_id2pair[i]
        pair_j = node_id2pair[j]

        if (pair_j[0] > pair_i[0] and pair_j[1] > pair_i[1] and
                pair_j[0] - pair_i[0] <= tn_max_step and pair_j[1] - pair_i[1] <= tn_max_step):
            DG.add_edge(i, j, weight=0)

    while True:
        if path > max_path:
            break
        longest_path = dag_longest_path(DG)
        for i in range(1, len(longest_path)):
            DG.add_edge(longest_path[i - 1], longest_path[i], weight=0.0)
        if 0 in longest_path:
            longest_path.remove(0)  # remove source node
        if node_num - 1 in longest_path:
            longest_path.remove(node_num - 1)  # remove sink node
        path_query = [node_id2pair[node_id][0] for node_id in longest_path]
        path_refer = [node_id2pair[node_id][1] for node_id in longest_path]

        if len(path_query) == 0:
            break
        score = 0.0
        for (qf_idx, rf_idx) in zip(path_query, path_refer):
            score += sims[qf_idx][rf_idx]
        if score > 0:
            query_min, query_max = min(path_query), max(path_query)
            refer_min, refer_max = min(path_refer), max(path_refer)
        else:
            query_min, query_max = 0, 0
            refer_min, refer_max = 0, 0

        ave_length = (refer_max - refer_min + query_max - query_min) / 2
        ious = iou(np.expand_dims(np.array([query_min, refer_min, query_max, refer_max]), axis=0),
                   np.array(infringe_box_list))
        # print(len(path_query), seconds_to_hms(query_min), seconds_to_hms(query_max), seconds_to_hms(refer_min),
        #       seconds_to_hms(refer_max), score / ave_length)
        if ave_length > 0 and score / ave_length > min_sim and min(refer_max - refer_min,
                                                                   query_max - query_min) >= min_length and ious.max() < max_iou:
            infringe_box_list.append([int(query_min), int(refer_min), int(query_max), int(refer_max)])
            infringe_score_list.append(score / ave_length)
        path += 1
    return infringe_box_list, infringe_score_list

class TemporalNetwork:
    """
    TemporalNetwork
    """

    def __init__(self,
                 tn_max_step: int = 10, tn_top_k: int = 20, max_path: int = 10,
                 min_sim: float = 0.9, min_length: int = 5, max_iou: float = 0.3):
        super().__init__()
        self._tn_max_step = tn_max_step
        self._tn_top_k = tn_top_k
        self._max_path = max_path
        self._min_sim = min_sim
        self._min_length = min_length
        self._max_iou = max_iou

    def __call__(self, q: 'ndarray', r: 'ndarray', normalize_input=True, acc=False) -> float:
        if normalize_input:
            q = q / np.linalg.norm(q, axis=1, keepdims=True)
            r = r / np.linalg.norm(r, axis=1, keepdims=True)
        sim_map = np.dot(q, r.T)
        ranges, scores = tn(sim_map, self._tn_max_step, self._tn_top_k, self._max_path, self._min_sim, self._min_length,
                            self._max_iou)
        # ranges, scores = tn_acc(sim_map, self._tn_max_step, self._tn_top_k, self._max_path, self._min_sim, self._min_length,
        #                     self._max_iou, acc)
        return np.array(ranges), np.array(scores)
