import difflib
import re
from typing import Iterable, List, Set, Tuple


def tokenize(s: str) -> List[str]:
    return re.findall(r"\w+|\n+| +|\t+|[^\w\s]", s)


def matched_blocks(blocksA: Iterable[str], blocksB: Iterable[str]):
    A_tokens = map(tokenize, blocksA)
    B_tokens = list(map(tokenize, blocksB))
    ratios = []
    for a in A_tokens:
        for b in B_tokens:
            ratios.append((a, b, difflib.SequenceMatcher(None, a, b).ratio()))
    matched_A = set()
    matched_B = set()
    match = []
    for a, b, ratio in sorted(ratios, key=lambda x: x[2], reverse=True):
        a = "".join(a)
        b = "".join(b)

        if a in matched_A or b in matched_B:
            continue

        matched_A.add(a)
        matched_B.add(b)
        match.append((a, b))
    return match

def index_matched_blocks(blocksA: Iterable[str], blocksB: Iterable[str], threshold: float=0.6) -> List[Tuple[int, int]]:
    A_tokens = map(tokenize, blocksA)
    B_tokens = list(map(tokenize, blocksB))
    ratios: List[Tuple[int, List[str], int, List[str],float]] = []
    for a_i, a in enumerate(A_tokens):
        for b_i, b in enumerate(B_tokens):
            ratios.append((a_i, a, b_i, b, difflib.SequenceMatcher(None, a, b).ratio()))
    matched_A = set()
    matched_B = set()
    match: List[Tuple[int, int]] = []
    for a_i, a, b_i, b, ratio in sorted(ratios, key=lambda x: x[-1], reverse=True):
        a = "".join(a)
        b = "".join(b)

        if a in matched_A or b in matched_B:
            continue
        # print(a)
        # print(b)
        # print(ratio)
        if ratio > threshold:
            matched_A.add(a)
            matched_B.add(b)
            match.append((a_i, b_i))
    return match


def matched_block(to_match: str, candidates: Iterable[str]):
    to_match_tokens = tokenize(to_match)
    candidates_tokens = map(tokenize, candidates)
    if matched := difflib.get_close_matches(to_match_tokens, candidates_tokens, n=1):
        return "".join(matched[0])
    else:
        return None


def matched_text(original: str, replacement: str, possible_lines: Set[int]):
    original_lines = original.splitlines()
    original_len = len(original_lines)
    replacement_lines = replacement.splitlines()
    context_len = int(len(replacement_lines) * 1.2) - 1
    blocks = []
    for line in possible_lines:
        min_line = max(0, line - context_len)
        max_line = min(original_len, line + context_len)
        blocks.append("\n".join(original_lines[min_line:max_line]))
    block = matched_block(replacement, blocks)
    # list(map(print, blocks))
    # print(block)
    if block is None:
        return None
    block_tokens = tokenize(block)
    replacement_tokens = tokenize(replacement)
    opcodes = difflib.SequenceMatcher(None, replacement_tokens, block_tokens).get_opcodes()
    start, end = 0, -1
    for tag, i1, i2, j1, j2 in opcodes:
        if i1 < i2:
            # print(tag, i1, i2, j1, j2, replacement_tokens[i1:i2])
            start = j1
            break
    for tag, i1, i2, j1, j2 in reversed(opcodes):
        if i1 < i2:
            end = j2
            # print(tag, i1, i2, j1, j2, replacement_tokens[i1:i2])
            break

    return "".join(block_tokens[start:end])
