import enum
from typing import Mapping

import rdflib
from transformers import PreTrainedTokenizer


class NodesVocab:
    def __init__(self):
        self._nodes_dict = {}
        self._nodes_list = []

    def add(self, node: rdflib.IdentifiedNode) -> int:
        index = self._nodes_dict.get(node)
        if index is not None:
            return index

        index = len(self._nodes_list)
        self._nodes_list.append(node)
        self._nodes_dict[node] = index
        return index

    def get_index(self, node: rdflib.IdentifiedNode) -> int:
        return self._nodes_dict[node]

    def get_node(self, index: int) -> rdflib.IdentifiedNode:
        return self._nodes_list[index]

    def __len__(self):
        return len(self._nodes_dict)


class NodesTokenizer:
    class SpecialToken(enum.Enum):
        BEGIN_OF_TRIPLET = enum.auto()
        END_OF_TRIPLET = enum.auto()
        BEGIN_OF_LITERAL = enum.auto()
        END_OF_LITERAL = enum.auto()

    def __init__(self,
        nodes_vocab: NodesVocab,
        text_tokenizer: PreTrainedTokenizer, *,
        reuse_specials: Mapping[SpecialToken, int] | None = None,
    ):
        self._nodes_vocab = nodes_vocab
        self._text_tokenizer = text_tokenizer

        self._reused_specials_dict = dict(reuse_specials) if reuse_specials is not None else dict()
        self._extended_specials_list = [
            special_token
            for special_token in NodesTokenizer.SpecialToken
            if special_token not in self._reused_specials_dict]
        self._extended_specials_dict = {
            special_token: i
            for i, special_token in enumerate(self._extended_specials_list)}

    def __len__(self):
        return len(self._text_tokenizer) + len(self._extended_specials_list) + len(self._nodes_vocab)

    def encode_special(self, special: SpecialToken) -> list[int]:
        token = self._reused_specials_dict.get(special)
        if token is None:
            token = len(self._text_tokenizer) + self._extended_specials_dict[special]
        return [token]

    def encode_node(self, node: rdflib.term.Node) -> list[int]:
        if isinstance(node, rdflib.IdentifiedNode):
            node_index = self._nodes_vocab.get_index(node)
            return [(len(self._text_tokenizer) + len(self._extended_specials_list) + node_index)]
        if isinstance(node, rdflib.Literal):
            return (self.encode_special(NodesTokenizer.SpecialToken.BEGIN_OF_LITERAL) +
                    self._text_tokenizer.encode(str(node.value)) +
                    self.encode_special(NodesTokenizer.SpecialToken.END_OF_LITERAL))
        raise TypeError(f"Unexpected node: {node}")

    def encode_triplet(self, triplet: tuple[rdflib.term.Node, rdflib.term.Node, rdflib.term.Node]) -> list[int]:
        tokens = []
        tokens += self.encode_special(NodesTokenizer.SpecialToken.BEGIN_OF_TRIPLET)
        for item in triplet:
            tokens += self.encode_node(item)
        tokens += self.encode_special(NodesTokenizer.SpecialToken.END_OF_TRIPLET)
        return tokens
