"""
Authors:    Jingjing WU (吴京京) <https://github.com/wj-Mcat>

2020-now @ Jingjing Wu
Licensed under the Apache License, Version 2.0 (the 'License');
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an 'AS IS' BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import annotations
from torch import nn, Tensor
from transformers.modeling_outputs import BaseModelOutputWithPoolingAndCrossAttentions
from transformers.models.bert import BertModel, BertConfig
import pytorch_lightning as pl

from src.config import Config
from src.distance import compute_distance


class ProtoNet(pl.LightningModule):
    """Prototypical networks"""
    def __init__(self, config: Config) -> None:
        super().__init__()

        sentence_embedding_methods = ['cls', 'pooling']
        if config.sentence_embedding not in sentence_embedding_methods:
            raise ValueError(f'sentence embedding only support methods: <{"".join(sentence_embedding_methods)}>')

        bert_config = BertConfig.from_pretrained(config.bert_name)

        self.config: Config = config
        self.bert: BertModel = BertModel(bert_config)
        self.classifier = nn.Linear(bert_config.hidden_size, config.n_labels)

    def get_sentence_embedding(self, bert_output: BaseModelOutputWithPoolingAndCrossAttentions) -> Tensor:
        if self.config.sentence_embedding == 'cls':
            last_hidden_states: Tensor = bert_output.last_hidden_state[:, 0, :]
            return last_hidden_states
        if self.config.sentence_embedding == 'pooling':
            pooling: Tensor = bert_output.pooler_output
            return pooling

    def compute_prototypes(self, support_set: Tensor):
        support_set = support_set.reshape(self.config.n_way, self.config.k_shot, -1)
        return support_set.mean(dim=1)

    def forward(self, input_ids: Tensor, token_type_ids: Tensor, attention_mask: Tensor):

        bert_output = self.bert(input_ids, token_type_ids, attention_mask)
        sentence_embedding = self.get_sentence_embedding(bert_output)

        assert len(sentence_embedding.shape) == 2

        output = self.classifier(sentence_embedding)
        return output

    def inference(self, input_ids: Tensor, token_type_ids: Tensor, attention_mask: Tensor, support_set_embedding: Tensor):
        """
        inference with support set
        Args:
            input_ids: the input ids
            token_type_ids: token segment id
            attention_mask: token attention mask id
            support_set_embedding: support set embedding id

        Returns: the probability of n-way label_strings

        """
        # 1. compute distance by query & support set embedding
        bert_output: BaseModelOutputWithPoolingAndCrossAttentions = self.bert(input_ids, token_type_ids, attention_mask)

        sentence_embedding = self.get_sentence_embedding(bert_output)

        distance: Tensor = compute_distance(sentence_embedding, support_set_embedding, self.config.epsilon)
        return distance

    def training_step(self, batch, batch_index: int) -> Tensor:
        pass


