from ..modules import *
from .base_model import BaseModel
import torch.nn as nn
from ..loss import CRF, CrossEntropy


class TokenClassification(BaseModel):
    def __init__(self, args):
        super().__init__(args, 'seq')
        hidden_size = self.transformer.config.hidden_size
        self.linear = Linear(hidden_size, args.n_class, args.m_drop)
        self.loss = CrossEntropy()

    def forward(self, input_ids, attention_mask, **kwargs):
        logits = self.forward_transformer(input_ids, attention_mask)
        logits = self.linear(logits)
        return logits

    def compute_loss(self, forward_out, batch):
        return self.loss(forward_out, batch['labels'], batch['attention_mask'])


class CRFTokenClassification(BaseModel):
    def __init__(self, args):
        super().__init__(args, 'seq')
        hidden_size = self.transformer.config.hidden_size
        self.linear = Linear(hidden_size, args.n_class, args.m_drop)
        self.loss = CRF(args.n_class)

    def forward(self, input_ids, attention_mask, **kwargs):
        logits = self.forward_transformer(input_ids, attention_mask)
        logits = self.linear(logits)
        return logits

    def compute_loss(self, forward_out, batch):
        return self.loss(forward_out, batch['labels'], batch['attention_mask'])


class FlatCRFTokenClassification(nn.Module):
    def __init__(self, args, max_len, hidden_size=160, num_heads=8):
        super().__init__()
        self.embedding = FlatEmbedding(args.model_path, args.w2v_file, args.model_class)
        self.flat = FLatTransformer(max_len=max_len, input_size=hidden_size, num_heads=num_heads)
        self.linear = Linear(hidden_size, args.n_class, args.m_drop)
        self.loss = CRF(args.n_class)

    def forward(self, input_ids, word_ids, attention_mask, word_mask, char_word_mask, start, end):
        char_word_vec = self.embedding(input_ids, word_ids, attention_mask, word_mask)
        logits = self.flat(char_word_vec, char_word_mask, start, end, attention_mask)
        logits = self.linear(logits)
        return logits

    def compute_loss(self, forward_out, batch):
        return self.head.crf(forward_out, batch['labels'], batch['attention_mask'])