import attention
import config
from var_cnn_rnn_attention import CRAttentionModel


class CRAttentionCNNModel(CRAttentionModel):
    
    def model(self, inputs, seq_lens):
        h_cnn, seq_lens = self.cnn(inputs, seq_lens)
        h_rnn, seq_lens = self.rnn(h_cnn, seq_lens)
        h_attention, alphas = attention.attention_cnn(h_rnn, config.attention_hidden_size, seq_lens,
                                                      k=5,
                                                      time_major=False)
        logits = self.fc(h_attention)
        return logits, alphas
