#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright (c) Coder.AN. All rights reserved.

import torch
import torch.nn as nn

from .darknet import CSPDarknet
from .network_blocks import BaseConv, CSPLayer, DWConv


class GRC(nn.Module):
    """
    Gate Recurrent Convolution
    """

    def __init__(self, in_channels, out_channels):
        super().__init__()
        self.out_channels = out_channels

        self.r_conv_x = BaseConv(in_channels, out_channels, 1, 1, act='lrelu', bias=True)
        self.r_conv_h = BaseConv(in_channels, out_channels, 1, 1, act='lrelu')
        self.r_sigmoid = nn.Sigmoid()

        self.z_conv_x = BaseConv(in_channels, out_channels, 1, 1, act='lrelu', bias=True)
        self.z_conv_h = BaseConv(in_channels, out_channels, 1, 1, act='lrelu')
        self.z_sigmoid = nn.Sigmoid()

        self.h_conv_x = BaseConv(in_channels, out_channels, 1, 1, act='lrelu', bias=True)
        self.h_conv_h = BaseConv(out_channels, out_channels, 1, 1, act='lrelu')
        self.h_tanh = nn.Tanh()

        self.last_h = None

    def forward(self, input):
        if self.last_h is None:
            self.last_h = torch.zeros((input.shape[0], self.out_channels, input.shape[2], input.shape[3]), dtype=torch.float32, device=input.device)
        if self.last_h.shape[0] != input.shape[0] or self.last_h.shape[2] != input.shape[2] or self.last_h.shape[3] != input.shape[3]:
            self.last_h = torch.zeros((input.shape[0], self.out_channels, input.shape[2], input.shape[3]),
                                      dtype=torch.float32, device=input.device)

        r_t = self.r_sigmoid(self.r_conv_x(input) + self.r_conv_h(self.last_h))
        z_t = self.z_sigmoid(self.z_conv_x(input) + self.z_conv_h(self.last_h))
        h_t = self.h_tanh(self.h_conv_x(input) + self.h_conv_h(r_t * self.last_h))
        output = z_t * self.last_h + (1 - z_t) * h_t
        self.last_h = output.detach()

        return output
