import torch.nn as nn
from torch.nn import functional as F

from models.inception_resnet import InceptionResnetV1


class FaceNet(nn.Module):
	def __init__(self, flat_shape=1792, embedding_size=128, num_classes=None, mode="train"):
		super(FaceNet, self).__init__()
		self.backbone = InceptionResnetV1()
		self.bottleneck = nn.Linear(flat_shape, embedding_size, bias=False)
		self.last_bn = nn.BatchNorm1d(embedding_size, eps=0.001, momentum=0.1, affine=True)
		if mode == "train":
			self.classifier = nn.Linear(embedding_size, num_classes)

	def forward(self, x):
		x = self.backbone(x)
		x = x.view(x.size(0), -1)
		x = self.bottleneck(x)
		x = self.last_bn(x)
		x = F.normalize(x, p=2, dim=1)
		return x

	def forward_feature(self, x):
		x = self.backbone(x)
		x = x.view(x.size(0), -1)
		x = self.bottleneck(x)
		before_normalize = self.last_bn(x)
		x = F.normalize(before_normalize, p=2, dim=1)
		return before_normalize, x

	def forward_classifier(self, x):
		x = self.classifier(x)
		return x

	def weights_init(self, init_type='normal', init_gain=0.02):
		def init_func(m):
			class_name = m.__class__.__name__
			if hasattr(m, 'weight') and class_name.find('Conv') != -1:
				if init_type == 'normal':
					nn.init.normal_(m.weight.data, 0.0, init_gain)
				elif init_type == 'xavier':
					nn.init.xavier_normal_(m.weight.data, gain=init_gain)
				elif init_type == 'kaiming':
					nn.init.kaiming_normal_(m.weight.data, a=0, mode='fan_in')
				elif init_type == 'orthogonal':
					nn.init.orthogonal_(m.weight.data, gain=init_gain)
				else:
					raise NotImplementedError('initialization method [%s] is not implemented' % init_type)
			elif class_name.find('BatchNorm2d') != -1:
				nn.init.normal_(m.weight.data, 1.0, 0.02)
				nn.init.constant_(m.bias.data, 0.0)

		print(f"initialize network with {init_type} type")
		self.apply(init_func)
