import tensorflow as tf
from tensorflow.python.keras.layers import Layer


class BPRLossWithinSample(Layer):
    def __init__(self, graph_ui, lamb=0.001, **kwargs):
        if 'input_shape' not in kwargs and 'input_dim' in kwargs:
            kwargs['input_shape'] = (kwargs.pop('input_dim'),)
        super(BPRLossWithinSample, self).__init__(**kwargs)
        self.graph = graph_ui if isinstance(graph_ui, tf.Tensor) else tf.constant(graph_ui, dtype='float32')
        self.neg_graph = tf.constant(self.sample_neg(graph_ui=self.graph), dtype='float32')
        self.lamb = lamb

    @staticmethod
    def sample_neg(graph_ui):
        import numpy as np
        graph_ui = np.array(graph_ui, dtype=np.int)
        # item_num = graph_ui.shape[1]
        neg_graph_ui = np.ones(graph_ui.shape, dtype='int') - graph_ui
        count = np.sum(graph_ui, axis=1)
        # count = np.where(count > item_num // 2, item_num - count, count)
        new_neg_graph_ui = np.zeros(graph_ui.shape, dtype='int')
        import random
        for i, u in enumerate(neg_graph_ui):
            args = np.argwhere(u > 0)
            sample_neg = random.sample(list(args), count[i])
            new_neg_graph_ui[i, sample_neg] = 1
        return new_neg_graph_ui

    def call(self, inputs, **kwargs):
        user_out, item_out, rating = inputs

        batch = tf.shape(user_out)[0]

        regularize = self.lamb * (tf.square(tf.norm(user_out, axis=(1, 2))) +
                                  tf.square(tf.norm(item_out, axis=(1, 2))))
        regularize = tf.expand_dims(regularize, 1)
        pos_index = tf.where(self.graph > 0)
        pos_index = tf.expand_dims(pos_index, 0)
        pos_index = tf.repeat(pos_index, batch, axis=0)
        neg_index = tf.where(self.neg_graph > 0)
        neg_index = tf.expand_dims(neg_index, 0)
        neg_index = tf.repeat(neg_index, batch, axis=0)
        pos = tf.gather_nd(rating, pos_index, batch_dims=1)
        neg = tf.gather_nd(rating, neg_index, batch_dims=1)
        loss = -tf.math.log(tf.sigmoid(pos - neg))
        loss = tf.reduce_sum(loss, axis=1) + regularize
        loss = tf.reduce_mean(loss)
        return loss

    def get_config(self):
        config = {
            'graph_ui': self.graph_ui.numpy(),
            'lamb': self.lamb
        }
        base_config = super(BPRLossWithinSample, self).get_config()
        config.update(base_config)
        return config


class BPRLossWithoutSample(Layer):
    def __init__(self, lamb=0.001, **kwargs):
        if 'input_shape' not in kwargs and 'input_dim' in kwargs:
            kwargs['input_shape'] = (kwargs.pop('input_dim'),)
        super(BPRLossWithoutSample, self).__init__(**kwargs)
        self.lamb = lamb

    def call(self, inputs, **kwargs):
        user_out, item_out, pos_items, neg_items = inputs
        user_out = user_out[0]
        item_out = item_out[0]
        pos_items = pos_items[0]
        neg_items = neg_items[0]

        pos_item_embeddings = tf.nn.embedding_lookup(item_out, pos_items)
        neg_item_embeddings = tf.nn.embedding_lookup(item_out, neg_items)

        regularize = tf.nn.l2_loss(user_out) + tf.nn.l2_loss(pos_item_embeddings) + tf.nn.l2_loss(neg_item_embeddings)

        pos = tf.reduce_sum(tf.multiply(user_out, pos_item_embeddings), axis=1)
        neg = tf.reduce_sum(tf.multiply(user_out, neg_item_embeddings), axis=1)
        loss = -tf.math.log(tf.sigmoid(pos - neg))
        loss = tf.reduce_sum(loss) + 2 * self.lamb * regularize
        return loss

    def get_config(self):
        config = {
            'lamb': self.lamb
        }
        base_config = super(BPRLossWithoutSample, self).get_config()
        config.update(base_config)
        return config
