# -*- coding: utf-8 -*-


import tensorflow as tf
from tensorflow.python.ops import partitioned_variables
from tensorflow.core.framework.embedding import config_pb2


POOLING_STRATEGY = ["mean", "sqrtn", "sum", "not"]

class VEmbeddingVariable(object):
    def __init__(self, embedding, key=None) -> None:
        self.embedding = embedding
        self.key = key


class EmbeddingLayer(object):
    def __init__(self, slot_ids, partitioner=None, embedding_size=16, step_evict=100, filter_freq=3, pooling_strategy=None, name="EmbeddingLayer") -> None:
        self._layer_name = name
        self._partitioner = partitioner
        if pooling_strategy is None:
            pooling_strategy = {}
        for k,v in pooling_strategy.items():
            assert v in POOLING_STRATEGY, "{} not in POOLING_STRATEGY".format(v)
        self._slot_ids = slot_ids
        self._table_name = "total_table"
        self._emb_size = embedding_size
        self._pooling_strategy = {}
        for slotid in self._slot_ids:
            if slotid in pooling_strategy:
                self._pooling_strategy[slotid] = pooling_strategy[slotid]
            else:
                self._pooling_strategy[slotid] = "mean"

        with self.getVarScope() as scope:
            evict_opt = tf.GlobalStepEvict(steps_to_live=step_evict) if step_evict else None
            filter_option = tf.CounterFilter(filter_freq=filter_freq) if filter_freq else None
            init_opt = tf.InitializerOption(initializer=tf.glorot_uniform_initializer, default_value_dim=10000)
            self._table = tf.get_embedding_variable(self._table_name,
                                                    key_dtype=tf.dtypes.int64,
                                                    embedding_dim=self._emb_size,
                                                    initializer=tf.glorot_uniform_initializer,
                                                    # ev_option=ev_opt,
                                                    partitioner=self._partitioner
                                                    )

    def getVarScope(self):
        return tf.variable_scope(self._layer_name, partitioner=self._partitioner, reuse=tf.AUTO_REUSE)

    def getEmbs(self, feature_keys):
        with self.getVarScope() as scope:
            return self.getPoolingEmbs(feature_keys), self.getNotPoolingEmbs(feature_keys)
    
    def getNotPoolingEmbs(self, feature_keys):
        pooling_embs = {}
        for slotid in self._slot_ids:
            if self._pooling_strategy[slotid] == "not":
                key = tf.cast(feature_keys[slotid], tf.int64)
                emb = tf.nn.embedding_lookup(self._table, key.values)
                pooling_embs[slotid] = VEmbeddingVariable(embedding=emb, key=key)
        return pooling_embs

    def getPoolingEmbs(self, feature_keys):
        pooling_embs = {}
        for slotid in self._slot_ids:
            if self._pooling_strategy[slotid] in POOLING_STRATEGY and not self._pooling_strategy[slotid]=="not":
                key = tf.cast(feature_keys[slotid], tf.int64)
                emb = tf.nn.safe_embedding_lookup_sparse(self._table, key, sparse_weights=None, partition_strategy="mod", combiner=self._pooling_strategy[slotid])
                pooling_embs[slotid] = VEmbeddingVariable(embedding=emb, key=key)
        return pooling_embs



