import tensorflow as tf
import t3f
import numpy as np

def tt_t3f(inp,
       inp_modes,
       out_modes,
       mat_ranks,
       cores_initializer=tf.contrib.layers.xavier_initializer(uniform=False),
       cores_regularizer=None,
       biases_initializer=tf.zeros_initializer,
       biases_regularizer=None,
       trainable=True,
       cpu_variables=False,
       scope=None):
	""" tt-layer (tt-matrix using t3f by full tensor product)
	 Args:
        inp: input tensor, float - [batch_size, prod(inp_modes)]
        inp_modes: input tensor modes
        out_modes: output tensor modes
        mat_ranks: tt-matrix ranks
        cores_initializer: cores init function, could be a list of functions for specifying different function for each core
        cores_regularizer: cores regularizer function, could be a list of functions for specifying different function for each core
        biases_initializer: biases init function (if None then no biases will be used)
        biases_regularizer: biases regularizer function        
        trainable: trainable variables flag, bool
        cpu_variables: cpu variables flag, bool
        scope: layer variable scope name, string
    Returns:
        out: output tensor, float - [batch_size, prod(out_modes)]
	"""
	with tf.variable_scope(scope):
		# tt tensor
		initializer = t3f.glorot_initializer([inp_modes.tolist(), out_modes.tolist()], tt_rank = mat_ranks.tolist())
		tt_weights = t3f.get_variable('tt_weights', initializer = initializer)
		out = t3f.matmul(inp, t3f.renormalize_tt_cores(tt_weights))

		if biases_initializer is not None:
			biases = tf.get_variable('biases', [np.prod(out_modes)], initializer = biases_initializer, trainable = True)
			out = tf.nn.bias_add(out, biases, name = 'out')

	return out
