import tensorflow as tf
import time


class PsliteOptimizer(object):
    def __init__(self, optimizer):
        self._optimizer = optimizer
        self._ops = tf.load_op_library('./ps_push_pull_op.so')

    def compute_gradients(self, *args, **kwargs):
        return self._optimizer.compute_gradients(*args, **kwargs)

    def apply_gradients(self, grads_and_vars, global_step=None, name=None):
        update_ops = []
        for g, v in grads_and_vars:
            update_ops.append(tf.assign(v, self._ops.ps_op(v.name, g, op='PushPull')))
        return tf.group(update_ops)

    def minimize(self, loss, global_step=None, name=None):
        g = self.compute_gradients(loss)
        return self.apply_gradients(g, global_step=global_step, name=name)

    def declare_all_trainable_variables(self):
        traiable_vars = tf.trainable_variables()
        declare_ops = []
        for v in traiable_vars:
            declare_ops.append(self._ops.ps_op(v.name, v, op='Push'))

        return tf.group(declare_ops)

    def pull_all_traiable_variable(self):
        traiable_vars = tf.trainable_variables()
        traiable_vars = []
        pull_ops = []
        for v in traiable_vars:
            pull_ops.append(tf.assign(v, self._ops.ps_op(v.name, op='Pull')))

        return tf.group(pull_ops)

    def ps_init(self, sess):
        while True:
            try:
                init_op = tf.cond(tf.equal(self._ops.ps_my_rank(), 0), lambda: self.declare_all_trainable_variables(), lambda: self.pull_all_traiable_variable())
                sess.run(init_op)
            except tf.errors.DataLossError as e:
                tf.logging.log(tf.logging.INFO, "wait for ready.")
                time.sleep(1)
                continue
            break
