import tensorflow as tf
sess = tf.Session()
print('################ sharing variables')
# 1. Explicitly passing tf.Variable objects around
# 2. Implicitly wrapping tf.Variable objects with tf.variable_scope objects

# def conv_relu(input, kernel_shape, bias_shape):
#     weights = tf.get_variable('weights', kernel_shape, initializer=tf.random_normal_initializer())
#     biases = tf.get_variable('biases', bias_shape, initializer=tf.constant_initializer(0.0))
#     conv = tf.nn.conv2d(input, weights, strides=[1,1,1,1], padding='SAME')
#     return tf.nn.relu(conv + biases)
#
# input1 = tf.random_normal([1,10,10,32])
# input2 = tf.random_normal([1,20,20,32])
# x = conv_relu(input1, kernel_shape=[5,5,32,32], bias_shape=[32])
# x = conv_relu(x, kernel_shape=[5,5,32,32], bias_shape=[32])

def a():
    with tf.name_scope(name='scope_1'):
        v = tf.Variable(initial_value=1)
    with tf.name_scope(name='scope_2'):
        v_1 = tf.Variable(initial_value=2)
        v_2 = v_1 + v
    sess.run(tf.global_variables_initializer())
    writer = tf.summary.FileWriter('./logs')
    writer.add_graph(tf.get_default_graph())
    writer.flush()

a()

