import tensorflow as tf
import numpy as np

def data_iterator(batch_size):
    data=np.load("F:\\pycharm1\\data.npy")
    n_data=len(data)
    idxs=np.arange(0,n_data)
    while True:
        np.random.shuffle(idxs)
        shuf_data=data[idxs]
        for i_batch in range(0,n_data-batch_size+1,batch_size):
            data_batch=shuf_data[i_batch:i_batch+batch_size,:,:,0]
            data_batch=data_batch.astype(np.float32)/255.0
            data_batch=np.reshape(data_batch,[-1,32,32,1])
            yield data_batch

def fullyConnect(input,initialize_value):
    input_length=input.get_shape().as_list()[-1]
    n_input=initialize_value.shape[1]
    n_output=initialize_value.shape[0]
    assert input_length==n_input, \
        "the length of the length dose not match the initializer"
    weight=tf.Variable(initialize_value,name="weight",dtype=tf.float32)
    output=tf.matmul(input,weight)
    return output

def xavier_initializer(n_input,n_output):
    range=(6/(n_input+n_output))**(0.5)
    return tf.random_uniform_initializer(minval=-range,maxval=range)

# def biasfullyConnect(input,shape):
#     n_input=input.get_shape().as_list()[-1]
#     weight=tf.get_variable('weight',shape=shape,initializer=xavier_initializer(shape[0],shape[1]))
#     bia=tf.get_variable("biases", shape=shape[1], initializer=xavier_initializer(shape[0],shape[1]))
#     output=tf.matmul(input,weight)+bia
#     return output

def conv2d(input,kernel_size,kernel_number,collection=None):
    n_input=input.get_shape().as_list()[3]
    n_output=kernel_number
    kernel_shape=[kernel_size,kernel_size,n_input,n_output]
    kernel=tf.get_variable(
        "kernel",shape=kernel_shape,initializer=xavier_initializer(n_input,n_output)
    )
    biases=tf.get_variable(
        "biases",shape=[n_output],initializer=tf.constant_initializer(0.0)
    )
    output=tf.nn.conv2d(input,kernel,[1,1,1,1],"SAME")+biases
    if collection != None:
        tf.add_to_collection(collection,kernel)
        tf.add_to_collection(collection,biases)
    return output

def biasfullyConnect(input,shape):
    initial=tf.truncated_normal(shape,stddev=0.1)
    weight=tf.Variable(initial)
    binitial=tf.constant(0.1,shape=[1,shape[1]])
    bias=tf.Variable(binitial)
    output=tf.matmul(input,weight)+bias
    return output

def round_smooth(input):
    output = input + tf.stop_gradient(tf.round(input) - input)
    return output

