import os
import gzip
import pickle
import matplotlib.pyplot as plt

def load_data(dataset):
    
     ''' 
     下载数据集。如果本地有，则直接加载，如果没有，则会从官网下载。文件目录为当前目录。
     '''
     #############
     # LOAD DATA #
     #############
            
     # 如果mnist数据集不存在，则下载
     data_dir, data_file = os.path.split(dataset)
     if data_dir == "" and not os.path.isfile(dataset):
         # Check if dataset is in the data directory.
         new_path = os.path.join(
             os.path.split(__file__)[0],
         dataset
         )
         if os.path.isfile(new_path) or data_file == 'mnist.pkl.gz':
             dataset = new_path
            
     if (not os.path.isfile(dataset)) and data_file == 'mnist.pkl.gz':
         import urllib
         origin = (
             'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist_py3k.pkl.gz'
             )
         print('Downloading data from %s' % origin)
         urllib.urlretrieve(origin, dataset)
            
     print('... loading data')
            
     # 加载数据集，按照格式分为训练数据集、验证数据集、测试数据集
     f = gzip.open(dataset, 'rb')
     train_set, valid_set, test_set = pickle.load(f,encoding='UTF-8')
     f.close()
     #train_set, valid_set, test_set format: tuple(input, target)
     #input is an numpy.ndarray of 2 dimensions (a matrix)
     #witch row's correspond to an example. target is a
     #numpy.ndarray of 1 dimensions (vector)) that have the same length as
     #the number of rows in the input. It should give the target
     #target to the example with the same index in the input.
            
     def shared_dataset(data_xy, borrow=True):
         """ 
         将数据集设置为shared类型，便于使用GPU加速。
         """
         data_x, data_y = data_xy
         shared_x = theano.shared(numpy.asarray(data_x,dtype=theano.config.floatX),
                   borrow=borrow)
         shared_y = theano.shared(numpy.asarray(data_y,dtype=theano.config.floatX),
                   borrow=borrow)
         # When storing data on the GPU it has to be stored as floats
         # therefore we will store the labels as ``floatX`` as well
         # (``shared_y`` does exactly that). But during our computations
         # we need them as ints (we use labels as index, and if they are
         # floats it doesn't make sense) therefore instead of returning
         # ``shared_y`` we will have to cast it to int. This little hack
         # lets ous get around this issue
         return shared_x, T.cast(shared_y, 'int32')
            
     #test_set_x, test_set_y = shared_dataset(test_set)
     #valid_set_x, valid_set_y = shared_dataset(valid_set)
     #train_set_x, train_set_y = shared_dataset(train_set)
            
     #rval = [(train_set_x, train_set_y), (valid_set_x, valid_set_y),
     #    (test_set_x, test_set_y)]
     return (train_set, valid_set, test_set)
                
if __name__ == '__main__':
    (train_set, valid_set, test_set) = load_data('mnist_py3k.pkl.gz')
  
    for s in train_set[0]:
        if (s < 0).all() or (s > 1).all():
            a = s.reshape(28,28)
            print(a)
    #plt.imshow(a, cmap='gray')
    #plt.show()
