# -*- coding: utf-8 -*-
"""
Created on Fri Jan 25 16:31:46 2019

@author: Administrator
"""

import torch
import torch.utils.data as Data
import time

torch.manual_seed(1)    # reproducible

BATCH_SIZE = 5 # 批训练的数据个数
# BATCH_SIZE = 8
a1 = time.time()
'''从1到10共10个点'''
x = torch.linspace(1, 10, 10)       # this is x data (torch tensor)
'''从10到1共10个点'''
y = torch.linspace(10, 1, 10)       # this is y data (torch tensor)

# 先转换成 torch 能识别的 Dataset
#torch_dataset = Data.TensorDataset(data_tensor=x, target_tensor=y) #新版本这句可能出错
torch_dataset = Data.TensorDataset(x, y) #定义一个数据集
loader = Data.DataLoader(
    dataset=torch_dataset,      # torch TensorDataset format
    batch_size=BATCH_SIZE,      # mini batch size
    #打乱训练用的数据
    shuffle=True,               # random shuffle for training
    #多线程来读数据 注意python多线程是假的，由于安全锁无法加速，反而更慢  
    num_workers=0,              #设为0即可关闭多线程 # subprocesses for loading data
    '''经测试，开多线程比不开慢了250倍'''
)


def show_batch():
    # 训练所有!整套!数据 3 次
    for epoch in range(3):   # train entire dataset 3 times
        # 每一步 loader 释放一小批数据用来学习
        for step, (batch_x, batch_y) in enumerate(loader):  # for each training step
            # train your data...
            print('Epoch: ', epoch, '| Step: ', step, '| batch x: ',
                  batch_x.numpy(), '| batch y: ', batch_y.numpy())


if __name__ == '__main__':
    show_batch()
    a2 = time.time()
    a3 = a2 -a1
    print(a3)
