import torch
import config
class Batchify(object):
    def batchify(data, bsz):
        # print("data1", data)
        # Work out how cleanly we can divide the dataset into bsz parts. 
        nbatch = data.size(0) // bsz
        # Trim off any extra elements that wouldn't cleanly fit (remainders).
        data = data.narrow(0, 0, nbatch * bsz)
        # Evenly divide the data across the bsz batches.
        data = data.view(bsz, -1, 2).permute(1, 0, 2)
        # print('data2:', data)
        device = torch.device("cuda:0" if torch.cuda.is_available() else"cpu")
        #device = torch.device("cpu")
        return data.to(device)
        
def get_batch(source, i):
    seq_len = min(config.BPTT, source.size(0) - 1 - i)
    #print("seq_len:",seq_len)
    data = source[i:i+seq_len, :, :]
    target = source[i+1:i+1+seq_len, :, -1:].contiguous().view(-1)
    #print('data', data)
    #print('target', target)
    return data, target