from torch.utils.data import Dataset, DataLoader
import os
import csv
from torch import Tensor

def data_encode(row):
    r1 = [0 for _ in range(33)]
    r1[int(row['r1']) - 1] = 1
    r2 = [0 for _ in range(33)]
    r2[int(row['r2']) - 1] = 1
    r3 = [0 for _ in range(33)]
    r3[int(row['r3']) - 1] = 1
    r4 = [0 for _ in range(33)]
    r4[int(row['r4']) - 1] = 1
    r5 = [0 for _ in range(33)]
    r5[int(row['r5']) - 1] = 1
    r6 = [0 for _ in range(33)]
    r6[int(row['r6']) - 1] = 1
    b = [0 for _ in range(33)]
    b[int(row['b']) - 1] = 1
    return [r1, r2, r3, r4, r5, r6, b]

class SSQDataset(Dataset):

    def __init__(self, input_sample=3):
        self.sample_data = []
        with open(os.path.dirname(__file__) + os.sep + 'data.csv', 'r') as fp:
            csv_reader = csv.DictReader(fp)
            for row in csv_reader:
                self.sample_data.append(data_encode(row))
        self.extra_data = []
        for x in self.sample_data:
            for xi in x:
                self.extra_data.append(xi)
        self.input_sample = input_sample

    def __len__(self):
        return len(self.extra_data) - self.input_sample

    def __getitem__(self, idx):
        input_sequence = self.extra_data[idx : (idx + self.input_sample)]
        output_sequence = self.extra_data[idx + self.input_sample]
        return Tensor(input_sequence), Tensor(output_sequence)

def iter_data(batch_size=1):
    dataset_object = SSQDataset()
    data_loader = DataLoader(dataset_object, batch_size=batch_size)
    for x, y in data_loader:
        yield x, y

def show_dataset():
    for x, y in iter_data(1):
        print('x:', x)
        print('y:', y)
        print('(batch_size=1)')
        break

if __name__ == '__main__':
    show_dataset()
