import torch
from torch import nn
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader
from tqdm import tqdm
import pandas as pd
import numpy as np


class RawDanmuContainer(Dataset):
    def __init__(self, tokenizer, txt):
        super().__init__()
        self.tokenizer = tokenizer
        self.txt = txt

    def __getitem__(self, idx):
        x = torch.tensor(self.tokenizer(self.txt[idx]))
        return x

    def __len__(self):
        return len(self.txt)


class RawDanmuDataset(Dataset):
    def __init__(self, tokenizer, path='./raw.txt'):
        super().__init__()
        self.tokenizer = tokenizer
        self.path = path
        self.txt = []
        with open(path, 'r', encoding='utf-8') as f:
            for line in tqdm(f):
                self.txt.append(line.strip())

    def __getitem__(self, idx):
        x = torch.tensor(self.tokenizer(self.txt[idx]))
        return x

    def __len__(self):
        return len(self.txt)


class LabeledDanmuDataset(Dataset):
    def __init__(self, name, tokenizer, seq_len=16):
        super().__init__()
        self.name = name
        self.tokenizer = tokenizer
        df = pd.read_csv('{}.txt'.format(name), sep='\t',
                         names=['txt', 'label'])
        self.txt = list(df.txt)
        self.label = list(df.label)
        self.seq_len = seq_len

    def __getitem__(self, idx):
        return torch.tensor(
            self.tokenizer(self.txt[idx], max_len=self.seq_len)
        ), np.int64(self.label[idx])

    def __len__(self):
        return len(self.label)



