from utils.data import BaseTxtDataset
from utils.jsonl import JsonLReader
from pytorch_lightning import LightningDataModule
from typing import List, Any
from torchtext.data.field import Field
from argparse import ArgumentParser
import torch


class SBTNLDataset(BaseTxtDataset):

    def __init__(self, sbt: List[Any], nl: List[Any], sbt_field: Field, nl_field: Field, raw_nl=False):
        texts = [sbt, nl]
        sort_weights = [0, 1]
        super().__init__(texts, sort_weights)
        self.sbt_field = sbt_field
        self.nl_field = nl_field
        self.raw_nl = raw_nl

    def collate_fn(self, batch):
        raw_sbt = [each[0] for each in batch]
        raw_nls = [each[1] for each in batch]
        batch_sbt = self.sbt_field.process(raw_sbt)
        batch_nls = self.nl_field.process(raw_nls)
        sbt_len = torch.tensor([len(each) for each in raw_sbt], dtype=torch.long)
        if self.raw_nl:
            return {"sbt": batch_sbt, "sbt_len": sbt_len, "nl": batch_nls, "raw_nl": raw_nls}
        else:
            return {"sbt": batch_sbt, "sbt_len": sbt_len, "nl": batch_nls}


class DeepComDataModule(LightningDataModule):
    def __init__(self,
                 train_path,
                 test_path,
                 val_path,
                 sbt_field,
                 nl_field,
                 batch_size):
        super(DeepComDataModule, self).__init__()
        self.train_path = train_path
        self.val_path = val_path
        self.test_path = test_path

        self.sbt_field = sbt_field
        self.nl_filed = nl_field

        self.batch_size = batch_size

        self.train_dataset: SBTNLDataset = None
        self.test_dataset: SBTNLDataset = None
        self.val_dataset: SBTNLDataset = None

    @staticmethod
    def add_data_args(parser: ArgumentParser):
        parser.add_argument("--train", type=str, default=None)
        parser.add_argument("--val", type=str, default=None)
        parser.add_argument("--test", type=str, default=None)
        parser.add_argument("--batch_size", type=int)

    def setup(self, stage=None) -> None:

        # train_dataset, val_dataset
        if stage == "fit" or stage is None:
            data = JsonLReader(self.train_path).read()
            sbt = [each['sbt'] for each in data]
            nl = [each['nl'] for each in data]
            self.train_dataset = SBTNLDataset(sbt, nl, self.sbt_field, self.nl_filed)

            data = JsonLReader(self.val_path).read()
            sbt = [each['sbt'] for each in data]
            nl = [each['nl'] for each in data]
            self.val_dataset = SBTNLDataset(sbt, nl, self.sbt_field, self.nl_filed, raw_nl=True)

        # test_dataset
        if stage == "test" or stage is None:
            data = JsonLReader(self.test_path).read()
            sbt = [each['sbt'] for each in data]
            nl = [each['nl'] for each in data]
            self.test_dataset = SBTNLDataset(sbt, nl, self.sbt_field, self.nl_filed, raw_nl=True)

    def train_dataloader(self):
        return self.train_dataset.gen_data_loader(batch_size=self.batch_size)

    def test_dataloader(self):
        return self.test_dataset.gen_data_loader(batch_size=self.batch_size)

    def val_dataloader(self):
        return self.val_dataset.gen_data_loader(batch_size=self.batch_size)

    def predict_dataloader(self):
        return None
