import os
import sys
import math
import time
from typing import *
from collections import deque

import mindspore as ms
import mindspore.nn as msnn
import mindspore.ops as msops
from mindspore.dataset import Dataset, GeneratorDataset, DistributedSampler, RandomSampler
from mindspore.train import RunContext, Callback


ms.set_context(mode=ms.GRAPH_MODE, device_target="CPU")
import numpy as np
import sty
import tqdm

class Model1(msnn.Cell):
    def __init__(self):
        super().__init__()
        self.conv1 = msnn.Conv2d(1, 15, kernel_size=3, stride=1, pad_mode="pad", padding=1, has_bias=True)
        self.bn1 = msnn.BatchNorm2d(num_features=15)
        self.conv2 = msnn.Conv2d(15, 31, kernel_size=3, stride=1, pad_mode="pad", padding=1, has_bias=True)
        self.bn2 = msnn.BatchNorm2d(num_features=31)
        self.conv3 = msnn.Conv2d(31, 1, kernel_size=1, stride=1, pad_mode="pad", padding=0, has_bias=True)
        self.bn3 = msnn.BatchNorm2d(num_features=1)
        self.relu = msnn.ReLU()
        self.sigmoid = msops.Sigmoid()

    def construct(self, x: ms.Tensor, *args, **kwargs):
        x = self.conv1(x)
        x = self.bn1(x)
        x = self.relu(x)

        x = self.conv2(x)
        x = self.bn2(x)
        x = self.relu(x)

        x = self.conv3(x)
        x = self.bn3(x)
        x = self.relu(x)
        x = self.sigmoid(x)

        return x


from mindspore.ops.primitive import prim_attr_register, Primitive, PrimitiveWithInfer


class PrintCC(Primitive):
    """
    
    """

    @prim_attr_register
    def __init__(self, end: str = "\n"):
        """Initialize Print."""
        super().__init__("PrintCC")
        self.end = end
        self.add_prim_attr("side_effect_io", True)


    def __call__(self, *args):
        for arg in args:
            if isinstance(arg, ms.common.parameter.Parameter):
                print(ms.Tensor.__repr__(arg), end=self.end)
            elif isinstance(arg, (ms.Tensor,)):
                print(arg.__repr__(), end=self.end)
            else:
                print(arg, end=self.end)


class MockDataset:
    def __init__(self, shape: Tuple[int, int], in_channels: int, target_channels: int, num: int):
        self.num = num
        self.shape = shape
        self.in_channels = in_channels
        self.target_channels = target_channels

        self.data = np.random.randn(self.num, self.in_channels, *self.shape)
        self.target = np.random.randn(self.num, self.target_channels, *self.shape)

    def __len__(self):
        return self.num

    def __getitem__(self, item):
        return self.data[item], self.target[item]

    def __next__(self) -> Coroutine[None, np.ndarray, None]:
        for elem in zip(self.data, self.target):
            yield elem

    def __iter__(self):
        return self.__next__()


class CustomCallback(Callback):
    def __init__(self):
        self.timer = None
        self.loss_list = list()
        self.print1 = PrintCC(end="\r")
        self.print = msops.Print()



    def on_train_step_end(self, run_context):
        original_args = run_context.original_args()
        cur_epoch_num = original_args['cur_epoch_num']
        cur_step_num = original_args['cur_step_num']
        batch_num = original_args['batch_num']
        
        loss = original_args['net_outputs'].asnumpy()
        self.loss_list.append(loss)

        s = tqdm.tqdm.format_meter(n=cur_step_num % batch_num, total=batch_num,
                                   elapsed=(time.perf_counter_ns() - self.timer) * 1e-9,
                                   prefix=f"epoch={cur_epoch_num}",
                                   postfix=f"loss={np.mean(self.loss_list[-10:]):.5e}", 
                                   ncols=80)
        # self.print(s)
        self.print1(s)

    def on_train_epoch_begin(self, run_context):
        # self.print("train epoch start")
        self.timer = time.perf_counter_ns()
        self.loss_list.clear()

    def on_train_epoch_end(self, run_context):
        self.print("")  # this is triggered at a strange point


if __name__ == "__main__":
    print(__file__)


    m = Model1()
    source = MockDataset(shape=(64, 64), in_channels=1, target_channels=1, num=1600)
    ds = GeneratorDataset(
        source,
        column_names=["data", "target"],
    )
    ds = ds.batch(9)
    optimizer = msnn.Adam(m.trainable_params(), learning_rate=1e-4)

    loss_fn = msnn.MSELoss()

    callbacks = [
        CustomCallback()
    ]

    model = ms.train.Model(
        network=m,
        loss_fn=loss_fn,
        optimizer=optimizer,
        metrics=None,
        eval_network=None,
        amp_level="O3",
    )

    model.fit(
        epoch=10,
        train_dataset=ds,
        callbacks=callbacks,
        dataset_sink_mode=True,
        sink_size=200,
    )


