import sys

import torch
from torch import nn
from lib.base.base_net import BaseNet
from torch.nn import functional as F
import copy


class Baseline(BaseNet):
    def __init__(self, cfg):
        super().__init__(cfg)

        self.dim = self.get_backbone_last_dim()

        self.train_linear = nn.Linear(self.dim, self.class_num)
        self.train_linear.bias.data.fill_(0)

        self.test_linear = nn.Linear(self.dim, self.n_way['TEST'])
        self.test_linear.bias.data.fill_(0)

    # test的时候需要finetune

    def forward(self, data, type):
        # data: 10*1*84*84

        if type == 'TRAIN' or type == 'VALIDATE':

            feature = self.backbone(data)
            feature = feature.flatten(1, -1)
            logits = self.train_linear(feature)

            return logits


        else:

            logits_q = []

            labels = torch.tensor(
                [i for i in range(self.n_way['TEST']) for _ in range(self.n_shot['TEST'])]
            ).to(torch.int64).to(data.device)

            for k, episode in enumerate(data):
                # episode: 600*1*28*28
                # support_set: 300*1*28
                # finetune the model
                logits_q.append(self.finetune(self.test_linear, episode, labels, 'TEST'))

            logits_q = torch.concat(logits_q, dim=0)
            # logits_q = logits_q.flatten(0, 1)

            return logits_q
