import torch
from torch import optim

from DAP.models.AttrMatrixModel import AttrEmbedModel
from DAP.models.AttrModel import AttrModel

device = 'cuda' if torch.cuda.is_available() else 'cpu'


def config_model(args, feat_dim,attr_dim):
    if args.model == 'AttrMatrix':
        model = AttrEmbedModel(args=args,feat_dim=feat_dim,attr_dim=attr_dim)
    elif args.model == 'Attr':
        model = AttrModel(args=args,feat_dim=feat_dim,attr_dim=attr_dim)
    else:
        raise NotImplemented
    if args.model == 'AttrMatrix':
        optim_params = [{'params': model.parameters(), 'lr': args.lr}]
    elif args.model == 'Attr' or args.model == 'DAPModel':
        optim_params = [{'params': model_item.parameters(), 'lr': args.lr} for model_item in model.attr_classifier_list]
        optim_params.append({'params': model.parameters(), 'lr': args.lr})
    else:
        raise NotImplemented
    model = model.to(device)
    optimizer = optim.Adam(optim_params, lr=args.lr, weight_decay=args.wd)
    return model, optimizer
