import torch 
import torch.nn as nn 
from flagai.auto_model.auto_loader import AutoLoader
import matplotlib.pyplot as plt 

loader = AutoLoader(task_name="classification",
                        # model_name="swinv2-base-patch4-window8-256",
                        model_name="vit-large-p32-224",
                        # only_download_config=True,
                        num_classes=1000)

model = loader.get_model()

# import numpy as np 


# sd = np.load("./checkpoints/vit-large-p16-224/pytorch_model.bin")

# print(type(sd))

# print(sd)

# for k, v in sd.items():
#     print(k)



# loader2 = AutoLoader(task_name="classification",
#                         # model_name="swinv2-base-patch4-window8-256",
#                         model_name="vit-base-p16-224",
#                         only_download_config=False,
#                         num_classes=1000,
#                         model_dir="./checkpoints_2")

# model2 = loader2.get_model()

# k1 = []
# v1 = []

# k2 = []
# v2 = []
# for k, v in model.named_parameters():
#     # print(k, v.sum())
#     k1.append(k)
#     v1.append(v.sum())

# for k, v in model2.named_parameters():
#     k2.append(k)
#     v2.append(v.sum())


# index = 0

# for i in range(len(k1)):
#     print(f"{k1[i]}: {v1[i]}, {k2[i]}: {v2[i]}")





# optimizer = torch.optim.AdamW(nn.Linear(1,2).parameters(), lr=0.0001, eps=1e-8, betas=(0.9, 0.999), weight_decay=1e-5)
# scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, 10000, last_epoch=-1)

# lrs = []
# x = [i for i in range(10000)]
# for i in range(10):
#     for j in range(1000):
#         print(optimizer.state_dict()['param_groups'][0]['lr'])
#         lrs.append(optimizer.state_dict()['param_groups'][0]['lr'])
#         scheduler.step()

# plt.plot(x, lrs)
# plt.show()


