import pickle

import torch
from pypots.optim import Adam
from sklearn.model_selection import train_test_split
from torch.utils.data import DataLoader

from module.polygon_embedding import POLYGON_EMBEDDING_MODEL
from utils.utils import get_extent, make_args_parser, PEDataset
import geopandas

if __name__ == '__main__':
    parser = make_args_parser()
    args = parser.parse_args()
    path = "D:\dataset\polygon_encoder-main\polygon_encoder-main\data_processing\dbtopo\output\pgon_triples_geom_300_norm_df.pkl"
    with open(path, "rb+") as f:
        pgon_gdf = pickle.load(f)
    extent = get_extent(pgon_gdf, args.geom_type_list)
    # extent = {"norm": (-1, 1, -1, 1)}
    # data = torch.zeros(500, 301, 2)
    # train_data, test_data = train_test_split(data, train_size=0.2)
    # training_set = PEDataset(train_data, args.device, )
    # training_loader = DataLoader(training_set, batch_size=args.batch_size, shuffle=True, num_workers=1, )
    # testing_set = PEDataset(test_data, args.device, )
    # testing_loader = DataLoader(testing_set, batch_size=args.batch_size, shuffle=True, num_workers=1, )

    model = POLYGON_EMBEDDING_MODEL(args, spa_enc=None, geom_type="norm", extent=extent)
    # optimizer = Adam()
    # for epoch in range(args.cla_epoches):
    #     for idx, data in enumerate(training_loader):
    #         optimizer.zero_grad()



