import h5py
import numpy as np
import json
import os


def h5_2_npy_data(data_dir, map_name='2s3z', h5file_suffix='expert'):
    # --------------------------- hdf5 -------------------------------
    data_dir = data_dir + '/'
    dataset_dir = data_dir + '/' + map_name + '_' + h5file_suffix + '.h5'
    save_dir = data_dir + '/' + map_name + '_' + h5file_suffix + '/'
    # # test
    # dataset_dir = data_dir + '2s3z' + '_' + 'expert' + '.h5'
    # save_dir = data_dir + '2s3z' + '_' + 'expert' + '/'
    os.makedirs(save_dir, exist_ok=True)

    with h5py.File(dataset_dir, 'r') as dataset:
        episode_num = dataset['actions'].shape[0]
        episode_len = dataset['actions'].shape[1]
        num_agents = dataset['actions'].shape[2]

        keys = list(dataset.keys())                 # state, obs, actions, reward, avail_actions, filled, terminated
        for key in keys:
            item = np.array(dataset[key])           # taking obs as an example, item is a np.ndarray with shape of (episode_num, episode_len, num_agents, obs_dim)
            file_name = save_dir + key + '.npy'     # the file name to save the item
            np.save(file_name, item)                # modify this to use a different method for saving data
        
        # the following code can be omitted and used only to satisfy alberdice's requirements for the offline dataset
        # construct the 'actions_onehot' array with shape of (episode_num, episode_len, num_agents, num_actions)
        key = 'actions_onehot'
        actions_onehot = np.zeros_like(dataset['avail_actions'])
        actions = np.array(dataset['actions']).squeeze(-1)
        actions_onehot[np.arange(episode_num)[:, None, None], 
                np.arange(episode_len)[None, :, None], 
                np.arange(num_agents)[None, None, :], 
                actions] = 1
        file_name = save_dir + key + '.npy'
        np.save(file_name, actions_onehot)

    # the following code can be omitted and used only to satisfy alberdice's requirements for the offline dataset
    file_name = save_dir + 'meta.json'
    meta = {'buffer_index': 0,
            'episodes_in_buffer': episode_num,
            'buffer_size': episode_num}
    with open(file_name, 'w') as fp:
        json.dump(meta, fp)


if __name__ == '__main__':
    data_dir = 'buffer/sc2'
    map_names = ['2s3z', '3s_vs_5z', '5m_vs_6m', '6h_vs_8z']
    h5file_suffixs = ['expert', 'medium', 'medium_replay', 'mixed']
    for map_name in map_names:
        for h5file_suffix in h5file_suffixs:
            h5_2_npy_data(data_dir, map_name, h5file_suffix)

    # data_dir = 'buffer/sc2/' + '2s3z' + '_' + 'expert' + '/'
    # data_dir = data_dir + 'actions_onehot' + '.npy'
    # data = np.load(data_dir)
