# _*_ coding: utf-8 _*_
# Codes from Baidu AI, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
# pretraining of compound
# Author: MiqroEra Shibo

import wget
import zipfile
import gunzip
import os

full_dataset_url = "http://snap.stanford.edu/gnn-pretrain/data/chem_dataset.zip"
small_dataset_url = "https://baidu-nlp.bj.bcebos.com/PaddleHelix%2Fdatasets%2Fcompound_datasets%2Fchem_dataset_small.tgz"
save_path = "/home/shibo/helix/helixwrapper/compound_property/dataset"


def download_dataset(full_small, savepath=save_path):
    if full_small == 'full':
        wget.download(full_dataset_url, savepath)
        print('Download full dataset successfully!')
        file = os.listdir(savepath)
        os.rename(os.path.join(savepath, file), os.path.join(savepath, 'chem_dataset_full.zip'))
    elif full_small == 'small':
        wget.download(small_dataset_url, savepath)
        print('Download small dataset successfully!')
        file = os.listdir(savepath)
        os.rename(os.path.join(savepath, file), os.path.join(savepath, 'chem_dataset_small.zip'))
    else:
        raise ValueError(full_small + ' is not a valid dataset type. Please choose from full or small.')



def untar_dataset(full_small, savepath=save_path):
    if full_small == 'full':
        zip_file = zipfile.ZipFile(savepath + '/chem_dataset_full.zip')
        zip_file.extractall(savepath)
        zip_file.close()
    elif full_small == 'small':
        tar = tarfile.open(savepath + '/chem_dataset_small.tgz')
        tar.extractall(savepath)
        tar.close()

download_dataset('small', save_path)
untar_dataset('small', save_path)
