# coding=utf-8
"""
    @project: wlou
    @file： install_model.py
    @date：2023/12/18 14:02
    @desc:
"""
import json
import os.path
from transformers import GPT2TokenizerFast
from huggingface_hub import snapshot_download
 
prefix_dir = "/opt/wlou/model"
models = [
    {
        'download_params': {
            'cache_dir': os.path.join(prefix_dir, 'base/hub'),
            'pretrained_model_name_or_path': 'gpt2'
        },
        'download_function': GPT2TokenizerFast.from_pretrained
    },
    {
        'download_params': {
            'cache_dir': os.path.join(prefix_dir, 'base/hub'),
            'pretrained_model_name_or_path': 'gpt2-medium'
        },
        'download_function': GPT2TokenizerFast.from_pretrained
    },
    {
        'download_params': {
            'cache_dir': os.path.join(prefix_dir, 'base/hub'),
            'pretrained_model_name_or_path': 'gpt2-large'
        },
        'download_function': GPT2TokenizerFast.from_pretrained
    },
    {
        'download_params': {
            'cache_dir': os.path.join(prefix_dir, 'base/hub'),
            'pretrained_model_name_or_path': 'gpt2-xl'
        },
        'download_function': GPT2TokenizerFast.from_pretrained
    },
    {
        'download_params': {
            'cache_dir': os.path.join(prefix_dir, 'base/hub'),
            'pretrained_model_name_or_path': 'distilgpt2'
        },
        'download_function': GPT2TokenizerFast.from_pretrained
    }

]


def install(index):
    if index is not None:
        model = models[index]
        print(json.dumps(model.get('download_params')))
        model.get('download_function')(**model.get('download_params'))
    else:
        for model in models:
            print(json.dumps(model.get('download_params')))
            model.get('download_function')(**model.get('download_params'))


if __name__ == '__main__':
    install(None)
