File size: 1,973 Bytes
03f40d8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import os
import streamlit as st
from transformers import AutoModel, AutoTokenizer

st.title("HuggingFace Model Loader & Saver")
st.write("Load a model from HuggingFace and save it locally. Edit parameters below:")

# Editable parameters
model_name = st.text_input("Model Name", value="openai-gpt", help="Enter the HuggingFace model name (e.g., openai-gpt)")
save_dir = st.text_input("Save Directory", value="./hugging", help="Local directory to save the model")
additional_models = st.multiselect(
    "Additional Models",
    options=["bert-base-uncased", "gpt2", "roberta-base"],
    help="Select additional models to load and save"
)

if st.button("Load and Save Model"):
    st.write("### Processing Primary Model")
    try:
        st.write(f"Loading **{model_name}** ...")
        model = AutoModel.from_pretrained(model_name)
        tokenizer = AutoTokenizer.from_pretrained(model_name)
        # Ensure a safe folder name (replace / if necessary)
        model_save_path = os.path.join(save_dir, model_name.replace("/", "_"))
        os.makedirs(model_save_path, exist_ok=True)
        model.save_pretrained(model_save_path)
        st.success(f"Model **{model_name}** saved to `{model_save_path}`")
    except Exception as e:
        st.error(f"Error loading/saving model **{model_name}**: {e}")

    if additional_models:
        st.write("### Processing Additional Models")
        for m in additional_models:
            try:
                st.write(f"Loading **{m}** ...")
                model = AutoModel.from_pretrained(m)
                tokenizer = AutoTokenizer.from_pretrained(m)
                model_save_path = os.path.join(save_dir, m.replace("/", "_"))
                os.makedirs(model_save_path, exist_ok=True)
                model.save_pretrained(model_save_path)
                st.success(f"Model **{m}** saved to `{model_save_path}`")
            except Exception as e:
                st.error(f"Error loading/saving model **{m}**: {e}")