File size: 3,930 Bytes
35fb65d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 |
#!/bin/bash
#
# This script installs the Hugging Face transformers library
# and patches it to include the 'Echo' model architecture from
# MythWorxAI. This allows you to load the model without needing
# the `trust_remote_code=True` flag.
#
# The script will:
# 1. Ensure dependencies like python, pip, and curl are available.
# 2. Install the `transformers`, `torch`, and `accelerate` libraries.
# 3. Locate the installed `transformers` library path.
# 4. Create a new directory for the 'echo' model inside the library.
# 5. Download the necessary model definition files from the Hugging Face Hub.
# 6. Create an __init__.py file to make the new model a proper module.
#
set -e # Exit immediately if a command exits with a non-zero status.
# --- Helper Functions ---
command_exists() {
command -v "$1" >/dev/null 2>&1
}
# --- Dependency Checks ---
echo "▶ Checking for required tools (python, pip, curl)..."
if ! command_exists python3 && ! command_exists python; then
echo "❌ Error: Python is not installed. Please install Python 3 and try again."
exit 1
fi
# Use python3 if available, otherwise fall back to python
PYTHON_CMD=$(command_exists python3 && echo "python3" || echo "python")
if ! command_exists pip3 && ! command_exists pip; then
echo "❌ Error: pip is not installed. Please install pip for Python 3 and try again."
exit 1
fi
PIP_CMD=$(command_exists pip3 && echo "pip3" || echo "pip")
if ! command_exists curl; then
echo "❌ Error: curl is not installed. Please install curl and try again."
exit 1
fi
echo "✅ All tools are available."
# --- Installation ---
echo -e "\n▶ Installing Hugging Face libraries (transformers, torch, accelerate)..."
$PIP_CMD install transformers torch accelerate --quiet
echo "✅ Libraries installed successfully."
# --- Patching ---
echo -e "\n▶ Locating transformers installation..."
TRANSFORMERS_PATH=$($PYTHON_CMD -c "import transformers, os; print(os.path.dirname(transformers.__file__))")
if [ -z "$TRANSFORMERS_PATH" ]; then
echo "❌ Error: Could not find the transformers library installation path."
exit 1
fi
echo "✅ Found transformers at: $TRANSFORMERS_PATH"
MODEL_PATH="$TRANSFORMERS_PATH/models/echo"
if [ -d "$MODEL_PATH" ]; then
echo "✅ Patch directory '$MODEL_PATH' already exists. No action needed."
echo -e "\n🎉 Patching complete! You can now use 'Echo' models."
exit 0
fi
echo -e "\n▶ Applying patch: Creating 'echo' model directory..."
mkdir -p "$MODEL_PATH"
echo "▶ Downloading model architecture files..."
CONFIG_URL="https://huggingface.co/MythWorxAI/Echo-mini/raw/main/configuration_echo.py"
MODELING_URL="https://huggingface.co/MythWorxAI/Echo-mini/raw/main/modeling_echo.py"
curl -fL "$CONFIG_URL" -o "$MODEL_PATH/configuration_echo.py"
curl -fL "$MODELING_URL" -o "$MODEL_PATH/modeling_echo.py"
echo "✅ Model files downloaded."
echo "▶ Finalizing module structure..."
# Create an __init__.py to make `echo` a recognizable Python module
# This is crucial for the relative imports inside the model files to work.
touch "$MODEL_PATH/__init__.py"
echo "✅ Module created."
# --- Completion ---
echo -e "\n🎉 Patching complete! The 'transformers' library now natively supports 'echo' models."
echo " You can now load 'MythWorxAI/Echo-mini' without 'trust_remote_code=True'."
echo -e "\n🧪 To test the installation, run the following Python code:"
echo
echo "from transformers import AutoTokenizer, AutoModelForCausalLM"
echo
echo "model_id = 'MythWorxAI/Echo-mini'"
echo "print(f\"Loading model: {model_id}\")"
echo
echo "# This now works without trust_remote_code=True"
echo "tokenizer = AutoTokenizer.from_pretrained(model_id)"
echo "model = AutoModelForCausalLM.from_pretrained(model_id)"
echo
echo "print('✅ Model and tokenizer loaded successfully!')"
echo "print(model.config)"
|