Echo4b / transformers+echo.sh
cryptonaut's picture
Upload 2 files
35fb65d verified
#!/bin/bash
#
# This script installs the Hugging Face transformers library
# and patches it to include the 'Echo' model architecture from
# MythWorxAI. This allows you to load the model without needing
# the `trust_remote_code=True` flag.
#
# The script will:
# 1. Ensure dependencies like python, pip, and curl are available.
# 2. Install the `transformers`, `torch`, and `accelerate` libraries.
# 3. Locate the installed `transformers` library path.
# 4. Create a new directory for the 'echo' model inside the library.
# 5. Download the necessary model definition files from the Hugging Face Hub.
# 6. Create an __init__.py file to make the new model a proper module.
#
set -e # Exit immediately if a command exits with a non-zero status.
# --- Helper Functions ---
command_exists() {
command -v "$1" >/dev/null 2>&1
}
# --- Dependency Checks ---
echo "β–Ά Checking for required tools (python, pip, curl)..."
if ! command_exists python3 && ! command_exists python; then
echo "❌ Error: Python is not installed. Please install Python 3 and try again."
exit 1
fi
# Use python3 if available, otherwise fall back to python
PYTHON_CMD=$(command_exists python3 && echo "python3" || echo "python")
if ! command_exists pip3 && ! command_exists pip; then
echo "❌ Error: pip is not installed. Please install pip for Python 3 and try again."
exit 1
fi
PIP_CMD=$(command_exists pip3 && echo "pip3" || echo "pip")
if ! command_exists curl; then
echo "❌ Error: curl is not installed. Please install curl and try again."
exit 1
fi
echo "βœ… All tools are available."
# --- Installation ---
echo -e "\nβ–Ά Installing Hugging Face libraries (transformers, torch, accelerate)..."
$PIP_CMD install transformers torch accelerate --quiet
echo "βœ… Libraries installed successfully."
# --- Patching ---
echo -e "\nβ–Ά Locating transformers installation..."
TRANSFORMERS_PATH=$($PYTHON_CMD -c "import transformers, os; print(os.path.dirname(transformers.__file__))")
if [ -z "$TRANSFORMERS_PATH" ]; then
echo "❌ Error: Could not find the transformers library installation path."
exit 1
fi
echo "βœ… Found transformers at: $TRANSFORMERS_PATH"
MODEL_PATH="$TRANSFORMERS_PATH/models/echo"
if [ -d "$MODEL_PATH" ]; then
echo "βœ… Patch directory '$MODEL_PATH' already exists. No action needed."
echo -e "\nπŸŽ‰ Patching complete! You can now use 'Echo' models."
exit 0
fi
echo -e "\nβ–Ά Applying patch: Creating 'echo' model directory..."
mkdir -p "$MODEL_PATH"
echo "β–Ά Downloading model architecture files..."
CONFIG_URL="https://huggingface.co/MythWorxAI/Echo-mini/raw/main/configuration_echo.py"
MODELING_URL="https://huggingface.co/MythWorxAI/Echo-mini/raw/main/modeling_echo.py"
curl -fL "$CONFIG_URL" -o "$MODEL_PATH/configuration_echo.py"
curl -fL "$MODELING_URL" -o "$MODEL_PATH/modeling_echo.py"
echo "βœ… Model files downloaded."
echo "β–Ά Finalizing module structure..."
# Create an __init__.py to make `echo` a recognizable Python module
# This is crucial for the relative imports inside the model files to work.
touch "$MODEL_PATH/__init__.py"
echo "βœ… Module created."
# --- Completion ---
echo -e "\nπŸŽ‰ Patching complete! The 'transformers' library now natively supports 'echo' models."
echo " You can now load 'MythWorxAI/Echo-mini' without 'trust_remote_code=True'."
echo -e "\nπŸ§ͺ To test the installation, run the following Python code:"
echo
echo "from transformers import AutoTokenizer, AutoModelForCausalLM"
echo
echo "model_id = 'MythWorxAI/Echo-mini'"
echo "print(f\"Loading model: {model_id}\")"
echo
echo "# This now works without trust_remote_code=True"
echo "tokenizer = AutoTokenizer.from_pretrained(model_id)"
echo "model = AutoModelForCausalLM.from_pretrained(model_id)"
echo
echo "print('βœ… Model and tokenizer loaded successfully!')"
echo "print(model.config)"