zetavg commited on
Commit
5167103
1 Parent(s): 98dba8d
Files changed (2) hide show
  1. README.md +8 -2
  2. app.py +3 -0
README.md CHANGED
@@ -61,31 +61,37 @@ file_mounts:
61
  setup: |
62
  conda create -q python=3.8 -n llm-tuner -y
63
  conda activate llm-tuner
 
64
  # Clone the LLaMA-LoRA Tuner repo and install its dependencies
65
  [ ! -d llm_tuner ] && git clone https://github.com/zetavg/LLaMA-LoRA-Tuner.git llm_tuner
66
  echo 'Installing dependencies...'
67
  pip install -r llm_tuner/requirements.lock.txt
 
68
  # Optional: install wandb to enable logging to Weights & Biases
69
  pip install wandb
 
70
  # Optional: patch bitsandbytes to workaround error "libbitsandbytes_cpu.so: undefined symbol: cget_col_row_stats"
71
  BITSANDBYTES_LOCATION="$(pip show bitsandbytes | grep 'Location' | awk '{print $2}')/bitsandbytes"
72
  [ -f "$BITSANDBYTES_LOCATION/libbitsandbytes_cpu.so" ] && [ ! -f "$BITSANDBYTES_LOCATION/libbitsandbytes_cpu.so.bak" ] && [ -f "$BITSANDBYTES_LOCATION/libbitsandbytes_cuda121.so" ] && echo 'Patching bitsandbytes for GPU support...' && mv "$BITSANDBYTES_LOCATION/libbitsandbytes_cpu.so" "$BITSANDBYTES_LOCATION/libbitsandbytes_cpu.so.bak" && cp "$BITSANDBYTES_LOCATION/libbitsandbytes_cuda121.so" "$BITSANDBYTES_LOCATION/libbitsandbytes_cpu.so"
73
  conda install -q cudatoolkit -y
 
74
  echo 'Dependencies installed.'
 
75
  # Optional: Install and setup Cloudflare Tunnel to expose the app to the internet with a custom domain name
76
  [ -f /data/secrets/cloudflared_tunnel_token.txt ] && echo "Installing Cloudflare" && curl -L --output cloudflared.deb https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64.deb && sudo dpkg -i cloudflared.deb && sudo cloudflared service uninstall || : && sudo cloudflared service install "$(cat /data/secrets/cloudflared_tunnel_token.txt | tr -d '\n')"
 
77
  # Optional: pre-download models
78
  echo "Pre-downloading base models so that you won't have to wait for long once the app is ready..."
79
  python llm_tuner/download_base_model.py --base_model_names='decapoda-research/llama-7b-hf,nomic-ai/gpt4all-j'
80
 
81
- # Start the app. `hf_access_token`, `wandb_api_key` and `wandb_project_name` are optional.
82
  run: |
83
  conda activate llm-tuner
84
  python llm_tuner/app.py \
85
  --data_dir='/data' \
86
  --hf_access_token="$([ -f /data/secrets/hf_access_token.txt ] && cat /data/secrets/hf_access_token.txt | tr -d '\n')" \
87
  --wandb_api_key="$([ -f /data/secrets/wandb_api_key.txt ] && cat /data/secrets/wandb_api_key.txt | tr -d '\n')" \
88
- --wandb_project_name='llm-tuner' \
89
  --timezone='Atlantic/Reykjavik' \
90
  --base_model='decapoda-research/llama-7b-hf' \
91
  --base_model_choices='decapoda-research/llama-7b-hf,nomic-ai/gpt4all-j,databricks/dolly-v2-7b' \
 
61
  setup: |
62
  conda create -q python=3.8 -n llm-tuner -y
63
  conda activate llm-tuner
64
+
65
  # Clone the LLaMA-LoRA Tuner repo and install its dependencies
66
  [ ! -d llm_tuner ] && git clone https://github.com/zetavg/LLaMA-LoRA-Tuner.git llm_tuner
67
  echo 'Installing dependencies...'
68
  pip install -r llm_tuner/requirements.lock.txt
69
+
70
  # Optional: install wandb to enable logging to Weights & Biases
71
  pip install wandb
72
+
73
  # Optional: patch bitsandbytes to workaround error "libbitsandbytes_cpu.so: undefined symbol: cget_col_row_stats"
74
  BITSANDBYTES_LOCATION="$(pip show bitsandbytes | grep 'Location' | awk '{print $2}')/bitsandbytes"
75
  [ -f "$BITSANDBYTES_LOCATION/libbitsandbytes_cpu.so" ] && [ ! -f "$BITSANDBYTES_LOCATION/libbitsandbytes_cpu.so.bak" ] && [ -f "$BITSANDBYTES_LOCATION/libbitsandbytes_cuda121.so" ] && echo 'Patching bitsandbytes for GPU support...' && mv "$BITSANDBYTES_LOCATION/libbitsandbytes_cpu.so" "$BITSANDBYTES_LOCATION/libbitsandbytes_cpu.so.bak" && cp "$BITSANDBYTES_LOCATION/libbitsandbytes_cuda121.so" "$BITSANDBYTES_LOCATION/libbitsandbytes_cpu.so"
76
  conda install -q cudatoolkit -y
77
+
78
  echo 'Dependencies installed.'
79
+
80
  # Optional: Install and setup Cloudflare Tunnel to expose the app to the internet with a custom domain name
81
  [ -f /data/secrets/cloudflared_tunnel_token.txt ] && echo "Installing Cloudflare" && curl -L --output cloudflared.deb https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64.deb && sudo dpkg -i cloudflared.deb && sudo cloudflared service uninstall || : && sudo cloudflared service install "$(cat /data/secrets/cloudflared_tunnel_token.txt | tr -d '\n')"
82
+
83
  # Optional: pre-download models
84
  echo "Pre-downloading base models so that you won't have to wait for long once the app is ready..."
85
  python llm_tuner/download_base_model.py --base_model_names='decapoda-research/llama-7b-hf,nomic-ai/gpt4all-j'
86
 
87
+ # Start the app. `hf_access_token`, `wandb_api_key` and `wandb_project` are optional.
88
  run: |
89
  conda activate llm-tuner
90
  python llm_tuner/app.py \
91
  --data_dir='/data' \
92
  --hf_access_token="$([ -f /data/secrets/hf_access_token.txt ] && cat /data/secrets/hf_access_token.txt | tr -d '\n')" \
93
  --wandb_api_key="$([ -f /data/secrets/wandb_api_key.txt ] && cat /data/secrets/wandb_api_key.txt | tr -d '\n')" \
94
+ --wandb_project='llm-tuner' \
95
  --timezone='Atlantic/Reykjavik' \
96
  --base_model='decapoda-research/llama-7b-hf' \
97
  --base_model_choices='decapoda-research/llama-7b-hf,nomic-ai/gpt4all-j,databricks/dolly-v2-7b' \
app.py CHANGED
@@ -84,6 +84,9 @@ def main(
84
  except ValueError:
85
  raise ValueError("--auth must be in the format <username>:<password>, e.g.: --auth='username:password'")
86
 
 
 
 
87
  if wandb_api_key is not None:
88
  Config.wandb_api_key = wandb_api_key
89
 
 
84
  except ValueError:
85
  raise ValueError("--auth must be in the format <username>:<password>, e.g.: --auth='username:password'")
86
 
87
+ if hf_access_token is not None:
88
+ Config.hf_access_token = hf_access_token
89
+
90
  if wandb_api_key is not None:
91
  Config.wandb_api_key = wandb_api_key
92