File size: 148 Bytes
9281d61
 
e3ccd16
9281d61
e3ccd16
 
1
2
3
4
5
6
7
#!/bin/bash

export TRANSFORMERS_CACHE=/root/.cache 

exec python3 ./ingest.py --device_type cuda
exec python3 ./run_localGPT.py --device_type cuda