File size: 572 Bytes
15fab7a
6fe9c93
15fab7a
6fe9c93
15fab7a
6fe9c93
 
15fab7a
6fe9c93
1
2
3
4
5
6
7
8
9
import subprocess
import flask
# commented because the existing llama-cpp-python package was renoved fron requirements.txt
#subprocess.run("pip uninstall -y llama-cpp-python", shell=True)

#install_command = "CMAKE_ARGS='-DGGML_CUDA=on -DCUDA_PATH=/usr/local/cuda-12.2 -DCUDAToolkit_ROOT=/usr/local/cuda-12.2 -DCUDAToolkit_INCLUDE_DIR=/usr/local/cuda-12.2/include -DCUDAToolkit_LIBRARY_DIR=/usr/local/cuda-12.2/lib64' FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir"
#subprocess.run(install_command, shell=True)

subprocess.run("app.run(debug=True)", shell=True)