Spaces:
Sleeping
Sleeping
Syed Junaid Iqbal
commited on
Commit
•
1feeea0
1
Parent(s):
5fa89db
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,14 @@
|
|
1 |
import subprocess
|
2 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
|
4 |
import streamlit as st
|
5 |
from langchain.llms import LlamaCpp
|
|
|
1 |
import subprocess
|
2 |
+
|
3 |
+
# Define the command
|
4 |
+
command = 'CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir'
|
5 |
+
|
6 |
+
# Run the command using subprocess
|
7 |
+
try:
|
8 |
+
subprocess.run(command, shell=True, check=True)
|
9 |
+
print("Command executed successfully.")
|
10 |
+
except subprocess.CalledProcessError as e:
|
11 |
+
print(f"Error: {e}")
|
12 |
|
13 |
import streamlit as st
|
14 |
from langchain.llms import LlamaCpp
|