ldhldh commited on
Commit
46893aa
1 Parent(s): 123c1d1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -1,11 +1,13 @@
1
- !CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install --upgrade --force-reinstall llama-cpp-python --no-cache-dir
2
-
3
  from threading import Thread
4
  from llama_cpp import Llama
5
  import torch
6
  import gradio as gr
7
  import re
8
 
 
 
 
9
  torch_device = "cuda" if torch.cuda.is_available() else "cpu"
10
  print("Running on device:", torch_device)
11
  print("CPU threads:", torch.get_num_threads())
 
1
+ import subprocess
 
2
  from threading import Thread
3
  from llama_cpp import Llama
4
  import torch
5
  import gradio as gr
6
  import re
7
 
8
+ subprocess.call(f"""CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install --upgrade --force-reinstall llama-cpp-python --no-cache-dir
9
+ """, shell=True)
10
+
11
  torch_device = "cuda" if torch.cuda.is_available() else "cpu"
12
  print("Running on device:", torch_device)
13
  print("CPU threads:", torch.get_num_threads())