Spaces:
Sleeping
Sleeping
up
Browse files- .gitignore +2 -2
- run.py +36 -0
.gitignore
CHANGED
@@ -1,2 +1,2 @@
|
|
1 |
-
|
2 |
-
|
|
|
1 |
+
develop/*
|
2 |
+
translations/*
|
run.py
CHANGED
@@ -10,7 +10,43 @@ import ollama
|
|
10 |
|
11 |
# Optionally specify the model to pull during startup:
|
12 |
model_name = "llama3" # Replace with the desired model name
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
|
15 |
if __name__ == '__main__':
|
16 |
import os
|
|
|
10 |
|
11 |
# Optionally specify the model to pull during startup:
|
12 |
model_name = "llama3" # Replace with the desired model name
|
13 |
+
import os
|
14 |
+
import subprocess
|
15 |
+
# List of allowed models
|
16 |
+
ALLOWED_MODELS = [
|
17 |
+
'llama3',
|
18 |
+
'llama3:70b',
|
19 |
+
'phi3',
|
20 |
+
'mistral',
|
21 |
+
'neural-chat',
|
22 |
+
'starling-lm',
|
23 |
+
'codellama',
|
24 |
+
'llama2-uncensored',
|
25 |
+
'llava',
|
26 |
+
'gemma:2b',
|
27 |
+
'gemma:7b',
|
28 |
+
'solar',
|
29 |
+
]
|
30 |
|
31 |
+
# Directory where models are stored (current directory)
|
32 |
+
MODEL_DIR = os.getcwd()
|
33 |
+
|
34 |
+
def is_model_downloaded(model_name):
|
35 |
+
"""Check if the model is already downloaded."""
|
36 |
+
model_path = os.path.join(MODEL_DIR, model_name.replace(':', '_'))
|
37 |
+
return os.path.exists(model_path)
|
38 |
+
|
39 |
+
def download_model(model_name):
|
40 |
+
"""Download the model using the ollama command."""
|
41 |
+
if model_name in ALLOWED_MODELS:
|
42 |
+
if not is_model_downloaded(model_name):
|
43 |
+
print(f"Downloading model: {model_name}")
|
44 |
+
subprocess.run(['ollama', 'pull', model_name], check=True)
|
45 |
+
print(f"Model {model_name} downloaded successfully.")
|
46 |
+
else:
|
47 |
+
print(f"Model {model_name} is already downloaded.")
|
48 |
+
else:
|
49 |
+
print(f"Model {model_name} is not in the list of allowed models.")
|
50 |
|
51 |
if __name__ == '__main__':
|
52 |
import os
|