Spaces:
Paused
Paused
Update start.sh
Browse files
start.sh
CHANGED
@@ -1,5 +1,10 @@
|
|
1 |
#!/bin/bash
|
2 |
|
|
|
|
|
|
|
|
|
|
|
3 |
|
4 |
# Démarrez le service Ollama en arrière-plan
|
5 |
ollama serve &
|
@@ -7,54 +12,19 @@ ollama serve &
|
|
7 |
# Attendez que le service soit prêt
|
8 |
sleep 10
|
9 |
|
10 |
-
#
|
11 |
-
|
12 |
-
#sleep 10
|
13 |
-
|
14 |
-
#pip install -U "huggingface_hub[cli]"
|
15 |
-
|
16 |
-
#sleep 10
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
# Exécutez la commande pull
|
21 |
-
#ollama pull llama3:8b-instruct-q8_0
|
22 |
ollama pull hf.co/Yaya86/Meta-Llama-3.1-8B-Instruct-bnb-4bit-v4.5:F16
|
23 |
-
|
24 |
sleep 10
|
25 |
-
#ollama pull cniongolo/biomistral
|
26 |
ollama pull hf.co/Yaya86/Mistral-Nemo-Base-2407-bnb-4bit-v4:F16
|
27 |
-
|
28 |
sleep 10
|
29 |
-
#ollama pull hhao/openbmb-minicpm-llama3-v-2_5
|
30 |
ollama pull hf.co/Yaya86/Phi-3-medium-4k-instruct-v4.5:F16
|
31 |
-
|
32 |
sleep 10
|
33 |
-
#ollama pull llama3-chatqa:8b
|
34 |
ollama pull hf.co/Yaya86/gemma-2-9b-bnb-4bit-v4.5:F16
|
35 |
-
|
36 |
sleep 10
|
37 |
-
#ollama pull llama3.1
|
38 |
ollama pull hf.co/Yaya86/Llama-3.1-Minitron-4B-Width-Base-v4.5:F16
|
39 |
-
|
40 |
sleep 10
|
41 |
-
#ollama pull mistral-nemo
|
42 |
ollama pull hf.co/Yaya86/Mistral-NeMo-Minitron-8B-Base-v4.5:F16
|
43 |
-
|
44 |
-
sleep 10
|
45 |
-
#ollama pull mistral-large
|
46 |
-
|
47 |
sleep 10
|
48 |
-
#ollama pull llama3.1:8b-instruct-fp16
|
49 |
-
|
50 |
-
#sleep 10
|
51 |
-
#ollama pull llama3.1:8b-instruct-q8_0
|
52 |
-
|
53 |
-
sleep 10
|
54 |
-
#ollama pull mistral-nemo:12b-instruct-2407-fp16
|
55 |
-
|
56 |
-
#sleep 10
|
57 |
-
#ollama pull mistral-large:123b-instruct-2407-q8_0
|
58 |
|
59 |
# Gardez le conteneur en cours d'exécution
|
60 |
tail -f /dev/null
|
|
|
1 |
#!/bin/bash
|
2 |
|
3 |
+
# Configurez le token Hugging Face
|
4 |
+
export HF_TOKEN=$HF_TOKEN
|
5 |
+
|
6 |
+
# Connectez-vous à Hugging Face
|
7 |
+
huggingface-cli login --token $HF_TOKEN
|
8 |
|
9 |
# Démarrez le service Ollama en arrière-plan
|
10 |
ollama serve &
|
|
|
12 |
# Attendez que le service soit prêt
|
13 |
sleep 10
|
14 |
|
15 |
+
# Exécutez les commandes pull avec les modèles privés
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
ollama pull hf.co/Yaya86/Meta-Llama-3.1-8B-Instruct-bnb-4bit-v4.5:F16
|
|
|
17 |
sleep 10
|
|
|
18 |
ollama pull hf.co/Yaya86/Mistral-Nemo-Base-2407-bnb-4bit-v4:F16
|
|
|
19 |
sleep 10
|
|
|
20 |
ollama pull hf.co/Yaya86/Phi-3-medium-4k-instruct-v4.5:F16
|
|
|
21 |
sleep 10
|
|
|
22 |
ollama pull hf.co/Yaya86/gemma-2-9b-bnb-4bit-v4.5:F16
|
|
|
23 |
sleep 10
|
|
|
24 |
ollama pull hf.co/Yaya86/Llama-3.1-Minitron-4B-Width-Base-v4.5:F16
|
|
|
25 |
sleep 10
|
|
|
26 |
ollama pull hf.co/Yaya86/Mistral-NeMo-Minitron-8B-Base-v4.5:F16
|
|
|
|
|
|
|
|
|
27 |
sleep 10
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
|
29 |
# Gardez le conteneur en cours d'exécution
|
30 |
tail -f /dev/null
|