added bitsandbytes bug fix
Browse files- .ipynb_checkpoints/smartscraper_notebook-checkpoint.ipynb +44 -57
- alpaca-lora +1 -0
- smartscraper_notebook.ipynb +44 -57
.ipynb_checkpoints/smartscraper_notebook-checkpoint.ipynb
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
-
"execution_count":
|
6 |
"metadata": {
|
7 |
"colab": {
|
8 |
"base_uri": "https://localhost:8080/"
|
@@ -15,26 +15,7 @@
|
|
15 |
"name": "stdout",
|
16 |
"output_type": "stream",
|
17 |
"text": [
|
18 |
-
"
|
19 |
-
"+-----------------------------------------------------------------------------+\n",
|
20 |
-
"| NVIDIA-SMI 525.85.12 Driver Version: 525.85.12 CUDA Version: 12.0 |\n",
|
21 |
-
"|-------------------------------+----------------------+----------------------+\n",
|
22 |
-
"| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n",
|
23 |
-
"| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n",
|
24 |
-
"| | | MIG M. |\n",
|
25 |
-
"|===============================+======================+======================|\n",
|
26 |
-
"| 0 Tesla T4 Off | 00000000:00:04.0 Off | 0 |\n",
|
27 |
-
"| N/A 40C P8 10W / 70W | 0MiB / 15360MiB | 0% Default |\n",
|
28 |
-
"| | | N/A |\n",
|
29 |
-
"+-------------------------------+----------------------+----------------------+\n",
|
30 |
-
" \n",
|
31 |
-
"+-----------------------------------------------------------------------------+\n",
|
32 |
-
"| Processes: |\n",
|
33 |
-
"| GPU GI CI PID Type Process name GPU Memory |\n",
|
34 |
-
"| ID ID Usage |\n",
|
35 |
-
"|=============================================================================|\n",
|
36 |
-
"| No running processes found |\n",
|
37 |
-
"+-----------------------------------------------------------------------------+\n"
|
38 |
]
|
39 |
}
|
40 |
],
|
@@ -44,7 +25,7 @@
|
|
44 |
},
|
45 |
{
|
46 |
"cell_type": "code",
|
47 |
-
"execution_count":
|
48 |
"metadata": {
|
49 |
"colab": {
|
50 |
"base_uri": "https://localhost:8080/"
|
@@ -62,7 +43,7 @@
|
|
62 |
"remote: Counting objects: 100% (51/51), done.\u001b[K\n",
|
63 |
"remote: Compressing objects: 100% (32/32), done.\u001b[K\n",
|
64 |
"remote: Total 607 (delta 28), reused 34 (delta 19), pack-reused 556\u001b[K\n",
|
65 |
-
"Receiving objects: 100% (607/607), 27.78 MiB |
|
66 |
"Resolving deltas: 100% (360/360), done.\n"
|
67 |
]
|
68 |
}
|
@@ -82,7 +63,7 @@
|
|
82 |
},
|
83 |
{
|
84 |
"cell_type": "code",
|
85 |
-
"execution_count":
|
86 |
"metadata": {
|
87 |
"colab": {
|
88 |
"base_uri": "https://localhost:8080/"
|
@@ -95,7 +76,7 @@
|
|
95 |
"name": "stdout",
|
96 |
"output_type": "stream",
|
97 |
"text": [
|
98 |
-
"/
|
99 |
]
|
100 |
}
|
101 |
],
|
@@ -105,7 +86,7 @@
|
|
105 |
},
|
106 |
{
|
107 |
"cell_type": "code",
|
108 |
-
"execution_count":
|
109 |
"metadata": {
|
110 |
"colab": {
|
111 |
"base_uri": "https://localhost:8080/"
|
@@ -113,40 +94,11 @@
|
|
113 |
"id": "JCB9UzMVwsSM",
|
114 |
"outputId": "78c8e31d-d978-44fa-c250-b862e83eb3c1"
|
115 |
},
|
116 |
-
"outputs": [
|
117 |
-
{
|
118 |
-
"name": "stdout",
|
119 |
-
"output_type": "stream",
|
120 |
-
"text": [
|
121 |
-
"\u001b[2K \u001b[90mβββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m468.7/468.7 kB\u001b[0m \u001b[31m8.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
122 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m41.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
123 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m62.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
124 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m200.1/200.1 kB\u001b[0m \u001b[31m24.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
125 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m110.5/110.5 kB\u001b[0m \u001b[31m13.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
126 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m212.2/212.2 kB\u001b[0m \u001b[31m25.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
127 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m132.9/132.9 kB\u001b[0m \u001b[31m15.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
128 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m158.8/158.8 kB\u001b[0m \u001b[31m19.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
129 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m114.2/114.2 kB\u001b[0m \u001b[31m13.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
130 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m269.3/269.3 kB\u001b[0m \u001b[31m30.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
131 |
-
"\u001b[?25h\u001b[33mWARNING: Skipping transformers as it is not installed.\u001b[0m\u001b[33m\n",
|
132 |
-
"\u001b[0m\u001b[33m WARNING: Did not find branch or tag 'c3dc391', assuming revision or ref.\u001b[0m\u001b[33m\n",
|
133 |
-
"\u001b[0m Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n",
|
134 |
-
" Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n",
|
135 |
-
" Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
|
136 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m7.8/7.8 MB\u001b[0m \u001b[31m53.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
137 |
-
"\u001b[?25h Building wheel for transformers (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
|
138 |
-
" Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n",
|
139 |
-
" Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n",
|
140 |
-
" Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
|
141 |
-
"\u001b[2K \u001b[90mβββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m215.3/215.3 kB\u001b[0m \u001b[31m5.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
142 |
-
"\u001b[?25h Building wheel for peft (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n"
|
143 |
-
]
|
144 |
-
}
|
145 |
-
],
|
146 |
"source": [
|
147 |
"!pip install -q datasets loralib sentencepiece\n",
|
148 |
"\n",
|
149 |
-
"!pip uninstall transformers\n",
|
150 |
"!pip install -q git+https://github.com/zphang/transformers@c3dc391\n",
|
151 |
"# pip install git+https://github.com/zphang/transformers.git@llama_push\n",
|
152 |
"\n",
|
@@ -683,6 +635,41 @@
|
|
683 |
"uploaded = files.upload()"
|
684 |
]
|
685 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
686 |
{
|
687 |
"cell_type": "code",
|
688 |
"execution_count": 7,
|
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
+
"execution_count": 4,
|
6 |
"metadata": {
|
7 |
"colab": {
|
8 |
"base_uri": "https://localhost:8080/"
|
|
|
15 |
"name": "stdout",
|
16 |
"output_type": "stream",
|
17 |
"text": [
|
18 |
+
"zsh:1: command not found: nvidia-smi\r\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
]
|
20 |
}
|
21 |
],
|
|
|
25 |
},
|
26 |
{
|
27 |
"cell_type": "code",
|
28 |
+
"execution_count": 5,
|
29 |
"metadata": {
|
30 |
"colab": {
|
31 |
"base_uri": "https://localhost:8080/"
|
|
|
43 |
"remote: Counting objects: 100% (51/51), done.\u001b[K\n",
|
44 |
"remote: Compressing objects: 100% (32/32), done.\u001b[K\n",
|
45 |
"remote: Total 607 (delta 28), reused 34 (delta 19), pack-reused 556\u001b[K\n",
|
46 |
+
"Receiving objects: 100% (607/607), 27.78 MiB | 4.33 MiB/s, done.\n",
|
47 |
"Resolving deltas: 100% (360/360), done.\n"
|
48 |
]
|
49 |
}
|
|
|
63 |
},
|
64 |
{
|
65 |
"cell_type": "code",
|
66 |
+
"execution_count": 6,
|
67 |
"metadata": {
|
68 |
"colab": {
|
69 |
"base_uri": "https://localhost:8080/"
|
|
|
76 |
"name": "stdout",
|
77 |
"output_type": "stream",
|
78 |
"text": [
|
79 |
+
"/Users/dano/DEV/smartscraper/smartscraper/alpaca-lora\n"
|
80 |
]
|
81 |
}
|
82 |
],
|
|
|
86 |
},
|
87 |
{
|
88 |
"cell_type": "code",
|
89 |
+
"execution_count": null,
|
90 |
"metadata": {
|
91 |
"colab": {
|
92 |
"base_uri": "https://localhost:8080/"
|
|
|
94 |
"id": "JCB9UzMVwsSM",
|
95 |
"outputId": "78c8e31d-d978-44fa-c250-b862e83eb3c1"
|
96 |
},
|
97 |
+
"outputs": [],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
98 |
"source": [
|
99 |
"!pip install -q datasets loralib sentencepiece\n",
|
100 |
"\n",
|
101 |
+
"!pip uninstall -y transformers\n",
|
102 |
"!pip install -q git+https://github.com/zphang/transformers@c3dc391\n",
|
103 |
"# pip install git+https://github.com/zphang/transformers.git@llama_push\n",
|
104 |
"\n",
|
|
|
635 |
"uploaded = files.upload()"
|
636 |
]
|
637 |
},
|
638 |
+
{
|
639 |
+
"cell_type": "code",
|
640 |
+
"execution_count": null,
|
641 |
+
"metadata": {},
|
642 |
+
"outputs": [],
|
643 |
+
"source": [
|
644 |
+
"!conda install -y -c conda-forge cudatoolkit"
|
645 |
+
]
|
646 |
+
},
|
647 |
+
{
|
648 |
+
"cell_type": "code",
|
649 |
+
"execution_count": null,
|
650 |
+
"metadata": {},
|
651 |
+
"outputs": [],
|
652 |
+
"source": [
|
653 |
+
"# import os\n",
|
654 |
+
"\n",
|
655 |
+
"# # Set PATH variable\n",
|
656 |
+
"# cuda_path = \"/usr/local/cuda/bin\" # Update this to the correct path on your system\n",
|
657 |
+
"# os.environ[\"PATH\"] = f\"{cuda_path}:{os.environ['PATH']}\"\n",
|
658 |
+
"\n",
|
659 |
+
"# # Set LD_LIBRARY_PATH variable\n",
|
660 |
+
"# cuda_lib_path = \"/usr/local/cuda/lib64\" # Update this to the correct path on your system\n",
|
661 |
+
"# os.environ[\"LD_LIBRARY_PATH\"] = f\"{cuda_lib_path}:{os.environ.get('LD_LIBRARY_PATH', '')}\""
|
662 |
+
]
|
663 |
+
},
|
664 |
+
{
|
665 |
+
"cell_type": "code",
|
666 |
+
"execution_count": null,
|
667 |
+
"metadata": {},
|
668 |
+
"outputs": [],
|
669 |
+
"source": [
|
670 |
+
"# !cp /opt/conda/lib/python3.10/site-packages/bitsandbytes/libbitsandbytes_cuda117.so /opt/conda/lib/python3.10/site-packages/bitsandbytes/libbitsandbytes_cpu.so"
|
671 |
+
]
|
672 |
+
},
|
673 |
{
|
674 |
"cell_type": "code",
|
675 |
"execution_count": 7,
|
alpaca-lora
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Subproject commit 8bb8579e403dc78e37fe81ffbb253c413007323f
|
smartscraper_notebook.ipynb
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
-
"execution_count":
|
6 |
"metadata": {
|
7 |
"colab": {
|
8 |
"base_uri": "https://localhost:8080/"
|
@@ -15,26 +15,7 @@
|
|
15 |
"name": "stdout",
|
16 |
"output_type": "stream",
|
17 |
"text": [
|
18 |
-
"
|
19 |
-
"+-----------------------------------------------------------------------------+\n",
|
20 |
-
"| NVIDIA-SMI 525.85.12 Driver Version: 525.85.12 CUDA Version: 12.0 |\n",
|
21 |
-
"|-------------------------------+----------------------+----------------------+\n",
|
22 |
-
"| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n",
|
23 |
-
"| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n",
|
24 |
-
"| | | MIG M. |\n",
|
25 |
-
"|===============================+======================+======================|\n",
|
26 |
-
"| 0 Tesla T4 Off | 00000000:00:04.0 Off | 0 |\n",
|
27 |
-
"| N/A 40C P8 10W / 70W | 0MiB / 15360MiB | 0% Default |\n",
|
28 |
-
"| | | N/A |\n",
|
29 |
-
"+-------------------------------+----------------------+----------------------+\n",
|
30 |
-
" \n",
|
31 |
-
"+-----------------------------------------------------------------------------+\n",
|
32 |
-
"| Processes: |\n",
|
33 |
-
"| GPU GI CI PID Type Process name GPU Memory |\n",
|
34 |
-
"| ID ID Usage |\n",
|
35 |
-
"|=============================================================================|\n",
|
36 |
-
"| No running processes found |\n",
|
37 |
-
"+-----------------------------------------------------------------------------+\n"
|
38 |
]
|
39 |
}
|
40 |
],
|
@@ -44,7 +25,7 @@
|
|
44 |
},
|
45 |
{
|
46 |
"cell_type": "code",
|
47 |
-
"execution_count":
|
48 |
"metadata": {
|
49 |
"colab": {
|
50 |
"base_uri": "https://localhost:8080/"
|
@@ -62,7 +43,7 @@
|
|
62 |
"remote: Counting objects: 100% (51/51), done.\u001b[K\n",
|
63 |
"remote: Compressing objects: 100% (32/32), done.\u001b[K\n",
|
64 |
"remote: Total 607 (delta 28), reused 34 (delta 19), pack-reused 556\u001b[K\n",
|
65 |
-
"Receiving objects: 100% (607/607), 27.78 MiB |
|
66 |
"Resolving deltas: 100% (360/360), done.\n"
|
67 |
]
|
68 |
}
|
@@ -82,7 +63,7 @@
|
|
82 |
},
|
83 |
{
|
84 |
"cell_type": "code",
|
85 |
-
"execution_count":
|
86 |
"metadata": {
|
87 |
"colab": {
|
88 |
"base_uri": "https://localhost:8080/"
|
@@ -95,7 +76,7 @@
|
|
95 |
"name": "stdout",
|
96 |
"output_type": "stream",
|
97 |
"text": [
|
98 |
-
"/
|
99 |
]
|
100 |
}
|
101 |
],
|
@@ -105,7 +86,7 @@
|
|
105 |
},
|
106 |
{
|
107 |
"cell_type": "code",
|
108 |
-
"execution_count":
|
109 |
"metadata": {
|
110 |
"colab": {
|
111 |
"base_uri": "https://localhost:8080/"
|
@@ -113,40 +94,11 @@
|
|
113 |
"id": "JCB9UzMVwsSM",
|
114 |
"outputId": "78c8e31d-d978-44fa-c250-b862e83eb3c1"
|
115 |
},
|
116 |
-
"outputs": [
|
117 |
-
{
|
118 |
-
"name": "stdout",
|
119 |
-
"output_type": "stream",
|
120 |
-
"text": [
|
121 |
-
"\u001b[2K \u001b[90mβββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m468.7/468.7 kB\u001b[0m \u001b[31m8.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
122 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m41.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
123 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m62.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
124 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m200.1/200.1 kB\u001b[0m \u001b[31m24.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
125 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m110.5/110.5 kB\u001b[0m \u001b[31m13.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
126 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m212.2/212.2 kB\u001b[0m \u001b[31m25.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
127 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m132.9/132.9 kB\u001b[0m \u001b[31m15.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
128 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m158.8/158.8 kB\u001b[0m \u001b[31m19.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
129 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m114.2/114.2 kB\u001b[0m \u001b[31m13.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
130 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m269.3/269.3 kB\u001b[0m \u001b[31m30.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
131 |
-
"\u001b[?25h\u001b[33mWARNING: Skipping transformers as it is not installed.\u001b[0m\u001b[33m\n",
|
132 |
-
"\u001b[0m\u001b[33m WARNING: Did not find branch or tag 'c3dc391', assuming revision or ref.\u001b[0m\u001b[33m\n",
|
133 |
-
"\u001b[0m Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n",
|
134 |
-
" Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n",
|
135 |
-
" Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
|
136 |
-
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m7.8/7.8 MB\u001b[0m \u001b[31m53.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
137 |
-
"\u001b[?25h Building wheel for transformers (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
|
138 |
-
" Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n",
|
139 |
-
" Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n",
|
140 |
-
" Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
|
141 |
-
"\u001b[2K \u001b[90mβββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m215.3/215.3 kB\u001b[0m \u001b[31m5.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
142 |
-
"\u001b[?25h Building wheel for peft (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n"
|
143 |
-
]
|
144 |
-
}
|
145 |
-
],
|
146 |
"source": [
|
147 |
"!pip install -q datasets loralib sentencepiece\n",
|
148 |
"\n",
|
149 |
-
"!pip uninstall transformers\n",
|
150 |
"!pip install -q git+https://github.com/zphang/transformers@c3dc391\n",
|
151 |
"# pip install git+https://github.com/zphang/transformers.git@llama_push\n",
|
152 |
"\n",
|
@@ -683,6 +635,41 @@
|
|
683 |
"uploaded = files.upload()"
|
684 |
]
|
685 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
686 |
{
|
687 |
"cell_type": "code",
|
688 |
"execution_count": 7,
|
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
+
"execution_count": 4,
|
6 |
"metadata": {
|
7 |
"colab": {
|
8 |
"base_uri": "https://localhost:8080/"
|
|
|
15 |
"name": "stdout",
|
16 |
"output_type": "stream",
|
17 |
"text": [
|
18 |
+
"zsh:1: command not found: nvidia-smi\r\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
]
|
20 |
}
|
21 |
],
|
|
|
25 |
},
|
26 |
{
|
27 |
"cell_type": "code",
|
28 |
+
"execution_count": 5,
|
29 |
"metadata": {
|
30 |
"colab": {
|
31 |
"base_uri": "https://localhost:8080/"
|
|
|
43 |
"remote: Counting objects: 100% (51/51), done.\u001b[K\n",
|
44 |
"remote: Compressing objects: 100% (32/32), done.\u001b[K\n",
|
45 |
"remote: Total 607 (delta 28), reused 34 (delta 19), pack-reused 556\u001b[K\n",
|
46 |
+
"Receiving objects: 100% (607/607), 27.78 MiB | 4.33 MiB/s, done.\n",
|
47 |
"Resolving deltas: 100% (360/360), done.\n"
|
48 |
]
|
49 |
}
|
|
|
63 |
},
|
64 |
{
|
65 |
"cell_type": "code",
|
66 |
+
"execution_count": 6,
|
67 |
"metadata": {
|
68 |
"colab": {
|
69 |
"base_uri": "https://localhost:8080/"
|
|
|
76 |
"name": "stdout",
|
77 |
"output_type": "stream",
|
78 |
"text": [
|
79 |
+
"/Users/dano/DEV/smartscraper/smartscraper/alpaca-lora\n"
|
80 |
]
|
81 |
}
|
82 |
],
|
|
|
86 |
},
|
87 |
{
|
88 |
"cell_type": "code",
|
89 |
+
"execution_count": null,
|
90 |
"metadata": {
|
91 |
"colab": {
|
92 |
"base_uri": "https://localhost:8080/"
|
|
|
94 |
"id": "JCB9UzMVwsSM",
|
95 |
"outputId": "78c8e31d-d978-44fa-c250-b862e83eb3c1"
|
96 |
},
|
97 |
+
"outputs": [],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
98 |
"source": [
|
99 |
"!pip install -q datasets loralib sentencepiece\n",
|
100 |
"\n",
|
101 |
+
"!pip uninstall -y transformers\n",
|
102 |
"!pip install -q git+https://github.com/zphang/transformers@c3dc391\n",
|
103 |
"# pip install git+https://github.com/zphang/transformers.git@llama_push\n",
|
104 |
"\n",
|
|
|
635 |
"uploaded = files.upload()"
|
636 |
]
|
637 |
},
|
638 |
+
{
|
639 |
+
"cell_type": "code",
|
640 |
+
"execution_count": null,
|
641 |
+
"metadata": {},
|
642 |
+
"outputs": [],
|
643 |
+
"source": [
|
644 |
+
"!conda install -y -c conda-forge cudatoolkit"
|
645 |
+
]
|
646 |
+
},
|
647 |
+
{
|
648 |
+
"cell_type": "code",
|
649 |
+
"execution_count": null,
|
650 |
+
"metadata": {},
|
651 |
+
"outputs": [],
|
652 |
+
"source": [
|
653 |
+
"# import os\n",
|
654 |
+
"\n",
|
655 |
+
"# # Set PATH variable\n",
|
656 |
+
"# cuda_path = \"/usr/local/cuda/bin\" # Update this to the correct path on your system\n",
|
657 |
+
"# os.environ[\"PATH\"] = f\"{cuda_path}:{os.environ['PATH']}\"\n",
|
658 |
+
"\n",
|
659 |
+
"# # Set LD_LIBRARY_PATH variable\n",
|
660 |
+
"# cuda_lib_path = \"/usr/local/cuda/lib64\" # Update this to the correct path on your system\n",
|
661 |
+
"# os.environ[\"LD_LIBRARY_PATH\"] = f\"{cuda_lib_path}:{os.environ.get('LD_LIBRARY_PATH', '')}\""
|
662 |
+
]
|
663 |
+
},
|
664 |
+
{
|
665 |
+
"cell_type": "code",
|
666 |
+
"execution_count": null,
|
667 |
+
"metadata": {},
|
668 |
+
"outputs": [],
|
669 |
+
"source": [
|
670 |
+
"# !cp /opt/conda/lib/python3.10/site-packages/bitsandbytes/libbitsandbytes_cuda117.so /opt/conda/lib/python3.10/site-packages/bitsandbytes/libbitsandbytes_cpu.so"
|
671 |
+
]
|
672 |
+
},
|
673 |
{
|
674 |
"cell_type": "code",
|
675 |
"execution_count": 7,
|