Spaces:
Build error
Build error
Update pyproject.toml with better install options & instructions for cpu/cuda
Browse files- backend/README.md +38 -16
- backend/pyproject.toml +13 -10
backend/README.md
CHANGED
@@ -5,34 +5,56 @@ The backend is built using Python & [FastAPI](https://fastapi.tiangolo.com/) boo
|
|
5 |
## Requirements
|
6 |
|
7 |
1. Python >= 3.11
|
8 |
-
2.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
- ```pipx install poetry```
|
10 |
|
11 |
## Getting Started
|
12 |
|
13 |
-
First,
|
14 |
|
15 |
-
|
|
|
|
|
16 |
|
17 |
```bash
|
18 |
-
|
19 |
-
torch = [
|
20 |
-
{ url = "https://download.pytorch.org/whl/cpu/torch-2.1.1%2Bcpu-cp311-cp311-win_amd64.whl", markers = "sys_platform == 'win32'" },
|
21 |
-
{ url = "https://download.pytorch.org/whl/cpu/torch-2.1.1%2Bcpu-cp311-cp311-linux_x86_64.whl", markers = "sys_platform == 'linux'" },
|
22 |
-
{ url = "https://download.pytorch.org/whl/cpu/torch-2.1.1-cp311-none-macosx_11_0_arm64.whl", markers = "sys_platform == 'darwin'" },
|
23 |
-
]
|
24 |
-
## For GPU version: Windows and Linux and MacOS (arm64)
|
25 |
-
# torch = [
|
26 |
-
# { url = "https://download.pytorch.org/whl/cu121/torch-2.1.1%2Bcu121-cp311-cp311-win_amd64.whl", markers = "sys_platform == 'win32'" },
|
27 |
-
# { url = "https://download.pytorch.org/whl/cu121/torch-2.1.1%2Bcu121-cp311-cp311-linux_x86_64.whl", markers = "sys_platform == 'linux'" },
|
28 |
-
# { url = "https://download.pytorch.org/whl/cu121/torch-2.1.1-cp311-none-macosx_11_0_arm64.whl", markers = "sys_platform == 'darwin'" },
|
29 |
-
# ]
|
30 |
```
|
31 |
|
32 |
Second, setup the environment:
|
33 |
|
34 |
```bash
|
35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
poetry shell
|
37 |
```
|
38 |
|
|
|
5 |
## Requirements
|
6 |
|
7 |
1. Python >= 3.11
|
8 |
+
2. Miniconda (To manage Python versions)
|
9 |
+
- [Windows](https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe)
|
10 |
+
- [Linux](https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh)
|
11 |
+
- [MacOS](https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.pkg)
|
12 |
+
- ```conda create -n SmartRetrieval python=3.11```
|
13 |
+
3. Pipx (To manage Python packages)
|
14 |
+
- ```pip install pipx``` (If you already have pipx installed, you can skip this step)
|
15 |
+
4. Cuda > 12.1 (if you have a Nvidia GPU)
|
16 |
+
- [Windows](https://developer.nvidia.com/cuda-downloads)
|
17 |
+
- [Linux](https://developer.nvidia.com/cuda-downloads)
|
18 |
+
- [MacOS](https://developer.nvidia.com/cuda-downloads)
|
19 |
+
5. Poetry (To manage dependencies)
|
20 |
- ```pipx install poetry```
|
21 |
|
22 |
## Getting Started
|
23 |
|
24 |
+
First, ensure if you want to use the cuda version of pytorch, you have the correct version `cuda > 12.1` of cuda installed. You can check this by running `nvcc --version or nvidia-smi` in your terminal. If you do not have cuda installed, you can install it from [here](https://developer.nvidia.com/cuda-downloads).
|
25 |
|
26 |
+
Ensure you have followed the steps in the requirements section above.
|
27 |
+
|
28 |
+
Then activate the conda environment:
|
29 |
|
30 |
```bash
|
31 |
+
conda activate SmartRetrieval
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
```
|
33 |
|
34 |
Second, setup the environment:
|
35 |
|
36 |
```bash
|
37 |
+
# Only run one of the following commands:
|
38 |
+
-----------------------------------------------
|
39 |
+
# Install dependencies and torch (cpu version)
|
40 |
+
# Windows: Set env for llama-cpp-python with openblas support on cpu
|
41 |
+
$env:CMAKE_ARGS = "-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS"
|
42 |
+
# Linux: Set env for llama-cpp-python with openblas support on cpu
|
43 |
+
CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS"
|
44 |
+
# Then:
|
45 |
+
poetry install --without torch-cuda
|
46 |
+
-----------------------------------------------
|
47 |
+
# Install dependencies and torch (cuda version)
|
48 |
+
# Windows: Set env for llama-cpp-python with cuda support on gpu
|
49 |
+
$env:CMAKE_ARGS = "-DLLAMA_CUBLAS=on"
|
50 |
+
# Linux: Set env for llama-cpp-python with cuda support on gpu
|
51 |
+
CMAKE_ARGS="-DLLAMA_CUBLAS=on"
|
52 |
+
# Then:
|
53 |
+
poetry install --without torch-cpu
|
54 |
+
```
|
55 |
+
|
56 |
+
```bash
|
57 |
+
# Enter poetry shell
|
58 |
poetry shell
|
59 |
```
|
60 |
|
backend/pyproject.toml
CHANGED
@@ -15,21 +15,24 @@ pypdf = "^3.17.0"
|
|
15 |
python-dotenv = "^1.0.0"
|
16 |
llama-cpp-python = "^0.2.18"
|
17 |
transformers = "^4.35.2"
|
18 |
-
# For CPU version: Windows and Linux and MacOS (arm64)
|
19 |
-
torch = [
|
20 |
-
{ url = "https://download.pytorch.org/whl/cpu/torch-2.1.1%2Bcpu-cp311-cp311-win_amd64.whl", markers = "sys_platform == 'win32'" },
|
21 |
-
{ url = "https://download.pytorch.org/whl/cpu/torch-2.1.1%2Bcpu-cp311-cp311-linux_x86_64.whl", markers = "sys_platform == 'linux'" },
|
22 |
-
]
|
23 |
-
## For GPU version: Windows and Linux and MacOS (arm64)
|
24 |
-
# torch = [
|
25 |
-
# { url = "https://download.pytorch.org/whl/cu121/torch-2.1.1%2Bcu121-cp311-cp311-win_amd64.whl", markers = "sys_platform == 'win32'" },
|
26 |
-
# { url = "https://download.pytorch.org/whl/cu121/torch-2.1.1%2Bcu121-cp311-cp311-linux_x86_64.whl", markers = "sys_platform == 'linux'" },
|
27 |
-
# ]
|
28 |
docx2txt = "^0.8"
|
29 |
|
30 |
# Dev Dependencies here
|
31 |
[tool.poetry.group.dev.dependencies]
|
32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
|
34 |
[build-system]
|
35 |
requires = ["poetry-core"]
|
|
|
15 |
python-dotenv = "^1.0.0"
|
16 |
llama-cpp-python = "^0.2.18"
|
17 |
transformers = "^4.35.2"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
docx2txt = "^0.8"
|
19 |
|
20 |
# Dev Dependencies here
|
21 |
[tool.poetry.group.dev.dependencies]
|
22 |
|
23 |
+
# For CPU torch version: Windows and Linux
|
24 |
+
[tool.poetry.group.torch-cpu.dependencies]
|
25 |
+
torch = [
|
26 |
+
{ url = "https://download.pytorch.org/whl/cpu/torch-2.1.1%2Bcpu-cp311-cp311-win_amd64.whl", markers = "sys_platform == 'win32'" },
|
27 |
+
{ url = "https://download.pytorch.org/whl/cpu/torch-2.1.1%2Bcpu-cp311-cp311-linux_x86_64.whl", markers = "sys_platform == 'linux'" },
|
28 |
+
]
|
29 |
+
|
30 |
+
## For Cuda torch version: Windows and Linux
|
31 |
+
[tool.poetry.group.torch-cuda.dependencies]
|
32 |
+
torch = [
|
33 |
+
{ url = "https://download.pytorch.org/whl/cu121/torch-2.1.1%2Bcu121-cp311-cp311-win_amd64.whl", markers = "sys_platform == 'win32'" },
|
34 |
+
{ url = "https://download.pytorch.org/whl/cu121/torch-2.1.1%2Bcu121-cp311-cp311-linux_x86_64.whl", markers = "sys_platform == 'linux'" },
|
35 |
+
]
|
36 |
|
37 |
[build-system]
|
38 |
requires = ["poetry-core"]
|