Update README.md
Browse files
README.md
CHANGED
|
@@ -74,7 +74,7 @@ modelscope download --model FlagRelease/MiniMax-M2-FlagOS --local_dir /share/Min
|
|
| 74 |
### Download FlagOS Image
|
| 75 |
|
| 76 |
```bash
|
| 77 |
-
docker pull harbor.baai.ac.cn/flagrelease-public/
|
| 78 |
```
|
| 79 |
|
| 80 |
### Start the inference service
|
|
@@ -85,7 +85,8 @@ docker run --init --detach --net=host --user 0 --ipc=host \
|
|
| 85 |
-v /share:/share --security-opt=seccomp=unconfined \
|
| 86 |
--privileged --ulimit=stack=67108864 --ulimit=memlock=-1 \
|
| 87 |
--shm-size=512G --gpus all -e USE_FLAGGEMS=1 \
|
| 88 |
-
--name flagos harbor.baai.ac.cn/flagrelease-public/
|
|
|
|
| 89 |
```
|
| 90 |
|
| 91 |
### Serve
|
|
|
|
| 74 |
### Download FlagOS Image
|
| 75 |
|
| 76 |
```bash
|
| 77 |
+
docker pull harbor.baai.ac.cn/flagrelease-public/flagrelease-nvidia-release-model_minimax-m2-tree_none-gems_3.0-scale_0.8.0-cx_none-python_3.12.3-torch_2.8.0a0_5228986c39.nv25.6-pcp_cuda12.9-gpu_nvidia003-arc_amd64-driver_570.158.01:2511041437
|
| 78 |
```
|
| 79 |
|
| 80 |
### Start the inference service
|
|
|
|
| 85 |
-v /share:/share --security-opt=seccomp=unconfined \
|
| 86 |
--privileged --ulimit=stack=67108864 --ulimit=memlock=-1 \
|
| 87 |
--shm-size=512G --gpus all -e USE_FLAGGEMS=1 \
|
| 88 |
+
--name flagos harbor.baai.ac.cn/flagrelease-public/flagrelease-nvidia-release-model_minimax-m2-tree_none-gems_3.0-scale_0.8.0-cx_none-python_3.12.3-torch_2.8.0a0_5228986c39.nv25.6-pcp_cuda12.9-gpu_nvidia003-arc_amd64-driver_570.158.01:2511041437 sleep infinity
|
| 89 |
+
docker exec -it flagos /bin/bash
|
| 90 |
```
|
| 91 |
|
| 92 |
### Serve
|