Spaces:
Sleeping
Sleeping
feat: update
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .dockerignore +12 -0
- .env.example +6 -0
- .gitattributes +4 -35
- .gitignore +165 -0
- .pre-commit-config.yaml +46 -0
- .pylintrc +428 -0
- Dockerfile +27 -0
- LICENSE +201 -0
- README.md +174 -7
- README_zh-CN.md +140 -0
- app.py +288 -120
- dist/assets/logo-38417354.svg → assets/logo.svg +0 -0
- assets/mindsearch_openset.png +3 -0
- assets/teaser.gif +3 -0
- backend_example.py +37 -0
- dist/assets/background-95159880.png +0 -0
- dist/assets/index-327d01f5.js +0 -0
- dist/assets/index-ab4095ce.css +0 -1
- dist/assets/index-legacy-f2aa4b0e.js +0 -0
- dist/assets/pack-up-ad0b3cbc.svg +0 -4
- dist/assets/polyfills-legacy-0b55db5f.js +0 -1
- dist/assets/show-right-icon-12c14da5.png +0 -0
- dist/index.html +0 -21
- docker/README.md +125 -0
- docker/README_zh-CN.md +125 -0
- docker/msdl/__init__.py +0 -0
- docker/msdl/__main__.py +107 -0
- docker/msdl/config.py +57 -0
- docker/msdl/docker_manager.py +175 -0
- docker/msdl/i18n.py +64 -0
- docker/msdl/templates/backend/cloud_llm.dockerfile +25 -0
- docker/msdl/templates/backend/local_llm.dockerfile +30 -0
- docker/msdl/templates/docker-compose.yaml +62 -0
- docker/msdl/templates/frontend/react.dockerfile +35 -0
- docker/msdl/translations/en.yaml +77 -0
- docker/msdl/translations/zh_CN.yaml +77 -0
- docker/msdl/user_interaction.py +253 -0
- docker/msdl/utils.py +257 -0
- docker/setup.py +24 -0
- frontend/React/.gitignore +5 -0
- frontend/React/.prettierignore +1 -1
- frontend/React/README.md +130 -78
- frontend/React/README_zh-CN.md +135 -0
- frontend/React/package.json +6 -0
- frontend/React/src/App.module.less +2 -4
- frontend/React/src/App.tsx +3 -1
- frontend/React/src/assets/background.png +0 -0
- frontend/React/src/assets/show-right-icon.png +0 -0
- frontend/React/src/components/iconfont/index.tsx +0 -7
- frontend/React/src/config/cgi.ts +0 -2
.dockerignore
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
**/node_modules
|
2 |
+
**/dist
|
3 |
+
**/.git
|
4 |
+
**/.gitignore
|
5 |
+
**/.vscode
|
6 |
+
**/README.md
|
7 |
+
**/LICENSE
|
8 |
+
**/.env
|
9 |
+
**/npm-debug.log
|
10 |
+
**/yarn-debug.log
|
11 |
+
**/yarn-error.log
|
12 |
+
**/.pnpm-debug.log
|
.env.example
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
OPENAI_API_KEY=
|
2 |
+
OPENAI_API_BASE=
|
3 |
+
OPENAI_MODEL=
|
4 |
+
SILICON_API_KEY=
|
5 |
+
SILICON_MODEL=
|
6 |
+
InternLM_API_KEY=
|
.gitattributes
CHANGED
@@ -1,35 +1,4 @@
|
|
1 |
-
*.
|
2 |
-
*.
|
3 |
-
*.
|
4 |
-
*.
|
5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
1 |
+
*.png filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.jpg filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.gif filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
.gitignore
ADDED
@@ -0,0 +1,165 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[ciod]
|
4 |
+
*$py.class
|
5 |
+
|
6 |
+
# C extensions
|
7 |
+
*.so
|
8 |
+
|
9 |
+
# Distribution / packaging
|
10 |
+
.Python
|
11 |
+
build/
|
12 |
+
develop-eggs/
|
13 |
+
dist/
|
14 |
+
downloads/
|
15 |
+
eggs/
|
16 |
+
.eggs/
|
17 |
+
lib/
|
18 |
+
lib64/
|
19 |
+
parts/
|
20 |
+
sdist/
|
21 |
+
var/
|
22 |
+
wheels/
|
23 |
+
share/python-wheels/
|
24 |
+
*.egg-info/
|
25 |
+
.installed.cfg
|
26 |
+
*.egg
|
27 |
+
MANIFEST
|
28 |
+
|
29 |
+
# PyInstaller
|
30 |
+
# Usually these files are written by a python script from a template
|
31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
32 |
+
*.manifest
|
33 |
+
*.spec
|
34 |
+
|
35 |
+
# Installer logs
|
36 |
+
pip-log.txt
|
37 |
+
pip-delete-this-directory.txt
|
38 |
+
|
39 |
+
# Unit test / coverage reports
|
40 |
+
htmlcov/
|
41 |
+
.tox/
|
42 |
+
.nox/
|
43 |
+
.coverage
|
44 |
+
.coverage.*
|
45 |
+
.cache
|
46 |
+
nosetests.xml
|
47 |
+
coverage.xml
|
48 |
+
*.cover
|
49 |
+
*.py,cover
|
50 |
+
.hypothesis/
|
51 |
+
.pytest_cache/
|
52 |
+
cover/
|
53 |
+
|
54 |
+
# Translations
|
55 |
+
*.mo
|
56 |
+
*.pot
|
57 |
+
|
58 |
+
# Django stuff:
|
59 |
+
*.log
|
60 |
+
local_settings.py
|
61 |
+
db.sqlite3
|
62 |
+
db.sqlite3-journal
|
63 |
+
|
64 |
+
# Flask stuff:
|
65 |
+
instance/
|
66 |
+
.webassets-cache
|
67 |
+
|
68 |
+
# Scrapy stuff:
|
69 |
+
.scrapy
|
70 |
+
|
71 |
+
# Sphinx documentation
|
72 |
+
docs/_build/
|
73 |
+
|
74 |
+
# PyBuilder
|
75 |
+
.pybuilder/
|
76 |
+
target/
|
77 |
+
|
78 |
+
# Jupyter Notebook
|
79 |
+
.ipynb_checkpoints
|
80 |
+
|
81 |
+
# IPython
|
82 |
+
profile_default/
|
83 |
+
ipython_config.py
|
84 |
+
|
85 |
+
# pyenv
|
86 |
+
# For a library or package, you might want to ignore these files since the code is
|
87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
88 |
+
# .python-version
|
89 |
+
|
90 |
+
# pipenv
|
91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
94 |
+
# install all needed dependencies.
|
95 |
+
#Pipfile.lock
|
96 |
+
|
97 |
+
# poetry
|
98 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
100 |
+
# commonly ignored for libraries.
|
101 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
102 |
+
#poetry.lock
|
103 |
+
|
104 |
+
# pdm
|
105 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
106 |
+
#pdm.lock
|
107 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
108 |
+
# in version control.
|
109 |
+
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
110 |
+
.pdm.toml
|
111 |
+
.pdm-python
|
112 |
+
.pdm-build/
|
113 |
+
|
114 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
115 |
+
__pypackages__/
|
116 |
+
|
117 |
+
# Celery stuff
|
118 |
+
celerybeat-schedule
|
119 |
+
celerybeat.pid
|
120 |
+
|
121 |
+
# SageMath parsed files
|
122 |
+
*.sage.py
|
123 |
+
|
124 |
+
# Environments
|
125 |
+
.env
|
126 |
+
.venv
|
127 |
+
env/
|
128 |
+
venv/
|
129 |
+
ENV/
|
130 |
+
env.bak/
|
131 |
+
venv.bak/
|
132 |
+
|
133 |
+
# Spyder project settings
|
134 |
+
.spyderproject
|
135 |
+
.spyproject
|
136 |
+
|
137 |
+
# Rope project settings
|
138 |
+
.ropeproject
|
139 |
+
|
140 |
+
# mkdocs documentation
|
141 |
+
/site
|
142 |
+
|
143 |
+
# mypy
|
144 |
+
.mypy_cache/
|
145 |
+
.dmypy.json
|
146 |
+
dmypy.json
|
147 |
+
|
148 |
+
# Pyre type checker
|
149 |
+
.pyre/
|
150 |
+
|
151 |
+
# pytype static type analyzer
|
152 |
+
.pytype/
|
153 |
+
|
154 |
+
# Cython debug symbols
|
155 |
+
cython_debug/
|
156 |
+
|
157 |
+
# PyCharm
|
158 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
159 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
160 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
161 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
162 |
+
#.idea/
|
163 |
+
|
164 |
+
.env
|
165 |
+
temp
|
.pre-commit-config.yaml
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
exclude: ^(tests/data|scripts|frontend/React)/
|
2 |
+
repos:
|
3 |
+
- repo: https://github.com/PyCQA/flake8
|
4 |
+
rev: 7.0.0
|
5 |
+
hooks:
|
6 |
+
- id: flake8
|
7 |
+
args: ["--max-line-length=120"]
|
8 |
+
- repo: https://github.com/PyCQA/isort
|
9 |
+
rev: 5.13.2
|
10 |
+
hooks:
|
11 |
+
- id: isort
|
12 |
+
- repo: https://github.com/pre-commit/mirrors-yapf
|
13 |
+
rev: v0.32.0
|
14 |
+
hooks:
|
15 |
+
- id: yapf
|
16 |
+
- repo: https://github.com/pre-commit/pre-commit-hooks
|
17 |
+
rev: v4.5.0
|
18 |
+
hooks:
|
19 |
+
- id: trailing-whitespace
|
20 |
+
- id: check-yaml
|
21 |
+
- id: end-of-file-fixer
|
22 |
+
- id: requirements-txt-fixer
|
23 |
+
- id: double-quote-string-fixer
|
24 |
+
- id: check-merge-conflict
|
25 |
+
- id: fix-encoding-pragma
|
26 |
+
args: ["--remove"]
|
27 |
+
- id: mixed-line-ending
|
28 |
+
args: ["--fix=lf"]
|
29 |
+
- repo: https://github.com/executablebooks/mdformat
|
30 |
+
rev: 0.7.17
|
31 |
+
hooks:
|
32 |
+
- id: mdformat
|
33 |
+
args: ["--number"]
|
34 |
+
additional_dependencies:
|
35 |
+
- mdformat-openmmlab
|
36 |
+
- mdformat_frontmatter
|
37 |
+
- linkify-it-py
|
38 |
+
- repo: https://github.com/codespell-project/codespell
|
39 |
+
rev: v2.2.6
|
40 |
+
hooks:
|
41 |
+
- id: codespell
|
42 |
+
- repo: https://github.com/asottile/pyupgrade
|
43 |
+
rev: v3.15.0
|
44 |
+
hooks:
|
45 |
+
- id: pyupgrade
|
46 |
+
args: ["--py36-plus"]
|
.pylintrc
ADDED
@@ -0,0 +1,428 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This Pylint rcfile contains a best-effort configuration to uphold the
|
2 |
+
# best-practices and style described in the Google Python style guide:
|
3 |
+
# https://google.github.io/styleguide/pyguide.html
|
4 |
+
#
|
5 |
+
# Its canonical open-source location is:
|
6 |
+
# https://google.github.io/styleguide/pylintrc
|
7 |
+
|
8 |
+
[MASTER]
|
9 |
+
|
10 |
+
# Files or directories to be skipped. They should be base names, not paths.
|
11 |
+
ignore=third_party,storage
|
12 |
+
|
13 |
+
# Files or directories matching the regex patterns are skipped. The regex
|
14 |
+
# matches against base names, not paths.
|
15 |
+
ignore-patterns=
|
16 |
+
|
17 |
+
# Pickle collected data for later comparisons.
|
18 |
+
persistent=no
|
19 |
+
|
20 |
+
# List of plugins (as comma separated values of python modules names) to load,
|
21 |
+
# usually to register additional checkers.
|
22 |
+
load-plugins=
|
23 |
+
|
24 |
+
# Use multiple processes to speed up Pylint.
|
25 |
+
jobs=4
|
26 |
+
|
27 |
+
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
28 |
+
# active Python interpreter and may run arbitrary code.
|
29 |
+
unsafe-load-any-extension=no
|
30 |
+
|
31 |
+
|
32 |
+
[MESSAGES CONTROL]
|
33 |
+
|
34 |
+
# Only show warnings with the listed confidence levels. Leave empty to show
|
35 |
+
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
36 |
+
confidence=
|
37 |
+
|
38 |
+
# Enable the message, report, category or checker with the given id(s). You can
|
39 |
+
# either give multiple identifier separated by comma (,) or put this option
|
40 |
+
# multiple time (only on the command line, not in the configuration file where
|
41 |
+
# it should appear only once). See also the "--disable" option for examples.
|
42 |
+
#enable=
|
43 |
+
|
44 |
+
# Disable the message, report, category or checker with the given id(s). You
|
45 |
+
# can either give multiple identifiers separated by comma (,) or put this
|
46 |
+
# option multiple times (only on the command line, not in the configuration
|
47 |
+
# file where it should appear only once).You can also use "--disable=all" to
|
48 |
+
# disable everything first and then reenable specific checks. For example, if
|
49 |
+
# you want to run only the similarities checker, you can use "--disable=all
|
50 |
+
# --enable=similarities". If you want to run only the classes checker, but have
|
51 |
+
# no Warning level messages displayed, use"--disable=all --enable=classes
|
52 |
+
# --disable=W"
|
53 |
+
disable=abstract-method,
|
54 |
+
apply-builtin,
|
55 |
+
arguments-differ,
|
56 |
+
attribute-defined-outside-init,
|
57 |
+
backtick,
|
58 |
+
bad-option-value,
|
59 |
+
basestring-builtin,
|
60 |
+
buffer-builtin,
|
61 |
+
c-extension-no-member,
|
62 |
+
consider-using-enumerate,
|
63 |
+
cmp-builtin,
|
64 |
+
cmp-method,
|
65 |
+
coerce-builtin,
|
66 |
+
coerce-method,
|
67 |
+
delslice-method,
|
68 |
+
div-method,
|
69 |
+
duplicate-code,
|
70 |
+
eq-without-hash,
|
71 |
+
execfile-builtin,
|
72 |
+
file-builtin,
|
73 |
+
filter-builtin-not-iterating,
|
74 |
+
fixme,
|
75 |
+
getslice-method,
|
76 |
+
global-statement,
|
77 |
+
hex-method,
|
78 |
+
idiv-method,
|
79 |
+
implicit-str-concat,
|
80 |
+
import-error,
|
81 |
+
import-self,
|
82 |
+
import-star-module-level,
|
83 |
+
inconsistent-return-statements,
|
84 |
+
input-builtin,
|
85 |
+
intern-builtin,
|
86 |
+
invalid-str-codec,
|
87 |
+
locally-disabled,
|
88 |
+
long-builtin,
|
89 |
+
long-suffix,
|
90 |
+
map-builtin-not-iterating,
|
91 |
+
misplaced-comparison-constant,
|
92 |
+
missing-function-docstring,
|
93 |
+
metaclass-assignment,
|
94 |
+
next-method-called,
|
95 |
+
next-method-defined,
|
96 |
+
no-absolute-import,
|
97 |
+
no-else-break,
|
98 |
+
no-else-continue,
|
99 |
+
no-else-raise,
|
100 |
+
no-else-return,
|
101 |
+
no-init, # added
|
102 |
+
no-member,
|
103 |
+
no-name-in-module,
|
104 |
+
no-self-use,
|
105 |
+
nonzero-method,
|
106 |
+
oct-method,
|
107 |
+
old-division,
|
108 |
+
old-ne-operator,
|
109 |
+
old-octal-literal,
|
110 |
+
old-raise-syntax,
|
111 |
+
parameter-unpacking,
|
112 |
+
print-statement,
|
113 |
+
raising-string,
|
114 |
+
range-builtin-not-iterating,
|
115 |
+
raw_input-builtin,
|
116 |
+
rdiv-method,
|
117 |
+
reduce-builtin,
|
118 |
+
relative-import,
|
119 |
+
reload-builtin,
|
120 |
+
round-builtin,
|
121 |
+
setslice-method,
|
122 |
+
signature-differs,
|
123 |
+
standarderror-builtin,
|
124 |
+
suppressed-message,
|
125 |
+
sys-max-int,
|
126 |
+
too-few-public-methods,
|
127 |
+
too-many-ancestors,
|
128 |
+
too-many-arguments,
|
129 |
+
too-many-boolean-expressions,
|
130 |
+
too-many-branches,
|
131 |
+
too-many-instance-attributes,
|
132 |
+
too-many-locals,
|
133 |
+
too-many-nested-blocks,
|
134 |
+
too-many-public-methods,
|
135 |
+
too-many-return-statements,
|
136 |
+
too-many-statements,
|
137 |
+
trailing-newlines,
|
138 |
+
unichr-builtin,
|
139 |
+
unicode-builtin,
|
140 |
+
unnecessary-pass,
|
141 |
+
unpacking-in-except,
|
142 |
+
useless-else-on-loop,
|
143 |
+
useless-object-inheritance,
|
144 |
+
useless-suppression,
|
145 |
+
using-cmp-argument,
|
146 |
+
wrong-import-order,
|
147 |
+
xrange-builtin,
|
148 |
+
zip-builtin-not-iterating,
|
149 |
+
|
150 |
+
|
151 |
+
[REPORTS]
|
152 |
+
|
153 |
+
# Set the output format. Available formats are text, parseable, colorized, msvs
|
154 |
+
# (visual studio) and html. You can also give a reporter class, eg
|
155 |
+
# mypackage.mymodule.MyReporterClass.
|
156 |
+
output-format=colorized
|
157 |
+
|
158 |
+
# Tells whether to display a full report or only the messages
|
159 |
+
reports=no
|
160 |
+
|
161 |
+
# Python expression which should return a note less than 10 (10 is the highest
|
162 |
+
# note). You have access to the variables errors warning, statement which
|
163 |
+
# respectively contain the number of errors / warnings messages and the total
|
164 |
+
# number of statements analyzed. This is used by the global evaluation report
|
165 |
+
# (RP0004).
|
166 |
+
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
167 |
+
|
168 |
+
# Template used to display messages. This is a python new-style format string
|
169 |
+
# used to format the message information. See doc for all details
|
170 |
+
#msg-template=
|
171 |
+
|
172 |
+
|
173 |
+
[BASIC]
|
174 |
+
|
175 |
+
# Good variable names which should always be accepted, separated by a comma
|
176 |
+
good-names=main,_
|
177 |
+
|
178 |
+
# Bad variable names which should always be refused, separated by a comma
|
179 |
+
bad-names=
|
180 |
+
|
181 |
+
# Colon-delimited sets of names that determine each other's naming style when
|
182 |
+
# the name regexes allow several styles.
|
183 |
+
name-group=
|
184 |
+
|
185 |
+
# Include a hint for the correct naming format with invalid-name
|
186 |
+
include-naming-hint=no
|
187 |
+
|
188 |
+
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
189 |
+
# to this list to register other decorators that produce valid properties.
|
190 |
+
property-classes=abc.abstractproperty,cached_property.cached_property,cached_property.threaded_cached_property,cached_property.cached_property_with_ttl,cached_property.threaded_cached_property_with_ttl
|
191 |
+
|
192 |
+
# Regular expression matching correct function names
|
193 |
+
function-rgx=^(?:(?P<exempt>setUp|tearDown|setUpModule|tearDownModule)|(?P<camel_case>_?[A-Z][a-zA-Z0-9]*)|(?P<snake_case>_?[a-z][a-z0-9_]*))$
|
194 |
+
|
195 |
+
# Regular expression matching correct variable names
|
196 |
+
variable-rgx=^[a-z][a-z0-9_]*$
|
197 |
+
|
198 |
+
# Regular expression matching correct constant names
|
199 |
+
const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
|
200 |
+
|
201 |
+
# Regular expression matching correct attribute names
|
202 |
+
attr-rgx=^_{0,2}[a-z][a-z0-9_]*$
|
203 |
+
|
204 |
+
# Regular expression matching correct argument names
|
205 |
+
argument-rgx=^[a-z][a-z0-9_]*$
|
206 |
+
|
207 |
+
# Regular expression matching correct class attribute names
|
208 |
+
class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
|
209 |
+
|
210 |
+
# Regular expression matching correct inline iteration names
|
211 |
+
inlinevar-rgx=^[a-z][a-z0-9_]*$
|
212 |
+
|
213 |
+
# Regular expression matching correct class names
|
214 |
+
class-rgx=^_?[A-Z][a-zA-Z0-9]*$
|
215 |
+
|
216 |
+
# Regular expression matching correct module names
|
217 |
+
module-rgx=^(_?[a-z][a-z0-9_]*|__init__)$
|
218 |
+
|
219 |
+
# Regular expression matching correct method names
|
220 |
+
method-rgx=(?x)^(?:(?P<exempt>_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P<camel_case>_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P<snake_case>_{0,2}[a-z][a-z0-9_]*))$
|
221 |
+
|
222 |
+
# Regular expression which should only match function or class names that do
|
223 |
+
# not require a docstring.
|
224 |
+
no-docstring-rgx=(__.*__|main|test.*|.*test|.*Test)$
|
225 |
+
|
226 |
+
# Minimum line length for functions/classes that require docstrings, shorter
|
227 |
+
# ones are exempt.
|
228 |
+
docstring-min-length=10
|
229 |
+
|
230 |
+
|
231 |
+
[TYPECHECK]
|
232 |
+
|
233 |
+
# List of decorators that produce context managers, such as
|
234 |
+
# contextlib.contextmanager. Add to this list to register other decorators that
|
235 |
+
# produce valid context managers.
|
236 |
+
contextmanager-decorators=contextlib.contextmanager,contextlib2.contextmanager
|
237 |
+
|
238 |
+
# Tells whether missing members accessed in mixin class should be ignored. A
|
239 |
+
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
240 |
+
ignore-mixin-members=yes
|
241 |
+
|
242 |
+
# List of module names for which member attributes should not be checked
|
243 |
+
# (useful for modules/projects where namespaces are manipulated during runtime
|
244 |
+
# and thus existing member attributes cannot be deduced by static analysis. It
|
245 |
+
# supports qualified module names, as well as Unix pattern matching.
|
246 |
+
ignored-modules=
|
247 |
+
|
248 |
+
# List of class names for which member attributes should not be checked (useful
|
249 |
+
# for classes with dynamically set attributes). This supports the use of
|
250 |
+
# qualified names.
|
251 |
+
ignored-classes=optparse.Values,thread._local,_thread._local
|
252 |
+
|
253 |
+
# List of members which are set dynamically and missed by pylint inference
|
254 |
+
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
255 |
+
# expressions are accepted.
|
256 |
+
generated-members=
|
257 |
+
|
258 |
+
|
259 |
+
[FORMAT]
|
260 |
+
|
261 |
+
# Maximum number of characters on a single line.
|
262 |
+
max-line-length=120
|
263 |
+
|
264 |
+
# TODO(https://github.com/PyCQA/pylint/issues/3352): Direct pylint to exempt
|
265 |
+
# lines made too long by directives to pytype.
|
266 |
+
|
267 |
+
# Regexp for a line that is allowed to be longer than the limit.
|
268 |
+
ignore-long-lines=(?x)(
|
269 |
+
^\s*(\#\ )?<?https?://\S+>?$|
|
270 |
+
^\s*(from\s+\S+\s+)?import\s+.+$)
|
271 |
+
|
272 |
+
# Allow the body of an if to be on the same line as the test if there is no
|
273 |
+
# else.
|
274 |
+
single-line-if-stmt=yes
|
275 |
+
|
276 |
+
# Maximum number of lines in a module
|
277 |
+
max-module-lines=99999
|
278 |
+
|
279 |
+
# String used as indentation unit. The internal Google style guide mandates 2
|
280 |
+
# spaces. Google's externaly-published style guide says 4, consistent with
|
281 |
+
# PEP 8. Here, we use 2 spaces, for conformity with many open-sourced Google
|
282 |
+
# projects (like TensorFlow).
|
283 |
+
indent-string=' '
|
284 |
+
|
285 |
+
# Number of spaces of indent required inside a hanging or continued line.
|
286 |
+
indent-after-paren=4
|
287 |
+
|
288 |
+
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
289 |
+
expected-line-ending-format=
|
290 |
+
|
291 |
+
|
292 |
+
[MISCELLANEOUS]
|
293 |
+
|
294 |
+
# List of note tags to take in consideration, separated by a comma.
|
295 |
+
notes=TODO
|
296 |
+
|
297 |
+
|
298 |
+
[STRING]
|
299 |
+
|
300 |
+
# This flag controls whether inconsistent-quotes generates a warning when the
|
301 |
+
# character used as a quote delimiter is used inconsistently within a module.
|
302 |
+
check-quote-consistency=yes
|
303 |
+
|
304 |
+
|
305 |
+
[VARIABLES]
|
306 |
+
|
307 |
+
# Tells whether we should check for unused import in __init__ files.
|
308 |
+
init-import=no
|
309 |
+
|
310 |
+
# A regular expression matching the name of dummy variables (i.e. expectedly
|
311 |
+
# not used).
|
312 |
+
dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_)
|
313 |
+
|
314 |
+
# List of additional names supposed to be defined in builtins. Remember that
|
315 |
+
# you should avoid to define new builtins when possible.
|
316 |
+
additional-builtins=
|
317 |
+
|
318 |
+
# List of strings which can identify a callback function by name. A callback
|
319 |
+
# name must start or end with one of those strings.
|
320 |
+
callbacks=cb_,_cb
|
321 |
+
|
322 |
+
# List of qualified module names which can have objects that can redefine
|
323 |
+
# builtins.
|
324 |
+
redefining-builtins-modules=six,six.moves,past.builtins,future.builtins,functools
|
325 |
+
|
326 |
+
|
327 |
+
[LOGGING]
|
328 |
+
|
329 |
+
# Logging modules to check that the string format arguments are in logging
|
330 |
+
# function parameter format
|
331 |
+
logging-modules=logging,absl.logging,tensorflow.io.logging
|
332 |
+
|
333 |
+
|
334 |
+
[SIMILARITIES]
|
335 |
+
|
336 |
+
# Minimum lines number of a similarity.
|
337 |
+
min-similarity-lines=4
|
338 |
+
|
339 |
+
# Ignore comments when computing similarities.
|
340 |
+
ignore-comments=yes
|
341 |
+
|
342 |
+
# Ignore docstrings when computing similarities.
|
343 |
+
ignore-docstrings=yes
|
344 |
+
|
345 |
+
# Ignore imports when computing similarities.
|
346 |
+
ignore-imports=no
|
347 |
+
|
348 |
+
|
349 |
+
[SPELLING]
|
350 |
+
|
351 |
+
# Spelling dictionary name. Available dictionaries: none. To make it working
|
352 |
+
# install python-enchant package.
|
353 |
+
spelling-dict=
|
354 |
+
|
355 |
+
# List of comma separated words that should not be checked.
|
356 |
+
spelling-ignore-words=
|
357 |
+
|
358 |
+
# A path to a file that contains private dictionary; one word per line.
|
359 |
+
spelling-private-dict-file=
|
360 |
+
|
361 |
+
# Tells whether to store unknown words to indicated private dictionary in
|
362 |
+
# --spelling-private-dict-file option instead of raising a message.
|
363 |
+
spelling-store-unknown-words=no
|
364 |
+
|
365 |
+
|
366 |
+
[IMPORTS]
|
367 |
+
|
368 |
+
# Deprecated modules which should not be used, separated by a comma
|
369 |
+
deprecated-modules=regsub,
|
370 |
+
TERMIOS,
|
371 |
+
Bastion,
|
372 |
+
rexec,
|
373 |
+
sets
|
374 |
+
|
375 |
+
# Create a graph of every (i.e. internal and external) dependencies in the
|
376 |
+
# given file (report RP0402 must not be disabled)
|
377 |
+
import-graph=
|
378 |
+
|
379 |
+
# Create a graph of external dependencies in the given file (report RP0402 must
|
380 |
+
# not be disabled)
|
381 |
+
ext-import-graph=
|
382 |
+
|
383 |
+
# Create a graph of internal dependencies in the given file (report RP0402 must
|
384 |
+
# not be disabled)
|
385 |
+
int-import-graph=
|
386 |
+
|
387 |
+
# Force import order to recognize a module as part of the standard
|
388 |
+
# compatibility libraries.
|
389 |
+
known-standard-library=
|
390 |
+
|
391 |
+
# Force import order to recognize a module as part of a third party library.
|
392 |
+
known-third-party=enchant, absl
|
393 |
+
|
394 |
+
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
395 |
+
# 3 compatible code, which means that the block might have code that exists
|
396 |
+
# only in one or another interpreter, leading to false positives when analysed.
|
397 |
+
analyse-fallback-blocks=no
|
398 |
+
|
399 |
+
|
400 |
+
[CLASSES]
|
401 |
+
|
402 |
+
# List of method names used to declare (i.e. assign) instance attributes.
|
403 |
+
defining-attr-methods=__init__,
|
404 |
+
__new__,
|
405 |
+
setUp
|
406 |
+
|
407 |
+
# List of member names, which should be excluded from the protected access
|
408 |
+
# warning.
|
409 |
+
exclude-protected=_asdict,
|
410 |
+
_fields,
|
411 |
+
_replace,
|
412 |
+
_source,
|
413 |
+
_make
|
414 |
+
|
415 |
+
# List of valid names for the first argument in a class method.
|
416 |
+
valid-classmethod-first-arg=cls,
|
417 |
+
class_
|
418 |
+
|
419 |
+
# List of valid names for the first argument in a metaclass class method.
|
420 |
+
valid-metaclass-classmethod-first-arg=mcs
|
421 |
+
|
422 |
+
|
423 |
+
[EXCEPTIONS]
|
424 |
+
|
425 |
+
# Exceptions that will emit a warning when being caught. Defaults to
|
426 |
+
# "Exception"
|
427 |
+
overgeneral-exceptions=builtins.BaseException,
|
428 |
+
builtins.Exception
|
Dockerfile
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM continuumio/miniconda3
|
2 |
+
|
3 |
+
ARG OPENAI_API_KEY
|
4 |
+
ENV OPENAI_API_KEY=${OPENAI_API_KEY}
|
5 |
+
|
6 |
+
ARG BING_API_KEY
|
7 |
+
ENV BING_API_KEY=${BING_API_KEY}
|
8 |
+
|
9 |
+
# 设置环境变量
|
10 |
+
ENV PATH=/opt/conda/bin:$PATH
|
11 |
+
|
12 |
+
# 克隆git仓库
|
13 |
+
RUN git clone https://github.com/InternLM/MindSearch.git /app
|
14 |
+
|
15 |
+
WORKDIR /app
|
16 |
+
|
17 |
+
# 创建并激活 fastapi 环境,并安装依赖包
|
18 |
+
RUN conda create --name fastapi python=3.10 -y && \
|
19 |
+
conda run -n fastapi pip install -r requirements.txt && \
|
20 |
+
conda clean --all -f -y
|
21 |
+
|
22 |
+
# 暴露 FastAPI 默认端口
|
23 |
+
EXPOSE 8000
|
24 |
+
|
25 |
+
# 启动 FastAPI 服务
|
26 |
+
ENTRYPOINT ["conda", "run", "--no-capture-output", "-n", "fastapi"]
|
27 |
+
CMD ["python3", "-m", "mindsearch.app", "--asy", "--host", "0.0.0.0", "--port", "8002"]
|
LICENSE
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Apache License
|
2 |
+
Version 2.0, January 2004
|
3 |
+
http://www.apache.org/licenses/
|
4 |
+
|
5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
6 |
+
|
7 |
+
1. Definitions.
|
8 |
+
|
9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
11 |
+
|
12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
13 |
+
the copyright owner that is granting the License.
|
14 |
+
|
15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
16 |
+
other entities that control, are controlled by, or are under common
|
17 |
+
control with that entity. For the purposes of this definition,
|
18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
19 |
+
direction or management of such entity, whether by contract or
|
20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
22 |
+
|
23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
24 |
+
exercising permissions granted by this License.
|
25 |
+
|
26 |
+
"Source" form shall mean the preferred form for making modifications,
|
27 |
+
including but not limited to software source code, documentation
|
28 |
+
source, and configuration files.
|
29 |
+
|
30 |
+
"Object" form shall mean any form resulting from mechanical
|
31 |
+
transformation or translation of a Source form, including but
|
32 |
+
not limited to compiled object code, generated documentation,
|
33 |
+
and conversions to other media types.
|
34 |
+
|
35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
36 |
+
Object form, made available under the License, as indicated by a
|
37 |
+
copyright notice that is included in or attached to the work
|
38 |
+
(an example is provided in the Appendix below).
|
39 |
+
|
40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
41 |
+
form, that is based on (or derived from) the Work and for which the
|
42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
44 |
+
of this License, Derivative Works shall not include works that remain
|
45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
46 |
+
the Work and Derivative Works thereof.
|
47 |
+
|
48 |
+
"Contribution" shall mean any work of authorship, including
|
49 |
+
the original version of the Work and any modifications or additions
|
50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
54 |
+
means any form of electronic, verbal, or written communication sent
|
55 |
+
to the Licensor or its representatives, including but not limited to
|
56 |
+
communication on electronic mailing lists, source code control systems,
|
57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
59 |
+
excluding communication that is conspicuously marked or otherwise
|
60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
61 |
+
|
62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
64 |
+
subsequently incorporated within the Work.
|
65 |
+
|
66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
71 |
+
Work and such Derivative Works in Source or Object form.
|
72 |
+
|
73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
76 |
+
(except as stated in this section) patent license to make, have made,
|
77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
78 |
+
where such license applies only to those patent claims licensable
|
79 |
+
by such Contributor that are necessarily infringed by their
|
80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
82 |
+
institute patent litigation against any entity (including a
|
83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
84 |
+
or a Contribution incorporated within the Work constitutes direct
|
85 |
+
or contributory patent infringement, then any patent licenses
|
86 |
+
granted to You under this License for that Work shall terminate
|
87 |
+
as of the date such litigation is filed.
|
88 |
+
|
89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
90 |
+
Work or Derivative Works thereof in any medium, with or without
|
91 |
+
modifications, and in Source or Object form, provided that You
|
92 |
+
meet the following conditions:
|
93 |
+
|
94 |
+
(a) You must give any other recipients of the Work or
|
95 |
+
Derivative Works a copy of this License; and
|
96 |
+
|
97 |
+
(b) You must cause any modified files to carry prominent notices
|
98 |
+
stating that You changed the files; and
|
99 |
+
|
100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
101 |
+
that You distribute, all copyright, patent, trademark, and
|
102 |
+
attribution notices from the Source form of the Work,
|
103 |
+
excluding those notices that do not pertain to any part of
|
104 |
+
the Derivative Works; and
|
105 |
+
|
106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
107 |
+
distribution, then any Derivative Works that You distribute must
|
108 |
+
include a readable copy of the attribution notices contained
|
109 |
+
within such NOTICE file, excluding those notices that do not
|
110 |
+
pertain to any part of the Derivative Works, in at least one
|
111 |
+
of the following places: within a NOTICE text file distributed
|
112 |
+
as part of the Derivative Works; within the Source form or
|
113 |
+
documentation, if provided along with the Derivative Works; or,
|
114 |
+
within a display generated by the Derivative Works, if and
|
115 |
+
wherever such third-party notices normally appear. The contents
|
116 |
+
of the NOTICE file are for informational purposes only and
|
117 |
+
do not modify the License. You may add Your own attribution
|
118 |
+
notices within Derivative Works that You distribute, alongside
|
119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
120 |
+
that such additional attribution notices cannot be construed
|
121 |
+
as modifying the License.
|
122 |
+
|
123 |
+
You may add Your own copyright statement to Your modifications and
|
124 |
+
may provide additional or different license terms and conditions
|
125 |
+
for use, reproduction, or distribution of Your modifications, or
|
126 |
+
for any such Derivative Works as a whole, provided Your use,
|
127 |
+
reproduction, and distribution of the Work otherwise complies with
|
128 |
+
the conditions stated in this License.
|
129 |
+
|
130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
132 |
+
by You to the Licensor shall be under the terms and conditions of
|
133 |
+
this License, without any additional terms or conditions.
|
134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
135 |
+
the terms of any separate license agreement you may have executed
|
136 |
+
with Licensor regarding such Contributions.
|
137 |
+
|
138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
140 |
+
except as required for reasonable and customary use in describing the
|
141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
142 |
+
|
143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
144 |
+
agreed to in writing, Licensor provides the Work (and each
|
145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
147 |
+
implied, including, without limitation, any warranties or conditions
|
148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
150 |
+
appropriateness of using or redistributing the Work and assume any
|
151 |
+
risks associated with Your exercise of permissions under this License.
|
152 |
+
|
153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
154 |
+
whether in tort (including negligence), contract, or otherwise,
|
155 |
+
unless required by applicable law (such as deliberate and grossly
|
156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
157 |
+
liable to You for damages, including any direct, indirect, special,
|
158 |
+
incidental, or consequential damages of any character arising as a
|
159 |
+
result of this License or out of the use or inability to use the
|
160 |
+
Work (including but not limited to damages for loss of goodwill,
|
161 |
+
work stoppage, computer failure or malfunction, or any and all
|
162 |
+
other commercial damages or losses), even if such Contributor
|
163 |
+
has been advised of the possibility of such damages.
|
164 |
+
|
165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
168 |
+
or other liability obligations and/or rights consistent with this
|
169 |
+
License. However, in accepting such obligations, You may act only
|
170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
171 |
+
of any other Contributor, and only if You agree to indemnify,
|
172 |
+
defend, and hold each Contributor harmless for any liability
|
173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
174 |
+
of your accepting any such warranty or additional liability.
|
175 |
+
|
176 |
+
END OF TERMS AND CONDITIONS
|
177 |
+
|
178 |
+
APPENDIX: How to apply the Apache License to your work.
|
179 |
+
|
180 |
+
To apply the Apache License to your work, attach the following
|
181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
182 |
+
replaced with your own identifying information. (Don't include
|
183 |
+
the brackets!) The text should be enclosed in the appropriate
|
184 |
+
comment syntax for the file format. We also recommend that a
|
185 |
+
file or class name and description of purpose be included on the
|
186 |
+
same "printed page" as the copyright notice for easier
|
187 |
+
identification within third-party archives.
|
188 |
+
|
189 |
+
Copyright 2024 Shanghai AI Laboratory.
|
190 |
+
|
191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
192 |
+
you may not use this file except in compliance with the License.
|
193 |
+
You may obtain a copy of the License at
|
194 |
+
|
195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
196 |
+
|
197 |
+
Unless required by applicable law or agreed to in writing, software
|
198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
200 |
+
See the License for the specific language governing permissions and
|
201 |
+
limitations under the License.
|
README.md
CHANGED
@@ -1,13 +1,180 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
-
emoji:
|
4 |
-
colorFrom:
|
5 |
-
colorTo:
|
6 |
sdk: gradio
|
7 |
-
sdk_version:
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
-
license: apache-2.0
|
11 |
---
|
12 |
|
13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
+
title: MindSearch111
|
3 |
+
emoji: 📊
|
4 |
+
colorFrom: purple
|
5 |
+
colorTo: yellow
|
6 |
sdk: gradio
|
7 |
+
sdk_version: 5.7.1
|
8 |
app_file: app.py
|
9 |
pinned: false
|
|
|
10 |
---
|
11 |
|
12 |
+
<div id="top"></div>
|
13 |
+
|
14 |
+
<div align="center">
|
15 |
+
|
16 |
+
<img src="assets/logo.svg" style="width: 50%; height: auto;">
|
17 |
+
|
18 |
+
[📃 Paper](https://arxiv.org/abs/2407.20183) | [💻 Demo](https://internlm-chat.intern-ai.org.cn/)
|
19 |
+
|
20 |
+
English | [简体中文](README_zh-CN.md)
|
21 |
+
|
22 |
+
<https://github.com/user-attachments/assets/44ffe4b9-be26-4b93-a77b-02fed16e33fe>
|
23 |
+
|
24 |
+
</div>
|
25 |
+
</p>
|
26 |
+
|
27 |
+
## ✨ MindSearch: Mimicking Human Minds Elicits Deep AI Searcher
|
28 |
+
|
29 |
+
## 📅 Changelog
|
30 |
+
|
31 |
+
- 2024/11/05: 🥳 MindSearch is now deployed on Puyu! 👉 [Try it](https://internlm-chat.intern-ai.org.cn/) 👈
|
32 |
+
- Refactored the agent module based on [Lagent v0.5](https://github.com/InternLM/lagent) for better performance in concurrency.
|
33 |
+
- Improved the UI to embody the simultaneous multi-query search.
|
34 |
+
|
35 |
+
|
36 |
+
## ⚽️ Build Your Own MindSearch
|
37 |
+
|
38 |
+
### Step1: Dependencies Installation
|
39 |
+
|
40 |
+
```bash
|
41 |
+
git clone https://github.com/InternLM/MindSearch
|
42 |
+
cd MindSearch
|
43 |
+
pip install -r requirements.txt
|
44 |
+
```
|
45 |
+
|
46 |
+
### Step2: Setup Environment Variables
|
47 |
+
|
48 |
+
Before setting up the API, you need to configure environment variables. Rename the `.env.example` file to `.env` and fill in the required values.
|
49 |
+
|
50 |
+
```bash
|
51 |
+
mv .env.example .env
|
52 |
+
# Open .env and add your keys and model configurations
|
53 |
+
```
|
54 |
+
|
55 |
+
### Step3: Setup MindSearch API
|
56 |
+
|
57 |
+
Setup FastAPI Server.
|
58 |
+
|
59 |
+
```bash
|
60 |
+
python -m mindsearch.app --lang en --model_format internlm_silicon --search_engine DuckDuckGoSearch --asy
|
61 |
+
```
|
62 |
+
|
63 |
+
- `--lang`: language of the model, `en` for English and `cn` for Chinese.
|
64 |
+
- `--model_format`: format of the model.
|
65 |
+
- `internlm_server` for InternLM2.5-7b-chat with local server. (InternLM2.5-7b-chat has been better optimized for Chinese.)
|
66 |
+
- `gpt4` for GPT4.
|
67 |
+
if you want to use other models, please modify [models](./mindsearch/agent/models.py)
|
68 |
+
- `--search_engine`: Search engine.
|
69 |
+
- `DuckDuckGoSearch` for search engine for DuckDuckGo.
|
70 |
+
- `BingSearch` for Bing search engine.
|
71 |
+
- `BraveSearch` for Brave search web api engine.
|
72 |
+
- `GoogleSearch` for Google Serper web search api engine.
|
73 |
+
- `TencentSearch` for Tencent search api engine.
|
74 |
+
|
75 |
+
Please set your Web Search engine API key as the `WEB_SEARCH_API_KEY` environment variable unless you are using `DuckDuckGo`, or `TencentSearch` that requires secret id as `TENCENT_SEARCH_SECRET_ID` and secret key as `TENCENT_SEARCH_SECRET_KEY`.
|
76 |
+
- `--asy`: deploy asynchronous agents.
|
77 |
+
|
78 |
+
### Step4: Setup MindSearch Frontend
|
79 |
+
|
80 |
+
Providing following frontend interfaces,
|
81 |
+
|
82 |
+
- React
|
83 |
+
|
84 |
+
First configurate the backend URL for Vite proxy.
|
85 |
+
|
86 |
+
```bash
|
87 |
+
HOST="127.0.0.1" # modify as you need
|
88 |
+
PORT=8002
|
89 |
+
sed -i -r "s/target:\s*\"\"/target: \"${HOST}:${PORT}\"/" frontend/React/vite.config.ts
|
90 |
+
```
|
91 |
+
|
92 |
+
```bash
|
93 |
+
# Install Node.js and npm
|
94 |
+
# for Ubuntu
|
95 |
+
sudo apt install nodejs npm
|
96 |
+
|
97 |
+
# for windows
|
98 |
+
# download from https://nodejs.org/zh-cn/download/prebuilt-installer
|
99 |
+
|
100 |
+
# Install dependencies
|
101 |
+
|
102 |
+
cd frontend/React
|
103 |
+
npm install
|
104 |
+
npm start
|
105 |
+
```
|
106 |
+
|
107 |
+
Details can be found in [React](./frontend/React/README.md)
|
108 |
+
|
109 |
+
- Gradio
|
110 |
+
|
111 |
+
```bash
|
112 |
+
python frontend/mindsearch_gradio.py
|
113 |
+
```
|
114 |
+
|
115 |
+
- Streamlit
|
116 |
+
|
117 |
+
```bash
|
118 |
+
streamlit run frontend/mindsearch_streamlit.py
|
119 |
+
```
|
120 |
+
|
121 |
+
## 🌐 Change Web Search API
|
122 |
+
|
123 |
+
To use a different type of web search API, modify the `searcher_type` attribute in the `searcher_cfg` located in `mindsearch/agent/__init__.py`. Currently supported web search APIs include:
|
124 |
+
|
125 |
+
- `GoogleSearch`
|
126 |
+
- `DuckDuckGoSearch`
|
127 |
+
- `BraveSearch`
|
128 |
+
- `BingSearch`
|
129 |
+
- `TencentSearch`
|
130 |
+
|
131 |
+
For example, to change to the Brave Search API, you would configure it as follows:
|
132 |
+
|
133 |
+
```python
|
134 |
+
BingBrowser(
|
135 |
+
searcher_type='BraveSearch',
|
136 |
+
topk=2,
|
137 |
+
api_key=os.environ.get('BRAVE_API_KEY', 'YOUR BRAVE API')
|
138 |
+
)
|
139 |
+
```
|
140 |
+
|
141 |
+
## 🐞 Using the Backend Without Frontend
|
142 |
+
|
143 |
+
For users who prefer to interact with the backend directly, use the `backend_example.py` script. This script demonstrates how to send a query to the backend and process the response.
|
144 |
+
|
145 |
+
```bash
|
146 |
+
python backend_example.py
|
147 |
+
```
|
148 |
+
|
149 |
+
Make sure you have set up the environment variables and the backend is running before executing the script.
|
150 |
+
|
151 |
+
## 🐞 Debug Locally
|
152 |
+
|
153 |
+
```bash
|
154 |
+
python -m mindsearch.terminal
|
155 |
+
```
|
156 |
+
|
157 |
+
## 📝 License
|
158 |
+
|
159 |
+
This project is released under the [Apache 2.0 license](LICENSE).
|
160 |
+
|
161 |
+
## Citation
|
162 |
+
|
163 |
+
If you find this project useful in your research, please consider cite:
|
164 |
+
|
165 |
+
```
|
166 |
+
@article{chen2024mindsearch,
|
167 |
+
title={MindSearch: Mimicking Human Minds Elicits Deep AI Searcher},
|
168 |
+
author={Chen, Zehui and Liu, Kuikun and Wang, Qiuchen and Liu, Jiangning and Zhang, Wenwei and Chen, Kai and Zhao, Feng},
|
169 |
+
journal={arXiv preprint arXiv:2407.20183},
|
170 |
+
year={2024}
|
171 |
+
}
|
172 |
+
```
|
173 |
+
|
174 |
+
## Our Projects
|
175 |
+
|
176 |
+
Explore our additional research on large language models, focusing on LLM agents.
|
177 |
+
|
178 |
+
- [Lagent](https://github.com/InternLM/lagent): A lightweight framework for building LLM-based agents
|
179 |
+
- [AgentFLAN](https://github.com/InternLM/Agent-FLAN): An innovative approach for constructing and training with high-quality agent datasets (ACL 2024 Findings)
|
180 |
+
- [T-Eval](https://github.com/open-compass/T-Eval): A Fine-grained tool utilization evaluation benchmark (ACL 2024)
|
README_zh-CN.md
ADDED
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<div id="top"></div>
|
2 |
+
|
3 |
+
<div align="center">
|
4 |
+
|
5 |
+
<img src="assets/logo.svg" style="width: 50%; height: auto;">
|
6 |
+
|
7 |
+
[📃 Paper](https://arxiv.org/abs/2407.20183) | [💻 浦语入口](https://internlm-chat.intern-ai.org.cn/)
|
8 |
+
|
9 |
+
[English](README.md) | 简体中文
|
10 |
+
|
11 |
+
<https://github.com/user-attachments/assets/b4312e9c-5b40-43e5-8c69-929c373e4965>
|
12 |
+
|
13 |
+
</div>
|
14 |
+
</p>
|
15 |
+
|
16 |
+
## ✨ MindSearch: Mimicking Human Minds Elicits Deep AI Searcher
|
17 |
+
|
18 |
+
MindSearch 是一个开源的 AI 搜索引擎框架,具有与 Perplexity.ai Pro 相同的性能。您可以轻松部署它来构建您自己的搜索引擎,可以使用闭源 LLM(如 GPT、Claude)或开源 LLM([InternLM2.5 系列模型](https://huggingface.co/internlm/internlm2_5-7b-chat)经过专门优化,能够在 MindSearch 框架中提供卓越的性能;其他开源模型没做过具体测试)。其拥有以下特性:
|
19 |
+
|
20 |
+
- 🤔 **任何想知道的问题**:MindSearch 通过搜索解决你在生活中遇到的各种问题
|
21 |
+
- 📚 **深度知识探索**:MindSearch 通过数百网页的浏览,提供更广泛、深层次的答案
|
22 |
+
- 🔍 **透明的解决方案路径**:MindSearch 提供了思考路径、搜索关键词等完整的内容,提高回复的可信度和可用性。
|
23 |
+
- 💻 **多种用户界面**:为用户提供各种接口,包括 React、Gradio、Streamlit 和本地调试。根据需要选择任意类型。
|
24 |
+
- 🧠 **动态图构建过程**:MindSearch 将用户查询分解为图中的子问题节点,并根据 WebSearcher 的搜索结果逐步扩展图。
|
25 |
+
|
26 |
+
<div align="center">
|
27 |
+
|
28 |
+
<img src="assets/teaser.gif">
|
29 |
+
|
30 |
+
</div>
|
31 |
+
|
32 |
+
## ⚡️ MindSearch VS 其他 AI 搜索引擎
|
33 |
+
|
34 |
+
在深度、广度和生成响应的准确性三个方面,对 ChatGPT-Web、Perplexity.ai(Pro)和 MindSearch 的表现进行比较。评估结果基于 100 个由人类专家精心设计的现实问题,并由 5 位专家进行评分\*。
|
35 |
+
|
36 |
+
<div align="center">
|
37 |
+
<img src="assets/mindsearch_openset.png" width="90%">
|
38 |
+
</div>
|
39 |
+
* 所有实验均在 2024 年 7 月 7 日之前完成。
|
40 |
+
|
41 |
+
## ⚽️ 构建您自己的 MindSearch
|
42 |
+
|
43 |
+
### 步骤1: 依赖安装
|
44 |
+
|
45 |
+
```bash
|
46 |
+
pip install -r requirements.txt
|
47 |
+
```
|
48 |
+
|
49 |
+
### 步骤2: 启动 MindSearch API
|
50 |
+
|
51 |
+
启动 FastAPI 服务器
|
52 |
+
|
53 |
+
```bash
|
54 |
+
python -m mindsearch.app --lang en --model_format internlm_server --search_engine DuckDuckGoSearch
|
55 |
+
```
|
56 |
+
|
57 |
+
- `--lang`: 模型的语言,`en` 为英语,`cn` 为中文。
|
58 |
+
- `--model_format`: 模型的格式。
|
59 |
+
- `internlm_server` 为 InternLM2.5-7b-chat 本地服务器。
|
60 |
+
- `gpt4` 为 GPT4。
|
61 |
+
如果您想使用其他模型,请修改 [models](./mindsearch/agent/models.py)
|
62 |
+
- `--search_engine`: 搜索引擎。
|
63 |
+
- `DuckDuckGoSearch` 为 DuckDuckGo 搜索引擎。
|
64 |
+
- `BingSearch` 为 Bing 搜索引擎。
|
65 |
+
- `BraveSearch` 为 Brave 搜索引擎。
|
66 |
+
- `GoogleSearch` 为 Google Serper 搜索引擎。
|
67 |
+
- `TencentSearch` 为 Tencent 搜索引擎。
|
68 |
+
|
69 |
+
请将 DuckDuckGo 和 Tencent 以外的网页搜索引擎 API 密钥设置为 `WEB_SEARCH_API_KEY` 环境变量。如果使用 DuckDuckGo,则无需设置;如果使用 Tencent,请设置 `TENCENT_SEARCH_SECRET_ID` 和 `TENCENT_SEARCH_SECRET_KEY`。
|
70 |
+
|
71 |
+
### 步骤3: 启动 MindSearch 前端
|
72 |
+
|
73 |
+
提供以下几种前端界面:
|
74 |
+
|
75 |
+
- React
|
76 |
+
|
77 |
+
首先配置Vite的API代理,指定实际后端URL
|
78 |
+
|
79 |
+
```bash
|
80 |
+
HOST="127.0.0.1"
|
81 |
+
PORT=8002
|
82 |
+
sed -i -r "s/target:\s*\"\"/target: \"${HOST}:${PORT}\"/" frontend/React/vite.config.ts
|
83 |
+
```
|
84 |
+
|
85 |
+
```bash
|
86 |
+
# 安装 Node.js 和 npm
|
87 |
+
# 对于 Ubuntu
|
88 |
+
sudo apt install nodejs npm
|
89 |
+
# 对于 Windows
|
90 |
+
# 从 https://nodejs.org/zh-cn/download/prebuilt-installer 下载
|
91 |
+
|
92 |
+
cd frontend/React
|
93 |
+
npm install
|
94 |
+
npm start
|
95 |
+
```
|
96 |
+
|
97 |
+
更多细节请参考 [React](./frontend/React/README.md)
|
98 |
+
|
99 |
+
- Gradio
|
100 |
+
|
101 |
+
```bash
|
102 |
+
python frontend/mindsearch_gradio.py
|
103 |
+
```
|
104 |
+
|
105 |
+
- Streamlit
|
106 |
+
|
107 |
+
```bash
|
108 |
+
streamlit run frontend/mindsearch_streamlit.py
|
109 |
+
```
|
110 |
+
|
111 |
+
## 🐞 本地调试
|
112 |
+
|
113 |
+
```bash
|
114 |
+
python mindsearch/terminal.py
|
115 |
+
```
|
116 |
+
|
117 |
+
## 📝 许可证
|
118 |
+
|
119 |
+
该项目按照 [Apache 2.0 许可证](LICENSE) 发行。
|
120 |
+
|
121 |
+
## 学术引用
|
122 |
+
|
123 |
+
如果此项目对您的研究有帮助,请参考如下方式进行引用:
|
124 |
+
|
125 |
+
```
|
126 |
+
@article{chen2024mindsearch,
|
127 |
+
title={MindSearch: Mimicking Human Minds Elicits Deep AI Searcher},
|
128 |
+
author={Chen, Zehui and Liu, Kuikun and Wang, Qiuchen and Liu, Jiangning and Zhang, Wenwei and Chen, Kai and Zhao, Feng},
|
129 |
+
journal={arXiv preprint arXiv:2407.20183},
|
130 |
+
year={2024}
|
131 |
+
}
|
132 |
+
```
|
133 |
+
|
134 |
+
## 相关项目
|
135 |
+
|
136 |
+
关注我们其他在大语言模型上的一些探索,主要为LLM智能体方向。
|
137 |
+
|
138 |
+
- [Lagent](https://github.com/InternLM/lagent): 一个轻便简洁的大语言模型智能体框架
|
139 |
+
- [AgentFLAN](https://github.com/InternLM/Agent-FLAN): 一套构建高质量智能体语料和训练模型的方法 (ACL 2024 Findings)
|
140 |
+
- [T-Eval](https://github.com/open-compass/T-Eval): 一个细粒度评估LLM调用工具能力的评测及 (ACL 2024)
|
app.py
CHANGED
@@ -1,155 +1,323 @@
|
|
1 |
import json
|
|
|
|
|
|
|
|
|
2 |
|
3 |
import gradio as gr
|
4 |
import requests
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
from lagent.schema import AgentStatusCode
|
|
|
|
|
|
|
6 |
import os
|
7 |
|
8 |
-
os.system("python -m mindsearch.app --lang en --model_format internlm_server &")
|
9 |
|
10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
PLANNER_HISTORY = []
|
12 |
SEARCHER_HISTORY = []
|
13 |
|
14 |
|
15 |
-
def
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
if PLANNER_HISTORY:
|
22 |
PLANNER_HISTORY.clear()
|
23 |
-
return
|
24 |
-
|
25 |
-
|
26 |
-
def format_response(gr_history,
|
27 |
-
if
|
28 |
-
|
29 |
-
]:
|
30 |
-
gr_history[
|
31 |
-
elif
|
32 |
-
|
33 |
-
|
34 |
-
gr_history
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
gr_history.
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
52 |
|
53 |
def streaming(raw_response):
|
54 |
-
for chunk in raw_response.iter_lines(
|
55 |
-
|
56 |
-
|
57 |
if chunk:
|
58 |
-
decoded = chunk.decode(
|
59 |
-
if decoded ==
|
60 |
continue
|
61 |
-
if decoded[:6] ==
|
62 |
decoded = decoded[6:]
|
63 |
-
elif decoded.startswith(
|
64 |
continue
|
65 |
response = json.loads(decoded)
|
66 |
-
yield (
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
|
68 |
global PLANNER_HISTORY
|
69 |
-
PLANNER_HISTORY.
|
70 |
-
|
71 |
-
|
72 |
-
url =
|
73 |
-
|
74 |
-
data =
|
75 |
-
raw_response = requests.post(url,
|
76 |
-
headers=headers,
|
77 |
-
data=json.dumps(data),
|
78 |
-
timeout=20,
|
79 |
-
stream=True)
|
80 |
|
|
|
81 |
for resp in streaming(raw_response):
|
82 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
if node_name:
|
84 |
-
if node_name in [
|
85 |
continue
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
94 |
else:
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
114 |
with gr.Row():
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
141 |
|
142 |
def user(query, history):
|
143 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
144 |
|
145 |
-
submitBtn.click(user, [user_input, planner], [user_input, planner],
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
|
|
150 |
|
151 |
demo.queue()
|
152 |
-
demo.launch(
|
153 |
-
|
154 |
-
inbrowser=True,
|
155 |
-
share=True)
|
|
|
1 |
import json
|
2 |
+
import mimetypes
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
import tempfile
|
6 |
|
7 |
import gradio as gr
|
8 |
import requests
|
9 |
+
|
10 |
+
sys.path.insert(0, os.path.dirname(__file__))
|
11 |
+
|
12 |
+
import schemdraw
|
13 |
+
from frontend.gradio_agentchatbot.agentchatbot import AgentChatbot
|
14 |
+
from frontend.gradio_agentchatbot.utils import ChatFileMessage, ChatMessage, ThoughtMetadata
|
15 |
from lagent.schema import AgentStatusCode
|
16 |
+
from schemdraw import flow
|
17 |
+
|
18 |
+
|
19 |
import os
|
20 |
|
|
|
21 |
|
22 |
|
23 |
+
os.system("pip show starlette")
|
24 |
+
# os.system("pip install -r requirements.txt")
|
25 |
+
os.system("pip install tenacity")
|
26 |
+
os.system("python -m mindsearch.app --lang en --model_format internlm_silicon --search_engine DuckDuckGoSearch &")
|
27 |
+
|
28 |
+
|
29 |
+
print('MindSearch is running on http://')
|
30 |
+
|
31 |
PLANNER_HISTORY = []
|
32 |
SEARCHER_HISTORY = []
|
33 |
|
34 |
|
35 |
+
def create_search_graph(adjacency_list: dict):
|
36 |
+
import matplotlib.pyplot as plt
|
37 |
+
|
38 |
+
plt.rcParams["font.sans-serif"] = ["SimHei"]
|
39 |
+
|
40 |
+
with schemdraw.Drawing(fontsize=10, unit=1) as graph:
|
41 |
+
node_pos, nodes, edges = {}, {}, []
|
42 |
+
if "root" in adjacency_list:
|
43 |
+
queue, layer, response_level = ["root"], 0, 0
|
44 |
+
while queue:
|
45 |
+
layer_len = len(queue)
|
46 |
+
for i in range(layer_len):
|
47 |
+
node_name = queue.pop(0)
|
48 |
+
node_pos[node_name] = (layer * 5, -i * 3)
|
49 |
+
for item in adjacency_list[node_name]:
|
50 |
+
if item["name"] == "response":
|
51 |
+
response_level = max(response_level, (layer + 1) * 5)
|
52 |
+
else:
|
53 |
+
queue.append(item["name"])
|
54 |
+
edges.append((node_name, item["name"]))
|
55 |
+
layer += 1
|
56 |
+
for node_name, (x, y) in node_pos.items():
|
57 |
+
if node_name == "root":
|
58 |
+
node = flow.Terminal().label(node_name).at((x, y)).color("pink")
|
59 |
+
else:
|
60 |
+
node = flow.RoundBox(w=3.5, h=1.75).label(node_name).at((x, y)).color("teal")
|
61 |
+
nodes[node_name] = node
|
62 |
+
if response_level:
|
63 |
+
response_node = (
|
64 |
+
flow.Terminal().label("response").at((response_level, 0)).color("orange")
|
65 |
+
)
|
66 |
+
nodes["response"] = response_node
|
67 |
+
for start, end in edges:
|
68 |
+
flow.Arc3(arrow="->").linestyle("--" if end == "response" else "-").at(
|
69 |
+
nodes[start].E
|
70 |
+
).to(nodes[end].W).color("grey" if end == "response" else "lightblue")
|
71 |
+
return graph
|
72 |
+
|
73 |
+
|
74 |
+
def draw_search_graph(adjacency_list: dict, suffix=".png", dpi=360) -> str:
|
75 |
+
g = create_search_graph(adjacency_list)
|
76 |
+
path = tempfile.mktemp(suffix=suffix)
|
77 |
+
g.save(path, dpi=dpi)
|
78 |
+
return path
|
79 |
+
|
80 |
+
|
81 |
+
def rst_mem():
|
82 |
+
"""Reset the chatbot memory."""
|
83 |
if PLANNER_HISTORY:
|
84 |
PLANNER_HISTORY.clear()
|
85 |
+
return [], [], 0
|
86 |
+
|
87 |
+
|
88 |
+
def format_response(gr_history, message, response, idx=-1):
|
89 |
+
if idx < 0:
|
90 |
+
idx = len(gr_history) + idx
|
91 |
+
if message["stream_state"] == AgentStatusCode.STREAM_ING:
|
92 |
+
gr_history[idx].content = response
|
93 |
+
elif message["stream_state"] == AgentStatusCode.CODING:
|
94 |
+
if gr_history[idx].thought_metadata.tool_name is None:
|
95 |
+
gr_history[idx].content = gr_history[idx].content.split("<|action_start|>")[0]
|
96 |
+
gr_history.insert(
|
97 |
+
idx + 1,
|
98 |
+
ChatMessage(
|
99 |
+
role="assistant",
|
100 |
+
content=response,
|
101 |
+
thought_metadata=ThoughtMetadata(tool_name="🖥️ Code Interpreter"),
|
102 |
+
),
|
103 |
+
)
|
104 |
+
else:
|
105 |
+
gr_history[idx].content = response
|
106 |
+
elif message["stream_state"] == AgentStatusCode.PLUGIN_START:
|
107 |
+
if isinstance(response, dict):
|
108 |
+
response = json.dumps(response, ensure_ascii=False, indent=4)
|
109 |
+
if gr_history[idx].thought_metadata.tool_name is None:
|
110 |
+
gr_history[idx].content = gr_history[idx].content.split("<|action_start|>")[0]
|
111 |
+
gr_history.insert(
|
112 |
+
idx + 1,
|
113 |
+
ChatMessage(
|
114 |
+
role="assistant",
|
115 |
+
content="```json\n" + response,
|
116 |
+
thought_metadata=ThoughtMetadata(tool_name="🌐 Web Browser"),
|
117 |
+
),
|
118 |
+
)
|
119 |
+
else:
|
120 |
+
gr_history[idx].content = "```json\n" + response
|
121 |
+
elif message["stream_state"] == AgentStatusCode.PLUGIN_END and isinstance(response, dict):
|
122 |
+
gr_history[idx].content = (
|
123 |
+
f"```json\n{json.dumps(response, ensure_ascii=False, indent=4)}\n```"
|
124 |
+
)
|
125 |
+
elif message["stream_state"] in [AgentStatusCode.CODE_RETURN, AgentStatusCode.PLUGIN_RETURN]:
|
126 |
+
try:
|
127 |
+
content = json.loads(message["content"])
|
128 |
+
except json.decoder.JSONDecodeError:
|
129 |
+
content = message["content"]
|
130 |
+
if gr_history[idx].thought_metadata.tool_name:
|
131 |
+
gr_history.insert(
|
132 |
+
idx + 1,
|
133 |
+
ChatMessage(
|
134 |
+
role="assistant",
|
135 |
+
content=(
|
136 |
+
content
|
137 |
+
if isinstance(content, str)
|
138 |
+
else f"\n```json\n{json.dumps(content, ensure_ascii=False, indent=4)}\n```\n"
|
139 |
+
),
|
140 |
+
thought_metadata=ThoughtMetadata(tool_name="Execution"),
|
141 |
+
),
|
142 |
+
)
|
143 |
+
gr_history.insert(idx + 2, ChatMessage(role="assistant", content=""))
|
144 |
+
|
145 |
+
|
146 |
+
def predict(history_planner, history_searcher, node_cnt):
|
147 |
|
148 |
def streaming(raw_response):
|
149 |
+
for chunk in raw_response.iter_lines(
|
150 |
+
chunk_size=8192, decode_unicode=False, delimiter=b"\n"
|
151 |
+
):
|
152 |
if chunk:
|
153 |
+
decoded = chunk.decode("utf-8")
|
154 |
+
if decoded == "\r":
|
155 |
continue
|
156 |
+
if decoded[:6] == "data: ":
|
157 |
decoded = decoded[6:]
|
158 |
+
elif decoded.startswith(": ping - "):
|
159 |
continue
|
160 |
response = json.loads(decoded)
|
161 |
+
yield (
|
162 |
+
response["current_node"],
|
163 |
+
(
|
164 |
+
response["response"]["formatted"]["node"][response["current_node"]]
|
165 |
+
if response["current_node"]
|
166 |
+
else response["response"]
|
167 |
+
),
|
168 |
+
response["response"]["formatted"]["adjacency_list"],
|
169 |
+
)
|
170 |
|
171 |
global PLANNER_HISTORY
|
172 |
+
PLANNER_HISTORY.extend(history_planner[-3:])
|
173 |
+
search_graph_msg = history_planner[-1]
|
174 |
+
|
175 |
+
url = "http://localhost:8002/solve"
|
176 |
+
data = {"inputs": PLANNER_HISTORY[-3].content}
|
177 |
+
raw_response = requests.post(url, json=data, timeout=60, stream=True)
|
|
|
|
|
|
|
|
|
|
|
178 |
|
179 |
+
node_id2msg_idx = {}
|
180 |
for resp in streaming(raw_response):
|
181 |
+
node_name, agent_message, adjacency_list = resp
|
182 |
+
dedup_nodes = set(adjacency_list) | {
|
183 |
+
val["name"] for vals in adjacency_list.values() for val in vals
|
184 |
+
}
|
185 |
+
if dedup_nodes and len(dedup_nodes) != node_cnt:
|
186 |
+
node_cnt = len(dedup_nodes)
|
187 |
+
graph_path = draw_search_graph(adjacency_list)
|
188 |
+
search_graph_msg.file.path = graph_path
|
189 |
+
search_graph_msg.file.mime_type = mimetypes.guess_type(graph_path)[0]
|
190 |
if node_name:
|
191 |
+
if node_name in ["root", "response"]:
|
192 |
continue
|
193 |
+
node_id = f'【{node_name}】{agent_message["content"]}'
|
194 |
+
agent_message = agent_message["response"]
|
195 |
+
response = (
|
196 |
+
agent_message["formatted"]["action"]
|
197 |
+
if agent_message["stream_state"]
|
198 |
+
in [AgentStatusCode.PLUGIN_START, AgentStatusCode.PLUGIN_END]
|
199 |
+
else agent_message["formatted"] and agent_message["formatted"].get("thought")
|
200 |
+
)
|
201 |
+
if node_id not in node_id2msg_idx:
|
202 |
+
node_id2msg_idx[node_id] = len(history_searcher) + 1
|
203 |
+
history_searcher.append(ChatMessage(role="user", content=node_id))
|
204 |
+
history_searcher.append(ChatMessage(role="assistant", content=""))
|
205 |
+
offset = len(history_searcher)
|
206 |
+
format_response(history_searcher, agent_message, response, node_id2msg_idx[node_id])
|
207 |
+
flag, incr = False, len(history_searcher) - offset
|
208 |
+
for key, value in node_id2msg_idx.items():
|
209 |
+
if flag or key == node_id:
|
210 |
+
node_id2msg_idx[key] = value + incr
|
211 |
+
if not flag:
|
212 |
+
flag = True
|
213 |
+
yield history_planner, history_searcher, node_cnt
|
214 |
else:
|
215 |
+
response = (
|
216 |
+
agent_message["formatted"]["action"]
|
217 |
+
if agent_message["stream_state"]
|
218 |
+
in [AgentStatusCode.CODING, AgentStatusCode.CODE_END]
|
219 |
+
else agent_message["formatted"] and agent_message["formatted"].get("thought")
|
220 |
+
)
|
221 |
+
format_response(history_planner, agent_message, response, -2)
|
222 |
+
if agent_message["stream_state"] == AgentStatusCode.END:
|
223 |
+
PLANNER_HISTORY = history_planner
|
224 |
+
yield history_planner, history_searcher, node_cnt
|
225 |
+
return history_planner, history_searcher, node_cnt
|
226 |
+
|
227 |
+
|
228 |
+
with gr.Blocks(css=os.path.join(os.path.dirname(__file__), "css", "gradio_front.css")) as demo:
|
229 |
+
with gr.Column(elem_classes="chat-box"):
|
230 |
+
gr.HTML("""<h1 align="center">MindSearch Gradio Demo</h1>""")
|
231 |
+
gr.HTML(
|
232 |
+
"""<p style="text-align: center; font-family: Arial, sans-serif;">
|
233 |
+
MindSearch is an open-source AI Search Engine Framework with Perplexity.ai Pro performance.
|
234 |
+
You can deploy your own Perplexity.ai-style search engine using either
|
235 |
+
closed-source LLMs (GPT, Claude)
|
236 |
+
or open-source LLMs (InternLM2.5-7b-chat).</p> """
|
237 |
+
)
|
238 |
+
gr.HTML(
|
239 |
+
"""
|
240 |
+
<div style="text-align: center; font-size: 16px;">
|
241 |
+
<a href="https://github.com/InternLM/MindSearch" style="margin-right: 15px;
|
242 |
+
text-decoration: none; color: #4A90E2;" target="_blank">🔗 GitHub</a>
|
243 |
+
<a href="https://arxiv.org/abs/2407.20183" style="margin-right: 15px;
|
244 |
+
text-decoration: none; color: #4A90E2;" target="_blank">📄 Arxiv</a>
|
245 |
+
<a href="https://huggingface.co/papers/2407.20183" style="margin-right:
|
246 |
+
15px; text-decoration: none; color: #4A90E2;" target="_blank">📚 Hugging Face Papers</a>
|
247 |
+
<a href="https://huggingface.co/spaces/internlm/MindSearch"
|
248 |
+
style="text-decoration: none; color: #4A90E2;" target="_blank">🤗 Hugging Face Demo</a>
|
249 |
+
</div>"""
|
250 |
+
)
|
251 |
+
gr.HTML(
|
252 |
+
"""
|
253 |
+
<h1 align='right'><img
|
254 |
+
src=
|
255 |
+
'https://raw.githubusercontent.com/InternLM/MindSearch/98fd84d566fe9e3adc5028727f72f2944098fd05/assets/logo.svg'
|
256 |
+
alt='MindSearch Logo1' class="logo" width="200"></h1> """
|
257 |
+
)
|
258 |
+
node_count = gr.State(0)
|
259 |
with gr.Row():
|
260 |
+
planner = AgentChatbot(
|
261 |
+
label="planner",
|
262 |
+
height=600,
|
263 |
+
show_label=True,
|
264 |
+
show_copy_button=True,
|
265 |
+
bubble_full_width=False,
|
266 |
+
render_markdown=True,
|
267 |
+
elem_classes="chatbot-container",
|
268 |
+
)
|
269 |
+
searcher = AgentChatbot(
|
270 |
+
label="searcher",
|
271 |
+
height=600,
|
272 |
+
show_label=True,
|
273 |
+
show_copy_button=True,
|
274 |
+
bubble_full_width=False,
|
275 |
+
render_markdown=True,
|
276 |
+
elem_classes="chatbot-container",
|
277 |
+
)
|
278 |
+
with gr.Row(elem_classes="chat-box"):
|
279 |
+
# Text input area
|
280 |
+
user_input = gr.Textbox(
|
281 |
+
show_label=False,
|
282 |
+
placeholder="Type your message...",
|
283 |
+
lines=1,
|
284 |
+
container=False,
|
285 |
+
elem_classes="editor",
|
286 |
+
scale=4,
|
287 |
+
)
|
288 |
+
# Buttons (now in the same Row)
|
289 |
+
submitBtn = gr.Button("submit", variant="primary", elem_classes="toolbarButton", scale=1)
|
290 |
+
clearBtn = gr.Button("clear", variant="secondary", elem_classes="toolbarButton", scale=1)
|
291 |
+
with gr.Row(elem_classes="examples-container"):
|
292 |
+
examples_component = gr.Examples(
|
293 |
+
[
|
294 |
+
["Find legal precedents in contract law."],
|
295 |
+
["What are the top 10 e-commerce websites?"],
|
296 |
+
["Generate a report on global climate change."],
|
297 |
+
],
|
298 |
+
inputs=user_input,
|
299 |
+
label="Try these examples:",
|
300 |
+
)
|
301 |
|
302 |
def user(query, history):
|
303 |
+
history.append(ChatMessage(role="user", content=query))
|
304 |
+
history.append(ChatMessage(role="assistant", content=""))
|
305 |
+
graph_path = draw_search_graph({"root": []})
|
306 |
+
history.append(
|
307 |
+
ChatFileMessage(
|
308 |
+
role="assistant",
|
309 |
+
file=gr.FileData(path=graph_path, mime_type=mimetypes.guess_type(graph_path)[0]),
|
310 |
+
)
|
311 |
+
)
|
312 |
+
return "", history
|
313 |
|
314 |
+
submitBtn.click(user, [user_input, planner], [user_input, planner], queue=False).then(
|
315 |
+
predict,
|
316 |
+
[planner, searcher, node_count],
|
317 |
+
[planner, searcher, node_count],
|
318 |
+
)
|
319 |
+
clearBtn.click(rst_mem, None, [planner, searcher, node_count], queue=False)
|
320 |
|
321 |
demo.queue()
|
322 |
+
demo.launch()
|
323 |
+
# demo.launch(server_name="0.0.0.0", inbrowser=True, share=False)
|
|
|
|
dist/assets/logo-38417354.svg → assets/logo.svg
RENAMED
File without changes
|
assets/mindsearch_openset.png
ADDED
Git LFS Details
|
assets/teaser.gif
ADDED
Git LFS Details
|
backend_example.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
|
3 |
+
import requests
|
4 |
+
|
5 |
+
# Define the backend URL
|
6 |
+
url = "http://localhost:8002/solve"
|
7 |
+
headers = {"Content-Type": "application/json"}
|
8 |
+
|
9 |
+
|
10 |
+
# Function to send a query to the backend and get the response
|
11 |
+
def get_response(query):
|
12 |
+
# Prepare the input data
|
13 |
+
data = {"inputs": query}
|
14 |
+
|
15 |
+
# Send the request to the backend
|
16 |
+
response = requests.post(url, headers=headers, data=json.dumps(data), timeout=20, stream=True)
|
17 |
+
|
18 |
+
# Process the streaming response
|
19 |
+
for chunk in response.iter_lines(chunk_size=8192, decode_unicode=False, delimiter=b"\n"):
|
20 |
+
if chunk:
|
21 |
+
decoded = chunk.decode("utf-8")
|
22 |
+
if decoded == "\r":
|
23 |
+
continue
|
24 |
+
if decoded[:6] == "data: ":
|
25 |
+
decoded = decoded[6:]
|
26 |
+
elif decoded.startswith(": ping - "):
|
27 |
+
continue
|
28 |
+
response_data = json.loads(decoded)
|
29 |
+
agent_return = response_data["response"]
|
30 |
+
node_name = response_data["current_node"]
|
31 |
+
print(f"Node: {node_name}, Response: {agent_return['response']}")
|
32 |
+
|
33 |
+
|
34 |
+
# Example usage
|
35 |
+
if __name__ == "__main__":
|
36 |
+
query = "What is the weather like today in New York?"
|
37 |
+
get_response(query)
|
dist/assets/background-95159880.png
DELETED
Binary file (36.9 kB)
|
|
dist/assets/index-327d01f5.js
DELETED
The diff for this file is too large to render.
See raw diff
|
|
dist/assets/index-ab4095ce.css
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
body,html,#root{padding:0;margin:0;width:100%;height:100%;font-family:PingFang SC;font-size:14px;line-height:21px}#global__message-container{position:fixed;left:0;right:0;top:72px;z-index:999;display:flex;flex-direction:column;justify-content:center;align-items:center}.f{color:#6674d6;font-family:DIN;font-size:12px;font-style:normal;font-weight:500;line-height:14px;position:relative;top:-4px;padding:0 3px}.f:after{content:"·";position:absolute;top:0;right:-2px;color:#6674d6}p>:nth-last-child(1).f:after,li>:nth-last-child(1).f:after{content:"";opacity:0}.fnn2{color:#6674d6;font-family:DIN;font-size:14px;font-style:normal;font-weight:500;line-height:14px;position:relative;top:-2px}._app_1k3bk_1{height:100%;display:flex;justify-content:space-between;background:url(/assets/background-95159880.png) #f7f8ff;background-size:cover;overflow:hidden}._content_1k3bk_9{padding-top:64px;width:100%;height:100%;box-sizing:border-box}._header_1k3bk_15{position:fixed;padding:16px 32px;width:100%;display:flex;align-items:center;box-sizing:border-box}._header-nav_1k3bk_23{flex:1}._header-nav_1k3bk_23 img{height:40px}._header-nav_1k3bk_23 a{display:inline-block;text-decoration:none;color:#000}._header-nav_1k3bk_23 a:not(:first-of-type){margin-left:40px}._header-nav_1k3bk_23 a._active_1k3bk_37{font-weight:700}._header-opt_1k3bk_40{flex-shrink:0;display:flex;align-items:center}._mainPage_6absh_1{display:flex;justify-content:flex-start;align-items:flex-start;padding:0 60px 60px;height:100%;overflow:hidden;position:relative;min-width:1280px;max-width:1920px;margin:0 auto}._mainPage_6absh_1 ._chatContent_6absh_13{position:relative;display:flex;justify-content:flex-start;flex-direction:column;flex-grow:1;margin-right:40px;height:calc(100% - 60px);overflow-y:hidden;padding:32px 0;box-sizing:border-box}._mainPage_6absh_1 ._chatContent_6absh_13 ._top_6absh_25{height:calc(100% - 110px);overflow-y:auto;margin-bottom:40px}._mainPage_6absh_1 ._chatContent_6absh_13 ._top_6absh_25::-webkit-scrollbar{width:6px}._mainPage_6absh_1 ._chatContent_6absh_13 ._top_6absh_25::-webkit-scrollbar-track{background-color:rgba(255,255,255,0);border-radius:100px}._mainPage_6absh_1 ._chatContent_6absh_13 ._top_6absh_25::-webkit-scrollbar-thumb{background-color:rgba(255,255,255,0);border-radius:100px}._mainPage_6absh_1 ._chatContent_6absh_13 ._question_6absh_41{display:flex;justify-content:flex-end;margin-bottom:40px}._mainPage_6absh_1 ._chatContent_6absh_13 ._question_6absh_41 span{padding:12px 20px;color:#121316;font-size:14px;line-height:24px;border-radius:8px;background:#FFF;max-width:93.75%}._mainPage_6absh_1 ._chatContent_6absh_13 ._end_6absh_55{position:absolute;right:0;background-color:#fff;display:flex;justify-content:center;align-items:center;border-left:1px solid #D7D8DD;padding-left:16px}._mainPage_6absh_1 ._chatContent_6absh_13 ._end_6absh_55 ._node_6absh_65{position:relative}._mainPage_6absh_1 ._chatContent_6absh_13 ._end_6absh_55 ._node_6absh_65:before{content:"";border:1px solid #D7D8DD;border-top:none;border-left:none;width:14px;height:0px;position:absolute;left:-16px;top:50%}._mainPage_6absh_1 ._chatContent_6absh_13 ._end_6absh_55 ._node_6absh_65 article{padding:8px 16px;border-radius:8px;border:1px solid transparent;color:#4082fe;text-align:center;font-size:14px;line-height:24px;box-sizing:border-box;background:rgba(232,233,249);color:#2126c0}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91{border-radius:8px;background:rgba(33,38,192,.1);padding:12px}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._inner_6absh_96{width:100%;background-color:#fff;border-radius:4px;padding:8px;box-sizing:border-box;transition:all .5s ease;margin-bottom:18px}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._inner_6absh_96 ._mapArea_6absh_105{width:100%;overflow-x:auto;overflow-y:hidden}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._inner_6absh_96 ._mapArea_6absh_105::-webkit-scrollbar{height:6px}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._inner_6absh_96 ._mapArea_6absh_105::-webkit-scrollbar-track{background-color:rgba(255,255,255,0);border-radius:10px}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._inner_6absh_96 ._mapArea_6absh_105::-webkit-scrollbar-thumb{background-color:#d7d8dd;border-radius:100px}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._response_6absh_121{color:#121316;font-size:14px;line-height:24px;padding:18px 42px}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._response_6absh_121 h3{font-size:24px;font-weight:600;line-height:36px;margin:0 0 16px}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._response_6absh_121 h4{font-size:20px;font-weight:600;line-height:30px;margin:0 0 8px}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._response_6absh_121 p{color:rgba(18,19,22,.8);font-size:16px;font-weight:400;line-height:28px;margin:0 0 16px}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._response_6absh_121 ul{margin-bottom:8px;padding-left:22px}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._response_6absh_121 li{color:rgba(18,19,22,.8);font-size:16px;font-weight:400;line-height:28px}._mainPage_6absh_1 ._chatContent_6absh_13 ._answer_6absh_91 ._response_6absh_121 li p{margin-bottom:4px}._mainPage_6absh_1 ._chatContent_6absh_13 ._sendArea_6absh_159{display:flex;width:100%;box-sizing:border-box;padding:10px 12px 10px 24px;justify-content:space-between;align-items:center;border-radius:8px;border:2px solid var(--fill-5, #464A53);background:#FFF;position:relative}._mainPage_6absh_1 ._chatContent_6absh_13 ._sendArea_6absh_159 .ant-input:focus{box-shadow:none!important;outline:0!important}._mainPage_6absh_1 ._chatContent_6absh_13 ._sendArea_6absh_159 input{height:36px;line-height:36px;flex-grow:1;border:0;outline:0}._mainPage_6absh_1 ._chatContent_6absh_13 ._sendArea_6absh_159 input:focus{border:0;outline:0}._mainPage_6absh_1 ._chatContent_6absh_13 ._sendArea_6absh_159 button{display:flex;justify-content:flex-start;align-items:center;border:0;background-color:#fff;cursor:pointer;padding:8px;width:65px;flex-shrink:0}._mainPage_6absh_1 ._chatContent_6absh_13 ._sendArea_6absh_159 button img{margin-right:4px}._mainPage_6absh_1 ._chatContent_6absh_13 ._notice_6absh_200{color:rgba(18,19,22,.35);padding-top:8px;text-align:center;font-weight:400}._mainPage_6absh_1 ._chatContent_6absh_13 ._notice_6absh_200 a{text-decoration:none;color:#444;display:inline-flex;align-items:center}._mainPage_6absh_1 ._chatContent_6absh_13 ._notice_6absh_200 a span{font-size:18px}._mainPage_6absh_1 ._progressContent_6absh_215{width:44.44%;flex-shrink:0;box-sizing:border-box;padding:24px;border-radius:8px;border:rgba(33,38,192,.1);background:rgba(255,255,255,.8);height:calc(100% - 60px);overflow-y:auto;position:relative}._mainPage_6absh_1 ._progressContent_6absh_215::-webkit-scrollbar{width:6px}._mainPage_6absh_1 ._progressContent_6absh_215::-webkit-scrollbar-track{background-color:rgba(255,255,255,0);border-radius:100px}._mainPage_6absh_1 ._progressContent_6absh_215::-webkit-scrollbar-thumb{background-color:rgba(255,255,255,0);border-radius:100px}._mainPage_6absh_1 ._progressContent_6absh_215 ._toggleIcon_6absh_238{position:absolute;right:24px;top:28px;cursor:pointer}._mainPage_6absh_1 ._progressContent_6absh_215 ._titleNode_6absh_244{color:#121316;font-size:24px;font-weight:600;line-height:36px;margin-bottom:24px}._mainPage_6absh_1 ._progressContent_6absh_215 ._conclusion_6absh_251{padding-top:8px;color:#121316;font-size:14px;line-height:24px}._mainPage_6absh_1 ._progressContent_6absh_215 ._conclusion_6absh_251 ul{padding-left:24px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._title_6absh_244{color:var(--100-text-5, #121316);font-size:20px;font-weight:600;line-height:30px;display:flex;justify-content:flex-start;align-items:center;position:relative}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._title_6absh_244 ._open_6absh_270{position:absolute;right:0;font-size:20px;font-weight:400}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._title_6absh_244 ._open_6absh_270 span{color:#121316;opacity:.6}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._title_6absh_244 i{width:12px;height:12px;border-radius:50%;background-color:#2126c0;margin-right:8px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260._thinking_6absh_287,._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260._select_6absh_288{margin-bottom:24px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260._select_6absh_288 ._searchList_6absh_291{margin-top:0!important;border-radius:8px;background:var(--fill-2, #F4F5F9);padding:8px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251{margin-left:5px;padding-top:8px;padding-left:15px;border-left:1px solid rgba(33,38,192,.2);height:auto}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251._collapsed_6absh_304{overflow:hidden;height:0;padding-top:0;transition:all 1s}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._subTitle_6absh_310{color:var(--100-text-5, #121316);font-size:14px;font-weight:600;line-height:24px;margin-bottom:4px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._subTitle_6absh_310 span{margin-right:4px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._query_6absh_320,._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251>._searchList_6absh_291{margin-top:24px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._query-Item_6absh_324{display:inline-flex;padding:4px 8px;margin-right:4px;margin-bottom:4px;border-radius:4px;border:1px solid #EBECF0;color:rgba(18,19,22,.8);font-size:14px;line-height:24px;height:32px;box-sizing:border-box;overflow:hidden}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._searchList_6absh_291 ._thought_6absh_338{color:rgba(18,19,22,.8);font-size:14px;line-height:24px;margin-bottom:16px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._searchList_6absh_291 ._scrollCon_6absh_344{padding-right:6px;max-height:300px;overflow-y:auto;position:relative}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._searchList_6absh_291 ._scrollCon_6absh_344::-webkit-scrollbar{width:6px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._searchList_6absh_291 ._scrollCon_6absh_344::-webkit-scrollbar-track{background-color:rgba(255,255,255,0);border-radius:100px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._searchList_6absh_291 ._scrollCon_6absh_344::-webkit-scrollbar-thumb{background-color:#d7d8dd;border-radius:100px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._searchList_6absh_291 ._inner_6absh_96{width:100%;border-radius:8px;background:var(--fill-2, #F4F5F9);transition:all .5s ease;box-sizing:border-box;padding:8px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._searchList_6absh_291 ._searchItem_6absh_369{border-radius:8px;background:var(---fill-0, #FFF);margin-bottom:6px;padding:4px 8px;transition:all .5s ease-in-out}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._searchList_6absh_291 ._searchItem_6absh_369._highLight_6absh_376{border:1px solid var(---Success-6, #00B365);background:linear-gradient(0deg,rgba(218,242,228,.4) 0%,rgba(218,242,228,.4) 100%),#FFF}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._searchList_6absh_291 ._searchItem_6absh_369 p{white-space:nowrap;max-width:95%;overflow:hidden;text-overflow:ellipsis;margin:0}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._searchList_6absh_291 ._searchItem_6absh_369 p._summ_6absh_387{color:rgba(18,19,22,.8);font-size:13px;line-height:20px;margin-bottom:2px}._mainPage_6absh_1 ._progressContent_6absh_215 ._steps_6absh_260 ._con_6absh_251 ._searchList_6absh_291 ._searchItem_6absh_369 p._url_6absh_393{color:var(--60-text-3, rgba(18, 19, 22, .6));font-size:12px;line-height:18px;padding-left:20px}pre{margin:0;padding-top:8px;color:#121316;font-size:14px;line-height:24px;font-family:PingFang SC,Franklin Gothic Medium,Arial Narrow,Arial,sans-serif;white-space:wrap}ul{margin:0;padding:0}._draft_6absh_412{width:100%;white-space:wrap;position:relative}._draft_6absh_412 ._loading_6absh_417,._draft_6absh_412 ._loading_6absh_417>div{position:relative;box-sizing:border-box}._draft_6absh_412 ._loading_6absh_417{display:flex;justify-content:center;align-items:center;font-size:0;color:#fff;background-color:#f90;width:20px;height:20px;border-radius:50%;margin-right:3px;flex-shrink:0;position:absolute;top:0;left:0}._draft_6absh_412 ._loading_6absh_417>div{display:inline-block;float:none;background-color:currentColor;border:0 solid currentColor}._draft_6absh_412 ._loading_6absh_417>div:nth-child(1){animation-delay:-.2s}._draft_6absh_412 ._loading_6absh_417>div:nth-child(2){animation-delay:-.1s}._draft_6absh_412 ._loading_6absh_417>div:nth-child(3){animation-delay:0ms}._draft_6absh_412 ._loading_6absh_417>div{width:3px;height:3px;margin:2px 1px;border-radius:100%;animation:_ball-pulse_6absh_1 1s ease infinite}._mindmap_6absh_460{position:relative}._mindmap_6absh_460 article{padding:6px 16px;border-radius:8px;height:38px;border:1px solid transparent;background:#FFF;color:#121316;text-align:center;font-size:14px;line-height:24px;position:relative;box-sizing:border-box}._mindmap_6absh_460 article._loading_6absh_417{line-height:20px;border-radius:8px;overflow:hidden;border:1px solid transparent;padding:4px}._mindmap_6absh_460 article._loading_6absh_417 span{color:#2126c0;background-color:#fff;border-radius:4px;line-height:24px;padding:2px 12px}._mindmap_6absh_460 article._loading_6absh_417 ._looping_6absh_490{--border-width: 4px;--follow-panel-linear-border: linear-gradient(91deg, #5551FF .58%, #FF87DE 100.36%);position:absolute;top:0;left:0;width:calc(100% + var(--border-width) * 2 - 8px);height:100%;background:var(--follow-panel-linear-border);background-size:300% 300%;background-position:0 50%;animation:_moveGradient_6absh_1 4s alternate infinite}._mindmap_6absh_460 article._disabled_6absh_503{border-radius:8px;border:1px solid #D7D8DD;color:rgba(18,19,22,.35)}._mindmap_6absh_460 article._finished_6absh_508{border:1px solid #2126C0}._mindmap_6absh_460 article._finished_6absh_508 ._finishDot_6absh_511{position:absolute;top:6px;right:6px;width:6px;height:6px;background-color:#c9c0fe;border-radius:50%}._mindmap_6absh_460 article._init_6absh_520{border:1px solid transparent;cursor:auto}._mindmap_6absh_460 article span{display:block;white-space:nowrap;max-width:160px;overflow:hidden;text-overflow:ellipsis;position:relative;z-index:20}._mindmap_6absh_460 article span._status_6absh_533{color:#4082fe}._mindmap_6absh_460>li>article{border-radius:8px;background:rgba(33,38,192,.1);color:#2126c0}._mindmap_6absh_460 li{list-style:none;display:flex;align-items:center;box-sizing:border-box;margin:16px;line-height:1;position:relative}._mindmap_6absh_460 li>ul._onlyone_6absh_550:before{opacity:0}._mindmap_6absh_460 li>ul._onlyone_6absh_550>li{margin-left:0}._mindmap_6absh_460 li>ul._onlyone_6absh_550>li:after{opacity:0}._mindmap_6absh_460 li>ul:before{content:"";border:1px solid #D7D8DD;border-top:none;border-left:none;width:14px;height:0px;position:absolute;left:0;top:50%}._mindmap_6absh_460 li:before{content:"";border:1px solid #D7D8DD;border-top:none;border-left:none;width:16px;height:0px;position:absolute;left:-17px}._mindmap_6absh_460 li:after{content:"";border:1px solid #D7D8DD;border-top:none;border-left:none;width:0px;height:calc(50% + 33px);position:absolute;left:-18px}._mindmap_6absh_460 li:first-of-type:after{top:50%}._mindmap_6absh_460 li:last-of-type:after{bottom:50%}._mindmap_6absh_460 li ul{padding:0 0 0 16px;position:relative}._mindmap_6absh_460>li:after,._mindmap_6absh_460>li:before{display:none}._mindmap_6absh_460 ._endLine_6absh_604{border-bottom:1px solid #D7D8DD;width:3000px;transition:width 1s ease-in-out}._showRight_6absh_609{position:fixed;top:80px;right:-10px;width:42px;cursor:pointer}._showRight_6absh_609 img{width:100%}@keyframes _ball-pulse_6absh_1{0%,60%,to{opacity:1;transform:scale(1)}30%{opacity:.1;transform:scale(.01)}}@keyframes _moveGradient_6absh_1{50%{background-position:100% 50%}}@keyframes _fadeIn_6absh_1{0%{width:0;opacity:0}to{width:auto;opacity:1}}@keyframes _unfold_6absh_1{0%{height:auto}to{height:0}}._loading99_6absh_654{margin:20px;position:relative;width:1px;height:1px}._loading99_6absh_654:before,._loading99_6absh_654:after{position:absolute;display:inline-block;width:15px;height:15px;content:"";border-radius:100%;background-color:#5551ff}._loading99_6absh_654:before{left:-15px;animation:_ball-pulse_6absh_1 infinite .75s -.4s cubic-bezier(.2,.68,.18,1.08)}._loading99_6absh_654:after{right:-15px;animation:_ball-pulse_6absh_1 infinite .75s cubic-bezier(.2,.68,.18,1.08)}@keyframes _ball-pulse_6absh_1{0%{transform:scale(1);opacity:1}50%{transform:scale(.1);opacity:.6}to{transform:scale(1);opacity:1}}
|
|
|
|
dist/assets/index-legacy-f2aa4b0e.js
DELETED
The diff for this file is too large to render.
See raw diff
|
|
dist/assets/pack-up-ad0b3cbc.svg
DELETED
dist/assets/polyfills-legacy-0b55db5f.js
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
!function(){"use strict";var t="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},e=function(t){return t&&t.Math===Math&&t},r=e("object"==typeof globalThis&&globalThis)||e("object"==typeof window&&window)||e("object"==typeof self&&self)||e("object"==typeof t&&t)||e("object"==typeof t&&t)||function(){return this}()||Function("return this")(),n={},o=function(t){try{return!!t()}catch(e){return!0}},i=!o((function(){return 7!==Object.defineProperty({},1,{get:function(){return 7}})[1]})),u=!o((function(){var t=function(){}.bind();return"function"!=typeof t||t.hasOwnProperty("prototype")})),c=u,a=Function.prototype.call,f=c?a.bind(a):function(){return a.apply(a,arguments)},s={},l={}.propertyIsEnumerable,h=Object.getOwnPropertyDescriptor,p=h&&!l.call({1:2},1);s.f=p?function(t){var e=h(this,t);return!!e&&e.enumerable}:l;var v,d,y=function(t,e){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:e}},g=u,m=Function.prototype,b=m.call,w=g&&m.bind.bind(b,b),S=g?w:function(t){return function(){return b.apply(t,arguments)}},O=S,x=O({}.toString),E=O("".slice),j=function(t){return E(x(t),8,-1)},P=o,T=j,I=Object,L=S("".split),R=P((function(){return!I("z").propertyIsEnumerable(0)}))?function(t){return"String"===T(t)?L(t,""):I(t)}:I,A=function(t){return null==t},k=A,C=TypeError,_=function(t){if(k(t))throw new C("Can't call method on "+t);return t},F=R,N=_,M=function(t){return F(N(t))},D="object"==typeof document&&document.all,z=void 0===D&&void 0!==D?function(t){return"function"==typeof t||t===D}:function(t){return"function"==typeof t},G=z,U=function(t){return"object"==typeof t?null!==t:G(t)},B=r,W=z,V=function(t,e){return arguments.length<2?(r=B[t],W(r)?r:void 0):B[t]&&B[t][e];var r},J=S({}.isPrototypeOf),K="undefined"!=typeof navigator&&String(navigator.userAgent)||"",Y=r,$=K,q=Y.process,H=Y.Deno,X=q&&q.versions||H&&H.version,Q=X&&X.v8;Q&&(d=(v=Q.split("."))[0]>0&&v[0]<4?1:+(v[0]+v[1])),!d&&$&&(!(v=$.match(/Edge\/(\d+)/))||v[1]>=74)&&(v=$.match(/Chrome\/(\d+)/))&&(d=+v[1]);var Z=d,tt=Z,et=o,rt=r.String,nt=!!Object.getOwnPropertySymbols&&!et((function(){var t=Symbol("symbol detection");return!rt(t)||!(Object(t)instanceof Symbol)||!Symbol.sham&&tt&&tt<41})),ot=nt&&!Symbol.sham&&"symbol"==typeof Symbol.iterator,it=V,ut=z,ct=J,at=Object,ft=ot?function(t){return"symbol"==typeof t}:function(t){var e=it("Symbol");return ut(e)&&ct(e.prototype,at(t))},st=String,lt=function(t){try{return st(t)}catch(e){return"Object"}},ht=z,pt=lt,vt=TypeError,dt=function(t){if(ht(t))return t;throw new vt(pt(t)+" is not a function")},yt=dt,gt=A,mt=function(t,e){var r=t[e];return gt(r)?void 0:yt(r)},bt=f,wt=z,St=U,Ot=TypeError,xt={exports:{}},Et=r,jt=Object.defineProperty,Pt=function(t,e){try{jt(Et,t,{value:e,configurable:!0,writable:!0})}catch(r){Et[t]=e}return e},Tt=r,It=Pt,Lt="__core-js_shared__",Rt=xt.exports=Tt[Lt]||It(Lt,{});(Rt.versions||(Rt.versions=[])).push({version:"3.37.1",mode:"global",copyright:"© 2014-2024 Denis Pushkarev (zloirock.ru)",license:"https://github.com/zloirock/core-js/blob/v3.37.1/LICENSE",source:"https://github.com/zloirock/core-js"});var At=xt.exports,kt=At,Ct=function(t,e){return kt[t]||(kt[t]=e||{})},_t=_,Ft=Object,Nt=function(t){return Ft(_t(t))},Mt=Nt,Dt=S({}.hasOwnProperty),zt=Object.hasOwn||function(t,e){return Dt(Mt(t),e)},Gt=S,Ut=0,Bt=Math.random(),Wt=Gt(1..toString),Vt=function(t){return"Symbol("+(void 0===t?"":t)+")_"+Wt(++Ut+Bt,36)},Jt=Ct,Kt=zt,Yt=Vt,$t=nt,qt=ot,Ht=r.Symbol,Xt=Jt("wks"),Qt=qt?Ht.for||Ht:Ht&&Ht.withoutSetter||Yt,Zt=function(t){return Kt(Xt,t)||(Xt[t]=$t&&Kt(Ht,t)?Ht[t]:Qt("Symbol."+t)),Xt[t]},te=f,ee=U,re=ft,ne=mt,oe=function(t,e){var r,n;if("string"===e&&wt(r=t.toString)&&!St(n=bt(r,t)))return n;if(wt(r=t.valueOf)&&!St(n=bt(r,t)))return n;if("string"!==e&&wt(r=t.toString)&&!St(n=bt(r,t)))return n;throw new Ot("Can't convert object to primitive value")},ie=TypeError,ue=Zt("toPrimitive"),ce=function(t,e){if(!ee(t)||re(t))return t;var r,n=ne(t,ue);if(n){if(void 0===e&&(e="default"),r=te(n,t,e),!ee(r)||re(r))return r;throw new ie("Can't convert object to primitive value")}return void 0===e&&(e="number"),oe(t,e)},ae=ft,fe=function(t){var e=ce(t,"string");return ae(e)?e:e+""},se=U,le=r.document,he=se(le)&&se(le.createElement),pe=function(t){return he?le.createElement(t):{}},ve=pe,de=!i&&!o((function(){return 7!==Object.defineProperty(ve("div"),"a",{get:function(){return 7}}).a})),ye=i,ge=f,me=s,be=y,we=M,Se=fe,Oe=zt,xe=de,Ee=Object.getOwnPropertyDescriptor;n.f=ye?Ee:function(t,e){if(t=we(t),e=Se(e),xe)try{return Ee(t,e)}catch(r){}if(Oe(t,e))return be(!ge(me.f,t,e),t[e])};var je={},Pe=i&&o((function(){return 42!==Object.defineProperty((function(){}),"prototype",{value:42,writable:!1}).prototype})),Te=U,Ie=String,Le=TypeError,Re=function(t){if(Te(t))return t;throw new Le(Ie(t)+" is not an object")},Ae=i,ke=de,Ce=Pe,_e=Re,Fe=fe,Ne=TypeError,Me=Object.defineProperty,De=Object.getOwnPropertyDescriptor,ze="enumerable",Ge="configurable",Ue="writable";je.f=Ae?Ce?function(t,e,r){if(_e(t),e=Fe(e),_e(r),"function"==typeof t&&"prototype"===e&&"value"in r&&Ue in r&&!r[Ue]){var n=De(t,e);n&&n[Ue]&&(t[e]=r.value,r={configurable:Ge in r?r[Ge]:n[Ge],enumerable:ze in r?r[ze]:n[ze],writable:!1})}return Me(t,e,r)}:Me:function(t,e,r){if(_e(t),e=Fe(e),_e(r),ke)try{return Me(t,e,r)}catch(n){}if("get"in r||"set"in r)throw new Ne("Accessors not supported");return"value"in r&&(t[e]=r.value),t};var Be=je,We=y,Ve=i?function(t,e,r){return Be.f(t,e,We(1,r))}:function(t,e,r){return t[e]=r,t},Je={exports:{}},Ke=i,Ye=zt,$e=Function.prototype,qe=Ke&&Object.getOwnPropertyDescriptor,He=Ye($e,"name"),Xe={EXISTS:He,PROPER:He&&"something"===function(){}.name,CONFIGURABLE:He&&(!Ke||Ke&&qe($e,"name").configurable)},Qe=z,Ze=At,tr=S(Function.toString);Qe(Ze.inspectSource)||(Ze.inspectSource=function(t){return tr(t)});var er,rr,nr,or=Ze.inspectSource,ir=z,ur=r.WeakMap,cr=ir(ur)&&/native code/.test(String(ur)),ar=Vt,fr=Ct("keys"),sr=function(t){return fr[t]||(fr[t]=ar(t))},lr={},hr=cr,pr=r,vr=U,dr=Ve,yr=zt,gr=At,mr=sr,br=lr,wr="Object already initialized",Sr=pr.TypeError,Or=pr.WeakMap;if(hr||gr.state){var xr=gr.state||(gr.state=new Or);xr.get=xr.get,xr.has=xr.has,xr.set=xr.set,er=function(t,e){if(xr.has(t))throw new Sr(wr);return e.facade=t,xr.set(t,e),e},rr=function(t){return xr.get(t)||{}},nr=function(t){return xr.has(t)}}else{var Er=mr("state");br[Er]=!0,er=function(t,e){if(yr(t,Er))throw new Sr(wr);return e.facade=t,dr(t,Er,e),e},rr=function(t){return yr(t,Er)?t[Er]:{}},nr=function(t){return yr(t,Er)}}var jr={set:er,get:rr,has:nr,enforce:function(t){return nr(t)?rr(t):er(t,{})},getterFor:function(t){return function(e){var r;if(!vr(e)||(r=rr(e)).type!==t)throw new Sr("Incompatible receiver, "+t+" required");return r}}},Pr=S,Tr=o,Ir=z,Lr=zt,Rr=i,Ar=Xe.CONFIGURABLE,kr=or,Cr=jr.enforce,_r=jr.get,Fr=String,Nr=Object.defineProperty,Mr=Pr("".slice),Dr=Pr("".replace),zr=Pr([].join),Gr=Rr&&!Tr((function(){return 8!==Nr((function(){}),"length",{value:8}).length})),Ur=String(String).split("String"),Br=Je.exports=function(t,e,r){"Symbol("===Mr(Fr(e),0,7)&&(e="["+Dr(Fr(e),/^Symbol\(([^)]*)\).*$/,"$1")+"]"),r&&r.getter&&(e="get "+e),r&&r.setter&&(e="set "+e),(!Lr(t,"name")||Ar&&t.name!==e)&&(Rr?Nr(t,"name",{value:e,configurable:!0}):t.name=e),Gr&&r&&Lr(r,"arity")&&t.length!==r.arity&&Nr(t,"length",{value:r.arity});try{r&&Lr(r,"constructor")&&r.constructor?Rr&&Nr(t,"prototype",{writable:!1}):t.prototype&&(t.prototype=void 0)}catch(o){}var n=Cr(t);return Lr(n,"source")||(n.source=zr(Ur,"string"==typeof e?e:"")),t};Function.prototype.toString=Br((function(){return Ir(this)&&_r(this).source||kr(this)}),"toString");var Wr=Je.exports,Vr=z,Jr=je,Kr=Wr,Yr=Pt,$r=function(t,e,r,n){n||(n={});var o=n.enumerable,i=void 0!==n.name?n.name:e;if(Vr(r)&&Kr(r,i,n),n.global)o?t[e]=r:Yr(e,r);else{try{n.unsafe?t[e]&&(o=!0):delete t[e]}catch(u){}o?t[e]=r:Jr.f(t,e,{value:r,enumerable:!1,configurable:!n.nonConfigurable,writable:!n.nonWritable})}return t},qr={},Hr=Math.ceil,Xr=Math.floor,Qr=Math.trunc||function(t){var e=+t;return(e>0?Xr:Hr)(e)},Zr=function(t){var e=+t;return e!=e||0===e?0:Qr(e)},tn=Zr,en=Math.max,rn=Math.min,nn=Zr,on=Math.min,un=function(t){var e=nn(t);return e>0?on(e,9007199254740991):0},cn=un,an=function(t){return cn(t.length)},fn=M,sn=function(t,e){var r=tn(t);return r<0?en(r+e,0):rn(r,e)},ln=an,hn=function(t){return function(e,r,n){var o=fn(e),i=ln(o);if(0===i)return!t&&-1;var u,c=sn(n,i);if(t&&r!=r){for(;i>c;)if((u=o[c++])!=u)return!0}else for(;i>c;c++)if((t||c in o)&&o[c]===r)return t||c||0;return!t&&-1}},pn={includes:hn(!0),indexOf:hn(!1)},vn=zt,dn=M,yn=pn.indexOf,gn=lr,mn=S([].push),bn=function(t,e){var r,n=dn(t),o=0,i=[];for(r in n)!vn(gn,r)&&vn(n,r)&&mn(i,r);for(;e.length>o;)vn(n,r=e[o++])&&(~yn(i,r)||mn(i,r));return i},wn=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"],Sn=bn,On=wn.concat("length","prototype");qr.f=Object.getOwnPropertyNames||function(t){return Sn(t,On)};var xn={};xn.f=Object.getOwnPropertySymbols;var En=V,jn=qr,Pn=xn,Tn=Re,In=S([].concat),Ln=En("Reflect","ownKeys")||function(t){var e=jn.f(Tn(t)),r=Pn.f;return r?In(e,r(t)):e},Rn=zt,An=Ln,kn=n,Cn=je,_n=o,Fn=z,Nn=/#|\.prototype\./,Mn=function(t,e){var r=zn[Dn(t)];return r===Un||r!==Gn&&(Fn(e)?_n(e):!!e)},Dn=Mn.normalize=function(t){return String(t).replace(Nn,".").toLowerCase()},zn=Mn.data={},Gn=Mn.NATIVE="N",Un=Mn.POLYFILL="P",Bn=Mn,Wn=r,Vn=n.f,Jn=Ve,Kn=$r,Yn=Pt,$n=function(t,e,r){for(var n=An(e),o=Cn.f,i=kn.f,u=0;u<n.length;u++){var c=n[u];Rn(t,c)||r&&Rn(r,c)||o(t,c,i(e,c))}},qn=Bn,Hn=function(t,e){var r,n,o,i,u,c=t.target,a=t.global,f=t.stat;if(r=a?Wn:f?Wn[c]||Yn(c,{}):Wn[c]&&Wn[c].prototype)for(n in e){if(i=e[n],o=t.dontCallGetSet?(u=Vn(r,n))&&u.value:r[n],!qn(a?n:c+(f?".":"#")+n,t.forced)&&void 0!==o){if(typeof i==typeof o)continue;$n(i,o)}(t.sham||o&&o.sham)&&Jn(i,"sham",!0),Kn(r,n,i,t)}},Xn={};Xn[Zt("toStringTag")]="z";var Qn="[object z]"===String(Xn),Zn=Qn,to=z,eo=j,ro=Zt("toStringTag"),no=Object,oo="Arguments"===eo(function(){return arguments}()),io=Zn?eo:function(t){var e,r,n;return void 0===t?"Undefined":null===t?"Null":"string"==typeof(r=function(t,e){try{return t[e]}catch(r){}}(e=no(t),ro))?r:oo?eo(e):"Object"===(n=eo(e))&&to(e.callee)?"Arguments":n},uo=io,co=String,ao=function(t){if("Symbol"===uo(t))throw new TypeError("Cannot convert a Symbol value to a string");return co(t)},fo={},so=bn,lo=wn,ho=Object.keys||function(t){return so(t,lo)},po=i,vo=Pe,yo=je,go=Re,mo=M,bo=ho;fo.f=po&&!vo?Object.defineProperties:function(t,e){go(t);for(var r,n=mo(e),o=bo(e),i=o.length,u=0;i>u;)yo.f(t,r=o[u++],n[r]);return t};var wo,So=V("document","documentElement"),Oo=Re,xo=fo,Eo=wn,jo=lr,Po=So,To=pe,Io="prototype",Lo="script",Ro=sr("IE_PROTO"),Ao=function(){},ko=function(t){return"<"+Lo+">"+t+"</"+Lo+">"},Co=function(t){t.write(ko("")),t.close();var e=t.parentWindow.Object;return t=null,e},_o=function(){try{wo=new ActiveXObject("htmlfile")}catch(o){}var t,e,r;_o="undefined"!=typeof document?document.domain&&wo?Co(wo):(e=To("iframe"),r="java"+Lo+":",e.style.display="none",Po.appendChild(e),e.src=String(r),(t=e.contentWindow.document).open(),t.write(ko("document.F=Object")),t.close(),t.F):Co(wo);for(var n=Eo.length;n--;)delete _o[Io][Eo[n]];return _o()};jo[Ro]=!0;var Fo=Object.create||function(t,e){var r;return null!==t?(Ao[Io]=Oo(t),r=new Ao,Ao[Io]=null,r[Ro]=t):r=_o(),void 0===e?r:xo.f(r,e)},No={},Mo=S([].slice),Do=j,zo=M,Go=qr.f,Uo=Mo,Bo="object"==typeof window&&window&&Object.getOwnPropertyNames?Object.getOwnPropertyNames(window):[];No.f=function(t){return Bo&&"Window"===Do(t)?function(t){try{return Go(t)}catch(e){return Uo(Bo)}}(t):Go(zo(t))};var Wo=Wr,Vo=je,Jo=function(t,e,r){return r.get&&Wo(r.get,e,{getter:!0}),r.set&&Wo(r.set,e,{setter:!0}),Vo.f(t,e,r)},Ko={},Yo=Zt;Ko.f=Yo;var $o=r,qo=$o,Ho=zt,Xo=Ko,Qo=je.f,Zo=f,ti=V,ei=Zt,ri=$r,ni=je.f,oi=zt,ii=Zt("toStringTag"),ui=function(t,e,r){t&&!r&&(t=t.prototype),t&&!oi(t,ii)&&ni(t,ii,{configurable:!0,value:e})},ci=j,ai=S,fi=function(t){if("Function"===ci(t))return ai(t)},si=dt,li=u,hi=fi(fi.bind),pi=function(t,e){return si(t),void 0===e?t:li?hi(t,e):function(){return t.apply(e,arguments)}},vi=j,di=Array.isArray||function(t){return"Array"===vi(t)},yi=S,gi=o,mi=z,bi=io,wi=or,Si=function(){},Oi=V("Reflect","construct"),xi=/^\s*(?:class|function)\b/,Ei=yi(xi.exec),ji=!xi.test(Si),Pi=function(t){if(!mi(t))return!1;try{return Oi(Si,[],t),!0}catch(e){return!1}},Ti=function(t){if(!mi(t))return!1;switch(bi(t)){case"AsyncFunction":case"GeneratorFunction":case"AsyncGeneratorFunction":return!1}try{return ji||!!Ei(xi,wi(t))}catch(e){return!0}};Ti.sham=!0;var Ii=!Oi||gi((function(){var t;return Pi(Pi.call)||!Pi(Object)||!Pi((function(){t=!0}))||t}))?Ti:Pi,Li=di,Ri=Ii,Ai=U,ki=Zt("species"),Ci=Array,_i=function(t){var e;return Li(t)&&(e=t.constructor,(Ri(e)&&(e===Ci||Li(e.prototype))||Ai(e)&&null===(e=e[ki]))&&(e=void 0)),void 0===e?Ci:e},Fi=pi,Ni=R,Mi=Nt,Di=an,zi=function(t,e){return new(_i(t))(0===e?0:e)},Gi=S([].push),Ui=function(t){var e=1===t,r=2===t,n=3===t,o=4===t,i=6===t,u=7===t,c=5===t||i;return function(a,f,s,l){for(var h,p,v=Mi(a),d=Ni(v),y=Di(d),g=Fi(f,s),m=0,b=l||zi,w=e?b(a,y):r||u?b(a,0):void 0;y>m;m++)if((c||m in d)&&(p=g(h=d[m],m,v),t))if(e)w[m]=p;else if(p)switch(t){case 3:return!0;case 5:return h;case 6:return m;case 2:Gi(w,h)}else switch(t){case 4:return!1;case 7:Gi(w,h)}return i?-1:n||o?o:w}},Bi={forEach:Ui(0),map:Ui(1),filter:Ui(2),some:Ui(3),every:Ui(4),find:Ui(5),findIndex:Ui(6),filterReject:Ui(7)},Wi=Hn,Vi=r,Ji=f,Ki=S,Yi=i,$i=nt,qi=o,Hi=zt,Xi=J,Qi=Re,Zi=M,tu=fe,eu=ao,ru=y,nu=Fo,ou=ho,iu=qr,uu=No,cu=xn,au=n,fu=je,su=fo,lu=s,hu=$r,pu=Jo,vu=Ct,du=lr,yu=Vt,gu=Zt,mu=Ko,bu=function(t){var e=qo.Symbol||(qo.Symbol={});Ho(e,t)||Qo(e,t,{value:Xo.f(t)})},wu=function(){var t=ti("Symbol"),e=t&&t.prototype,r=e&&e.valueOf,n=ei("toPrimitive");e&&!e[n]&&ri(e,n,(function(t){return Zo(r,this)}),{arity:1})},Su=ui,Ou=jr,xu=Bi.forEach,Eu=sr("hidden"),ju="Symbol",Pu="prototype",Tu=Ou.set,Iu=Ou.getterFor(ju),Lu=Object[Pu],Ru=Vi.Symbol,Au=Ru&&Ru[Pu],ku=Vi.RangeError,Cu=Vi.TypeError,_u=Vi.QObject,Fu=au.f,Nu=fu.f,Mu=uu.f,Du=lu.f,zu=Ki([].push),Gu=vu("symbols"),Uu=vu("op-symbols"),Bu=vu("wks"),Wu=!_u||!_u[Pu]||!_u[Pu].findChild,Vu=function(t,e,r){var n=Fu(Lu,e);n&&delete Lu[e],Nu(t,e,r),n&&t!==Lu&&Nu(Lu,e,n)},Ju=Yi&&qi((function(){return 7!==nu(Nu({},"a",{get:function(){return Nu(this,"a",{value:7}).a}})).a}))?Vu:Nu,Ku=function(t,e){var r=Gu[t]=nu(Au);return Tu(r,{type:ju,tag:t,description:e}),Yi||(r.description=e),r},Yu=function(t,e,r){t===Lu&&Yu(Uu,e,r),Qi(t);var n=tu(e);return Qi(r),Hi(Gu,n)?(r.enumerable?(Hi(t,Eu)&&t[Eu][n]&&(t[Eu][n]=!1),r=nu(r,{enumerable:ru(0,!1)})):(Hi(t,Eu)||Nu(t,Eu,ru(1,nu(null))),t[Eu][n]=!0),Ju(t,n,r)):Nu(t,n,r)},$u=function(t,e){Qi(t);var r=Zi(e),n=ou(r).concat(Qu(r));return xu(n,(function(e){Yi&&!Ji(qu,r,e)||Yu(t,e,r[e])})),t},qu=function(t){var e=tu(t),r=Ji(Du,this,e);return!(this===Lu&&Hi(Gu,e)&&!Hi(Uu,e))&&(!(r||!Hi(this,e)||!Hi(Gu,e)||Hi(this,Eu)&&this[Eu][e])||r)},Hu=function(t,e){var r=Zi(t),n=tu(e);if(r!==Lu||!Hi(Gu,n)||Hi(Uu,n)){var o=Fu(r,n);return!o||!Hi(Gu,n)||Hi(r,Eu)&&r[Eu][n]||(o.enumerable=!0),o}},Xu=function(t){var e=Mu(Zi(t)),r=[];return xu(e,(function(t){Hi(Gu,t)||Hi(du,t)||zu(r,t)})),r},Qu=function(t){var e=t===Lu,r=Mu(e?Uu:Zi(t)),n=[];return xu(r,(function(t){!Hi(Gu,t)||e&&!Hi(Lu,t)||zu(n,Gu[t])})),n};$i||(Ru=function(){if(Xi(Au,this))throw new Cu("Symbol is not a constructor");var t=arguments.length&&void 0!==arguments[0]?eu(arguments[0]):void 0,e=yu(t),r=function(t){var n=void 0===this?Vi:this;n===Lu&&Ji(r,Uu,t),Hi(n,Eu)&&Hi(n[Eu],e)&&(n[Eu][e]=!1);var o=ru(1,t);try{Ju(n,e,o)}catch(i){if(!(i instanceof ku))throw i;Vu(n,e,o)}};return Yi&&Wu&&Ju(Lu,e,{configurable:!0,set:r}),Ku(e,t)},hu(Au=Ru[Pu],"toString",(function(){return Iu(this).tag})),hu(Ru,"withoutSetter",(function(t){return Ku(yu(t),t)})),lu.f=qu,fu.f=Yu,su.f=$u,au.f=Hu,iu.f=uu.f=Xu,cu.f=Qu,mu.f=function(t){return Ku(gu(t),t)},Yi&&(pu(Au,"description",{configurable:!0,get:function(){return Iu(this).description}}),hu(Lu,"propertyIsEnumerable",qu,{unsafe:!0}))),Wi({global:!0,constructor:!0,wrap:!0,forced:!$i,sham:!$i},{Symbol:Ru}),xu(ou(Bu),(function(t){bu(t)})),Wi({target:ju,stat:!0,forced:!$i},{useSetter:function(){Wu=!0},useSimple:function(){Wu=!1}}),Wi({target:"Object",stat:!0,forced:!$i,sham:!Yi},{create:function(t,e){return void 0===e?nu(t):$u(nu(t),e)},defineProperty:Yu,defineProperties:$u,getOwnPropertyDescriptor:Hu}),Wi({target:"Object",stat:!0,forced:!$i},{getOwnPropertyNames:Xu}),wu(),Su(Ru,ju),du[Eu]=!0;var Zu=nt&&!!Symbol.for&&!!Symbol.keyFor,tc=Hn,ec=V,rc=zt,nc=ao,oc=Ct,ic=Zu,uc=oc("string-to-symbol-registry"),cc=oc("symbol-to-string-registry");tc({target:"Symbol",stat:!0,forced:!ic},{for:function(t){var e=nc(t);if(rc(uc,e))return uc[e];var r=ec("Symbol")(e);return uc[e]=r,cc[r]=e,r}});var ac=Hn,fc=zt,sc=ft,lc=lt,hc=Zu,pc=Ct("symbol-to-string-registry");ac({target:"Symbol",stat:!0,forced:!hc},{keyFor:function(t){if(!sc(t))throw new TypeError(lc(t)+" is not a symbol");if(fc(pc,t))return pc[t]}});var vc=u,dc=Function.prototype,yc=dc.apply,gc=dc.call,mc="object"==typeof Reflect&&Reflect.apply||(vc?gc.bind(yc):function(){return gc.apply(yc,arguments)}),bc=di,wc=z,Sc=j,Oc=ao,xc=S([].push),Ec=Hn,jc=V,Pc=mc,Tc=f,Ic=S,Lc=o,Rc=z,Ac=ft,kc=Mo,Cc=function(t){if(wc(t))return t;if(bc(t)){for(var e=t.length,r=[],n=0;n<e;n++){var o=t[n];"string"==typeof o?xc(r,o):"number"!=typeof o&&"Number"!==Sc(o)&&"String"!==Sc(o)||xc(r,Oc(o))}var i=r.length,u=!0;return function(t,e){if(u)return u=!1,e;if(bc(this))return e;for(var n=0;n<i;n++)if(r[n]===t)return e}}},_c=nt,Fc=String,Nc=jc("JSON","stringify"),Mc=Ic(/./.exec),Dc=Ic("".charAt),zc=Ic("".charCodeAt),Gc=Ic("".replace),Uc=Ic(1..toString),Bc=/[\uD800-\uDFFF]/g,Wc=/^[\uD800-\uDBFF]$/,Vc=/^[\uDC00-\uDFFF]$/,Jc=!_c||Lc((function(){var t=jc("Symbol")("stringify detection");return"[null]"!==Nc([t])||"{}"!==Nc({a:t})||"{}"!==Nc(Object(t))})),Kc=Lc((function(){return'"\\udf06\\ud834"'!==Nc("\udf06\ud834")||'"\\udead"'!==Nc("\udead")})),Yc=function(t,e){var r=kc(arguments),n=Cc(e);if(Rc(n)||void 0!==t&&!Ac(t))return r[1]=function(t,e){if(Rc(n)&&(e=Tc(n,this,Fc(t),e)),!Ac(e))return e},Pc(Nc,null,r)},$c=function(t,e,r){var n=Dc(r,e-1),o=Dc(r,e+1);return Mc(Wc,t)&&!Mc(Vc,o)||Mc(Vc,t)&&!Mc(Wc,n)?"\\u"+Uc(zc(t,0),16):t};Nc&&Ec({target:"JSON",stat:!0,arity:3,forced:Jc||Kc},{stringify:function(t,e,r){var n=kc(arguments),o=Pc(Jc?Yc:Nc,null,n);return Kc&&"string"==typeof o?Gc(o,Bc,$c):o}});var qc=xn,Hc=Nt;Hn({target:"Object",stat:!0,forced:!nt||o((function(){qc.f(1)}))},{getOwnPropertySymbols:function(t){var e=qc.f;return e?e(Hc(t)):[]}});var Xc=o,Qc=Z,Zc=Zt("species"),ta=Bi.filter;Hn({target:"Array",proto:!0,forced:!function(t){return Qc>=51||!Xc((function(){var e=[];return(e.constructor={})[Zc]=function(){return{foo:1}},1!==e[t](Boolean).foo}))}("filter")},{filter:function(t){return ta(this,t,arguments.length>1?arguments[1]:void 0)}});var ea,ra,na,oa,ia="process"===j(r.process),ua=S,ca=dt,aa=function(t,e,r){try{return ua(ca(Object.getOwnPropertyDescriptor(t,e)[r]))}catch(n){}},fa=U,sa=function(t){return fa(t)||null===t},la=String,ha=TypeError,pa=aa,va=U,da=_,ya=function(t){if(sa(t))return t;throw new ha("Can't set "+la(t)+" as a prototype")},ga=Object.setPrototypeOf||("__proto__"in{}?function(){var t,e=!1,r={};try{(t=pa(Object.prototype,"__proto__","set"))(r,[]),e=r instanceof Array}catch(n){}return function(r,n){return da(r),ya(n),va(r)?(e?t(r,n):r.__proto__=n,r):r}}():void 0),ma=V,ba=Jo,wa=i,Sa=Zt("species"),Oa=function(t){var e=ma(t);wa&&e&&!e[Sa]&&ba(e,Sa,{configurable:!0,get:function(){return this}})},xa=J,Ea=TypeError,ja=function(t,e){if(xa(e,t))return t;throw new Ea("Incorrect invocation")},Pa=Ii,Ta=lt,Ia=TypeError,La=Re,Ra=function(t){if(Pa(t))return t;throw new Ia(Ta(t)+" is not a constructor")},Aa=A,ka=Zt("species"),Ca=function(t,e){var r,n=La(t).constructor;return void 0===n||Aa(r=La(n)[ka])?e:Ra(r)},_a=TypeError,Fa=/(?:ipad|iphone|ipod).*applewebkit/i.test(K),Na=r,Ma=mc,Da=pi,za=z,Ga=zt,Ua=o,Ba=So,Wa=Mo,Va=pe,Ja=function(t,e){if(t<e)throw new _a("Not enough arguments");return t},Ka=Fa,Ya=ia,$a=Na.setImmediate,qa=Na.clearImmediate,Ha=Na.process,Xa=Na.Dispatch,Qa=Na.Function,Za=Na.MessageChannel,tf=Na.String,ef=0,rf={},nf="onreadystatechange";Ua((function(){ea=Na.location}));var of=function(t){if(Ga(rf,t)){var e=rf[t];delete rf[t],e()}},uf=function(t){return function(){of(t)}},cf=function(t){of(t.data)},af=function(t){Na.postMessage(tf(t),ea.protocol+"//"+ea.host)};$a&&qa||($a=function(t){Ja(arguments.length,1);var e=za(t)?t:Qa(t),r=Wa(arguments,1);return rf[++ef]=function(){Ma(e,void 0,r)},ra(ef),ef},qa=function(t){delete rf[t]},Ya?ra=function(t){Ha.nextTick(uf(t))}:Xa&&Xa.now?ra=function(t){Xa.now(uf(t))}:Za&&!Ka?(oa=(na=new Za).port2,na.port1.onmessage=cf,ra=Da(oa.postMessage,oa)):Na.addEventListener&&za(Na.postMessage)&&!Na.importScripts&&ea&&"file:"!==ea.protocol&&!Ua(af)?(ra=af,Na.addEventListener("message",cf,!1)):ra=nf in Va("script")?function(t){Ba.appendChild(Va("script"))[nf]=function(){Ba.removeChild(this),of(t)}}:function(t){setTimeout(uf(t),0)});var ff={set:$a,clear:qa},sf=r,lf=i,hf=Object.getOwnPropertyDescriptor,pf=function(){this.head=null,this.tail=null};pf.prototype={add:function(t){var e={item:t,next:null},r=this.tail;r?r.next=e:this.head=e,this.tail=e},get:function(){var t=this.head;if(t)return null===(this.head=t.next)&&(this.tail=null),t.item}};var vf,df,yf,gf,mf,bf=pf,wf=/ipad|iphone|ipod/i.test(K)&&"undefined"!=typeof Pebble,Sf=/web0s(?!.*chrome)/i.test(K),Of=r,xf=function(t){if(!lf)return sf[t];var e=hf(sf,t);return e&&e.value},Ef=pi,jf=ff.set,Pf=bf,Tf=Fa,If=wf,Lf=Sf,Rf=ia,Af=Of.MutationObserver||Of.WebKitMutationObserver,kf=Of.document,Cf=Of.process,_f=Of.Promise,Ff=xf("queueMicrotask");if(!Ff){var Nf=new Pf,Mf=function(){var t,e;for(Rf&&(t=Cf.domain)&&t.exit();e=Nf.get();)try{e()}catch(r){throw Nf.head&&vf(),r}t&&t.enter()};Tf||Rf||Lf||!Af||!kf?!If&&_f&&_f.resolve?((gf=_f.resolve(void 0)).constructor=_f,mf=Ef(gf.then,gf),vf=function(){mf(Mf)}):Rf?vf=function(){Cf.nextTick(Mf)}:(jf=Ef(jf,Of),vf=function(){jf(Mf)}):(df=!0,yf=kf.createTextNode(""),new Af(Mf).observe(yf,{characterData:!0}),vf=function(){yf.data=df=!df}),Ff=function(t){Nf.head||vf(),Nf.add(t)}}var Df=Ff,zf=function(t){try{return{error:!1,value:t()}}catch(e){return{error:!0,value:e}}},Gf=r.Promise,Uf="object"==typeof Deno&&Deno&&"object"==typeof Deno.version,Bf=!Uf&&!ia&&"object"==typeof window&&"object"==typeof document,Wf=r,Vf=Gf,Jf=z,Kf=Bn,Yf=or,$f=Zt,qf=Bf,Hf=Uf,Xf=Z;Vf&&Vf.prototype;var Qf=$f("species"),Zf=!1,ts=Jf(Wf.PromiseRejectionEvent),es=Kf("Promise",(function(){var t=Yf(Vf),e=t!==String(Vf);if(!e&&66===Xf)return!0;if(!Xf||Xf<51||!/native code/.test(t)){var r=new Vf((function(t){t(1)})),n=function(t){t((function(){}),(function(){}))};if((r.constructor={})[Qf]=n,!(Zf=r.then((function(){}))instanceof n))return!0}return!e&&(qf||Hf)&&!ts})),rs={CONSTRUCTOR:es,REJECTION_EVENT:ts,SUBCLASSING:Zf},ns={},os=dt,is=TypeError,us=function(t){var e,r;this.promise=new t((function(t,n){if(void 0!==e||void 0!==r)throw new is("Bad Promise constructor");e=t,r=n})),this.resolve=os(e),this.reject=os(r)};ns.f=function(t){return new us(t)};var cs,as,fs,ss=Hn,ls=ia,hs=r,ps=f,vs=$r,ds=ga,ys=ui,gs=Oa,ms=dt,bs=z,ws=U,Ss=ja,Os=Ca,xs=ff.set,Es=Df,js=function(t,e){try{1===arguments.length?console.error(t):console.error(t,e)}catch(r){}},Ps=zf,Ts=bf,Is=jr,Ls=Gf,Rs=ns,As="Promise",ks=rs.CONSTRUCTOR,Cs=rs.REJECTION_EVENT,_s=rs.SUBCLASSING,Fs=Is.getterFor(As),Ns=Is.set,Ms=Ls&&Ls.prototype,Ds=Ls,zs=Ms,Gs=hs.TypeError,Us=hs.document,Bs=hs.process,Ws=Rs.f,Vs=Ws,Js=!!(Us&&Us.createEvent&&hs.dispatchEvent),Ks="unhandledrejection",Ys=function(t){var e;return!(!ws(t)||!bs(e=t.then))&&e},$s=function(t,e){var r,n,o,i=e.value,u=1===e.state,c=u?t.ok:t.fail,a=t.resolve,f=t.reject,s=t.domain;try{c?(u||(2===e.rejection&&Zs(e),e.rejection=1),!0===c?r=i:(s&&s.enter(),r=c(i),s&&(s.exit(),o=!0)),r===t.promise?f(new Gs("Promise-chain cycle")):(n=Ys(r))?ps(n,r,a,f):a(r)):f(i)}catch(l){s&&!o&&s.exit(),f(l)}},qs=function(t,e){t.notified||(t.notified=!0,Es((function(){for(var r,n=t.reactions;r=n.get();)$s(r,t);t.notified=!1,e&&!t.rejection&&Xs(t)})))},Hs=function(t,e,r){var n,o;Js?((n=Us.createEvent("Event")).promise=e,n.reason=r,n.initEvent(t,!1,!0),hs.dispatchEvent(n)):n={promise:e,reason:r},!Cs&&(o=hs["on"+t])?o(n):t===Ks&&js("Unhandled promise rejection",r)},Xs=function(t){ps(xs,hs,(function(){var e,r=t.facade,n=t.value;if(Qs(t)&&(e=Ps((function(){ls?Bs.emit("unhandledRejection",n,r):Hs(Ks,r,n)})),t.rejection=ls||Qs(t)?2:1,e.error))throw e.value}))},Qs=function(t){return 1!==t.rejection&&!t.parent},Zs=function(t){ps(xs,hs,(function(){var e=t.facade;ls?Bs.emit("rejectionHandled",e):Hs("rejectionhandled",e,t.value)}))},tl=function(t,e,r){return function(n){t(e,n,r)}},el=function(t,e,r){t.done||(t.done=!0,r&&(t=r),t.value=e,t.state=2,qs(t,!0))},rl=function(t,e,r){if(!t.done){t.done=!0,r&&(t=r);try{if(t.facade===e)throw new Gs("Promise can't be resolved itself");var n=Ys(e);n?Es((function(){var r={done:!1};try{ps(n,e,tl(rl,r,t),tl(el,r,t))}catch(o){el(r,o,t)}})):(t.value=e,t.state=1,qs(t,!1))}catch(o){el({done:!1},o,t)}}};if(ks&&(zs=(Ds=function(t){Ss(this,zs),ms(t),ps(cs,this);var e=Fs(this);try{t(tl(rl,e),tl(el,e))}catch(r){el(e,r)}}).prototype,(cs=function(t){Ns(this,{type:As,done:!1,notified:!1,parent:!1,reactions:new Ts,rejection:!1,state:0,value:void 0})}).prototype=vs(zs,"then",(function(t,e){var r=Fs(this),n=Ws(Os(this,Ds));return r.parent=!0,n.ok=!bs(t)||t,n.fail=bs(e)&&e,n.domain=ls?Bs.domain:void 0,0===r.state?r.reactions.add(n):Es((function(){$s(n,r)})),n.promise})),as=function(){var t=new cs,e=Fs(t);this.promise=t,this.resolve=tl(rl,e),this.reject=tl(el,e)},Rs.f=Ws=function(t){return t===Ds||undefined===t?new as(t):Vs(t)},bs(Ls)&&Ms!==Object.prototype)){fs=Ms.then,_s||vs(Ms,"then",(function(t,e){var r=this;return new Ds((function(t,e){ps(fs,r,t,e)})).then(t,e)}),{unsafe:!0});try{delete Ms.constructor}catch(wb){}ds&&ds(Ms,zs)}ss({global:!0,constructor:!0,wrap:!0,forced:ks},{Promise:Ds}),ys(Ds,As,!1),gs(As);var nl={},ol=nl,il=Zt("iterator"),ul=Array.prototype,cl=io,al=mt,fl=A,sl=nl,ll=Zt("iterator"),hl=function(t){if(!fl(t))return al(t,ll)||al(t,"@@iterator")||sl[cl(t)]},pl=f,vl=dt,dl=Re,yl=lt,gl=hl,ml=TypeError,bl=f,wl=Re,Sl=mt,Ol=function(t,e,r){var n,o;wl(t);try{if(!(n=Sl(t,"return"))){if("throw"===e)throw r;return r}n=bl(n,t)}catch(wb){o=!0,n=wb}if("throw"===e)throw r;if(o)throw n;return wl(n),r},xl=pi,El=f,jl=Re,Pl=lt,Tl=function(t){return void 0!==t&&(ol.Array===t||ul[il]===t)},Il=an,Ll=J,Rl=function(t,e){var r=arguments.length<2?gl(t):e;if(vl(r))return dl(pl(r,t));throw new ml(yl(t)+" is not iterable")},Al=hl,kl=Ol,Cl=TypeError,_l=function(t,e){this.stopped=t,this.result=e},Fl=_l.prototype,Nl=function(t,e,r){var n,o,i,u,c,a,f,s=r&&r.that,l=!(!r||!r.AS_ENTRIES),h=!(!r||!r.IS_RECORD),p=!(!r||!r.IS_ITERATOR),v=!(!r||!r.INTERRUPTED),d=xl(e,s),y=function(t){return n&&kl(n,"normal",t),new _l(!0,t)},g=function(t){return l?(jl(t),v?d(t[0],t[1],y):d(t[0],t[1])):v?d(t,y):d(t)};if(h)n=t.iterator;else if(p)n=t;else{if(!(o=Al(t)))throw new Cl(Pl(t)+" is not iterable");if(Tl(o)){for(i=0,u=Il(t);u>i;i++)if((c=g(t[i]))&&Ll(Fl,c))return c;return new _l(!1)}n=Rl(t,o)}for(a=h?t.next:n.next;!(f=El(a,n)).done;){try{c=g(f.value)}catch(wb){kl(n,"throw",wb)}if("object"==typeof c&&c&&Ll(Fl,c))return c}return new _l(!1)},Ml=Zt("iterator"),Dl=!1;try{var zl=0,Gl={next:function(){return{done:!!zl++}},return:function(){Dl=!0}};Gl[Ml]=function(){return this},Array.from(Gl,(function(){throw 2}))}catch(wb){}var Ul=function(t,e){try{if(!e&&!Dl)return!1}catch(wb){return!1}var r=!1;try{var n={};n[Ml]=function(){return{next:function(){return{done:r=!0}}}},t(n)}catch(wb){}return r},Bl=Gf,Wl=rs.CONSTRUCTOR||!Ul((function(t){Bl.all(t).then(void 0,(function(){}))})),Vl=f,Jl=dt,Kl=ns,Yl=zf,$l=Nl;Hn({target:"Promise",stat:!0,forced:Wl},{all:function(t){var e=this,r=Kl.f(e),n=r.resolve,o=r.reject,i=Yl((function(){var r=Jl(e.resolve),i=[],u=0,c=1;$l(t,(function(t){var a=u++,f=!1;c++,Vl(r,e,t).then((function(t){f||(f=!0,i[a]=t,--c||n(i))}),o)})),--c||n(i)}));return i.error&&o(i.value),r.promise}});var ql=Hn,Hl=rs.CONSTRUCTOR,Xl=Gf,Ql=V,Zl=z,th=$r,eh=Xl&&Xl.prototype;if(ql({target:"Promise",proto:!0,forced:Hl,real:!0},{catch:function(t){return this.then(void 0,t)}}),Zl(Xl)){var rh=Ql("Promise").prototype.catch;eh.catch!==rh&&th(eh,"catch",rh,{unsafe:!0})}var nh=f,oh=dt,ih=ns,uh=zf,ch=Nl;Hn({target:"Promise",stat:!0,forced:Wl},{race:function(t){var e=this,r=ih.f(e),n=r.reject,o=uh((function(){var o=oh(e.resolve);ch(t,(function(t){nh(o,e,t).then(r.resolve,n)}))}));return o.error&&n(o.value),r.promise}});var ah=ns;Hn({target:"Promise",stat:!0,forced:rs.CONSTRUCTOR},{reject:function(t){var e=ah.f(this);return(0,e.reject)(t),e.promise}});var fh=Re,sh=U,lh=ns,hh=function(t,e){if(fh(t),sh(e)&&e.constructor===t)return e;var r=lh.f(t);return(0,r.resolve)(e),r.promise},ph=Hn,vh=rs.CONSTRUCTOR,dh=hh;V("Promise"),ph({target:"Promise",stat:!0,forced:vh},{resolve:function(t){return dh(this,t)}});var yh=Hn,gh=Gf,mh=o,bh=V,wh=z,Sh=Ca,Oh=hh,xh=$r,Eh=gh&&gh.prototype;if(yh({target:"Promise",proto:!0,real:!0,forced:!!gh&&mh((function(){Eh.finally.call({then:function(){}},(function(){}))}))},{finally:function(t){var e=Sh(this,bh("Promise")),r=wh(t);return this.then(r?function(r){return Oh(e,t()).then((function(){return r}))}:t,r?function(r){return Oh(e,t()).then((function(){throw r}))}:t)}}),wh(gh)){var jh=bh("Promise").prototype.finally;Eh.finally!==jh&&xh(Eh,"finally",jh,{unsafe:!0})}var Ph=Zt,Th=Fo,Ih=je.f,Lh=Ph("unscopables"),Rh=Array.prototype;void 0===Rh[Lh]&&Ih(Rh,Lh,{configurable:!0,value:Th(null)});var Ah,kh,Ch,_h=!o((function(){function t(){}return t.prototype.constructor=null,Object.getPrototypeOf(new t)!==t.prototype})),Fh=zt,Nh=z,Mh=Nt,Dh=_h,zh=sr("IE_PROTO"),Gh=Object,Uh=Gh.prototype,Bh=Dh?Gh.getPrototypeOf:function(t){var e=Mh(t);if(Fh(e,zh))return e[zh];var r=e.constructor;return Nh(r)&&e instanceof r?r.prototype:e instanceof Gh?Uh:null},Wh=o,Vh=z,Jh=U,Kh=Bh,Yh=$r,$h=Zt("iterator"),qh=!1;[].keys&&("next"in(Ch=[].keys())?(kh=Kh(Kh(Ch)))!==Object.prototype&&(Ah=kh):qh=!0);var Hh=!Jh(Ah)||Wh((function(){var t={};return Ah[$h].call(t)!==t}));Hh&&(Ah={}),Vh(Ah[$h])||Yh(Ah,$h,(function(){return this}));var Xh={IteratorPrototype:Ah,BUGGY_SAFARI_ITERATORS:qh},Qh=Xh.IteratorPrototype,Zh=Fo,tp=y,ep=ui,rp=nl,np=function(){return this},op=function(t,e,r,n){var o=e+" Iterator";return t.prototype=Zh(Qh,{next:tp(+!n,r)}),ep(t,o,!1),rp[o]=np,t},ip=Hn,up=f,cp=z,ap=op,fp=Bh,sp=ga,lp=ui,hp=Ve,pp=$r,vp=nl,dp=Xe.PROPER,yp=Xe.CONFIGURABLE,gp=Xh.IteratorPrototype,mp=Xh.BUGGY_SAFARI_ITERATORS,bp=Zt("iterator"),wp="keys",Sp="values",Op="entries",xp=function(){return this},Ep=function(t,e,r,n,o,i,u){ap(r,e,n);var c,a,f,s=function(t){if(t===o&&d)return d;if(!mp&&t&&t in p)return p[t];switch(t){case wp:case Sp:case Op:return function(){return new r(this,t)}}return function(){return new r(this)}},l=e+" Iterator",h=!1,p=t.prototype,v=p[bp]||p["@@iterator"]||o&&p[o],d=!mp&&v||s(o),y="Array"===e&&p.entries||v;if(y&&(c=fp(y.call(new t)))!==Object.prototype&&c.next&&(fp(c)!==gp&&(sp?sp(c,gp):cp(c[bp])||pp(c,bp,xp)),lp(c,l,!0)),dp&&o===Sp&&v&&v.name!==Sp&&(yp?hp(p,"name",Sp):(h=!0,d=function(){return up(v,this)})),o)if(a={values:s(Sp),keys:i?d:s(wp),entries:s(Op)},u)for(f in a)(mp||h||!(f in p))&&pp(p,f,a[f]);else ip({target:e,proto:!0,forced:mp||h},a);return p[bp]!==d&&pp(p,bp,d,{name:o}),vp[e]=d,a},jp=function(t,e){return{value:t,done:e}},Pp=M,Tp=function(t){Rh[Lh][t]=!0},Ip=nl,Lp=jr,Rp=je.f,Ap=Ep,kp=jp,Cp=i,_p="Array Iterator",Fp=Lp.set,Np=Lp.getterFor(_p),Mp=Ap(Array,"Array",(function(t,e){Fp(this,{type:_p,target:Pp(t),index:0,kind:e})}),(function(){var t=Np(this),e=t.target,r=t.index++;if(!e||r>=e.length)return t.target=void 0,kp(void 0,!0);switch(t.kind){case"keys":return kp(r,!1);case"values":return kp(e[r],!1)}return kp([r,e[r]],!1)}),"values"),Dp=Ip.Arguments=Ip.Array;if(Tp("keys"),Tp("values"),Tp("entries"),Cp&&"values"!==Dp.name)try{Rp(Dp,"name",{value:"values"})}catch(wb){}var zp={exports:{}},Gp=o((function(){if("function"==typeof ArrayBuffer){var t=new ArrayBuffer(8);Object.isExtensible(t)&&Object.defineProperty(t,"a",{value:8})}})),Up=o,Bp=U,Wp=j,Vp=Gp,Jp=Object.isExtensible,Kp=Up((function(){Jp(1)}))||Vp?function(t){return!!Bp(t)&&((!Vp||"ArrayBuffer"!==Wp(t))&&(!Jp||Jp(t)))}:Jp,Yp=!o((function(){return Object.isExtensible(Object.preventExtensions({}))})),$p=Hn,qp=S,Hp=lr,Xp=U,Qp=zt,Zp=je.f,tv=qr,ev=No,rv=Kp,nv=Yp,ov=!1,iv=Vt("meta"),uv=0,cv=function(t){Zp(t,iv,{value:{objectID:"O"+uv++,weakData:{}}})},av=zp.exports={enable:function(){av.enable=function(){},ov=!0;var t=tv.f,e=qp([].splice),r={};r[iv]=1,t(r).length&&(tv.f=function(r){for(var n=t(r),o=0,i=n.length;o<i;o++)if(n[o]===iv){e(n,o,1);break}return n},$p({target:"Object",stat:!0,forced:!0},{getOwnPropertyNames:ev.f}))},fastKey:function(t,e){if(!Xp(t))return"symbol"==typeof t?t:("string"==typeof t?"S":"P")+t;if(!Qp(t,iv)){if(!rv(t))return"F";if(!e)return"E";cv(t)}return t[iv].objectID},getWeakData:function(t,e){if(!Qp(t,iv)){if(!rv(t))return!0;if(!e)return!1;cv(t)}return t[iv].weakData},onFreeze:function(t){return nv&&ov&&rv(t)&&!Qp(t,iv)&&cv(t),t}};Hp[iv]=!0;var fv=zp.exports,sv=z,lv=U,hv=ga,pv=Hn,vv=r,dv=S,yv=Bn,gv=$r,mv=fv,bv=Nl,wv=ja,Sv=z,Ov=A,xv=U,Ev=o,jv=Ul,Pv=ui,Tv=function(t,e,r){var n,o;return hv&&sv(n=e.constructor)&&n!==r&&lv(o=n.prototype)&&o!==r.prototype&&hv(t,o),t},Iv=function(t,e,r){var n=-1!==t.indexOf("Map"),o=-1!==t.indexOf("Weak"),i=n?"set":"add",u=vv[t],c=u&&u.prototype,a=u,f={},s=function(t){var e=dv(c[t]);gv(c,t,"add"===t?function(t){return e(this,0===t?0:t),this}:"delete"===t?function(t){return!(o&&!xv(t))&&e(this,0===t?0:t)}:"get"===t?function(t){return o&&!xv(t)?void 0:e(this,0===t?0:t)}:"has"===t?function(t){return!(o&&!xv(t))&&e(this,0===t?0:t)}:function(t,r){return e(this,0===t?0:t,r),this})};if(yv(t,!Sv(u)||!(o||c.forEach&&!Ev((function(){(new u).entries().next()})))))a=r.getConstructor(e,t,n,i),mv.enable();else if(yv(t,!0)){var l=new a,h=l[i](o?{}:-0,1)!==l,p=Ev((function(){l.has(1)})),v=jv((function(t){new u(t)})),d=!o&&Ev((function(){for(var t=new u,e=5;e--;)t[i](e,e);return!t.has(-0)}));v||((a=e((function(t,e){wv(t,c);var r=Tv(new u,t,a);return Ov(e)||bv(e,r[i],{that:r,AS_ENTRIES:n}),r}))).prototype=c,c.constructor=a),(p||d)&&(s("delete"),s("has"),n&&s("get")),(d||h)&&s(i),o&&c.clear&&delete c.clear}return f[t]=a,pv({global:!0,constructor:!0,forced:a!==u},f),Pv(a,t),o||r.setStrong(a,t,n),a},Lv=$r,Rv=Fo,Av=Jo,kv=function(t,e,r){for(var n in e)Lv(t,n,e[n],r);return t},Cv=pi,_v=ja,Fv=A,Nv=Nl,Mv=Ep,Dv=jp,zv=Oa,Gv=i,Uv=fv.fastKey,Bv=jr.set,Wv=jr.getterFor,Vv={getConstructor:function(t,e,r,n){var o=t((function(t,o){_v(t,i),Bv(t,{type:e,index:Rv(null),first:void 0,last:void 0,size:0}),Gv||(t.size=0),Fv(o)||Nv(o,t[n],{that:t,AS_ENTRIES:r})})),i=o.prototype,u=Wv(e),c=function(t,e,r){var n,o,i=u(t),c=a(t,e);return c?c.value=r:(i.last=c={index:o=Uv(e,!0),key:e,value:r,previous:n=i.last,next:void 0,removed:!1},i.first||(i.first=c),n&&(n.next=c),Gv?i.size++:t.size++,"F"!==o&&(i.index[o]=c)),t},a=function(t,e){var r,n=u(t),o=Uv(e);if("F"!==o)return n.index[o];for(r=n.first;r;r=r.next)if(r.key===e)return r};return kv(i,{clear:function(){for(var t=u(this),e=t.first;e;)e.removed=!0,e.previous&&(e.previous=e.previous.next=void 0),e=e.next;t.first=t.last=void 0,t.index=Rv(null),Gv?t.size=0:this.size=0},delete:function(t){var e=this,r=u(e),n=a(e,t);if(n){var o=n.next,i=n.previous;delete r.index[n.index],n.removed=!0,i&&(i.next=o),o&&(o.previous=i),r.first===n&&(r.first=o),r.last===n&&(r.last=i),Gv?r.size--:e.size--}return!!n},forEach:function(t){for(var e,r=u(this),n=Cv(t,arguments.length>1?arguments[1]:void 0);e=e?e.next:r.first;)for(n(e.value,e.key,this);e&&e.removed;)e=e.previous},has:function(t){return!!a(this,t)}}),kv(i,r?{get:function(t){var e=a(this,t);return e&&e.value},set:function(t,e){return c(this,0===t?0:t,e)}}:{add:function(t){return c(this,t=0===t?0:t,t)}}),Gv&&Av(i,"size",{configurable:!0,get:function(){return u(this).size}}),o},setStrong:function(t,e,r){var n=e+" Iterator",o=Wv(e),i=Wv(n);Mv(t,e,(function(t,e){Bv(this,{type:n,target:t,state:o(t),kind:e,last:void 0})}),(function(){for(var t=i(this),e=t.kind,r=t.last;r&&r.removed;)r=r.previous;return t.target&&(t.last=r=r?r.next:t.state.first)?Dv("keys"===e?r.key:"values"===e?r.value:[r.key,r.value],!1):(t.target=void 0,Dv(void 0,!0))}),r?"entries":"values",!r,!0),zv(e)}};Iv("Map",(function(t){return function(){return t(this,arguments.length?arguments[0]:void 0)}}),Vv);var Jv=S,Kv=Map.prototype,Yv={Map:Map,set:Jv(Kv.set),get:Jv(Kv.get),has:Jv(Kv.has),remove:Jv(Kv.delete),proto:Kv},$v=Hn,qv=dt,Hv=_,Xv=Nl,Qv=o,Zv=Yv.Map,td=Yv.has,ed=Yv.get,rd=Yv.set,nd=S([].push);$v({target:"Map",stat:!0,forced:Qv((function(){return 1!==Zv.groupBy("ab",(function(t){return t})).get("a").length}))},{groupBy:function(t,e){Hv(t),qv(e);var r=new Zv,n=0;return Xv(t,(function(t){var o=e(t,n++);td(r,o)?nd(ed(r,o),t):rd(r,o,[t])})),r}});var od=io,id=Qn?{}.toString:function(){return"[object "+od(this)+"]"};Qn||$r(Object.prototype,"toString",id,{unsafe:!0});var ud=S,cd=Zr,ad=ao,fd=_,sd=ud("".charAt),ld=ud("".charCodeAt),hd=ud("".slice),pd=function(t){return function(e,r){var n,o,i=ad(fd(e)),u=cd(r),c=i.length;return u<0||u>=c?t?"":void 0:(n=ld(i,u))<55296||n>56319||u+1===c||(o=ld(i,u+1))<56320||o>57343?t?sd(i,u):n:t?hd(i,u,u+2):o-56320+(n-55296<<10)+65536}},vd={codeAt:pd(!1),charAt:pd(!0)},dd=vd.charAt,yd=ao,gd=jr,md=Ep,bd=jp,wd="String Iterator",Sd=gd.set,Od=gd.getterFor(wd);md(String,"String",(function(t){Sd(this,{type:wd,string:yd(t),index:0})}),(function(){var t,e=Od(this),r=e.string,n=e.index;return n>=r.length?bd(void 0,!0):(t=dd(r,n),e.index+=t.length,bd(t,!1))})),$o.Map,Iv("Set",(function(t){return function(){return t(this,arguments.length?arguments[0]:void 0)}}),Vv);var xd=S,Ed=Set.prototype,jd={Set:Set,add:xd(Ed.add),has:xd(Ed.has),remove:xd(Ed.delete),proto:Ed},Pd=jd.has,Td=function(t){return Pd(t),t},Id=f,Ld=function(t,e,r){for(var n,o,i=r?t:t.iterator,u=t.next;!(n=Id(u,i)).done;)if(void 0!==(o=e(n.value)))return o},Rd=S,Ad=Ld,kd=jd.Set,Cd=jd.proto,_d=Rd(Cd.forEach),Fd=Rd(Cd.keys),Nd=Fd(new kd).next,Md=function(t,e,r){return r?Ad({iterator:Fd(t),next:Nd},e):_d(t,e)},Dd=Md,zd=jd.Set,Gd=jd.add,Ud=function(t){var e=new zd;return Dd(t,(function(t){Gd(e,t)})),e},Bd=aa(jd.proto,"size","get")||function(t){return t.size},Wd=dt,Vd=Re,Jd=f,Kd=Zr,Yd=function(t){return{iterator:t,next:t.next,done:!1}},$d="Invalid size",qd=RangeError,Hd=TypeError,Xd=Math.max,Qd=function(t,e){this.set=t,this.size=Xd(e,0),this.has=Wd(t.has),this.keys=Wd(t.keys)};Qd.prototype={getIterator:function(){return Yd(Vd(Jd(this.keys,this.set)))},includes:function(t){return Jd(this.has,this.set,t)}};var Zd=function(t){Vd(t);var e=+t.size;if(e!=e)throw new Hd($d);var r=Kd(e);if(r<0)throw new qd($d);return new Qd(t,r)},ty=Td,ey=Ud,ry=Bd,ny=Zd,oy=Md,iy=Ld,uy=jd.has,cy=jd.remove,ay=V,fy=function(t){return{size:t,has:function(){return!1},keys:function(){return{next:function(){return{done:!0}}}}}},sy=function(t){var e=ay("Set");try{(new e)[t](fy(0));try{return(new e)[t](fy(-1)),!1}catch(r){return!0}}catch(wb){return!1}},ly=function(t){var e=ty(this),r=ny(t),n=ey(e);return ry(e)<=r.size?oy(e,(function(t){r.includes(t)&&cy(n,t)})):iy(r.getIterator(),(function(t){uy(e,t)&&cy(n,t)})),n};Hn({target:"Set",proto:!0,real:!0,forced:!sy("difference")},{difference:ly});var hy=Td,py=Bd,vy=Zd,dy=Md,yy=Ld,gy=jd.Set,my=jd.add,by=jd.has,wy=o,Sy=function(t){var e=hy(this),r=vy(t),n=new gy;return py(e)>r.size?yy(r.getIterator(),(function(t){by(e,t)&&my(n,t)})):dy(e,(function(t){r.includes(t)&&my(n,t)})),n};Hn({target:"Set",proto:!0,real:!0,forced:!sy("intersection")||wy((function(){return"3,2"!==String(Array.from(new Set([1,2,3]).intersection(new Set([3,2]))))}))},{intersection:Sy});var Oy=Td,xy=jd.has,Ey=Bd,jy=Zd,Py=Md,Ty=Ld,Iy=Ol,Ly=function(t){var e=Oy(this),r=jy(t);if(Ey(e)<=r.size)return!1!==Py(e,(function(t){if(r.includes(t))return!1}),!0);var n=r.getIterator();return!1!==Ty(n,(function(t){if(xy(e,t))return Iy(n,"normal",!1)}))};Hn({target:"Set",proto:!0,real:!0,forced:!sy("isDisjointFrom")},{isDisjointFrom:Ly});var Ry=Td,Ay=Bd,ky=Md,Cy=Zd,_y=function(t){var e=Ry(this),r=Cy(t);return!(Ay(e)>r.size)&&!1!==ky(e,(function(t){if(!r.includes(t))return!1}),!0)};Hn({target:"Set",proto:!0,real:!0,forced:!sy("isSubsetOf")},{isSubsetOf:_y});var Fy=Td,Ny=jd.has,My=Bd,Dy=Zd,zy=Ld,Gy=Ol,Uy=function(t){var e=Fy(this),r=Dy(t);if(My(e)<r.size)return!1;var n=r.getIterator();return!1!==zy(n,(function(t){if(!Ny(e,t))return Gy(n,"normal",!1)}))};Hn({target:"Set",proto:!0,real:!0,forced:!sy("isSupersetOf")},{isSupersetOf:Uy});var By=Td,Wy=Ud,Vy=Zd,Jy=Ld,Ky=jd.add,Yy=jd.has,$y=jd.remove,qy=function(t){var e=By(this),r=Vy(t).getIterator(),n=Wy(e);return Jy(r,(function(t){Yy(e,t)?$y(n,t):Ky(n,t)})),n};Hn({target:"Set",proto:!0,real:!0,forced:!sy("symmetricDifference")},{symmetricDifference:qy});var Hy=Td,Xy=jd.add,Qy=Ud,Zy=Zd,tg=Ld,eg=function(t){var e=Hy(this),r=Zy(t).getIterator(),n=Qy(e);return tg(r,(function(t){Xy(n,t)})),n};Hn({target:"Set",proto:!0,real:!0,forced:!sy("union")},{union:eg}),$o.Set;var rg=o,ng=Bi.forEach,og=function(t,e){var r=[][t];return!!r&&rg((function(){r.call(null,e||function(){return 1},1)}))},ig=og("forEach")?[].forEach:function(t){return ng(this,t,arguments.length>1?arguments[1]:void 0)};Hn({target:"Array",proto:!0,forced:[].forEach!==ig},{forEach:ig});var ug=Hn,cg=i,ag=fo.f;ug({target:"Object",stat:!0,forced:Object.defineProperties!==ag,sham:!cg},{defineProperties:ag});var fg=Hn,sg=i,lg=je.f;fg({target:"Object",stat:!0,forced:Object.defineProperty!==lg,sham:!sg},{defineProperty:lg});var hg=Hn,pg=o,vg=M,dg=n.f,yg=i;hg({target:"Object",stat:!0,forced:!yg||pg((function(){dg(1)})),sham:!yg},{getOwnPropertyDescriptor:function(t,e){return dg(vg(t),e)}});var gg=i,mg=je,bg=y,wg=Ln,Sg=M,Og=n,xg=function(t,e,r){gg?mg.f(t,e,bg(0,r)):t[e]=r};Hn({target:"Object",stat:!0,sham:!i},{getOwnPropertyDescriptors:function(t){for(var e,r,n=Sg(t),o=Og.f,i=wg(n),u={},c=0;i.length>c;)void 0!==(r=o(n,e=i[c++]))&&xg(u,e,r);return u}});var Eg=Nt,jg=ho;Hn({target:"Object",stat:!0,forced:o((function(){jg(1)}))},{keys:function(t){return jg(Eg(t))}});var Pg={CSSRuleList:0,CSSStyleDeclaration:0,CSSValueList:0,ClientRectList:0,DOMRectList:0,DOMStringList:0,DOMTokenList:1,DataTransferItemList:0,FileList:0,HTMLAllCollection:0,HTMLCollection:0,HTMLFormElement:0,HTMLSelectElement:0,MediaList:0,MimeTypeArray:0,NamedNodeMap:0,NodeList:1,PaintRequestList:0,Plugin:0,PluginArray:0,SVGLengthList:0,SVGNumberList:0,SVGPathSegList:0,SVGPointList:0,SVGStringList:0,SVGTransformList:0,SourceBufferList:0,StyleSheetList:0,TextTrackCueList:0,TextTrackList:0,TouchList:0},Tg=pe("span").classList,Ig=Tg&&Tg.constructor&&Tg.constructor.prototype,Lg=Ig===Object.prototype?void 0:Ig,Rg=r,Ag=Pg,kg=Lg,Cg=ig,_g=Ve,Fg=function(t){if(t&&t.forEach!==Cg)try{_g(t,"forEach",Cg)}catch(wb){t.forEach=Cg}};for(var Ng in Ag)Ag[Ng]&&Fg(Rg[Ng]&&Rg[Ng].prototype);Fg(kg);var Mg=r;Hn({global:!0,forced:Mg.globalThis!==Mg},{globalThis:Mg});var Dg,zg,Gg=U,Ug=j,Bg=Zt("match"),Wg=Re,Vg=function(){var t=Wg(this),e="";return t.hasIndices&&(e+="d"),t.global&&(e+="g"),t.ignoreCase&&(e+="i"),t.multiline&&(e+="m"),t.dotAll&&(e+="s"),t.unicode&&(e+="u"),t.unicodeSets&&(e+="v"),t.sticky&&(e+="y"),e},Jg=f,Kg=zt,Yg=J,$g=Vg,qg=RegExp.prototype,Hg=vd.charAt,Xg=o,Qg=r.RegExp,Zg=Xg((function(){var t=Qg("a","y");return t.lastIndex=2,null!==t.exec("abcd")})),tm=Zg||Xg((function(){return!Qg("a","y").sticky})),em={BROKEN_CARET:Zg||Xg((function(){var t=Qg("^r","gy");return t.lastIndex=2,null!==t.exec("str")})),MISSED_STICKY:tm,UNSUPPORTED_Y:Zg},rm=o,nm=r.RegExp,om=rm((function(){var t=nm(".","s");return!(t.dotAll&&t.test("\n")&&"s"===t.flags)})),im=o,um=r.RegExp,cm=im((function(){var t=um("(?<a>b)","g");return"b"!==t.exec("b").groups.a||"bc"!=="b".replace(t,"$<a>c")})),am=f,fm=S,sm=ao,lm=Vg,hm=em,pm=Fo,vm=jr.get,dm=om,ym=cm,gm=Ct("native-string-replace",String.prototype.replace),mm=RegExp.prototype.exec,bm=mm,wm=fm("".charAt),Sm=fm("".indexOf),Om=fm("".replace),xm=fm("".slice),Em=(zg=/b*/g,am(mm,Dg=/a/,"a"),am(mm,zg,"a"),0!==Dg.lastIndex||0!==zg.lastIndex),jm=hm.BROKEN_CARET,Pm=void 0!==/()??/.exec("")[1];(Em||Pm||jm||dm||ym)&&(bm=function(t){var e,r,n,o,i,u,c,a=this,f=vm(a),s=sm(t),l=f.raw;if(l)return l.lastIndex=a.lastIndex,e=am(bm,l,s),a.lastIndex=l.lastIndex,e;var h=f.groups,p=jm&&a.sticky,v=am(lm,a),d=a.source,y=0,g=s;if(p&&(v=Om(v,"y",""),-1===Sm(v,"g")&&(v+="g"),g=xm(s,a.lastIndex),a.lastIndex>0&&(!a.multiline||a.multiline&&"\n"!==wm(s,a.lastIndex-1))&&(d="(?: "+d+")",g=" "+g,y++),r=new RegExp("^(?:"+d+")",v)),Pm&&(r=new RegExp("^"+d+"$(?!\\s)",v)),Em&&(n=a.lastIndex),o=am(mm,p?r:a,g),p?o?(o.input=xm(o.input,y),o[0]=xm(o[0],y),o.index=a.lastIndex,a.lastIndex+=o[0].length):a.lastIndex=0:Em&&o&&(a.lastIndex=a.global?o.index+o[0].length:n),Pm&&o&&o.length>1&&am(gm,o[0],r,(function(){for(i=1;i<arguments.length-2;i++)void 0===arguments[i]&&(o[i]=void 0)})),o&&h)for(o.groups=u=pm(null),i=0;i<h.length;i++)u[(c=h[i])[0]]=o[c[1]];return o});var Tm=f,Im=Re,Lm=z,Rm=j,Am=bm,km=TypeError,Cm=Hn,_m=f,Fm=fi,Nm=op,Mm=jp,Dm=_,zm=un,Gm=ao,Um=Re,Bm=A,Wm=function(t){var e;return Gg(t)&&(void 0!==(e=t[Bg])?!!e:"RegExp"===Ug(t))},Vm=function(t){var e=t.flags;return void 0!==e||"flags"in qg||Kg(t,"flags")||!Yg(qg,t)?e:Jg($g,t)},Jm=mt,Km=$r,Ym=o,$m=Ca,qm=function(t,e,r){return e+(r?Hg(t,e).length:1)},Hm=function(t,e){var r=t.exec;if(Lm(r)){var n=Tm(r,t,e);return null!==n&&Im(n),n}if("RegExp"===Rm(t))return Tm(Am,t,e);throw new km("RegExp#exec called on incompatible receiver")},Xm=jr,Qm=Zt("matchAll"),Zm="RegExp String",tb=Zm+" Iterator",eb=Xm.set,rb=Xm.getterFor(tb),nb=RegExp.prototype,ob=TypeError,ib=Fm("".indexOf),ub=Fm("".matchAll),cb=!!ub&&!Ym((function(){ub("a",/./)})),ab=Nm((function(t,e,r,n){eb(this,{type:tb,regexp:t,string:e,global:r,unicode:n,done:!1})}),Zm,(function(){var t=rb(this);if(t.done)return Mm(void 0,!0);var e=t.regexp,r=t.string,n=Hm(e,r);return null===n?(t.done=!0,Mm(void 0,!0)):t.global?(""===Gm(n[0])&&(e.lastIndex=qm(r,zm(e.lastIndex),t.unicode)),Mm(n,!1)):(t.done=!0,Mm(n,!1))})),fb=function(t){var e,r,n,o=Um(this),i=Gm(t),u=$m(o,RegExp),c=Gm(Vm(o));return e=new u(u===RegExp?o.source:o,c),r=!!~ib(c,"g"),n=!!~ib(c,"u"),e.lastIndex=zm(o.lastIndex),new ab(e,i,r,n)};Cm({target:"String",proto:!0,forced:cb},{matchAll:function(t){var e,r,n,o=Dm(this);if(Bm(t)){if(cb)return ub(o,t)}else{if(Wm(t)&&(e=Gm(Dm(Vm(t))),!~ib(e,"g")))throw new ob("`.matchAll` does not allow non-global regexes");if(cb)return ub(o,t);if(n=Jm(t,Qm))return _m(n,t,o)}return r=Gm(o),new RegExp(t,"g")[Qm](r)}}),Qm in nb||Km(nb,Qm,fb);!function(t){var e=function(t){var e,r=Object.prototype,n=r.hasOwnProperty,o=Object.defineProperty||function(t,e,r){t[e]=r.value},i="function"==typeof Symbol?Symbol:{},u=i.iterator||"@@iterator",c=i.asyncIterator||"@@asyncIterator",a=i.toStringTag||"@@toStringTag";function f(t,e,r){return Object.defineProperty(t,e,{value:r,enumerable:!0,configurable:!0,writable:!0}),t[e]}try{f({},"")}catch(C){f=function(t,e,r){return t[e]=r}}function s(t,e,r,n){var i=e&&e.prototype instanceof g?e:g,u=Object.create(i.prototype),c=new R(n||[]);return o(u,"_invoke",{value:P(t,r,c)}),u}function l(t,e,r){try{return{type:"normal",arg:t.call(e,r)}}catch(C){return{type:"throw",arg:C}}}t.wrap=s;var h="suspendedStart",p="suspendedYield",v="executing",d="completed",y={};function g(){}function m(){}function b(){}var w={};f(w,u,(function(){return this}));var S=Object.getPrototypeOf,O=S&&S(S(A([])));O&&O!==r&&n.call(O,u)&&(w=O);var x=b.prototype=g.prototype=Object.create(w);function E(t){["next","throw","return"].forEach((function(e){f(t,e,(function(t){return this._invoke(e,t)}))}))}function j(t,e){function r(o,i,u,c){var a=l(t[o],t,i);if("throw"!==a.type){var f=a.arg,s=f.value;return s&&"object"==typeof s&&n.call(s,"__await")?e.resolve(s.__await).then((function(t){r("next",t,u,c)}),(function(t){r("throw",t,u,c)})):e.resolve(s).then((function(t){f.value=t,u(f)}),(function(t){return r("throw",t,u,c)}))}c(a.arg)}var i;o(this,"_invoke",{value:function(t,n){function o(){return new e((function(e,o){r(t,n,e,o)}))}return i=i?i.then(o,o):o()}})}function P(t,e,r){var n=h;return function(o,i){if(n===v)throw new Error("Generator is already running");if(n===d){if("throw"===o)throw i;return k()}for(r.method=o,r.arg=i;;){var u=r.delegate;if(u){var c=T(u,r);if(c){if(c===y)continue;return c}}if("next"===r.method)r.sent=r._sent=r.arg;else if("throw"===r.method){if(n===h)throw n=d,r.arg;r.dispatchException(r.arg)}else"return"===r.method&&r.abrupt("return",r.arg);n=v;var a=l(t,e,r);if("normal"===a.type){if(n=r.done?d:p,a.arg===y)continue;return{value:a.arg,done:r.done}}"throw"===a.type&&(n=d,r.method="throw",r.arg=a.arg)}}}function T(t,r){var n=r.method,o=t.iterator[n];if(o===e)return r.delegate=null,"throw"===n&&t.iterator.return&&(r.method="return",r.arg=e,T(t,r),"throw"===r.method)||"return"!==n&&(r.method="throw",r.arg=new TypeError("The iterator does not provide a '"+n+"' method")),y;var i=l(o,t.iterator,r.arg);if("throw"===i.type)return r.method="throw",r.arg=i.arg,r.delegate=null,y;var u=i.arg;return u?u.done?(r[t.resultName]=u.value,r.next=t.nextLoc,"return"!==r.method&&(r.method="next",r.arg=e),r.delegate=null,y):u:(r.method="throw",r.arg=new TypeError("iterator result is not an object"),r.delegate=null,y)}function I(t){var e={tryLoc:t[0]};1 in t&&(e.catchLoc=t[1]),2 in t&&(e.finallyLoc=t[2],e.afterLoc=t[3]),this.tryEntries.push(e)}function L(t){var e=t.completion||{};e.type="normal",delete e.arg,t.completion=e}function R(t){this.tryEntries=[{tryLoc:"root"}],t.forEach(I,this),this.reset(!0)}function A(t){if(t){var r=t[u];if(r)return r.call(t);if("function"==typeof t.next)return t;if(!isNaN(t.length)){var o=-1,i=function r(){for(;++o<t.length;)if(n.call(t,o))return r.value=t[o],r.done=!1,r;return r.value=e,r.done=!0,r};return i.next=i}}return{next:k}}function k(){return{value:e,done:!0}}return m.prototype=b,o(x,"constructor",{value:b,configurable:!0}),o(b,"constructor",{value:m,configurable:!0}),m.displayName=f(b,a,"GeneratorFunction"),t.isGeneratorFunction=function(t){var e="function"==typeof t&&t.constructor;return!!e&&(e===m||"GeneratorFunction"===(e.displayName||e.name))},t.mark=function(t){return Object.setPrototypeOf?Object.setPrototypeOf(t,b):(t.__proto__=b,f(t,a,"GeneratorFunction")),t.prototype=Object.create(x),t},t.awrap=function(t){return{__await:t}},E(j.prototype),f(j.prototype,c,(function(){return this})),t.AsyncIterator=j,t.async=function(e,r,n,o,i){void 0===i&&(i=Promise);var u=new j(s(e,r,n,o),i);return t.isGeneratorFunction(r)?u:u.next().then((function(t){return t.done?t.value:u.next()}))},E(x),f(x,a,"Generator"),f(x,u,(function(){return this})),f(x,"toString",(function(){return"[object Generator]"})),t.keys=function(t){var e=Object(t),r=[];for(var n in e)r.push(n);return r.reverse(),function t(){for(;r.length;){var n=r.pop();if(n in e)return t.value=n,t.done=!1,t}return t.done=!0,t}},t.values=A,R.prototype={constructor:R,reset:function(t){if(this.prev=0,this.next=0,this.sent=this._sent=e,this.done=!1,this.delegate=null,this.method="next",this.arg=e,this.tryEntries.forEach(L),!t)for(var r in this)"t"===r.charAt(0)&&n.call(this,r)&&!isNaN(+r.slice(1))&&(this[r]=e)},stop:function(){this.done=!0;var t=this.tryEntries[0].completion;if("throw"===t.type)throw t.arg;return this.rval},dispatchException:function(t){if(this.done)throw t;var r=this;function o(n,o){return c.type="throw",c.arg=t,r.next=n,o&&(r.method="next",r.arg=e),!!o}for(var i=this.tryEntries.length-1;i>=0;--i){var u=this.tryEntries[i],c=u.completion;if("root"===u.tryLoc)return o("end");if(u.tryLoc<=this.prev){var a=n.call(u,"catchLoc"),f=n.call(u,"finallyLoc");if(a&&f){if(this.prev<u.catchLoc)return o(u.catchLoc,!0);if(this.prev<u.finallyLoc)return o(u.finallyLoc)}else if(a){if(this.prev<u.catchLoc)return o(u.catchLoc,!0)}else{if(!f)throw new Error("try statement without catch or finally");if(this.prev<u.finallyLoc)return o(u.finallyLoc)}}}},abrupt:function(t,e){for(var r=this.tryEntries.length-1;r>=0;--r){var o=this.tryEntries[r];if(o.tryLoc<=this.prev&&n.call(o,"finallyLoc")&&this.prev<o.finallyLoc){var i=o;break}}i&&("break"===t||"continue"===t)&&i.tryLoc<=e&&e<=i.finallyLoc&&(i=null);var u=i?i.completion:{};return u.type=t,u.arg=e,i?(this.method="next",this.next=i.finallyLoc,y):this.complete(u)},complete:function(t,e){if("throw"===t.type)throw t.arg;return"break"===t.type||"continue"===t.type?this.next=t.arg:"return"===t.type?(this.rval=this.arg=t.arg,this.method="return",this.next="end"):"normal"===t.type&&e&&(this.next=e),y},finish:function(t){for(var e=this.tryEntries.length-1;e>=0;--e){var r=this.tryEntries[e];if(r.finallyLoc===t)return this.complete(r.completion,r.afterLoc),L(r),y}},catch:function(t){for(var e=this.tryEntries.length-1;e>=0;--e){var r=this.tryEntries[e];if(r.tryLoc===t){var n=r.completion;if("throw"===n.type){var o=n.arg;L(r)}return o}}throw new Error("illegal catch attempt")},delegateYield:function(t,r,n){return this.delegate={iterator:A(t),resultName:r,nextLoc:n},"next"===this.method&&(this.arg=e),y}},t}(t.exports);try{regeneratorRuntime=e}catch(r){"object"==typeof globalThis?globalThis.regeneratorRuntime=e:Function("r","regeneratorRuntime = r")(e)}}({exports:{}});var sb=r,lb=Pg,hb=Lg,pb=Mp,vb=Ve,db=ui,yb=Zt("iterator"),gb=pb.values,mb=function(t,e){if(t){if(t[yb]!==gb)try{vb(t,yb,gb)}catch(wb){t[yb]=gb}if(db(t,e,!0),lb[e])for(var r in pb)if(t[r]!==pb[r])try{vb(t,r,pb[r])}catch(wb){t[r]=pb[r]}}};for(var bb in lb)mb(sb[bb]&&sb[bb].prototype,bb);mb(hb,"DOMTokenList"),function(){function e(t,e){return(e||"")+" (SystemJS https://github.com/systemjs/systemjs/blob/main/docs/errors.md#"+t+")"}function r(t,e){if(-1!==t.indexOf("\\")&&(t=t.replace(E,"/")),"/"===t[0]&&"/"===t[1])return e.slice(0,e.indexOf(":")+1)+t;if("."===t[0]&&("/"===t[1]||"."===t[1]&&("/"===t[2]||2===t.length&&(t+="/"))||1===t.length&&(t+="/"))||"/"===t[0]){var r,n=e.slice(0,e.indexOf(":")+1);if(r="/"===e[n.length+1]?"file:"!==n?(r=e.slice(n.length+2)).slice(r.indexOf("/")+1):e.slice(8):e.slice(n.length+("/"===e[n.length])),"/"===t[0])return e.slice(0,e.length-r.length-1)+t;for(var o=r.slice(0,r.lastIndexOf("/")+1)+t,i=[],u=-1,c=0;c<o.length;c++)-1!==u?"/"===o[c]&&(i.push(o.slice(u,c+1)),u=-1):"."===o[c]?"."!==o[c+1]||"/"!==o[c+2]&&c+2!==o.length?"/"===o[c+1]||c+1===o.length?c+=1:u=c:(i.pop(),c+=2):u=c;return-1!==u&&i.push(o.slice(u)),e.slice(0,e.length-r.length)+i.join("")}}function n(t,e){return r(t,e)||(-1!==t.indexOf(":")?t:r("./"+t,e))}function o(t,e,n,o,i){for(var u in t){var c=r(u,n)||u,s=t[u];if("string"==typeof s){var l=f(o,r(s,n)||s,i);l?e[c]=l:a("W1",u,s)}}}function i(t,e,r){var i;for(i in t.imports&&o(t.imports,r.imports,e,r,null),t.scopes||{}){var u=n(i,e);o(t.scopes[i],r.scopes[u]||(r.scopes[u]={}),e,r,u)}for(i in t.depcache||{})r.depcache[n(i,e)]=t.depcache[i];for(i in t.integrity||{})r.integrity[n(i,e)]=t.integrity[i]}function u(t,e){if(e[t])return t;var r=t.length;do{var n=t.slice(0,r+1);if(n in e)return n}while(-1!==(r=t.lastIndexOf("/",r-1)))}function c(t,e){var r=u(t,e);if(r){var n=e[r];if(null===n)return;if(!(t.length>r.length&&"/"!==n[n.length-1]))return n+t.slice(r.length);a("W2",r,n)}}function a(t,r,n){console.warn(e(t,[n,r].join(", ")))}function f(t,e,r){for(var n=t.scopes,o=r&&u(r,n);o;){var i=c(e,n[o]);if(i)return i;o=u(o.slice(0,o.lastIndexOf("/")),n)}return c(e,t.imports)||-1!==e.indexOf(":")&&e}function s(){this[P]={}}function l(t,r,n,o){var i=t[P][r];if(i)return i;var u=[],c=Object.create(null);j&&Object.defineProperty(c,j,{value:"Module"});var a=Promise.resolve().then((function(){return t.instantiate(r,n,o)})).then((function(n){if(!n)throw Error(e(2,r));var o=n[1]((function(t,e){i.h=!0;var r=!1;if("string"==typeof t)t in c&&c[t]===e||(c[t]=e,r=!0);else{for(var n in t)e=t[n],n in c&&c[n]===e||(c[n]=e,r=!0);t&&t.__esModule&&(c.__esModule=t.__esModule)}if(r)for(var o=0;o<u.length;o++){var a=u[o];a&&a(c)}return e}),2===n[1].length?{import:function(e,n){return t.import(e,r,n)},meta:t.createContext(r)}:void 0);return i.e=o.execute||function(){},[n[0],o.setters||[],n[2]||[]]}),(function(t){throw i.e=null,i.er=t,t})),f=a.then((function(e){return Promise.all(e[0].map((function(n,o){var i=e[1][o],u=e[2][o];return Promise.resolve(t.resolve(n,r)).then((function(e){var n=l(t,e,r,u);return Promise.resolve(n.I).then((function(){return i&&(n.i.push(i),!n.h&&n.I||i(n.n)),n}))}))}))).then((function(t){i.d=t}))}));return i=t[P][r]={id:r,i:u,n:c,m:o,I:a,L:f,h:!1,d:void 0,e:void 0,er:void 0,E:void 0,C:void 0,p:void 0}}function h(t,e,r,n){if(!n[e.id])return n[e.id]=!0,Promise.resolve(e.L).then((function(){return e.p&&null!==e.p.e||(e.p=r),Promise.all(e.d.map((function(e){return h(t,e,r,n)})))})).catch((function(t){if(e.er)throw t;throw e.e=null,t}))}function p(t,e){return e.C=h(t,e,e,{}).then((function(){return v(t,e,{})})).then((function(){return e.n}))}function v(t,e,r){function n(){try{var t=i.call(I);if(t)return t=t.then((function(){e.C=e.n,e.E=null}),(function(t){throw e.er=t,e.E=null,t})),e.E=t;e.C=e.n,e.L=e.I=void 0}catch(r){throw e.er=r,r}}if(!r[e.id]){if(r[e.id]=!0,!e.e){if(e.er)throw e.er;return e.E?e.E:void 0}var o,i=e.e;return e.e=null,e.d.forEach((function(n){try{var i=v(t,n,r);i&&(o=o||[]).push(i)}catch(c){throw e.er=c,c}})),o?Promise.all(o).then(n):n()}}function d(){[].forEach.call(document.querySelectorAll("script"),(function(t){if(!t.sp)if("systemjs-module"===t.type){if(t.sp=!0,!t.src)return;System.import("import:"===t.src.slice(0,7)?t.src.slice(7):n(t.src,y)).catch((function(e){if(e.message.indexOf("https://github.com/systemjs/systemjs/blob/main/docs/errors.md#3")>-1){var r=document.createEvent("Event");r.initEvent("error",!1,!1),t.dispatchEvent(r)}return Promise.reject(e)}))}else if("systemjs-importmap"===t.type){t.sp=!0;var r=t.src?(System.fetch||fetch)(t.src,{integrity:t.integrity,priority:t.fetchPriority,passThrough:!0}).then((function(t){if(!t.ok)throw Error(t.status);return t.text()})).catch((function(r){return r.message=e("W4",t.src)+"\n"+r.message,console.warn(r),"function"==typeof t.onerror&&t.onerror(),"{}"})):t.innerHTML;A=A.then((function(){return r})).then((function(r){!function(t,r,n){var o={};try{o=JSON.parse(r)}catch(c){console.warn(Error(e("W5")))}i(o,n,t)}(k,r,t.src||y)}))}}))}var y,g="undefined"!=typeof Symbol,m="undefined"!=typeof self,b="undefined"!=typeof document,w=m?self:t;if(b){var S=document.querySelector("base[href]");S&&(y=S.href)}if(!y&&"undefined"!=typeof location){var O=(y=location.href.split("#")[0].split("?")[0]).lastIndexOf("/");-1!==O&&(y=y.slice(0,O+1))}var x,E=/\\/g,j=g&&Symbol.toStringTag,P=g?Symbol():"@",T=s.prototype;T.import=function(t,e,r){var n=this;return e&&"object"==typeof e&&(r=e,e=void 0),Promise.resolve(n.prepareImport()).then((function(){return n.resolve(t,e,r)})).then((function(t){var e=l(n,t,void 0,r);return e.C||p(n,e)}))},T.createContext=function(t){var e=this;return{url:t,resolve:function(r,n){return Promise.resolve(e.resolve(r,n||t))}}},T.register=function(t,e,r){x=[t,e,r]},T.getRegister=function(){var t=x;return x=void 0,t};var I=Object.freeze(Object.create(null));w.System=new s;var L,R,A=Promise.resolve(),k={imports:{},scopes:{},depcache:{},integrity:{}},C=b;if(T.prepareImport=function(t){return(C||t)&&(d(),C=!1),A},T.getImportMap=function(){return JSON.parse(JSON.stringify(k))},b&&(d(),window.addEventListener("DOMContentLoaded",d)),T.addImportMap=function(t,e){i(t,e||y,k)},b){window.addEventListener("error",(function(t){F=t.filename,N=t.error}));var _=location.origin}T.createScript=function(t){var e=document.createElement("script");e.async=!0,t.indexOf(_+"/")&&(e.crossOrigin="anonymous");var r=k.integrity[t];return r&&(e.integrity=r),e.src=t,e};var F,N,M={},D=T.register;T.register=function(t,e){if(b&&"loading"===document.readyState&&"string"!=typeof t){var r=document.querySelectorAll("script[src]"),n=r[r.length-1];if(n){L=t;var o=this;R=setTimeout((function(){M[n.src]=[t,e],o.import(n.src)}))}}else L=void 0;return D.call(this,t,e)},T.instantiate=function(t,r){var n=M[t];if(n)return delete M[t],n;var o=this;return Promise.resolve(T.createScript(t)).then((function(n){return new Promise((function(i,u){n.addEventListener("error",(function(){u(Error(e(3,[t,r].join(", "))))})),n.addEventListener("load",(function(){if(document.head.removeChild(n),F===t)u(N);else{var e=o.getRegister(t);e&&e[0]===L&&clearTimeout(R),i(e)}})),document.head.appendChild(n)}))}))},T.shouldFetch=function(){return!1},"undefined"!=typeof fetch&&(T.fetch=fetch);var z=T.instantiate,G=/^(text|application)\/(x-)?javascript(;|$)/;T.instantiate=function(t,r,n){var o=this;return this.shouldFetch(t,r,n)?this.fetch(t,{credentials:"same-origin",integrity:k.integrity[t],meta:n}).then((function(n){if(!n.ok)throw Error(e(7,[n.status,n.statusText,t,r].join(", ")));var i=n.headers.get("content-type");if(!i||!G.test(i))throw Error(e(4,i));return n.text().then((function(e){return e.indexOf("//# sourceURL=")<0&&(e+="\n//# sourceURL="+t),(0,eval)(e),o.getRegister(t)}))})):z.apply(this,arguments)},T.resolve=function(t,n){return f(k,r(t,n=n||y)||t,n)||function(t,r){throw Error(e(8,[t,r].join(", ")))}(t,n)};var U=T.instantiate;T.instantiate=function(t,e,r){var n=k.depcache[t];if(n)for(var o=0;o<n.length;o++)l(this,this.resolve(n[o],t),t);return U.call(this,t,e,r)},m&&"function"==typeof importScripts&&(T.instantiate=function(t){var e=this;return Promise.resolve().then((function(){return importScripts(t),e.getRegister(t)}))})}()}();
|
|
|
|
dist/assets/show-right-icon-12c14da5.png
DELETED
Binary file (8.87 kB)
|
|
dist/index.html
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
<!doctype html>
|
2 |
-
<html lang="en">
|
3 |
-
<head>
|
4 |
-
<meta charset="UTF-8" />
|
5 |
-
<link rel="icon" type="image/svg+xml" href="" />
|
6 |
-
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
7 |
-
<title></title>
|
8 |
-
<script type="module" crossorigin src="/assets/index-327d01f5.js"></script>
|
9 |
-
<link rel="stylesheet" href="/assets/index-ab4095ce.css">
|
10 |
-
<script type="module">import.meta.url;import("_").catch(()=>1);async function* g(){};if(location.protocol!="file:"){window.__vite_is_modern_browser=true}</script>
|
11 |
-
<script type="module">!function(){if(window.__vite_is_modern_browser)return;console.warn("vite: loading legacy chunks, syntax error above and the same error below should be ignored");var e=document.getElementById("vite-legacy-polyfill"),n=document.createElement("script");n.src=e.src,n.onload=function(){System.import(document.getElementById('vite-legacy-entry').getAttribute('data-src'))},document.body.appendChild(n)}();</script>
|
12 |
-
</head>
|
13 |
-
|
14 |
-
<body>
|
15 |
-
<div id="root"></div>
|
16 |
-
|
17 |
-
<script nomodule>!function(){var e=document,t=e.createElement("script");if(!("noModule"in t)&&"onbeforeload"in t){var n=!1;e.addEventListener("beforeload",(function(e){if(e.target===t)n=!0;else if(!e.target.hasAttribute("nomodule")||!n)return;e.preventDefault()}),!0),t.type="module",t.src=".",e.head.appendChild(t),t.remove()}}();</script>
|
18 |
-
<script nomodule crossorigin id="vite-legacy-polyfill" src="/assets/polyfills-legacy-0b55db5f.js"></script>
|
19 |
-
<script nomodule crossorigin id="vite-legacy-entry" data-src="/assets/index-legacy-f2aa4b0e.js">System.import(document.getElementById('vite-legacy-entry').getAttribute('data-src'))</script>
|
20 |
-
</body>
|
21 |
-
</html>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
docker/README.md
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# MSDL (MindSearch Docker Launcher) User Guide
|
2 |
+
|
3 |
+
English | [简体中文](README_zh-CN.md)
|
4 |
+
|
5 |
+
## Introduction
|
6 |
+
|
7 |
+
MSDL (MindSearch Docker Launcher) is a command-line tool designed to simplify the deployment process of MindSearch. It helps users configure and launch the Docker environment for MindSearch through an interactive interface, reducing the complexity of deployment. MSDL primarily serves as a scaffold for deploying containers and does not involve optimization of MindSearch's core logic.
|
8 |
+
|
9 |
+
## Prerequisites
|
10 |
+
|
11 |
+
- Python 3.7 or higher
|
12 |
+
- Docker (Docker Compose included; most newer Docker versions have it integrated)
|
13 |
+
- Git (for cloning the repository)
|
14 |
+
- Stable internet connection
|
15 |
+
- Sufficient disk space (required space varies depending on the selected deployment option)
|
16 |
+
|
17 |
+
## Installation Steps
|
18 |
+
|
19 |
+
1. Clone the MindSearch repository:
|
20 |
+
```bash
|
21 |
+
git clone https://github.com/InternLM/MindSearch.git # If you have already cloned the repository, you can skip this step.
|
22 |
+
cd MindSearch/docker
|
23 |
+
```
|
24 |
+
|
25 |
+
2. Install MSDL:
|
26 |
+
```bash
|
27 |
+
pip install -e .
|
28 |
+
```
|
29 |
+
|
30 |
+
## Usage
|
31 |
+
|
32 |
+
After installation, you can run the MSDL command from any directory:
|
33 |
+
|
34 |
+
```bash
|
35 |
+
msdl
|
36 |
+
```
|
37 |
+
|
38 |
+
Follow the interactive prompts for configuration:
|
39 |
+
- Choose the language for the Agent (Chinese or English; this only affects the language of prompts).
|
40 |
+
- Select the model deployment type (local model or cloud model).
|
41 |
+
- Choose the model format:
|
42 |
+
- Currently, only `internlm_silicon` works properly for cloud models.
|
43 |
+
- For local models, only `internlm_server` has passed tests and runs correctly.
|
44 |
+
- Enter the necessary API keys (e.g., SILICON_API_KEY).
|
45 |
+
|
46 |
+
MSDL will automatically perform the following actions:
|
47 |
+
- Copy and configure the necessary Dockerfile and docker-compose.yaml files.
|
48 |
+
- Build Docker images.
|
49 |
+
- Launch Docker containers.
|
50 |
+
|
51 |
+
## Deployment Options Comparison
|
52 |
+
|
53 |
+
### Cloud Model Deployment (Recommended)
|
54 |
+
|
55 |
+
**Advantages**:
|
56 |
+
- Lightweight deployment with minimal disk usage (frontend around 510MB, backend around 839MB).
|
57 |
+
- No need for high-performance hardware.
|
58 |
+
- Easy to deploy and maintain.
|
59 |
+
- You can freely use the `internlm/internlm2_5-7b-chat` model via SiliconCloud.
|
60 |
+
- High concurrency, fast inference speed.
|
61 |
+
|
62 |
+
**Instructions**:
|
63 |
+
- Select the "Cloud Model" option.
|
64 |
+
- Choose "internlm_silicon" as the model format.
|
65 |
+
- Enter the SiliconCloud API Key (register at https://cloud.siliconflow.cn/ to obtain it).
|
66 |
+
|
67 |
+
**Important Notes**:
|
68 |
+
- The `internlm/internlm2_5-7b-chat` model is freely accessible on SiliconCloud.
|
69 |
+
- MindSearch has no financial relationship with SiliconCloud; this service is recommended solely because it provides valuable resources to the open-source community.
|
70 |
+
|
71 |
+
### Local Model Deployment
|
72 |
+
|
73 |
+
**Features**:
|
74 |
+
- Uses the `openmmlab/lmdeploy` image.
|
75 |
+
- Based on the PyTorch environment.
|
76 |
+
- Requires significant disk space (backend container 15GB+, model 15GB+, totaling 30GB+).
|
77 |
+
- Requires a powerful GPU (12GB or more of VRAM recommended).
|
78 |
+
|
79 |
+
**Instructions**:
|
80 |
+
- Select the "Local Model" option.
|
81 |
+
- Choose "internlm_server" as the model format.
|
82 |
+
|
83 |
+
**Relevant Links**:
|
84 |
+
- lmdeploy image: https://hub.docker.com/r/openmmlab/lmdeploy/tags
|
85 |
+
- InternLM2.5 project: https://huggingface.co/internlm/internlm2_5-7b-chat
|
86 |
+
|
87 |
+
## Notes
|
88 |
+
|
89 |
+
- Currently, only the `internlm_silicon` format works properly for cloud models, and only the `internlm_server` format has passed tests for local models.
|
90 |
+
- The language selection only affects the language of the Agent's prompts and does not change the language of the React frontend.
|
91 |
+
- The first run might take a long time to download necessary model files and Docker images.
|
92 |
+
- When using cloud models, ensure a stable network connection.
|
93 |
+
|
94 |
+
## Troubleshooting
|
95 |
+
|
96 |
+
1. Ensure the Docker service is running.
|
97 |
+
2. Check if there is sufficient disk space.
|
98 |
+
3. Ensure all necessary environment variables are set correctly.
|
99 |
+
4. Check if the network connection is stable.
|
100 |
+
5. Verify the validity of API keys (e.g., for cloud models).
|
101 |
+
|
102 |
+
If problems persist, check the Issues section in the MindSearch GitHub repository or submit a new issue.
|
103 |
+
|
104 |
+
## Privacy and Security
|
105 |
+
|
106 |
+
MSDL is a locally executed tool and does not transmit any API keys or sensitive information. All configuration information is stored in the `msdl/temp/.env` file, used only to simplify the deployment process.
|
107 |
+
|
108 |
+
## Updating MSDL
|
109 |
+
|
110 |
+
To update MSDL to the latest version, follow these steps:
|
111 |
+
|
112 |
+
1. Navigate to the MindSearch directory.
|
113 |
+
2. Pull the latest code:
|
114 |
+
```bash
|
115 |
+
git pull origin main
|
116 |
+
```
|
117 |
+
3. Reinstall MSDL:
|
118 |
+
```bash
|
119 |
+
cd docker
|
120 |
+
pip install -e .
|
121 |
+
```
|
122 |
+
|
123 |
+
## Conclusion
|
124 |
+
|
125 |
+
If you have any questions or suggestions, feel free to submit an issue on GitHub or contact us directly. Thank you for using MindSearch and MSDL!
|
docker/README_zh-CN.md
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# MSDL (MindSearch Docker Launcher) 使用指南
|
2 |
+
|
3 |
+
[English](README.md) | 简体中文
|
4 |
+
|
5 |
+
## 简介
|
6 |
+
|
7 |
+
MSDL (MindSearch Docker Launcher) 是一个专为简化 MindSearch 部署过程而设计的命令行工具。它通过交互式界面帮助用户轻松配置和启动 MindSearch 的 Docker 环境,降低了部署的复杂性。MSDL 主要作为部署容器的脚手架,不涉及 MindSearch 核心逻辑的优化。
|
8 |
+
|
9 |
+
## 环境要求
|
10 |
+
|
11 |
+
- Python 3.7 或更高版本
|
12 |
+
- Docker (需包含 Docker Compose,新版本的 Docker 通常已集成)
|
13 |
+
- Git (用于克隆仓库)
|
14 |
+
- 稳定的网络连接
|
15 |
+
- 充足的磁盘空间(根据选择的部署方案,所需空间有所不同)
|
16 |
+
|
17 |
+
## 安装步骤
|
18 |
+
|
19 |
+
1. 克隆 MindSearch 仓库:
|
20 |
+
```bash
|
21 |
+
git clone https://github.com/InternLM/MindSearch.git # 已经克隆过的,可以忽略执行此步骤
|
22 |
+
cd MindSearch/docker
|
23 |
+
```
|
24 |
+
|
25 |
+
2. 安装 MSDL:
|
26 |
+
```bash
|
27 |
+
pip install -e .
|
28 |
+
```
|
29 |
+
|
30 |
+
## 使用方法
|
31 |
+
|
32 |
+
安装完成后,您可以在任意目录下运行 MSDL 命令:
|
33 |
+
|
34 |
+
```bash
|
35 |
+
msdl
|
36 |
+
```
|
37 |
+
|
38 |
+
按照交互式提示进行配置:
|
39 |
+
- 选择 Agent 使用的语言(中文或英文,仅影响 Agent 的提示词语言)
|
40 |
+
- 选择模型部署类型(本地模型或云端模型)
|
41 |
+
- 选择模型格式
|
42 |
+
- 云端模型目前只有 internlm_silicon 能够正常运行
|
43 |
+
- 本地模型目前只有 internlm_server 通过测试,能正常运行
|
44 |
+
- 输入必要的 API 密钥(如 SILICON_API_KEY)
|
45 |
+
|
46 |
+
MSDL 将自动执行以下操作:
|
47 |
+
- 复制并配置必要的 Dockerfile 和 docker-compose.yaml 文件
|
48 |
+
- 构建 Docker 镜像
|
49 |
+
- 启动 Docker 容器
|
50 |
+
|
51 |
+
## 部署方案比较
|
52 |
+
|
53 |
+
### 云端模型部署(推荐)
|
54 |
+
|
55 |
+
**优势**:
|
56 |
+
- 轻量级部署,磁盘占用小(前端约 510MB,后端约 839MB)
|
57 |
+
- 无需高性能硬件
|
58 |
+
- 部署和维护简单
|
59 |
+
- 使用 SiliconCloud 可免费调用 internlm/internlm2_5-7b-chat 模型
|
60 |
+
- 高并发量,推理速度快
|
61 |
+
|
62 |
+
**使用说明**:
|
63 |
+
- 选择"云端模型"选项
|
64 |
+
- 选择 "internlm_silicon" 作为模型格式
|
65 |
+
- 输入 SiliconCloud API Key(需在 https://cloud.siliconflow.cn/ 注册获取)
|
66 |
+
|
67 |
+
**重要说明**:
|
68 |
+
- internlm/internlm2_5-7b-chat 模型在 SiliconCloud 上可以免费调用,但 API Key 仍需妥善保管好。
|
69 |
+
- MindSearch 项目与 SiliconCloud 并无利益关系,只是使用它能更好地体验 MindSearch 的效果,感谢 SiliconCloud 为开源社区所做的贡献。
|
70 |
+
|
71 |
+
### 本地模型部署
|
72 |
+
|
73 |
+
**特点**:
|
74 |
+
- 使用 openmmlab/lmdeploy 镜像
|
75 |
+
- 基于 PyTorch 环境
|
76 |
+
- 需要大量磁盘空间(后端容器 15GB+,模型 15GB+,总计 30GB 以上)
|
77 |
+
- 需要强大的 GPU(建议 12GB 或以上显存)
|
78 |
+
|
79 |
+
**使用说明**:
|
80 |
+
- 选择"本地模型"选项
|
81 |
+
- 选择 "internlm_server" 作为模型格式
|
82 |
+
|
83 |
+
**相关链接**:
|
84 |
+
- lmdeploy 镜像: https://hub.docker.com/r/openmmlab/lmdeploy/tags
|
85 |
+
- InternLM2.5 项目: https://huggingface.co/internlm/internlm2_5-7b-chat
|
86 |
+
|
87 |
+
## 注意事项
|
88 |
+
|
89 |
+
- 云端模型目前只有 internlm_silicon 格式能够正常运行,本地模型只有 internlm_server 格式通过测试能正常运行。
|
90 |
+
- 选择语言只会影响 Agent 的提示词语言,不会改变 React 前端的界面语言。
|
91 |
+
- 首次运行可能需要较长时间来下载必要的模型文件和 Docker 镜像。
|
92 |
+
- 使用云端模型时,请确保网络连接稳定。
|
93 |
+
|
94 |
+
## 故障排除
|
95 |
+
|
96 |
+
1. 确保 Docker 服务正在运行。
|
97 |
+
2. 检查是否有足够的磁盘空间。
|
98 |
+
3. 确保所有必要的环境变量已正确设置。
|
99 |
+
4. 检查网络连接是否正常。
|
100 |
+
5. 验证 API Key 是否有效(如使用云端模型)。
|
101 |
+
|
102 |
+
如果问题持续,请查看 MindSearch 的 GitHub 仓库中的 Issues 部分,或提交新的 Issue。
|
103 |
+
|
104 |
+
## 隐私和安全
|
105 |
+
|
106 |
+
MSDL 是纯本地执行的工具,不会上报任何 API Key 或其他敏感信息。所有配置信息存储在 `msdl/temp/.env` 文件中,仅用于简化部署过程。
|
107 |
+
|
108 |
+
## 更新 MSDL
|
109 |
+
|
110 |
+
要更新 MSDL 到最新版本,请执行以下步骤:
|
111 |
+
|
112 |
+
1. 进入 MindSearch 目录
|
113 |
+
2. 拉取最新的代码:
|
114 |
+
```bash
|
115 |
+
git pull origin main
|
116 |
+
```
|
117 |
+
3. 重新安装 MSDL:
|
118 |
+
```bash
|
119 |
+
cd docker
|
120 |
+
pip install -e .
|
121 |
+
```
|
122 |
+
|
123 |
+
## 结语
|
124 |
+
|
125 |
+
如有任何问题或建议,欢迎在 GitHub 上提交 Issue 或直接联系我们。感谢您使用 MindSearch 和 MSDL!
|
docker/msdl/__init__.py
ADDED
File without changes
|
docker/msdl/__main__.py
ADDED
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# msdl/__main__.py
|
2 |
+
import signal
|
3 |
+
import sys
|
4 |
+
import argparse
|
5 |
+
import os
|
6 |
+
from pathlib import Path
|
7 |
+
|
8 |
+
from msdl.config import (
|
9 |
+
BACKEND_DOCKERFILE_DIR,
|
10 |
+
FRONTEND_DOCKERFILE_DIR,
|
11 |
+
PACKAGE_DIR,
|
12 |
+
PROJECT_ROOT,
|
13 |
+
REACT_DOCKERFILE,
|
14 |
+
TEMP_DIR,
|
15 |
+
TEMPLATE_FILES,
|
16 |
+
)
|
17 |
+
from msdl.docker_manager import (
|
18 |
+
check_docker_install,
|
19 |
+
run_docker_compose,
|
20 |
+
stop_and_remove_containers,
|
21 |
+
update_docker_compose_paths,
|
22 |
+
)
|
23 |
+
from msdl.i18n import (
|
24 |
+
setup_i18n,
|
25 |
+
t,
|
26 |
+
)
|
27 |
+
from msdl.utils import (
|
28 |
+
copy_templates_to_temp,
|
29 |
+
copy_backend_dockerfile,
|
30 |
+
copy_frontend_dockerfile,
|
31 |
+
modify_docker_compose,
|
32 |
+
)
|
33 |
+
from msdl.user_interaction import get_user_choices
|
34 |
+
|
35 |
+
|
36 |
+
def signal_handler(signum, frame):
|
37 |
+
print(t("TERMINATION_SIGNAL"))
|
38 |
+
stop_and_remove_containers()
|
39 |
+
sys.exit(0)
|
40 |
+
|
41 |
+
|
42 |
+
def parse_args():
|
43 |
+
parser = argparse.ArgumentParser(description=t("CLI_DESCRIPTION"))
|
44 |
+
parser.add_argument('--language', '-l',
|
45 |
+
help=t("LANGUAGE_HELP"),
|
46 |
+
choices=["en", "zh_CN"],
|
47 |
+
default=None)
|
48 |
+
parser.add_argument('--config-language', action='store_true',
|
49 |
+
help=t("CONFIG_LANGUAGE_HELP"))
|
50 |
+
return parser.parse_args()
|
51 |
+
|
52 |
+
|
53 |
+
def main():
|
54 |
+
# Setup signal handler
|
55 |
+
signal.signal(signal.SIGINT, signal_handler)
|
56 |
+
signal.signal(signal.SIGTERM, signal_handler)
|
57 |
+
|
58 |
+
# Initialize i18n
|
59 |
+
setup_i18n()
|
60 |
+
|
61 |
+
# Parse command line arguments
|
62 |
+
args = parse_args()
|
63 |
+
if args.language:
|
64 |
+
# set_language(args.language)
|
65 |
+
# Reinitialize i18n with new language
|
66 |
+
setup_i18n()
|
67 |
+
|
68 |
+
try:
|
69 |
+
# Check if TEMP_DIR exists, if not, create it
|
70 |
+
if not TEMP_DIR.exists():
|
71 |
+
TEMP_DIR.mkdir(parents=True, exist_ok=True)
|
72 |
+
print(t("TEMP_DIR_CREATED", dir=str(TEMP_DIR)))
|
73 |
+
|
74 |
+
check_docker_install()
|
75 |
+
|
76 |
+
# Get user choices using the new module
|
77 |
+
backend_language, model, model_format, search_engine = get_user_choices()
|
78 |
+
|
79 |
+
# Copy template files
|
80 |
+
copy_templates_to_temp(TEMPLATE_FILES)
|
81 |
+
|
82 |
+
# Copy Dockerfiles
|
83 |
+
copy_backend_dockerfile(model)
|
84 |
+
copy_frontend_dockerfile()
|
85 |
+
|
86 |
+
# Update paths in docker-compose.yml
|
87 |
+
update_docker_compose_paths()
|
88 |
+
|
89 |
+
# Modify docker-compose.yml based on user choices
|
90 |
+
modify_docker_compose(model, backend_language, model_format, search_engine)
|
91 |
+
|
92 |
+
stop_and_remove_containers()
|
93 |
+
run_docker_compose()
|
94 |
+
|
95 |
+
print(t("DOCKER_LAUNCHER_COMPLETE"))
|
96 |
+
except KeyboardInterrupt:
|
97 |
+
print(t("KEYBOARD_INTERRUPT"))
|
98 |
+
# stop_and_remove_containers()
|
99 |
+
sys.exit(0)
|
100 |
+
except Exception as e:
|
101 |
+
print(t("UNEXPECTED_ERROR", error=str(e)))
|
102 |
+
# stop_and_remove_containers()
|
103 |
+
sys.exit(1)
|
104 |
+
|
105 |
+
|
106 |
+
if __name__ == "__main__":
|
107 |
+
main()
|
docker/msdl/config.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# msdl/config.py
|
2 |
+
|
3 |
+
from pathlib import Path
|
4 |
+
|
5 |
+
|
6 |
+
class FileSystemManager:
|
7 |
+
|
8 |
+
@staticmethod
|
9 |
+
def ensure_dir(dir_path):
|
10 |
+
"""Ensure the directory exists, create if it doesn't"""
|
11 |
+
path = Path(dir_path)
|
12 |
+
if not path.exists():
|
13 |
+
path.mkdir(parents=True, exist_ok=True)
|
14 |
+
return path
|
15 |
+
|
16 |
+
@staticmethod
|
17 |
+
def ensure_file(file_path, default_content=""):
|
18 |
+
"""Ensure the file exists, create if it doesn't"""
|
19 |
+
path = Path(file_path)
|
20 |
+
if not path.parent.exists():
|
21 |
+
FileSystemManager.ensure_dir(path.parent)
|
22 |
+
if not path.exists():
|
23 |
+
with open(path, "w") as f:
|
24 |
+
f.write(default_content)
|
25 |
+
return path
|
26 |
+
|
27 |
+
|
28 |
+
# Get the directory where the script is located
|
29 |
+
PACKAGE_DIR = Path(__file__).resolve().parent
|
30 |
+
|
31 |
+
# Get the root directory of the MindSearch project
|
32 |
+
PROJECT_ROOT = PACKAGE_DIR.parent.parent
|
33 |
+
|
34 |
+
# Get the temp directory path, which is actually the working directory for executing the docker compose up command
|
35 |
+
TEMP_DIR = FileSystemManager.ensure_dir(PACKAGE_DIR / "temp")
|
36 |
+
|
37 |
+
# Configuration file name list
|
38 |
+
TEMPLATE_FILES = ["docker-compose.yaml"]
|
39 |
+
|
40 |
+
# Backend Dockerfile directory
|
41 |
+
BACKEND_DOCKERFILE_DIR = "backend"
|
42 |
+
|
43 |
+
# Backend Dockerfile name
|
44 |
+
CLOUD_LLM_DOCKERFILE = "cloud_llm.dockerfile"
|
45 |
+
LOCAL_LLM_DOCKERFILE = "local_llm.dockerfile"
|
46 |
+
|
47 |
+
# Frontend Dockerfile directory
|
48 |
+
FRONTEND_DOCKERFILE_DIR = "frontend"
|
49 |
+
|
50 |
+
# Frontend Dockerfile name
|
51 |
+
REACT_DOCKERFILE = "react.dockerfile"
|
52 |
+
|
53 |
+
# i18n translations directory
|
54 |
+
TRANSLATIONS_DIR = FileSystemManager.ensure_dir(PACKAGE_DIR / "translations")
|
55 |
+
|
56 |
+
# Get the path of the .env file
|
57 |
+
ENV_FILE_PATH = FileSystemManager.ensure_file(TEMP_DIR / ".env")
|
docker/msdl/docker_manager.py
ADDED
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# msdl/docker_manager.py
|
2 |
+
|
3 |
+
import os
|
4 |
+
import subprocess
|
5 |
+
import sys
|
6 |
+
from functools import lru_cache
|
7 |
+
|
8 |
+
import yaml
|
9 |
+
from msdl.config import PROJECT_ROOT, TEMP_DIR
|
10 |
+
from msdl.i18n import t
|
11 |
+
|
12 |
+
|
13 |
+
@lru_cache(maxsize=1)
|
14 |
+
def get_docker_command():
|
15 |
+
try:
|
16 |
+
subprocess.run(
|
17 |
+
["docker", "compose", "version"], check=True, capture_output=True
|
18 |
+
)
|
19 |
+
return ["docker", "compose"]
|
20 |
+
except subprocess.CalledProcessError:
|
21 |
+
try:
|
22 |
+
subprocess.run(
|
23 |
+
["docker-compose", "--version"], check=True, capture_output=True
|
24 |
+
)
|
25 |
+
return ["docker-compose"]
|
26 |
+
except subprocess.CalledProcessError:
|
27 |
+
print(t("DOCKER_COMPOSE_NOT_FOUND"))
|
28 |
+
sys.exit(1)
|
29 |
+
|
30 |
+
|
31 |
+
@lru_cache(maxsize=1)
|
32 |
+
def check_docker_install():
|
33 |
+
try:
|
34 |
+
subprocess.run(["docker", "--version"], check=True, capture_output=True)
|
35 |
+
docker_compose_cmd = get_docker_command()
|
36 |
+
subprocess.run(
|
37 |
+
docker_compose_cmd + ["version"], check=True, capture_output=True
|
38 |
+
)
|
39 |
+
print(t("DOCKER_INSTALLED"))
|
40 |
+
return True
|
41 |
+
except subprocess.CalledProcessError as e:
|
42 |
+
print(t("DOCKER_INSTALL_ERROR", error=str(e)))
|
43 |
+
return False
|
44 |
+
except FileNotFoundError:
|
45 |
+
print(t("DOCKER_NOT_FOUND"))
|
46 |
+
return False
|
47 |
+
|
48 |
+
|
49 |
+
def stop_and_remove_containers():
|
50 |
+
docker_compose_cmd = get_docker_command()
|
51 |
+
compose_file = os.path.join(TEMP_DIR, "docker-compose.yaml")
|
52 |
+
|
53 |
+
# Read the docker-compose.yaml file
|
54 |
+
try:
|
55 |
+
with open(compose_file, "r") as file:
|
56 |
+
compose_config = yaml.safe_load(file)
|
57 |
+
except Exception as e:
|
58 |
+
print(t("COMPOSE_FILE_READ_ERROR", error=str(e)))
|
59 |
+
return
|
60 |
+
|
61 |
+
# Get project name and service names
|
62 |
+
project_name = compose_config.get("name", "mindsearch")
|
63 |
+
service_names = list(compose_config.get("services", {}).keys())
|
64 |
+
|
65 |
+
# Use only the project name as the container prefix
|
66 |
+
container_prefix = f"{project_name}_"
|
67 |
+
|
68 |
+
try:
|
69 |
+
# 1. Try to stop containers using the current docker-compose.yaml
|
70 |
+
subprocess.run(
|
71 |
+
docker_compose_cmd + ["-f", compose_file, "down", "-v", "--remove-orphans"],
|
72 |
+
check=True,
|
73 |
+
)
|
74 |
+
except subprocess.CalledProcessError:
|
75 |
+
print(t("CURRENT_COMPOSE_STOP_FAILED"))
|
76 |
+
|
77 |
+
# 2. Attempt to clean up potentially existing containers, regardless of the success of the previous step
|
78 |
+
try:
|
79 |
+
# List all containers (including stopped ones)
|
80 |
+
result = subprocess.run(
|
81 |
+
["docker", "ps", "-a", "--format", "{{.Names}}"],
|
82 |
+
check=True,
|
83 |
+
capture_output=True,
|
84 |
+
text=True,
|
85 |
+
)
|
86 |
+
all_containers = result.stdout.splitlines()
|
87 |
+
|
88 |
+
# 3. Filter out containers belonging to our project
|
89 |
+
project_containers = [
|
90 |
+
c
|
91 |
+
for c in all_containers
|
92 |
+
if c.startswith(container_prefix)
|
93 |
+
or any(c == f"{project_name}-{service}" for service in service_names)
|
94 |
+
]
|
95 |
+
|
96 |
+
if project_containers:
|
97 |
+
# 4. Force stop and remove these containers
|
98 |
+
for container in project_containers:
|
99 |
+
try:
|
100 |
+
subprocess.run(["docker", "stop", container], check=True)
|
101 |
+
subprocess.run(["docker", "rm", "-f", container], check=True)
|
102 |
+
print(t("CONTAINER_STOPPED_AND_REMOVED", container=container))
|
103 |
+
except subprocess.CalledProcessError as e:
|
104 |
+
print(t("CONTAINER_STOP_ERROR", container=container, error=str(e)))
|
105 |
+
|
106 |
+
# 5. Clean up potentially leftover networks
|
107 |
+
try:
|
108 |
+
subprocess.run(["docker", "network", "prune", "-f"], check=True)
|
109 |
+
print(t("NETWORKS_PRUNED"))
|
110 |
+
except subprocess.CalledProcessError as e:
|
111 |
+
print(t("NETWORK_PRUNE_ERROR", error=str(e)))
|
112 |
+
|
113 |
+
except subprocess.CalledProcessError as e:
|
114 |
+
print(t("DOCKER_LIST_ERROR", error=str(e)))
|
115 |
+
|
116 |
+
print(t("CONTAINERS_STOPPED_AND_REMOVED"))
|
117 |
+
|
118 |
+
|
119 |
+
def run_docker_compose():
|
120 |
+
docker_compose_cmd = get_docker_command()
|
121 |
+
try:
|
122 |
+
print(t("STARTING_CONTAINERS_WITH_BUILD"))
|
123 |
+
subprocess.run(
|
124 |
+
docker_compose_cmd
|
125 |
+
+ [
|
126 |
+
"-f",
|
127 |
+
os.path.join(TEMP_DIR, "docker-compose.yaml"),
|
128 |
+
"--env-file",
|
129 |
+
os.path.join(TEMP_DIR, ".env"),
|
130 |
+
"up",
|
131 |
+
"-d",
|
132 |
+
"--build",
|
133 |
+
],
|
134 |
+
check=True,
|
135 |
+
)
|
136 |
+
print(t("CONTAINERS_STARTED"))
|
137 |
+
except subprocess.CalledProcessError as e:
|
138 |
+
print(t("DOCKER_ERROR", error=str(e)))
|
139 |
+
print(t("DOCKER_OUTPUT"))
|
140 |
+
print(e.output.decode() if e.output else "No output")
|
141 |
+
stop_and_remove_containers()
|
142 |
+
sys.exit(1)
|
143 |
+
|
144 |
+
|
145 |
+
def update_docker_compose_paths(project_root=PROJECT_ROOT):
|
146 |
+
docker_compose_path = os.path.join(TEMP_DIR, "docker-compose.yaml")
|
147 |
+
with open(docker_compose_path, "r") as file:
|
148 |
+
compose_data = yaml.safe_load(file)
|
149 |
+
for service in compose_data["services"].values():
|
150 |
+
if "build" in service:
|
151 |
+
if "context" in service["build"]:
|
152 |
+
if service["build"]["context"] == "..":
|
153 |
+
service["build"]["context"] = project_root
|
154 |
+
else:
|
155 |
+
service["build"]["context"] = os.path.join(
|
156 |
+
project_root, service["build"]["context"]
|
157 |
+
)
|
158 |
+
if "dockerfile" in service["build"]:
|
159 |
+
dockerfile_name = os.path.basename(service["build"]["dockerfile"])
|
160 |
+
service["build"]["dockerfile"] = os.path.join(TEMP_DIR, dockerfile_name)
|
161 |
+
with open(docker_compose_path, "w") as file:
|
162 |
+
yaml.dump(compose_data, file)
|
163 |
+
print(t("PATHS_UPDATED"))
|
164 |
+
|
165 |
+
|
166 |
+
def main():
|
167 |
+
if check_docker_install():
|
168 |
+
update_docker_compose_paths()
|
169 |
+
run_docker_compose()
|
170 |
+
else:
|
171 |
+
sys.exit(1)
|
172 |
+
|
173 |
+
|
174 |
+
if __name__ == "__main__":
|
175 |
+
main()
|
docker/msdl/i18n.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# msdl/translations/i18n.py
|
2 |
+
|
3 |
+
import os
|
4 |
+
import i18n
|
5 |
+
import locale
|
6 |
+
from dotenv import load_dotenv, set_key, find_dotenv
|
7 |
+
from msdl.config import TRANSLATIONS_DIR, ENV_FILE_PATH
|
8 |
+
from pathlib import Path
|
9 |
+
|
10 |
+
# Load environment variables at module level
|
11 |
+
load_dotenv(ENV_FILE_PATH)
|
12 |
+
|
13 |
+
def get_env_variable(var_name, default=None):
|
14 |
+
return os.getenv(var_name, default)
|
15 |
+
|
16 |
+
def set_env_variable(var_name, value):
|
17 |
+
dotenv_file = find_dotenv(ENV_FILE_PATH)
|
18 |
+
set_key(dotenv_file, var_name, value)
|
19 |
+
# Reload environment variables after setting
|
20 |
+
os.environ[var_name] = value
|
21 |
+
|
22 |
+
def get_system_language():
|
23 |
+
try:
|
24 |
+
return locale.getlocale()[0].split("_")[0]
|
25 |
+
except:
|
26 |
+
return "en"
|
27 |
+
|
28 |
+
def get_available_languages():
|
29 |
+
"""Get list of available language codes from translation files"""
|
30 |
+
translations_path = Path(TRANSLATIONS_DIR)
|
31 |
+
if not translations_path.exists():
|
32 |
+
return ["en"]
|
33 |
+
return [f.stem for f in translations_path.glob("*.yaml")]
|
34 |
+
|
35 |
+
def set_language(language_code):
|
36 |
+
"""Set the interaction language and persist it to .env file"""
|
37 |
+
available_langs = get_available_languages()
|
38 |
+
if language_code not in available_langs:
|
39 |
+
print(f"Warning: Language '{language_code}' not available. Using 'en' instead.")
|
40 |
+
language_code = "en"
|
41 |
+
|
42 |
+
set_env_variable("LAUNCHER_INTERACTION_LANGUAGE", language_code)
|
43 |
+
i18n.set("locale", language_code)
|
44 |
+
|
45 |
+
|
46 |
+
def setup_i18n():
|
47 |
+
# Initialize i18n settings
|
48 |
+
i18n.load_path.append(TRANSLATIONS_DIR)
|
49 |
+
i18n.set("filename_format", "{locale}.{format}")
|
50 |
+
i18n.set("file_format", "yaml")
|
51 |
+
|
52 |
+
# Get language from environment
|
53 |
+
env_language = get_env_variable("LAUNCHER_INTERACTION_LANGUAGE")
|
54 |
+
if not env_language:
|
55 |
+
# If no language is set, use English as default without saving to .env
|
56 |
+
env_language = "en"
|
57 |
+
|
58 |
+
# Force reload translations
|
59 |
+
i18n.set("locale", None) # Clear current locale
|
60 |
+
i18n.set("locale", env_language) # Set new locale
|
61 |
+
|
62 |
+
|
63 |
+
def t(key, **kwargs):
|
64 |
+
return i18n.t(key, **kwargs)
|
docker/msdl/templates/backend/cloud_llm.dockerfile
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Use Python 3.11.9 as the base image
|
2 |
+
FROM python:3.11.9-slim
|
3 |
+
|
4 |
+
# Set the working directory
|
5 |
+
WORKDIR /root
|
6 |
+
|
7 |
+
# Install Git
|
8 |
+
RUN apt-get update && apt-get install -y git && apt-get clean && rm -rf /var/lib/apt/lists/*
|
9 |
+
|
10 |
+
# Install specified dependency packages
|
11 |
+
RUN pip install --no-cache-dir \
|
12 |
+
duckduckgo_search==5.3.1b1 \
|
13 |
+
einops \
|
14 |
+
fastapi \
|
15 |
+
janus \
|
16 |
+
pyvis \
|
17 |
+
sse-starlette \
|
18 |
+
termcolor \
|
19 |
+
uvicorn \
|
20 |
+
griffe==0.48.0 \
|
21 |
+
python-dotenv \
|
22 |
+
lagent==0.5.0rc1
|
23 |
+
|
24 |
+
# Copy the mindsearch folder to the /root directory of the container
|
25 |
+
COPY mindsearch /root/mindsearch
|
docker/msdl/templates/backend/local_llm.dockerfile
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Use openmmlab/lmdeploy:latest-cu12 as the base image
|
2 |
+
# Note: Before using this Dockerfile, you should visit https://hub.docker.com/r/openmmlab/lmdeploy/tags
|
3 |
+
# to select a base image that's compatible with your specific GPU architecture.
|
4 |
+
# The 'latest-cu12' tag is used here as an example, but you should choose the most
|
5 |
+
# appropriate tag for your setup (e.g., cu11 for CUDA 11, cu12 for CUDA 12, etc.)
|
6 |
+
FROM openmmlab/lmdeploy:latest-cu12
|
7 |
+
|
8 |
+
# Set the working directory
|
9 |
+
WORKDIR /root
|
10 |
+
|
11 |
+
# Install Git
|
12 |
+
RUN apt-get update && apt-get install -y git && apt-get clean && rm -rf /var/lib/apt/lists/*
|
13 |
+
|
14 |
+
# Install specified dependency packages
|
15 |
+
# Note: lmdeploy dependency is already included in the base image, no need to reinstall
|
16 |
+
RUN pip install --no-cache-dir \
|
17 |
+
duckduckgo_search==5.3.1b1 \
|
18 |
+
einops \
|
19 |
+
fastapi \
|
20 |
+
janus \
|
21 |
+
pyvis \
|
22 |
+
sse-starlette \
|
23 |
+
termcolor \
|
24 |
+
uvicorn \
|
25 |
+
griffe==0.48.0 \
|
26 |
+
python-dotenv \
|
27 |
+
lagent==0.5.0rc1
|
28 |
+
|
29 |
+
# Copy the mindsearch folder to the /root directory of the container
|
30 |
+
COPY mindsearch /root/mindsearch
|
docker/msdl/templates/docker-compose.yaml
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
services:
|
2 |
+
backend:
|
3 |
+
container_name: mindsearch-backend
|
4 |
+
build:
|
5 |
+
context: .
|
6 |
+
dockerfile: backend.dockerfile
|
7 |
+
image: mindsearch/backend:latest
|
8 |
+
restart: unless-stopped
|
9 |
+
# Uncomment the following line to force using local build
|
10 |
+
# pull: never
|
11 |
+
ports:
|
12 |
+
- "8002:8002"
|
13 |
+
environment:
|
14 |
+
- PYTHONUNBUFFERED=1
|
15 |
+
# - OPENAI_API_KEY=${OPENAI_API_KEY:-}
|
16 |
+
- OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1}
|
17 |
+
# - QWEN_API_KEY=${QWEN_API_KEY:-}
|
18 |
+
# - SILICON_API_KEY=${SILICON_API_KEY:-}
|
19 |
+
command: python -m mindsearch.app --lang ${LANG:-cn} --model_format ${MODEL_FORMAT:-internlm_server}
|
20 |
+
volumes:
|
21 |
+
- /root/.cache:/root/.cache
|
22 |
+
deploy:
|
23 |
+
resources:
|
24 |
+
reservations:
|
25 |
+
devices:
|
26 |
+
- driver: nvidia
|
27 |
+
count: 1
|
28 |
+
capabilities: [gpu]
|
29 |
+
# GPU support explanation:
|
30 |
+
# The current configuration has been tested with NVIDIA GPUs. If you use other types of GPUs, you may need to adjust the configuration.
|
31 |
+
# For AMD GPUs, you can try using the ROCm driver by modifying the configuration as follows:
|
32 |
+
# deploy:
|
33 |
+
# resources:
|
34 |
+
# reservations:
|
35 |
+
# devices:
|
36 |
+
# - driver: amd
|
37 |
+
# count: 1
|
38 |
+
# capabilities: [gpu]
|
39 |
+
#
|
40 |
+
# For other GPU types, you may need to consult the respective Docker GPU support documentation.
|
41 |
+
# In theory, any GPU supported by PyTorch should be configurable here.
|
42 |
+
# If you encounter issues, try the following steps:
|
43 |
+
# 1. Ensure the correct GPU drivers are installed on the host
|
44 |
+
# 2. Check if your Docker version supports your GPU type
|
45 |
+
# 3. Install necessary GPU-related libraries in the Dockerfile
|
46 |
+
# 4. Adjust the deploy configuration here to match your GPU type
|
47 |
+
#
|
48 |
+
# Note: After changing GPU configuration, you may need to rebuild the image.
|
49 |
+
|
50 |
+
frontend:
|
51 |
+
container_name: mindsearch-frontend
|
52 |
+
build:
|
53 |
+
context: .
|
54 |
+
dockerfile: frontend.dockerfile
|
55 |
+
image: mindsearch/frontend:latest
|
56 |
+
restart: unless-stopped
|
57 |
+
# Uncomment the following line to force using local build
|
58 |
+
# pull: never
|
59 |
+
ports:
|
60 |
+
- "8080:8080"
|
61 |
+
depends_on:
|
62 |
+
- backend
|
docker/msdl/templates/frontend/react.dockerfile
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Use Node.js 18 as the base image
|
2 |
+
FROM node:18-alpine
|
3 |
+
|
4 |
+
# Set the working directory
|
5 |
+
WORKDIR /app
|
6 |
+
|
7 |
+
# Copy package files first to leverage Docker cache
|
8 |
+
COPY frontend/React/package*.json ./
|
9 |
+
|
10 |
+
# Install dependencies
|
11 |
+
RUN npm install
|
12 |
+
|
13 |
+
# Copy source code after npm install to prevent unnecessary reinstalls
|
14 |
+
COPY frontend/React/ ./
|
15 |
+
|
16 |
+
# Modify vite.config.ts for Docker environment
|
17 |
+
# Beacuse we use Docker Compose to manage the backend and frontend services, we can use the service name as the hostname
|
18 |
+
RUN sed -i '/server: {/,/},/c\
|
19 |
+
server: {\
|
20 |
+
host: "0.0.0.0",\
|
21 |
+
port: 8080,\
|
22 |
+
proxy: {\
|
23 |
+
"/solve": {\
|
24 |
+
target: "http://backend:8002",\
|
25 |
+
changeOrigin: true,\
|
26 |
+
},\
|
27 |
+
// "/solve": {\
|
28 |
+
// target: "https://mindsearch.openxlab.org.cn",\
|
29 |
+
// changeOrigin: true,\
|
30 |
+
// },\
|
31 |
+
},\
|
32 |
+
},' vite.config.ts
|
33 |
+
|
34 |
+
# Start the development server
|
35 |
+
CMD ["npm", "start"]
|
docker/msdl/translations/en.yaml
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
en:
|
2 |
+
SCRIPT_DIR: "Script directory: %{dir}"
|
3 |
+
PROJECT_ROOT: "Project root directory: %{dir}"
|
4 |
+
TEMP_DIR: "Temporary directory: %{dir}"
|
5 |
+
DOCKER_LAUNCHER_START: "Starting Docker launcher process"
|
6 |
+
DOCKER_LAUNCHER_COMPLETE: "Docker launcher process completed"
|
7 |
+
DIR_CREATED: "Directory created: %{dir}"
|
8 |
+
FILE_COPIED: "Copied %{file} to the temp directory"
|
9 |
+
FILE_NOT_FOUND: "Error: %{file} not found in the templates directory"
|
10 |
+
CONTAINERS_STOPPED: "Existing containers and volumes stopped and removed"
|
11 |
+
CONTAINER_STOP_ERROR: "Error stopping and removing containers (this may be normal if there were no running containers): %{error}"
|
12 |
+
BUILDING_IMAGES: "Starting to build Docker images..."
|
13 |
+
IMAGES_BUILT: "Docker images built successfully"
|
14 |
+
STARTING_CONTAINERS: "Starting Docker containers..."
|
15 |
+
STARTING_CONTAINERS_WITH_BUILD: "Starting to build and start Docker containers..."
|
16 |
+
CONTAINERS_STARTED: "Docker containers started successfully"
|
17 |
+
DOCKER_ERROR: "Error while building or starting Docker containers: %{error}"
|
18 |
+
DOCKER_OUTPUT: "Docker command output:"
|
19 |
+
DOCKER_INSTALLED: "Docker and Docker Compose installed correctly"
|
20 |
+
DOCKER_INSTALL_ERROR: "Error: Docker or Docker Compose may not be installed correctly: %{error}"
|
21 |
+
DOCKER_NOT_FOUND: "Error: Docker or Docker Compose command not found. Please ensure they are correctly installed and added to the PATH."
|
22 |
+
DOCKER_COMPOSE_NOT_FOUND: "Error: Docker Compose command not found. Please ensure it is correctly installed and added to the PATH."
|
23 |
+
PATHS_UPDATED: "Paths updated in docker-compose.yaml"
|
24 |
+
COMPOSE_FILE_CONTENT: "docker-compose.yaml file content:"
|
25 |
+
COMPOSE_FILE_NOT_FOUND: "Error: %{file} file not found"
|
26 |
+
COMPOSE_FILE_READ_ERROR: "Error reading docker-compose.yaml file: %{error}"
|
27 |
+
TERMINATION_SIGNAL: "Termination signal caught. Exiting gracefully..."
|
28 |
+
KEYBOARD_INTERRUPT: "Keyboard interrupt caught. Exiting gracefully..."
|
29 |
+
UNEXPECTED_ERROR: "An unexpected error occurred: %{error}"
|
30 |
+
BACKEND_LANGUAGE_CHOICE: "Select MindSearch backend language (default is cn)"
|
31 |
+
CHINESE: "Chinese (cn)"
|
32 |
+
ENGLISH: "English (en)"
|
33 |
+
MODEL_DEPLOYMENT_TYPE: "Select model deployment type:"
|
34 |
+
CLOUD_MODEL: "Cloud model"
|
35 |
+
LOCAL_MODEL: "Local model"
|
36 |
+
MODEL_FORMAT_CHOICE: "Select model format:"
|
37 |
+
CONFIRM_USE_EXISTING_API_KEY: "Do you want to use the existing %{ENV_VAR_NAME} API key?"
|
38 |
+
CONFIRM_OVERWRITE_EXISTING_API_KEY: "Do you want to overwrite the existing %{ENV_VAR_NAME} API key?"
|
39 |
+
PLEASE_INPUT_NEW_API_KEY: "Please enter a new %{ENV_VAR_NAME} API key:"
|
40 |
+
PLEASE_INPUT_NEW_API_KEY_FROM_ZERO: "Please enter a new %{ENV_VAR_NAME} API key:"
|
41 |
+
INVALID_API_KEY_FORMAT: "Invalid API key format"
|
42 |
+
RETRY_API_KEY_INPUT: "Retry API key input"
|
43 |
+
API_KEY_INPUT_CANCELLED: "API key input cancelled"
|
44 |
+
UNKNOWN_API_KEY_TYPE: "Unknown API key type: %{KEY_TYPE}"
|
45 |
+
UNKNOWN_MODEL_FORMAT: "Unknown model format: %{MODEL_FORMAT}"
|
46 |
+
INVALID_API_KEY: "Invalid API key: %{KEY_TYPE}"
|
47 |
+
API_KEY_SAVED: "API key for %{ENV_VAR_NAME} saved"
|
48 |
+
UNKNOWN_DOCKERFILE: "Unknown Dockerfile: %{dockerfile}"
|
49 |
+
UNKNOWN_MODEL_TYPE: "Unknown model type: %{model_type}"
|
50 |
+
BACKEND_DOCKERFILE_COPIED: "Backend Dockerfile copied from %{source_path} to %{dest_path}"
|
51 |
+
FRONTEND_DOCKERFILE_COPIED: "Frontend Dockerfile copied from %{source_path} to %{dest_path}"
|
52 |
+
TEMP_DIR_CREATED: "Temporary directory created at %{dir}"
|
53 |
+
CURRENT_COMPOSE_STOP_FAILED: "Current containers stop failed"
|
54 |
+
CONTAINER_STOPPED_AND_REMOVED: "Container stopped and removed"
|
55 |
+
NETWORKS_PRUNED: "Corresponding Docker networks pruned"
|
56 |
+
NETWORK_PRUNE_ERROR: "Error pruning corresponding Docker networks: %{error}"
|
57 |
+
DOCKER_LIST_ERROR: "Error listing Docker containers: %{error}"
|
58 |
+
CONTAINERS_STOPPED_AND_REMOVED: "Docker containers stopped and removed"
|
59 |
+
CLI_DESCRIPTION: "MindSearch Docker Launcher - A tool to manage MindSearch docker containers"
|
60 |
+
LANGUAGE_HELP: "Set the msdl tool interface language (e.g. en, zh_CN)"
|
61 |
+
CONFIG_LANGUAGE_HELP: "Show language configuration prompt"
|
62 |
+
LANGUAGE_NOT_AVAILABLE: "Warning: Language '%{lang}' not available. Using English instead."
|
63 |
+
SELECT_INTERFACE_LANGUAGE: "Select msdl tool interface language"
|
64 |
+
SELECT_BACKEND_LANGUAGE: "Select MindSearch backend language (default is cn)"
|
65 |
+
LANGUAGE_CHANGED_RESTARTING: "Language changed, restarting msdl..."
|
66 |
+
SELECT_SEARCH_ENGINE: "Select search engine:"
|
67 |
+
NO_API_KEY_NEEDED: "No API key needed"
|
68 |
+
API_KEY_REQUIRED: "API key required"
|
69 |
+
SEARCH_ENGINE_GOOGLE: "Google Search"
|
70 |
+
SEARCH_ENGINE_BING: "Bing Search"
|
71 |
+
SEARCH_ENGINE_DUCKDUCKGO: "DuckDuckGo Search"
|
72 |
+
SEARCH_ENGINE_BRAVE: "Brave Search"
|
73 |
+
SEARCH_ENGINE_TENCENT: "Tencent Search"
|
74 |
+
TENCENT_ID_REQUIRED: "Please enter your Tencent Search Secret ID"
|
75 |
+
TENCENT_KEY_REQUIRED: "Please enter your Tencent Search Secret Key"
|
76 |
+
WEB_SEARCH_KEY_REQUIRED: "Please enter your Web Search API Key"
|
77 |
+
SEARCH_ENGINE_CONFIGURED: "Search engine %{engine} configured successfully"
|
docker/msdl/translations/zh_CN.yaml
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
zh_CN:
|
2 |
+
SCRIPT_DIR: "脚本目录:%{dir}"
|
3 |
+
PROJECT_ROOT: "项目根目录:%{dir}"
|
4 |
+
TEMP_DIR: "临时目录:%{dir}"
|
5 |
+
DOCKER_LAUNCHER_START: "开始 Docker 启动器流程"
|
6 |
+
DOCKER_LAUNCHER_COMPLETE: "Docker 启动器流程完成"
|
7 |
+
DIR_CREATED: "创建目录:%{dir}"
|
8 |
+
FILE_COPIED: "已复制 %{file} 到 temp 目录"
|
9 |
+
FILE_NOT_FOUND: "错误:%{file} 在 templates 目录中不存在"
|
10 |
+
CONTAINERS_STOPPED: "已停止并删除现有容器和卷"
|
11 |
+
CONTAINER_STOP_ERROR: "停止和删除容器时出错(这可能是正常的,如果没有正在运行的容器):%{error}"
|
12 |
+
BUILDING_IMAGES: "开始构建Docker镜像..."
|
13 |
+
IMAGES_BUILT: "Docker镜像构建成功"
|
14 |
+
STARTING_CONTAINERS: "开始启动Docker容器..."
|
15 |
+
STARTING_CONTAINERS_WITH_BUILD: "开始构建并启动Docker容器..."
|
16 |
+
CONTAINERS_STARTED: "Docker 容器已成功启动"
|
17 |
+
DOCKER_ERROR: "构建或启动 Docker 容器时出错:%{error}"
|
18 |
+
DOCKER_OUTPUT: "Docker 命令输出:"
|
19 |
+
DOCKER_INSTALLED: "Docker 和 Docker Compose 安装正确"
|
20 |
+
DOCKER_INSTALL_ERROR: "错误:Docker 或 Docker Compose 可能没有正确安装:%{error}"
|
21 |
+
DOCKER_NOT_FOUND: "错误:Docker 或 Docker Compose 命令未找到。请确保它们已正确安装并添加到PATH中。"
|
22 |
+
DOCKER_COMPOSE_NOT_FOUND: "错误:Docker Compose 命令未找到。请确保它已正确安装并添加到PATH中。"
|
23 |
+
PATHS_UPDATED: "已更新 docker-compose.yaml 中的路径"
|
24 |
+
COMPOSE_FILE_CONTENT: "docker-compose.yaml 文件内容:"
|
25 |
+
COMPOSE_FILE_NOT_FOUND: "错误:%{file} 文件不存在"
|
26 |
+
COMPOSE_FILE_READ_ERROR: "读取 docker-compose.yaml 文件时出错:%{error}"
|
27 |
+
TERMINATION_SIGNAL: "捕获到终止信号。正在优雅地退出..."
|
28 |
+
KEYBOARD_INTERRUPT: "捕获到键盘中断。正在优雅地退出..."
|
29 |
+
UNEXPECTED_ERROR: "发生未预期的错误:%{error}"
|
30 |
+
BACKEND_LANGUAGE_CHOICE: "选择 MindSearch 后端语言(默认为中文)"
|
31 |
+
SELECT_INTERFACE_LANGUAGE: "选择 msdl 工具界面语言"
|
32 |
+
SELECT_BACKEND_LANGUAGE: "选择 MindSearch 后端语言(默认为中文)"
|
33 |
+
CHINESE: "中文 (cn)"
|
34 |
+
ENGLISH: "英文 (en)"
|
35 |
+
MODEL_DEPLOYMENT_TYPE: "选择模型部署类型:"
|
36 |
+
CLOUD_MODEL: "云端模型"
|
37 |
+
LOCAL_MODEL: "本地模型"
|
38 |
+
MODEL_FORMAT_CHOICE: "选择模型格式:"
|
39 |
+
CONFIRM_USE_EXISTING_API_KEY: "是否使用现有的 %{ENV_VAR_NAME} API 密钥?"
|
40 |
+
CONFIRM_OVERWRITE_EXISTING_API_KEY: "是否覆盖现有的 %{ENV_VAR_NAME} API 密钥?"
|
41 |
+
PLEASE_INPUT_NEW_API_KEY: "请输入新的 %{ENV_VAR_NAME} API 密钥:"
|
42 |
+
PLEASE_INPUT_NEW_API_KEY_FROM_ZERO: "请输入新的 %{ENV_VAR_NAME} API 密钥:"
|
43 |
+
INVALID_API_KEY_FORMAT: "无效的 API 密钥格式"
|
44 |
+
RETRY_API_KEY_INPUT: "重试 API 密钥输入"
|
45 |
+
API_KEY_INPUT_CANCELLED: "API 密钥输入已取消"
|
46 |
+
UNKNOWN_API_KEY_TYPE: "未知的 API 密钥类型:%{KEY_TYPE}"
|
47 |
+
UNKNOWN_MODEL_FORMAT: "未知的模型格式:%{MODEL_FORMAT}"
|
48 |
+
INVALID_API_KEY: "无效的 API 密钥:%{KEY_TYPE}"
|
49 |
+
API_KEY_SAVED: "%{ENV_VAR_NAME} 的 API 密钥已保存"
|
50 |
+
UNKNOWN_DOCKERFILE: "未知的 Dockerfile:%{dockerfile}"
|
51 |
+
UNKNOWN_MODEL_TYPE: "未知的模型类型:%{model_type}"
|
52 |
+
BACKEND_DOCKERFILE_COPIED: "后端 Dockerfile 已经从 %{source_path} 复制为 %{dest_path}"
|
53 |
+
FRONTEND_DOCKERFILE_COPIED: "前端 Dockerfile 已经从 %{source_path} 复制为 %{dest_path}"
|
54 |
+
TEMP_DIR_CREATED: "已在 %{dir} 创建临时目录"
|
55 |
+
CURRENT_COMPOSE_STOP_FAILED: "当前的容器停止失败"
|
56 |
+
CONTAINER_STOPPED_AND_REMOVED: "容器已停止并删除"
|
57 |
+
NETWORKS_PRUNED: "已清理对应的Docker网络"
|
58 |
+
NETWORK_PRUNE_ERROR: "清理对应的Docker网络时出错:%{error}"
|
59 |
+
DOCKER_LIST_ERROR: "列出 Docker 容器时出错:%{error}"
|
60 |
+
CONTAINERS_STOPPED_AND_REMOVED: "已停止并删除 Docker 容器"
|
61 |
+
CLI_DESCRIPTION: "MindSearch Docker 启动器 - 用于管理 MindSearch docker 容器的工具"
|
62 |
+
LANGUAGE_HELP: "设置 msdl 工具界面语言(例如:en, zh_CN)"
|
63 |
+
CONFIG_LANGUAGE_HELP: "显示语言配置提示"
|
64 |
+
LANGUAGE_NOT_AVAILABLE: "警告:语言'%{lang}'不可用。使用英语作为替代。"
|
65 |
+
LANGUAGE_CHANGED_RESTARTING: "语言已更改,正在重启 msdl..."
|
66 |
+
SELECT_SEARCH_ENGINE: "选择搜索引擎:"
|
67 |
+
NO_API_KEY_NEEDED: "无需 API 密钥"
|
68 |
+
API_KEY_REQUIRED: "需要 API 密钥"
|
69 |
+
SEARCH_ENGINE_DUCKDUCKGO: "DuckDuckGo 搜索"
|
70 |
+
SEARCH_ENGINE_BING: "必应搜索"
|
71 |
+
SEARCH_ENGINE_BRAVE: "Brave 搜索"
|
72 |
+
SEARCH_ENGINE_GOOGLE: "Google 搜索"
|
73 |
+
SEARCH_ENGINE_TENCENT: "腾讯搜索"
|
74 |
+
TENCENT_ID_REQUIRED: "请输入您的腾讯搜索 Secret ID"
|
75 |
+
TENCENT_KEY_REQUIRED: "请输入您的腾讯搜索 Secret Key"
|
76 |
+
WEB_SEARCH_KEY_REQUIRED: "请输入您的网页搜索 API 密钥"
|
77 |
+
SEARCH_ENGINE_CONFIGURED: "搜索引擎 %{engine} 配置成功"
|
docker/msdl/user_interaction.py
ADDED
@@ -0,0 +1,253 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from InquirerPy import inquirer
|
2 |
+
import sys
|
3 |
+
import os
|
4 |
+
from pathlib import Path
|
5 |
+
|
6 |
+
from msdl.config import (
|
7 |
+
CLOUD_LLM_DOCKERFILE,
|
8 |
+
LOCAL_LLM_DOCKERFILE,
|
9 |
+
)
|
10 |
+
from msdl.i18n import (
|
11 |
+
t,
|
12 |
+
get_available_languages,
|
13 |
+
set_language,
|
14 |
+
get_env_variable,
|
15 |
+
)
|
16 |
+
from msdl.utils import (
|
17 |
+
clean_api_key,
|
18 |
+
get_model_formats,
|
19 |
+
get_existing_api_key,
|
20 |
+
save_api_key_to_env,
|
21 |
+
validate_api_key,
|
22 |
+
)
|
23 |
+
|
24 |
+
SEARCH_ENGINES = {
|
25 |
+
"DuckDuckGoSearch": {
|
26 |
+
"name": "DuckDuckGo",
|
27 |
+
"key": "DUCKDUCKGO",
|
28 |
+
"requires_key": False,
|
29 |
+
"env_var": None
|
30 |
+
},
|
31 |
+
"BingSearch": {
|
32 |
+
"name": "Bing",
|
33 |
+
"key": "BING",
|
34 |
+
"requires_key": True,
|
35 |
+
"env_var": "BING_SEARCH_API_KEY"
|
36 |
+
},
|
37 |
+
"BraveSearch": {
|
38 |
+
"name": "Brave",
|
39 |
+
"key": "BRAVE",
|
40 |
+
"requires_key": True,
|
41 |
+
"env_var": "BRAVE_SEARCH_API_KEY"
|
42 |
+
},
|
43 |
+
"GoogleSearch": {
|
44 |
+
"name": "Google Serper",
|
45 |
+
"key": "GOOGLE",
|
46 |
+
"requires_key": True,
|
47 |
+
"env_var": "GOOGLE_SERPER_API_KEY"
|
48 |
+
},
|
49 |
+
"TencentSearch": {
|
50 |
+
"name": "Tencent",
|
51 |
+
"key": "TENCENT",
|
52 |
+
"requires_key": True,
|
53 |
+
"env_vars": ["TENCENT_SEARCH_SECRET_ID", "TENCENT_SEARCH_SECRET_KEY"]
|
54 |
+
}
|
55 |
+
}
|
56 |
+
|
57 |
+
def get_language_choice():
|
58 |
+
"""Get user's language preference"""
|
59 |
+
def _get_language_options():
|
60 |
+
available_langs = get_available_languages()
|
61 |
+
lang_choices = {
|
62 |
+
"en": "English",
|
63 |
+
"zh_CN": "中文"
|
64 |
+
}
|
65 |
+
return [{"name": f"{lang_choices.get(lang, lang)}", "value": lang} for lang in available_langs]
|
66 |
+
|
67 |
+
current_lang = get_env_variable("LAUNCHER_INTERACTION_LANGUAGE")
|
68 |
+
if not current_lang:
|
69 |
+
lang_options = _get_language_options()
|
70 |
+
language = inquirer.select(
|
71 |
+
message=t("SELECT_INTERFACE_LANGUAGE"),
|
72 |
+
choices=lang_options,
|
73 |
+
default="en"
|
74 |
+
).execute()
|
75 |
+
|
76 |
+
if language:
|
77 |
+
set_language(language)
|
78 |
+
sys.stdout.flush()
|
79 |
+
restart_program()
|
80 |
+
|
81 |
+
def get_backend_language():
|
82 |
+
"""Get user's backend language preference"""
|
83 |
+
return inquirer.select(
|
84 |
+
message=t("SELECT_BACKEND_LANGUAGE"),
|
85 |
+
choices=[
|
86 |
+
{"name": t("CHINESE"), "value": "cn"},
|
87 |
+
{"name": t("ENGLISH"), "value": "en"},
|
88 |
+
],
|
89 |
+
default="cn",
|
90 |
+
).execute()
|
91 |
+
|
92 |
+
def get_model_choice():
|
93 |
+
"""Get user's model deployment type preference"""
|
94 |
+
model_deployment_type = [
|
95 |
+
{
|
96 |
+
"name": t("CLOUD_MODEL"),
|
97 |
+
"value": CLOUD_LLM_DOCKERFILE
|
98 |
+
},
|
99 |
+
{
|
100 |
+
"name": t("LOCAL_MODEL"),
|
101 |
+
"value": LOCAL_LLM_DOCKERFILE
|
102 |
+
},
|
103 |
+
]
|
104 |
+
|
105 |
+
return inquirer.select(
|
106 |
+
message=t("MODEL_DEPLOYMENT_TYPE"),
|
107 |
+
choices=model_deployment_type,
|
108 |
+
).execute()
|
109 |
+
|
110 |
+
def get_model_format(model):
|
111 |
+
"""Get user's model format preference"""
|
112 |
+
model_formats = get_model_formats(model)
|
113 |
+
return inquirer.select(
|
114 |
+
message=t("MODEL_FORMAT_CHOICE"),
|
115 |
+
choices=[{
|
116 |
+
"name": format,
|
117 |
+
"value": format
|
118 |
+
} for format in model_formats],
|
119 |
+
).execute()
|
120 |
+
|
121 |
+
def _handle_api_key_input(env_var_name, message=None):
|
122 |
+
"""Handle API key input and validation for a given environment variable"""
|
123 |
+
if message is None:
|
124 |
+
message = t("PLEASE_INPUT_NEW_API_KEY", ENV_VAR_NAME=env_var_name)
|
125 |
+
print(message)
|
126 |
+
|
127 |
+
while True:
|
128 |
+
api_key = inquirer.secret(
|
129 |
+
message=t("PLEASE_INPUT_NEW_API_KEY_FROM_ZERO", ENV_VAR_NAME=env_var_name)
|
130 |
+
).execute()
|
131 |
+
cleaned_api_key = clean_api_key(api_key)
|
132 |
+
|
133 |
+
try:
|
134 |
+
save_api_key_to_env(env_var_name, cleaned_api_key, t)
|
135 |
+
break
|
136 |
+
except ValueError as e:
|
137 |
+
print(str(e))
|
138 |
+
retry = inquirer.confirm(
|
139 |
+
message=t("RETRY_API_KEY_INPUT"), default=True
|
140 |
+
).execute()
|
141 |
+
if not retry:
|
142 |
+
print(t("API_KEY_INPUT_CANCELLED"))
|
143 |
+
sys.exit(1)
|
144 |
+
|
145 |
+
def handle_api_key_input(model, model_format):
|
146 |
+
"""Handle API key input and validation"""
|
147 |
+
if model != CLOUD_LLM_DOCKERFILE:
|
148 |
+
return
|
149 |
+
|
150 |
+
env_var_name = {
|
151 |
+
"internlm_silicon": "SILICON_API_KEY",
|
152 |
+
"gpt4": "OPENAI_API_KEY",
|
153 |
+
"qwen": "QWEN_API_KEY",
|
154 |
+
}.get(model_format)
|
155 |
+
|
156 |
+
existing_api_key = get_existing_api_key(env_var_name)
|
157 |
+
|
158 |
+
if existing_api_key:
|
159 |
+
use_existing = inquirer.confirm(
|
160 |
+
message=t("CONFIRM_USE_EXISTING_API_KEY", ENV_VAR_NAME=env_var_name),
|
161 |
+
default=True,
|
162 |
+
).execute()
|
163 |
+
|
164 |
+
if use_existing:
|
165 |
+
return
|
166 |
+
|
167 |
+
print(t("CONFIRM_OVERWRITE_EXISTING_API_KEY", ENV_VAR_NAME=env_var_name))
|
168 |
+
|
169 |
+
try:
|
170 |
+
save_api_key_to_env(model_format, clean_api_key(inquirer.secret(
|
171 |
+
message=t("PLEASE_INPUT_NEW_API_KEY_FROM_ZERO", ENV_VAR_NAME=env_var_name)
|
172 |
+
).execute()), t)
|
173 |
+
except ValueError as e:
|
174 |
+
print(str(e))
|
175 |
+
retry = inquirer.confirm(
|
176 |
+
message=t("RETRY_API_KEY_INPUT"), default=True
|
177 |
+
).execute()
|
178 |
+
if not retry:
|
179 |
+
print(t("API_KEY_INPUT_CANCELLED"))
|
180 |
+
sys.exit(1)
|
181 |
+
|
182 |
+
def get_search_engine():
|
183 |
+
"""Get user's preferred search engine and handle API key if needed"""
|
184 |
+
search_engine = inquirer.select(
|
185 |
+
message=t("SELECT_SEARCH_ENGINE"),
|
186 |
+
choices=[{
|
187 |
+
"name": f"{t(f'SEARCH_ENGINE_{info["key"]}')} ({t('NO_API_KEY_NEEDED') if not info['requires_key'] else t('API_KEY_REQUIRED')})",
|
188 |
+
"value": engine
|
189 |
+
} for engine, info in SEARCH_ENGINES.items()],
|
190 |
+
).execute()
|
191 |
+
|
192 |
+
engine_info = SEARCH_ENGINES[search_engine]
|
193 |
+
|
194 |
+
if engine_info['requires_key']:
|
195 |
+
if search_engine == "TencentSearch":
|
196 |
+
# Handle Tencent's special case with two keys
|
197 |
+
for env_var in engine_info['env_vars']:
|
198 |
+
is_id = "ID" in env_var
|
199 |
+
message = t("TENCENT_ID_REQUIRED") if is_id else t("TENCENT_KEY_REQUIRED")
|
200 |
+
existing_key = get_existing_api_key(env_var)
|
201 |
+
if existing_key:
|
202 |
+
use_existing = inquirer.confirm(
|
203 |
+
message=t("CONFIRM_USE_EXISTING_API_KEY", ENV_VAR_NAME=env_var),
|
204 |
+
default=True,
|
205 |
+
).execute()
|
206 |
+
if not use_existing:
|
207 |
+
_handle_api_key_input(env_var, message)
|
208 |
+
else:
|
209 |
+
_handle_api_key_input(env_var, message)
|
210 |
+
else:
|
211 |
+
# Handle standard case with single WEB_SEARCH_API_KEY
|
212 |
+
env_var = engine_info['env_var']
|
213 |
+
existing_key = get_existing_api_key(env_var)
|
214 |
+
if existing_key:
|
215 |
+
use_existing = inquirer.confirm(
|
216 |
+
message=t("CONFIRM_USE_EXISTING_API_KEY", ENV_VAR_NAME=env_var),
|
217 |
+
default=True,
|
218 |
+
).execute()
|
219 |
+
if not use_existing:
|
220 |
+
_handle_api_key_input(env_var, t("WEB_SEARCH_KEY_REQUIRED"))
|
221 |
+
else:
|
222 |
+
_handle_api_key_input(env_var, t("WEB_SEARCH_KEY_REQUIRED"))
|
223 |
+
|
224 |
+
print(t("SEARCH_ENGINE_CONFIGURED", engine=engine_info['name']))
|
225 |
+
return search_engine
|
226 |
+
|
227 |
+
def restart_program():
|
228 |
+
"""Restart the current program with the same arguments"""
|
229 |
+
print(t("LANGUAGE_CHANGED_RESTARTING"))
|
230 |
+
python = sys.executable
|
231 |
+
os.execl(python, python, *sys.argv)
|
232 |
+
|
233 |
+
def get_user_choices():
|
234 |
+
"""Get all user choices in a single function"""
|
235 |
+
# Get language preference
|
236 |
+
get_language_choice()
|
237 |
+
|
238 |
+
# Get backend language
|
239 |
+
backend_language = get_backend_language()
|
240 |
+
|
241 |
+
# Get model choice
|
242 |
+
model = get_model_choice()
|
243 |
+
|
244 |
+
# Get model format
|
245 |
+
model_format = get_model_format(model)
|
246 |
+
|
247 |
+
# Get search engine choice
|
248 |
+
search_engine = get_search_engine()
|
249 |
+
|
250 |
+
# Handle API key if needed
|
251 |
+
handle_api_key_input(model, model_format)
|
252 |
+
|
253 |
+
return backend_language, model, model_format, search_engine
|
docker/msdl/utils.py
ADDED
@@ -0,0 +1,257 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# msdl/utils.py
|
2 |
+
|
3 |
+
import os
|
4 |
+
import re
|
5 |
+
import shutil
|
6 |
+
import sys
|
7 |
+
import yaml
|
8 |
+
from functools import lru_cache
|
9 |
+
from pathlib import Path
|
10 |
+
from msdl.config import (
|
11 |
+
BACKEND_DOCKERFILE_DIR,
|
12 |
+
CLOUD_LLM_DOCKERFILE,
|
13 |
+
FRONTEND_DOCKERFILE_DIR,
|
14 |
+
LOCAL_LLM_DOCKERFILE,
|
15 |
+
PACKAGE_DIR,
|
16 |
+
REACT_DOCKERFILE,
|
17 |
+
TEMP_DIR,
|
18 |
+
ENV_FILE_PATH,
|
19 |
+
)
|
20 |
+
from msdl.i18n import t
|
21 |
+
|
22 |
+
|
23 |
+
@lru_cache(maxsize=None)
|
24 |
+
def get_env_variable(var_name, default=None):
|
25 |
+
if ENV_FILE_PATH.exists():
|
26 |
+
with ENV_FILE_PATH.open("r") as env_file:
|
27 |
+
for line in env_file:
|
28 |
+
if line.startswith(f"{var_name}="):
|
29 |
+
return line.strip().split("=", 1)[1]
|
30 |
+
return os.getenv(var_name, default)
|
31 |
+
|
32 |
+
|
33 |
+
@lru_cache(maxsize=None)
|
34 |
+
def get_existing_api_key(env_var_name):
|
35 |
+
env_vars = read_env_file()
|
36 |
+
return env_vars.get(env_var_name)
|
37 |
+
|
38 |
+
|
39 |
+
@lru_cache(maxsize=None)
|
40 |
+
def read_env_file():
|
41 |
+
env_vars = {}
|
42 |
+
if ENV_FILE_PATH.exists():
|
43 |
+
with ENV_FILE_PATH.open("r") as env_file:
|
44 |
+
for line in env_file:
|
45 |
+
if "=" in line and not line.strip().startswith("#"):
|
46 |
+
key, value = line.strip().split("=", 1)
|
47 |
+
env_vars[key] = value.strip('"').strip("'")
|
48 |
+
return env_vars
|
49 |
+
|
50 |
+
|
51 |
+
def clean_api_key(api_key):
|
52 |
+
cleaned_key = api_key.strip()
|
53 |
+
cleaned_key = re.sub(r"\s+", "", cleaned_key)
|
54 |
+
return cleaned_key
|
55 |
+
|
56 |
+
|
57 |
+
@lru_cache(maxsize=None)
|
58 |
+
def validate_api_key(api_key, key_type, t):
|
59 |
+
basic_pattern = r"^sk-[A-Za-z0-9]+$"
|
60 |
+
web_search_pattern = r"^[A-Za-z0-9_\-\.]+$"
|
61 |
+
tencent_pattern = r"^[A-Za-z0-9]+$"
|
62 |
+
|
63 |
+
validation_rules = {
|
64 |
+
# Model API Keys
|
65 |
+
"SILICON_API_KEY": basic_pattern,
|
66 |
+
"OPENAI_API_KEY": basic_pattern,
|
67 |
+
"QWEN_API_KEY": basic_pattern,
|
68 |
+
# Search Engine API Keys
|
69 |
+
"BING_SEARCH_API_KEY": web_search_pattern,
|
70 |
+
"BRAVE_SEARCH_API_KEY": web_search_pattern,
|
71 |
+
"GOOGLE_SERPER_API_KEY": web_search_pattern,
|
72 |
+
"TENCENT_SEARCH_SECRET_ID": tencent_pattern,
|
73 |
+
"TENCENT_SEARCH_SECRET_KEY": tencent_pattern,
|
74 |
+
# Legacy support
|
75 |
+
"WEB_SEARCH_API_KEY": web_search_pattern,
|
76 |
+
}
|
77 |
+
|
78 |
+
if key_type not in validation_rules:
|
79 |
+
raise ValueError(t("UNKNOWN_API_KEY_TYPE", KEY_TYPE=key_type))
|
80 |
+
|
81 |
+
pattern = validation_rules[key_type]
|
82 |
+
return re.match(pattern, api_key) is not None
|
83 |
+
|
84 |
+
|
85 |
+
def save_api_key_to_env(key_type, api_key, t):
|
86 |
+
"""Save API key to .env file
|
87 |
+
|
88 |
+
Args:
|
89 |
+
key_type: Environment variable name or model format
|
90 |
+
api_key: API key value
|
91 |
+
t: Translation function
|
92 |
+
"""
|
93 |
+
# Convert model format to env var name if needed
|
94 |
+
env_var_name = {
|
95 |
+
"internlm_silicon": "SILICON_API_KEY",
|
96 |
+
"gpt4": "OPENAI_API_KEY",
|
97 |
+
"qwen": "QWEN_API_KEY",
|
98 |
+
}.get(key_type, key_type) # If not a model format, use key_type directly
|
99 |
+
|
100 |
+
if not validate_api_key(api_key, env_var_name, t):
|
101 |
+
raise ValueError(t("INVALID_API_KEY", KEY_TYPE=env_var_name))
|
102 |
+
|
103 |
+
env_vars = read_env_file()
|
104 |
+
env_vars[env_var_name] = api_key
|
105 |
+
|
106 |
+
with ENV_FILE_PATH.open("w") as env_file:
|
107 |
+
for key, value in env_vars.items():
|
108 |
+
env_file.write(f"{key}={value}\n")
|
109 |
+
|
110 |
+
print(t("API_KEY_SAVED", ENV_VAR_NAME=env_var_name))
|
111 |
+
|
112 |
+
|
113 |
+
def ensure_directory(path):
|
114 |
+
path = Path(path)
|
115 |
+
if not path.exists():
|
116 |
+
path.mkdir(parents=True, exist_ok=True)
|
117 |
+
print(t("DIR_CREATED", dir=path))
|
118 |
+
|
119 |
+
|
120 |
+
def copy_templates_to_temp(template_files):
|
121 |
+
template_dir = PACKAGE_DIR / "templates"
|
122 |
+
|
123 |
+
ensure_directory(TEMP_DIR)
|
124 |
+
|
125 |
+
for filename in template_files:
|
126 |
+
src = template_dir / filename
|
127 |
+
dst = TEMP_DIR / filename
|
128 |
+
if src.exists():
|
129 |
+
shutil.copy2(src, dst)
|
130 |
+
print(t("FILE_COPIED", file=filename))
|
131 |
+
else:
|
132 |
+
print(t("FILE_NOT_FOUND", file=filename))
|
133 |
+
sys.exit(1)
|
134 |
+
|
135 |
+
|
136 |
+
def modify_docker_compose(model_type, backend_language, model_format, search_engine):
|
137 |
+
"""Modify docker-compose.yaml based on user choices"""
|
138 |
+
docker_compose_path = os.path.join(TEMP_DIR, "docker-compose.yaml")
|
139 |
+
with open(docker_compose_path, "r") as file:
|
140 |
+
compose_data = yaml.safe_load(file)
|
141 |
+
|
142 |
+
# Set the name of the project
|
143 |
+
compose_data["name"] = "mindsearch"
|
144 |
+
|
145 |
+
# Configure backend service
|
146 |
+
backend_service = compose_data["services"]["backend"]
|
147 |
+
|
148 |
+
# Set environment variables
|
149 |
+
if "environment" not in backend_service:
|
150 |
+
backend_service["environment"] = []
|
151 |
+
|
152 |
+
# Add or update environment variables
|
153 |
+
env_vars = {
|
154 |
+
"LANG": backend_language,
|
155 |
+
"MODEL_FORMAT": model_format,
|
156 |
+
"SEARCH_ENGINE": search_engine
|
157 |
+
}
|
158 |
+
|
159 |
+
# Ensure .env file is included
|
160 |
+
if "env_file" not in backend_service:
|
161 |
+
backend_service["env_file"] = [".env"]
|
162 |
+
elif ".env" not in backend_service["env_file"]:
|
163 |
+
backend_service["env_file"].append(".env")
|
164 |
+
|
165 |
+
# Set command with all parameters
|
166 |
+
command = f"python -m mindsearch.app --lang {backend_language} --model_format {model_format} --search_engine {search_engine}"
|
167 |
+
backend_service["command"] = command
|
168 |
+
|
169 |
+
# Convert environment variables to docker-compose format
|
170 |
+
backend_service["environment"] = [
|
171 |
+
f"{key}={value}" for key, value in env_vars.items()
|
172 |
+
]
|
173 |
+
|
174 |
+
# Configure based on model type
|
175 |
+
if model_type == CLOUD_LLM_DOCKERFILE:
|
176 |
+
if "deploy" in backend_service:
|
177 |
+
del backend_service["deploy"]
|
178 |
+
# Remove volumes for cloud deployment
|
179 |
+
if "volumes" in backend_service:
|
180 |
+
del backend_service["volumes"]
|
181 |
+
elif model_type == LOCAL_LLM_DOCKERFILE:
|
182 |
+
# Add GPU configuration for local deployment
|
183 |
+
if "deploy" not in backend_service:
|
184 |
+
backend_service["deploy"] = {
|
185 |
+
"resources": {
|
186 |
+
"reservations": {
|
187 |
+
"devices": [
|
188 |
+
{"driver": "nvidia", "count": 1, "capabilities": ["gpu"]}
|
189 |
+
]
|
190 |
+
}
|
191 |
+
}
|
192 |
+
}
|
193 |
+
# Add volume for cache in local deployment
|
194 |
+
backend_service["volumes"] = ["/root/.cache:/root/.cache"]
|
195 |
+
else:
|
196 |
+
raise ValueError(t("UNKNOWN_DOCKERFILE", dockerfile=model_type))
|
197 |
+
|
198 |
+
# Save the modified docker-compose.yaml
|
199 |
+
with open(docker_compose_path, "w") as file:
|
200 |
+
yaml.dump(compose_data, file)
|
201 |
+
|
202 |
+
print(
|
203 |
+
t(
|
204 |
+
"docker_compose_updated",
|
205 |
+
mode=(t("CLOUD") if model_type == CLOUD_LLM_DOCKERFILE else t("LOCAL")),
|
206 |
+
format=model_format,
|
207 |
+
)
|
208 |
+
)
|
209 |
+
|
210 |
+
|
211 |
+
def get_model_formats(model_type):
|
212 |
+
if model_type == CLOUD_LLM_DOCKERFILE:
|
213 |
+
return ["internlm_silicon", "qwen", "gpt4"]
|
214 |
+
elif model_type == LOCAL_LLM_DOCKERFILE:
|
215 |
+
return ["internlm_server", "internlm_client", "internlm_hf"]
|
216 |
+
else:
|
217 |
+
raise ValueError(t("UNKNOWN_MODEL_TYPE", model_type=model_type))
|
218 |
+
|
219 |
+
|
220 |
+
def copy_backend_dockerfile(choice):
|
221 |
+
"""Copy backend Dockerfile to temp directory based on user choice"""
|
222 |
+
source_file = Path(BACKEND_DOCKERFILE_DIR) / choice
|
223 |
+
dest_file = "backend.dockerfile"
|
224 |
+
source_path = PACKAGE_DIR / "templates" / source_file
|
225 |
+
dest_path = TEMP_DIR / dest_file
|
226 |
+
|
227 |
+
if not source_path.exists():
|
228 |
+
raise FileNotFoundError(t("FILE_NOT_FOUND", file=source_file))
|
229 |
+
|
230 |
+
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
231 |
+
dest_path.write_text(source_path.read_text())
|
232 |
+
print(
|
233 |
+
t(
|
234 |
+
"BACKEND_DOCKERFILE_COPIED",
|
235 |
+
source_path=str(source_path),
|
236 |
+
dest_path=str(dest_path),
|
237 |
+
))
|
238 |
+
|
239 |
+
|
240 |
+
def copy_frontend_dockerfile():
|
241 |
+
"""Copy frontend Dockerfile to temp directory"""
|
242 |
+
source_file = Path(FRONTEND_DOCKERFILE_DIR) / REACT_DOCKERFILE
|
243 |
+
dest_file = "frontend.dockerfile"
|
244 |
+
source_path = PACKAGE_DIR / "templates" / source_file
|
245 |
+
dest_path = TEMP_DIR / dest_file
|
246 |
+
|
247 |
+
if not source_path.exists():
|
248 |
+
raise FileNotFoundError(t("FILE_NOT_FOUND", file=source_file))
|
249 |
+
|
250 |
+
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
251 |
+
dest_path.write_text(source_path.read_text())
|
252 |
+
print(
|
253 |
+
t(
|
254 |
+
"FRONTEND_DOCKERFILE_COPIED",
|
255 |
+
source_path=str(source_path),
|
256 |
+
dest_path=str(dest_path),
|
257 |
+
))
|
docker/setup.py
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from setuptools import find_packages, setup
|
2 |
+
|
3 |
+
setup(
|
4 |
+
name="msdl",
|
5 |
+
version="0.1.1",
|
6 |
+
description="MindSearch Docker Launcher",
|
7 |
+
packages=find_packages(),
|
8 |
+
python_requires=">=3.7",
|
9 |
+
install_requires=[
|
10 |
+
"pyyaml>=6.0",
|
11 |
+
"python-i18n>=0.3.9",
|
12 |
+
"inquirerpy>=0.3.4",
|
13 |
+
"python-dotenv>=0.19.1",
|
14 |
+
],
|
15 |
+
entry_points={
|
16 |
+
"console_scripts": [
|
17 |
+
"msdl=msdl.__main__:main",
|
18 |
+
],
|
19 |
+
},
|
20 |
+
include_package_data=True,
|
21 |
+
package_data={
|
22 |
+
"msdl": ["translations/*.yaml", "templates/*"],
|
23 |
+
},
|
24 |
+
)
|
frontend/React/.gitignore
CHANGED
@@ -14,7 +14,12 @@
|
|
14 |
|
15 |
# misc
|
16 |
.DS_Store
|
|
|
|
|
|
|
|
|
17 |
|
18 |
npm-debug.log*
|
19 |
yarn-debug.log*
|
20 |
yarn-error.log*
|
|
|
|
14 |
|
15 |
# misc
|
16 |
.DS_Store
|
17 |
+
.env.local
|
18 |
+
.env.development.local
|
19 |
+
.env.test.local
|
20 |
+
.env.production.local
|
21 |
|
22 |
npm-debug.log*
|
23 |
yarn-debug.log*
|
24 |
yarn-error.log*
|
25 |
+
|
frontend/React/.prettierignore
CHANGED
@@ -4,4 +4,4 @@ values
|
|
4 |
node_modules
|
5 |
.gitignore
|
6 |
.prettierignore
|
7 |
-
.husky
|
|
|
4 |
node_modules
|
5 |
.gitignore
|
6 |
.prettierignore
|
7 |
+
.husky
|
frontend/React/README.md
CHANGED
@@ -1,132 +1,184 @@
|
|
1 |
-
#
|
2 |
-
|
3 |
-
|
4 |
|
5 |
-
|
6 |
-
-
|
7 |
|
8 |
-
|
|
|
|
|
9 |
|
10 |
-
|
11 |
|
12 |
-
|
13 |
-
![windows install](./windows-.png)
|
14 |
|
15 |
-
-
|
16 |
|
17 |
-
|
18 |
|
19 |
-
|
20 |
-
安装完成后,点击“Finish”结束安装。
|
21 |
|
22 |
-
-
|
23 |
|
24 |
-
|
25 |
-
输入 node -v 并回车,如果系统返回了 Node.js 的版本号,说明安装成功。
|
26 |
-
接着,输入 npm -v 并回车,npm 是 Node.js 的包管理器,如果返回了版本号,表示 npm 也已正确安装。
|
27 |
|
28 |
-
|
29 |
-
注意: 由于 Linux 发行版众多,以下以 Ubuntu 为例说明,其他发行版(如 CentOS、Debian 等)的安装方式可能略有不同,可自行查询对应的安装办法。
|
30 |
|
31 |
-
|
32 |
|
33 |
-
|
34 |
|
35 |
-
|
36 |
|
37 |
-
|
38 |
|
39 |
-
|
40 |
-
|
41 |
-
|
|
|
42 |
|
43 |
-
-
|
|
|
44 |
|
45 |
-
|
46 |
|
47 |
-
|
48 |
|
49 |
-
|
50 |
-
- 步骤 1: 访问 Node.js 官网
|
51 |
|
52 |
-
|
53 |
|
54 |
-
|
|
|
|
|
|
|
55 |
|
56 |
-
|
|
|
|
|
57 |
|
58 |
-
|
|
|
|
|
59 |
|
60 |
-
|
61 |
-
|
62 |
-
|
|
|
63 |
|
64 |
-
-
|
|
|
65 |
|
66 |
-
|
|
|
|
|
67 |
|
68 |
-
|
|
|
69 |
|
70 |
-
|
71 |
-
|
72 |
-
```
|
73 |
-
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
74 |
-
```
|
75 |
-
按照提示输入密码以确认安装。安装过程中,可能需要你同意许可协议等。
|
76 |
|
77 |
-
|
78 |
-
|
|
|
|
|
79 |
|
80 |
-
|
81 |
-
在终端中输入以下命令来安装最新版本的Node.js
|
82 |
-
```
|
83 |
-
brew install node
|
84 |
-
```
|
85 |
-
Homebrew会自动下载Node.js的安装包,并处理相关的依赖项和安装过程。你需要等待一段时间,直到安装完成。
|
86 |
|
87 |
-
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
93 |
|
94 |
-
|
95 |
|
96 |
-
|
97 |
-
如需了解更多,可参照:https://nodejs.org/en
|
98 |
|
99 |
-
|
|
|
|
|
|
|
100 |
|
101 |
-
|
102 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
```
|
104 |
npm install
|
105 |
```
|
106 |
|
107 |
-
##
|
108 |
```
|
109 |
npm start
|
110 |
```
|
111 |
|
112 |
-
|
|
|
|
|
|
|
|
|
|
|
113 |
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
-
|
118 |
|
119 |
```
|
120 |
server: {
|
121 |
port: 8080,
|
122 |
proxy: {
|
123 |
"/solve": {
|
124 |
-
target: "
|
125 |
changeOrigin: true,
|
126 |
}
|
127 |
}
|
128 |
}
|
129 |
-
```
|
130 |
-
|
131 |
-
## 知悉
|
132 |
-
- 前端服务基于react开发,如需了解react相关知识,可参考:https://react.dev/
|
|
|
1 |
+
# Notice
|
2 |
+
- If you leave the page (Make the page invisible) and come back again, it will cause sse to reconnect.
|
3 |
+
- the project requires Node.js version >= 18.0.0.
|
4 |
|
5 |
+
# Prepare your dev-environment for frontend
|
6 |
+
[Node.js](https://nodejs.org/en)® is a free, open-source, cross-platform JavaScript runtime environment that lets developers create servers, web apps, command line tools and scripts.
|
7 |
|
8 |
+
# Node.js Installation Guide (Windows, Linux, macOS)
|
9 |
+
## Windows Installation
|
10 |
+
- Step 1: Download Node.js
|
11 |
|
12 |
+
1. Open your web browser and visit the [Node.js official website](https://nodejs.org/en).
|
13 |
|
14 |
+
2. Navigate to the "Downloads" section.
|
|
|
15 |
|
16 |
+
3. Select the desired version (LTS recommended for long-term stability). As of August 2024, the latest LTS version might be v20.x.x.
|
17 |
|
18 |
+
4. Click on the "Windows Installer (.msi)" link to download the installation package.
|
19 |
|
20 |
+
- Step 2: Install Node.js
|
|
|
21 |
|
22 |
+
1. Double-click the downloaded .msi file to start the installation wizard.
|
23 |
|
24 |
+
2. Click "Next" to proceed.
|
|
|
|
|
25 |
|
26 |
+
3. Read and accept the license agreement by checking the "I accept the terms in the License Agreement" box.
|
|
|
27 |
|
28 |
+
4. Click "Next" again and select the installation directory. It's recommended to change the default location to avoid installing in the C drive.
|
29 |
|
30 |
+
5. Continue clicking "Next" to use the default settings until you reach the "Install" button.
|
31 |
|
32 |
+
6. Click "Install" to start the installation process.
|
33 |
|
34 |
+
7. Wait for the installation to complete and click "Finish" to exit the installation wizard.
|
35 |
|
36 |
+
- Step 3: Verify Installation
|
37 |
+
1. Open the Command Prompt (cmd) by pressing `Win + R`, typing `cmd`, and pressing Enter.
|
38 |
+
2. Type `node -v` and press Enter. You should see the installed Node.js version displayed.
|
39 |
+
3. Type `npm -v` and press Enter to verify the installed npm version. npm is the package manager that comes bundled with Node.js.
|
40 |
|
41 |
+
- Step 4: Configure npm Global Path (Optional)
|
42 |
+
If you want to change the default global installation path for npm, follow these steps:
|
43 |
|
44 |
+
1. Open the Command Prompt (cmd) as an administrator.
|
45 |
|
46 |
+
2. Navigate to your Node.js installation directory (e.g., C:\Program Files\nodejs).
|
47 |
|
48 |
+
3. Create two new folders named node_global and node_cache.
|
|
|
49 |
|
50 |
+
4. Run the following commands to set the new paths:
|
51 |
|
52 |
+
```bash
|
53 |
+
npm config set prefix "C:\Program Files\nodejs\node_global"
|
54 |
+
npm config set cache "C:\Program Files\nodejs\node_cache"
|
55 |
+
```
|
56 |
|
57 |
+
5. Open the Environment Variables settings in the System Properties.
|
58 |
+
6. Add `C:\Program Files\nodejs\node_global` to the `PATH` variable under User Variables.
|
59 |
+
7. Optionally, create a new system variable named `NODE_PATH` and set its value to ` C:\Program Files\nodejs\node_global\node_modules`.
|
60 |
|
61 |
+
## Linux Installation
|
62 |
+
- Step 1: Update Your System
|
63 |
+
Before installing Node.js, ensure your Linux system is up-to-date:
|
64 |
|
65 |
+
```bash
|
66 |
+
sudo apt-get update
|
67 |
+
sudo apt-get upgrade
|
68 |
+
```
|
69 |
|
70 |
+
- Step 2: Install Dependencies
|
71 |
+
Node.js requires certain dependencies to function properly:
|
72 |
|
73 |
+
```bash
|
74 |
+
sudo apt-get install build-essential libssl-dev
|
75 |
+
```
|
76 |
|
77 |
+
- Step 3: Download and Install Node.js
|
78 |
+
You can download the Node.js source code or use a package manager like `curl` or `wget` to download a pre-built binary. For simplicity, this guide assumes you're using a package manager.
|
79 |
|
80 |
+
1. Navigate to the Node.js download page for package managers.
|
81 |
+
Follow the instructions for your Linux distribution. For example, on Ubuntu, you can use:
|
|
|
|
|
|
|
|
|
82 |
|
83 |
+
```bash
|
84 |
+
curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash -
|
85 |
+
sudo apt-get install -y nodejs
|
86 |
+
```
|
87 |
|
88 |
+
Replace 20.x with the desired version number if you don't want the latest version.
|
|
|
|
|
|
|
|
|
|
|
89 |
|
90 |
+
- Step 4: Verify Installation
|
91 |
+
1. Open a terminal.
|
92 |
+
2. Type `node -v` and press Enter to check the Node.js version.
|
93 |
+
3. Type `npm -v` and press Enter to verify the npm version.
|
94 |
+
|
95 |
+
|
96 |
+
## Installing Node.js on macOS
|
97 |
+
|
98 |
+
Installing Node.js on macOS is a straightforward process that can be accomplished using the official installer from the Node.js website or through package managers like Homebrew. This guide will cover both methods.
|
99 |
+
|
100 |
+
### Method 1: Using the Official Installer
|
101 |
+
- Visit the Node.js Website
|
102 |
+
- Open your web browser and navigate to https://nodejs.org/.
|
103 |
+
- Download the Installer
|
104 |
+
- Scroll down to the "Downloads" section.
|
105 |
+
- Click on the "macOS Installer" button to download the .pkg file. Ensure you download the latest version, which as of August 2024, might be v20.x.x or higher.
|
106 |
+
- Install Node.js
|
107 |
+
- Once the download is complete, locate the .pkg file in your Downloads folder.
|
108 |
+
- Double-click the file to start the installation process.
|
109 |
+
- Follow the on-screen instructions. Typically, you'll need to agree to the license agreement, select an installation location (the default is usually fine), and click "Continue" or "Install" until the installation is complete.
|
110 |
+
- Verify the Installation
|
111 |
+
- Open the Terminal application by going to "Finder" > "Applications" > "Utilities" > "Terminal" or using Spotlight Search (press `Cmd + Space` and type "Terminal").
|
112 |
+
- Type `node -v` and press Enter. This command should display the installed version of Node.js.
|
113 |
+
- Type `npm -v` and press Enter to verify that npm, the Node.js package manager, is also installed.
|
114 |
+
|
115 |
+
### Method 2: Using Homebrew
|
116 |
+
If you prefer to use a package manager, Homebrew is a popular choice for macOS.
|
117 |
|
118 |
+
- Install Homebrew (if not already installed)
|
119 |
|
120 |
+
- Open the Terminal.
|
|
|
121 |
|
122 |
+
- Copy and paste the following command into the Terminal and press Enter:
|
123 |
+
```bash
|
124 |
+
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
125 |
+
```
|
126 |
|
127 |
+
- Follow the on-screen instructions to complete the Homebrew installation.
|
128 |
+
|
129 |
+
- Install Node.js with Homebrew
|
130 |
+
- Once Homebrew is installed, update your package list by running brew update in the Terminal.
|
131 |
+
- To install Node.js, run the following command in the Terminal:
|
132 |
+
```bash
|
133 |
+
brew install node
|
134 |
+
```
|
135 |
+
- Homebrew will download and install the latest version of Node.js and npm.
|
136 |
+
- Verify the Installation
|
137 |
+
- As with the official installer method, you can verify the installation by typing node -v and npm -v in the Terminal and pressing Enter.
|
138 |
+
|
139 |
+
### Additional Configuration (Optional)
|
140 |
+
- Configure npm's Global Installation Path (if desired):
|
141 |
+
- You may want to change the default location where globally installed npm packages are stored. Follow the steps outlined in the Node.js documentation or search for guides online to configure this.
|
142 |
+
- Switch to a Different Node.js Version (if needed):
|
143 |
+
- If you need to switch between multiple Node.js versions, consider using a version manager like nvm (Node Version Manager). Follow the instructions on the nvm GitHub page to install and use it.
|
144 |
+
|
145 |
+
|
146 |
+
By following these steps, you should be able to successfully install Node.js on your system. Remember to keep your Node.js and npm versions up-to-date to take advantage of the latest features and security updates.
|
147 |
+
|
148 |
+
If your env has been prepared, you can
|
149 |
+
|
150 |
+
# Installation and Setup Instructions
|
151 |
+
|
152 |
+
## Installation
|
153 |
```
|
154 |
npm install
|
155 |
```
|
156 |
|
157 |
+
## Start Server
|
158 |
```
|
159 |
npm start
|
160 |
```
|
161 |
|
162 |
+
## Visit Server
|
163 |
+
```
|
164 |
+
http://localhost:8080
|
165 |
+
```
|
166 |
+
|
167 |
+
pay attention to the real port in your terminal.maybe it won`t be 8080.
|
168 |
|
169 |
+
# Config
|
170 |
+
## How to modify the request URL
|
171 |
+
|
172 |
+
- Open the file `vite.config.ts`, modify the target like:
|
173 |
|
174 |
```
|
175 |
server: {
|
176 |
port: 8080,
|
177 |
proxy: {
|
178 |
"/solve": {
|
179 |
+
target: "{HOST}:{PORT}",
|
180 |
changeOrigin: true,
|
181 |
}
|
182 |
}
|
183 |
}
|
184 |
+
```
|
|
|
|
|
|
frontend/React/README_zh-CN.md
ADDED
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Notice
|
2 |
+
问题回答过程中离开页面后再回到页面,会导致sse重连!
|
3 |
+
# 开始
|
4 |
+
## 请使用大于18.0.0的node版本
|
5 |
+
## 准备node.js开发环境
|
6 |
+
Node.js 是一个基于 Chrome V8 引擎的 JavaScript 运行环境,允许你在服务器端运行 JavaScript。以下是在 Windows、Linux 和 macOS 上安装 Node.js 的详细步骤。
|
7 |
+
|
8 |
+
### 在 Windows 上安装 Node.js
|
9 |
+
- 步骤 1: 访问 Node.js 官网
|
10 |
+
|
11 |
+
打开浏览器,访问 [Node.js](https://nodejs.org/zh-cn/download/prebuilt-installer) 官方网站。
|
12 |
+
|
13 |
+
- 步骤 2: 下载 Node.js 安装包
|
14 |
+
|
15 |
+
选择你需要的nodejs版本,设备的类型,点击下载,示例如下图:
|
16 |
+
![windows install](./windows-.png)
|
17 |
+
|
18 |
+
- 步骤 3: 安装 Node.js
|
19 |
+
|
20 |
+
双击下载的安装包开始安装。
|
21 |
+
|
22 |
+
跟随安装向导的指示进行安装。在安装过程中,你可以选择安装位置、是否将 Node.js 添加到系统 PATH 环境变量等选项。推荐选择“添加到 PATH”以便在任何地方都能通过命令行访问 Node.js。
|
23 |
+
安装完成后,点击“Finish”结束安装。
|
24 |
+
|
25 |
+
- 步骤 4: 验证安装
|
26 |
+
|
27 |
+
打开命令提示符(CMD)或 PowerShell。
|
28 |
+
输入 node -v 并回车,如果系统返回了 Node.js 的版本号,说明安装成功。
|
29 |
+
接着,输入 npm -v 并回车,npm 是 Node.js 的包管理器,如果返回了版本号,表示 npm 也已正确安装。
|
30 |
+
|
31 |
+
### 在 Linux 上安装 Node.js
|
32 |
+
注意: 由于 Linux 发行版众多,以下以 Ubuntu 为例说明,其他发行版(如 CentOS、Debian 等)的安装方式可能略有不同,可自行查询对应的安装办法。
|
33 |
+
|
34 |
+
- 步骤 1: 更新你的包管理器
|
35 |
+
|
36 |
+
打开终端。
|
37 |
+
|
38 |
+
输入 sudo apt update 并回车,以更新 Ubuntu 的包索引。
|
39 |
+
|
40 |
+
- 步骤 2: 安装 Node.js
|
41 |
+
|
42 |
+
对于 Ubuntu 18.04 及更高版本,Node.js 可以直接从 Ubuntu 的仓库中安装。
|
43 |
+
输入 sudo apt install nodejs npm 并回车。
|
44 |
+
对于旧版本的 Ubuntu 或需要安装特定版本的 Node.js,你可能需要使用如 NodeSource 这样的第三方仓库。
|
45 |
+
|
46 |
+
- 步骤 3: 验证安装
|
47 |
+
|
48 |
+
在终端中,输入 node -v 和 npm -v 来验证 Node.js 和 npm 是否已正确安装。
|
49 |
+
|
50 |
+
### 在 macOS 上安装 Node.js
|
51 |
+
|
52 |
+
#### 下载安装
|
53 |
+
- 步骤 1: 访问 Node.js 官网
|
54 |
+
|
55 |
+
打开浏览器,访问 Node.js 官方网站。
|
56 |
+
|
57 |
+
- 步骤 2: 下载 Node.js 安装包
|
58 |
+
|
59 |
+
在首页找到 macOS 对应的安装包(通常是 .pkg 文件),点击下载。
|
60 |
+
|
61 |
+
- 步骤 3: 安装 Node.js
|
62 |
+
|
63 |
+
找到下载的 .pkg 文件,双击打开。
|
64 |
+
跟随安装向导的指示进行安装。
|
65 |
+
安装完成后,点击“Close”结束安装。
|
66 |
+
|
67 |
+
- 步骤 4: 验证安装
|
68 |
+
|
69 |
+
打开终端。
|
70 |
+
|
71 |
+
输入 node -v 和 npm -v 来验证 Node.js 和 npm 是否已正确安装。
|
72 |
+
|
73 |
+
#### 使用HomeBrew安装
|
74 |
+
前提条件:确保你的macOS上已经安装了Homebrew。如果尚未安装,可以通过以下命令进行安装(以终端操作为例):
|
75 |
+
```
|
76 |
+
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
77 |
+
```
|
78 |
+
按照提示输入密码以确认安装。安装过程中,可能需要你同意许可协议等。
|
79 |
+
|
80 |
+
- 打开终端:
|
81 |
+
在macOS上找到并打开“终端”应用程序。
|
82 |
+
|
83 |
+
- 使用Homebrew安装Node.js:
|
84 |
+
在终端中输入以下命令来安装最新版本的Node.js
|
85 |
+
```
|
86 |
+
brew install node
|
87 |
+
```
|
88 |
+
Homebrew会自动下载Node.js的安装包,并处理相关的依赖项和安装过程。你需要等待一段时间,直到安装完成。
|
89 |
+
|
90 |
+
- 验证安装:
|
91 |
+
安装完成后,你可以通过输入以下命令来验证Node.js是否成功安装:
|
92 |
+
```
|
93 |
+
node -v
|
94 |
+
```
|
95 |
+
如果终端输出了Node.js的版本号,那么表示安装成功。同时,你也可以通过输入npm -v来验证npm(Node.js的包管理器)是否也成功安装。
|
96 |
+
|
97 |
+
完成以上步骤后,你应该能在你的 Windows、Linux 或 macOS 系统上成功安装并运行 Node.js。
|
98 |
+
|
99 |
+
### 更多
|
100 |
+
如需了解更多,可参照:https://nodejs.org/en
|
101 |
+
|
102 |
+
如环境已经准备好,跳转下一步
|
103 |
+
|
104 |
+
## 安装依赖
|
105 |
+
进入前端项目根目录
|
106 |
+
```
|
107 |
+
npm install
|
108 |
+
```
|
109 |
+
|
110 |
+
## 启动
|
111 |
+
```
|
112 |
+
npm start
|
113 |
+
```
|
114 |
+
|
115 |
+
启动成功后,界面将出现可访问的本地url
|
116 |
+
|
117 |
+
## 配置
|
118 |
+
### 接口请求配置
|
119 |
+
|
120 |
+
- 在vite.config.ts中配置proxy,示例如下:
|
121 |
+
|
122 |
+
```
|
123 |
+
server: {
|
124 |
+
port: 8080,
|
125 |
+
proxy: {
|
126 |
+
"/solve": {
|
127 |
+
target: "{HOST}:{PORT}",
|
128 |
+
changeOrigin: true,
|
129 |
+
}
|
130 |
+
}
|
131 |
+
}
|
132 |
+
```
|
133 |
+
|
134 |
+
## 知悉
|
135 |
+
- 前端服务基于react开发,如需了解react相关知识,可参考:https://react.dev/
|
frontend/React/package.json
CHANGED
@@ -5,7 +5,13 @@
|
|
5 |
"type": "module",
|
6 |
"scripts": {
|
7 |
"start": "vite --host --mode dev",
|
|
|
|
|
|
|
8 |
"build": "tsc && vite build",
|
|
|
|
|
|
|
9 |
"preview": "vite preview",
|
10 |
"prettier": "prettier --write ."
|
11 |
},
|
|
|
5 |
"type": "module",
|
6 |
"scripts": {
|
7 |
"start": "vite --host --mode dev",
|
8 |
+
"start:dev": "vite --host --mode dev",
|
9 |
+
"start:staging": "vite --host --mode staging",
|
10 |
+
"start:prod": "vite --host --mode production",
|
11 |
"build": "tsc && vite build",
|
12 |
+
"build:dev": "tsc && vite build --mode dev",
|
13 |
+
"build:staging": "tsc && vite build --mode staging",
|
14 |
+
"build:prod": "tsc && vite build --mode production",
|
15 |
"preview": "vite preview",
|
16 |
"prettier": "prettier --write ."
|
17 |
},
|
frontend/React/src/App.module.less
CHANGED
@@ -8,12 +8,10 @@
|
|
8 |
}
|
9 |
|
10 |
.content {
|
11 |
-
padding
|
12 |
width: 100%;
|
13 |
height: 100%;
|
14 |
box-sizing: border-box;
|
15 |
-
// display: flex;
|
16 |
-
// justify-content: center;
|
17 |
}
|
18 |
|
19 |
.header {
|
@@ -51,4 +49,4 @@
|
|
51 |
display: flex;
|
52 |
align-items: center;
|
53 |
}
|
54 |
-
}
|
|
|
8 |
}
|
9 |
|
10 |
.content {
|
11 |
+
padding: 64px 0 16px 0;
|
12 |
width: 100%;
|
13 |
height: 100%;
|
14 |
box-sizing: border-box;
|
|
|
|
|
15 |
}
|
16 |
|
17 |
.header {
|
|
|
49 |
display: flex;
|
50 |
align-items: center;
|
51 |
}
|
52 |
+
}
|
frontend/React/src/App.tsx
CHANGED
@@ -1,7 +1,8 @@
|
|
1 |
import style from "./App.module.less";
|
2 |
-
|
3 |
import { BrowserRouter } from "react-router-dom";
|
4 |
import RouterRoutes from "@/routes/routes";
|
|
|
5 |
|
6 |
function App() {
|
7 |
return (
|
@@ -12,6 +13,7 @@ function App() {
|
|
12 |
<img src={Logo} />
|
13 |
</div>
|
14 |
</div>
|
|
|
15 |
<div className={style.content}>
|
16 |
<RouterRoutes />
|
17 |
</div>
|
|
|
1 |
import style from "./App.module.less";
|
2 |
+
|
3 |
import { BrowserRouter } from "react-router-dom";
|
4 |
import RouterRoutes from "@/routes/routes";
|
5 |
+
import Logo from "@/assets/logo.svg";
|
6 |
|
7 |
function App() {
|
8 |
return (
|
|
|
13 |
<img src={Logo} />
|
14 |
</div>
|
15 |
</div>
|
16 |
+
|
17 |
<div className={style.content}>
|
18 |
<RouterRoutes />
|
19 |
</div>
|
frontend/React/src/assets/background.png
CHANGED
Git LFS Details
|
frontend/React/src/assets/show-right-icon.png
CHANGED
Git LFS Details
|
frontend/React/src/components/iconfont/index.tsx
DELETED
@@ -1,7 +0,0 @@
|
|
1 |
-
import { createFromIconfontCN } from "@ant-design/icons";
|
2 |
-
|
3 |
-
const IconFont = createFromIconfontCN({
|
4 |
-
scriptUrl: "//at.alicdn.com/t/c/font_3858115_p8dw9q83s0h.js"
|
5 |
-
});
|
6 |
-
|
7 |
-
export default IconFont;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
frontend/React/src/config/cgi.ts
DELETED
@@ -1,2 +0,0 @@
|
|
1 |
-
export const mode = import.meta.env.MODE;
|
2 |
-
export const GET_SSE_DATA = '/solve';
|
|
|
|
|
|