Upload 11 files
Browse files- dev/pytest/pytest_all_tests.sh +14 -0
- dev/pytest/pytest_artifacts.sh +4 -0
- dev/pytest/pytest_config_tests.py +111 -0
- dev/pytest/pytest_model_runtime.sh +13 -0
- dev/pytest/pytest_tools.sh +4 -0
- dev/pytest/pytest_unit_tests.sh +5 -0
- dev/pytest/pytest_vdb.sh +17 -0
- dev/pytest/pytest_workflow.sh +4 -0
- dev/reformat +18 -0
- dev/sync-poetry +18 -0
- dev/update-poetry +13 -0
dev/pytest/pytest_all_tests.sh
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
set -x
|
3 |
+
|
4 |
+
# ModelRuntime
|
5 |
+
dev/pytest/pytest_model_runtime.sh
|
6 |
+
|
7 |
+
# Tools
|
8 |
+
dev/pytest/pytest_tools.sh
|
9 |
+
|
10 |
+
# Workflow
|
11 |
+
dev/pytest/pytest_workflow.sh
|
12 |
+
|
13 |
+
# Unit tests
|
14 |
+
dev/pytest/pytest_unit_tests.sh
|
dev/pytest/pytest_artifacts.sh
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
set -x
|
3 |
+
|
4 |
+
pytest api/tests/artifact_tests/
|
dev/pytest/pytest_config_tests.py
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import yaml # type: ignore
|
2 |
+
from dotenv import dotenv_values
|
3 |
+
from pathlib import Path
|
4 |
+
|
5 |
+
BASE_API_AND_DOCKER_CONFIG_SET_DIFF = {
|
6 |
+
"APP_MAX_EXECUTION_TIME",
|
7 |
+
"BATCH_UPLOAD_LIMIT",
|
8 |
+
"CELERY_BEAT_SCHEDULER_TIME",
|
9 |
+
"CODE_EXECUTION_API_KEY",
|
10 |
+
"HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
|
11 |
+
"HTTP_REQUEST_MAX_READ_TIMEOUT",
|
12 |
+
"HTTP_REQUEST_MAX_WRITE_TIMEOUT",
|
13 |
+
"KEYWORD_DATA_SOURCE_TYPE",
|
14 |
+
"LOGIN_LOCKOUT_DURATION",
|
15 |
+
"LOG_FORMAT",
|
16 |
+
"OCI_ACCESS_KEY",
|
17 |
+
"OCI_BUCKET_NAME",
|
18 |
+
"OCI_ENDPOINT",
|
19 |
+
"OCI_REGION",
|
20 |
+
"OCI_SECRET_KEY",
|
21 |
+
"REDIS_DB",
|
22 |
+
"RESEND_API_URL",
|
23 |
+
"RESPECT_XFORWARD_HEADERS_ENABLED",
|
24 |
+
"SENTRY_DSN",
|
25 |
+
"SSRF_DEFAULT_CONNECT_TIME_OUT",
|
26 |
+
"SSRF_DEFAULT_MAX_RETRIES",
|
27 |
+
"SSRF_DEFAULT_READ_TIME_OUT",
|
28 |
+
"SSRF_DEFAULT_TIME_OUT",
|
29 |
+
"SSRF_DEFAULT_WRITE_TIME_OUT",
|
30 |
+
"UPSTASH_VECTOR_TOKEN",
|
31 |
+
"UPSTASH_VECTOR_URL",
|
32 |
+
"USING_UGC_INDEX",
|
33 |
+
"WEAVIATE_BATCH_SIZE",
|
34 |
+
"WEAVIATE_GRPC_ENABLED",
|
35 |
+
}
|
36 |
+
|
37 |
+
BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF = {
|
38 |
+
"BATCH_UPLOAD_LIMIT",
|
39 |
+
"CELERY_BEAT_SCHEDULER_TIME",
|
40 |
+
"HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
|
41 |
+
"HTTP_REQUEST_MAX_READ_TIMEOUT",
|
42 |
+
"HTTP_REQUEST_MAX_WRITE_TIMEOUT",
|
43 |
+
"KEYWORD_DATA_SOURCE_TYPE",
|
44 |
+
"LOGIN_LOCKOUT_DURATION",
|
45 |
+
"LOG_FORMAT",
|
46 |
+
"OPENDAL_FS_ROOT",
|
47 |
+
"OPENDAL_S3_ACCESS_KEY_ID",
|
48 |
+
"OPENDAL_S3_BUCKET",
|
49 |
+
"OPENDAL_S3_ENDPOINT",
|
50 |
+
"OPENDAL_S3_REGION",
|
51 |
+
"OPENDAL_S3_ROOT",
|
52 |
+
"OPENDAL_S3_SECRET_ACCESS_KEY",
|
53 |
+
"OPENDAL_S3_SERVER_SIDE_ENCRYPTION",
|
54 |
+
"PGVECTOR_MAX_CONNECTION",
|
55 |
+
"PGVECTOR_MIN_CONNECTION",
|
56 |
+
"PGVECTO_RS_DATABASE",
|
57 |
+
"PGVECTO_RS_HOST",
|
58 |
+
"PGVECTO_RS_PASSWORD",
|
59 |
+
"PGVECTO_RS_PORT",
|
60 |
+
"PGVECTO_RS_USER",
|
61 |
+
"RESPECT_XFORWARD_HEADERS_ENABLED",
|
62 |
+
"SCARF_NO_ANALYTICS",
|
63 |
+
"SSRF_DEFAULT_CONNECT_TIME_OUT",
|
64 |
+
"SSRF_DEFAULT_MAX_RETRIES",
|
65 |
+
"SSRF_DEFAULT_READ_TIME_OUT",
|
66 |
+
"SSRF_DEFAULT_TIME_OUT",
|
67 |
+
"SSRF_DEFAULT_WRITE_TIME_OUT",
|
68 |
+
"STORAGE_OPENDAL_SCHEME",
|
69 |
+
"SUPABASE_API_KEY",
|
70 |
+
"SUPABASE_BUCKET_NAME",
|
71 |
+
"SUPABASE_URL",
|
72 |
+
"USING_UGC_INDEX",
|
73 |
+
"VIKINGDB_CONNECTION_TIMEOUT",
|
74 |
+
"VIKINGDB_SOCKET_TIMEOUT",
|
75 |
+
"WEAVIATE_BATCH_SIZE",
|
76 |
+
"WEAVIATE_GRPC_ENABLED",
|
77 |
+
}
|
78 |
+
|
79 |
+
API_CONFIG_SET = set(dotenv_values(Path("api") / Path(".env.example")).keys())
|
80 |
+
DOCKER_CONFIG_SET = set(dotenv_values(Path("docker") / Path(".env.example")).keys())
|
81 |
+
DOCKER_COMPOSE_CONFIG_SET = set()
|
82 |
+
|
83 |
+
with open(Path("docker") / Path("docker-compose.yaml")) as f:
|
84 |
+
DOCKER_COMPOSE_CONFIG_SET = set(yaml.safe_load(f.read())["x-shared-env"].keys())
|
85 |
+
|
86 |
+
|
87 |
+
def test_yaml_config():
|
88 |
+
# python set == operator is used to compare two sets
|
89 |
+
DIFF_API_WITH_DOCKER = (
|
90 |
+
API_CONFIG_SET - DOCKER_CONFIG_SET - BASE_API_AND_DOCKER_CONFIG_SET_DIFF
|
91 |
+
)
|
92 |
+
if DIFF_API_WITH_DOCKER:
|
93 |
+
print(
|
94 |
+
f"API and Docker config sets are different with key: {DIFF_API_WITH_DOCKER}"
|
95 |
+
)
|
96 |
+
raise Exception("API and Docker config sets are different")
|
97 |
+
DIFF_API_WITH_DOCKER_COMPOSE = (
|
98 |
+
API_CONFIG_SET
|
99 |
+
- DOCKER_COMPOSE_CONFIG_SET
|
100 |
+
- BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF
|
101 |
+
)
|
102 |
+
if DIFF_API_WITH_DOCKER_COMPOSE:
|
103 |
+
print(
|
104 |
+
f"API and Docker Compose config sets are different with key: {DIFF_API_WITH_DOCKER_COMPOSE}"
|
105 |
+
)
|
106 |
+
raise Exception("API and Docker Compose config sets are different")
|
107 |
+
print("All tests passed!")
|
108 |
+
|
109 |
+
|
110 |
+
if __name__ == "__main__":
|
111 |
+
test_yaml_config()
|
dev/pytest/pytest_model_runtime.sh
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
set -x
|
3 |
+
|
4 |
+
pytest api/tests/integration_tests/model_runtime/anthropic \
|
5 |
+
api/tests/integration_tests/model_runtime/azure_openai \
|
6 |
+
api/tests/integration_tests/model_runtime/openai api/tests/integration_tests/model_runtime/chatglm \
|
7 |
+
api/tests/integration_tests/model_runtime/google api/tests/integration_tests/model_runtime/xinference \
|
8 |
+
api/tests/integration_tests/model_runtime/huggingface_hub/test_llm.py \
|
9 |
+
api/tests/integration_tests/model_runtime/upstage \
|
10 |
+
api/tests/integration_tests/model_runtime/fireworks \
|
11 |
+
api/tests/integration_tests/model_runtime/nomic \
|
12 |
+
api/tests/integration_tests/model_runtime/mixedbread \
|
13 |
+
api/tests/integration_tests/model_runtime/voyage
|
dev/pytest/pytest_tools.sh
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
set -x
|
3 |
+
|
4 |
+
pytest api/tests/integration_tests/tools/test_all_provider.py
|
dev/pytest/pytest_unit_tests.sh
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
set -x
|
3 |
+
|
4 |
+
# libs
|
5 |
+
pytest api/tests/unit_tests
|
dev/pytest/pytest_vdb.sh
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
set -x
|
3 |
+
|
4 |
+
pytest api/tests/integration_tests/vdb/chroma \
|
5 |
+
api/tests/integration_tests/vdb/milvus \
|
6 |
+
api/tests/integration_tests/vdb/pgvecto_rs \
|
7 |
+
api/tests/integration_tests/vdb/pgvector \
|
8 |
+
api/tests/integration_tests/vdb/qdrant \
|
9 |
+
api/tests/integration_tests/vdb/weaviate \
|
10 |
+
api/tests/integration_tests/vdb/elasticsearch \
|
11 |
+
api/tests/integration_tests/vdb/vikingdb \
|
12 |
+
api/tests/integration_tests/vdb/baidu \
|
13 |
+
api/tests/integration_tests/vdb/tcvectordb \
|
14 |
+
api/tests/integration_tests/vdb/upstash \
|
15 |
+
api/tests/integration_tests/vdb/couchbase \
|
16 |
+
api/tests/integration_tests/vdb/oceanbase \
|
17 |
+
api/tests/integration_tests/vdb/tidb_vector \
|
dev/pytest/pytest_workflow.sh
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
set -x
|
3 |
+
|
4 |
+
pytest api/tests/integration_tests/workflow
|
dev/reformat
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
set -x
|
4 |
+
|
5 |
+
# style checks rely on commands in path
|
6 |
+
if ! command -v ruff &> /dev/null || ! command -v dotenv-linter &> /dev/null; then
|
7 |
+
echo "Installing linting tools (Ruff, dotenv-linter ...) ..."
|
8 |
+
poetry install -C api --only lint
|
9 |
+
fi
|
10 |
+
|
11 |
+
# run ruff linter
|
12 |
+
poetry run -C api ruff check --fix ./
|
13 |
+
|
14 |
+
# run ruff formatter
|
15 |
+
poetry run -C api ruff format ./
|
16 |
+
|
17 |
+
# run dotenv-linter linter
|
18 |
+
poetry run -P api dotenv-linter ./api/.env.example ./web/.env.example
|
dev/sync-poetry
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
# rely on `poetry` in path
|
4 |
+
if ! command -v poetry &> /dev/null; then
|
5 |
+
echo "Installing Poetry ..."
|
6 |
+
pip install poetry
|
7 |
+
fi
|
8 |
+
|
9 |
+
# check poetry.lock in sync with pyproject.toml
|
10 |
+
poetry check -C api --lock
|
11 |
+
if [ $? -ne 0 ]; then
|
12 |
+
# update poetry.lock
|
13 |
+
# refreshing lockfile only without updating locked versions
|
14 |
+
echo "poetry.lock is outdated, refreshing without updating locked versions ..."
|
15 |
+
poetry lock -C api
|
16 |
+
else
|
17 |
+
echo "poetry.lock is ready."
|
18 |
+
fi
|
dev/update-poetry
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
# rely on `poetry` in path
|
4 |
+
if ! command -v poetry &> /dev/null; then
|
5 |
+
echo "Installing Poetry ..."
|
6 |
+
pip install poetry
|
7 |
+
fi
|
8 |
+
|
9 |
+
# refreshing lockfile, updating locked versions
|
10 |
+
poetry update -C api
|
11 |
+
|
12 |
+
# check poetry.lock in sync with pyproject.toml
|
13 |
+
poetry check -C api --lock
|