Upload 181 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- api/.idea/icon.png +0 -0
- api/.idea/vcs.xml +17 -0
- api/.vscode/launch.json.example +61 -0
- api/configs/__init__.py +3 -0
- api/configs/app_config.py +96 -0
- api/configs/deploy/__init__.py +28 -0
- api/configs/enterprise/__init__.py +20 -0
- api/configs/extra/__init__.py +10 -0
- api/configs/extra/notion_config.py +36 -0
- api/configs/extra/sentry_config.py +28 -0
- api/configs/feature/__init__.py +822 -0
- api/configs/feature/hosted_service/__init__.py +239 -0
- api/configs/middleware/__init__.py +279 -0
- api/configs/middleware/cache/__init__.py +0 -0
- api/configs/middleware/cache/redis_config.py +85 -0
- api/configs/middleware/storage/aliyun_oss_storage_config.py +45 -0
- api/configs/middleware/storage/amazon_s3_storage_config.py +45 -0
- api/configs/middleware/storage/azure_blob_storage_config.py +30 -0
- api/configs/middleware/storage/baidu_obs_storage_config.py +30 -0
- api/configs/middleware/storage/google_cloud_storage_config.py +20 -0
- api/configs/middleware/storage/huawei_obs_storage_config.py +30 -0
- api/configs/middleware/storage/oci_storage_config.py +35 -0
- api/configs/middleware/storage/opendal_storage_config.py +9 -0
- api/configs/middleware/storage/supabase_storage_config.py +25 -0
- api/configs/middleware/storage/tencent_cos_storage_config.py +35 -0
- api/configs/middleware/storage/volcengine_tos_storage_config.py +35 -0
- api/configs/middleware/vdb/analyticdb_config.py +51 -0
- api/configs/middleware/vdb/baidu_vector_config.py +45 -0
- api/configs/middleware/vdb/chroma_config.py +40 -0
- api/configs/middleware/vdb/couchbase_config.py +35 -0
- api/configs/middleware/vdb/elasticsearch_config.py +30 -0
- api/configs/middleware/vdb/lindorm_config.py +34 -0
- api/configs/middleware/vdb/milvus_config.py +41 -0
- api/configs/middleware/vdb/myscale_config.py +38 -0
- api/configs/middleware/vdb/oceanbase_config.py +35 -0
- api/configs/middleware/vdb/opensearch_config.py +35 -0
- api/configs/middleware/vdb/oracle_config.py +35 -0
- api/configs/middleware/vdb/pgvector_config.py +45 -0
- api/configs/middleware/vdb/pgvectors_config.py +35 -0
- api/configs/middleware/vdb/qdrant_config.py +35 -0
- api/configs/middleware/vdb/relyt_config.py +35 -0
- api/configs/middleware/vdb/tencent_vector_config.py +50 -0
- api/configs/middleware/vdb/tidb_on_qdrant_config.py +70 -0
- api/configs/middleware/vdb/tidb_vector_config.py +35 -0
- api/configs/middleware/vdb/upstash_config.py +20 -0
- api/configs/middleware/vdb/vikingdb_config.py +50 -0
- api/configs/middleware/vdb/weaviate_config.py +30 -0
- api/configs/packaging/__init__.py +18 -0
- api/configs/remote_settings_sources/__init__.py +17 -0
- api/configs/remote_settings_sources/apollo/__init__.py +55 -0
api/.idea/icon.png
ADDED
![]() |
api/.idea/vcs.xml
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<project version="4">
|
3 |
+
<component name="IssueNavigationConfiguration">
|
4 |
+
<option name="links">
|
5 |
+
<list>
|
6 |
+
<IssueNavigationLink>
|
7 |
+
<option name="issueRegexp" value="#(\d+)" />
|
8 |
+
<option name="linkRegexp" value="https://github.com/langgenius/dify/issues/$1" />
|
9 |
+
</IssueNavigationLink>
|
10 |
+
</list>
|
11 |
+
</option>
|
12 |
+
</component>
|
13 |
+
<component name="VcsDirectoryMappings">
|
14 |
+
<mapping directory="" vcs="Git" />
|
15 |
+
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
|
16 |
+
</component>
|
17 |
+
</project>
|
api/.vscode/launch.json.example
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"version": "0.2.0",
|
3 |
+
"compounds": [
|
4 |
+
{
|
5 |
+
"name": "Launch Flask and Celery",
|
6 |
+
"configurations": ["Python: Flask", "Python: Celery"]
|
7 |
+
}
|
8 |
+
],
|
9 |
+
"configurations": [
|
10 |
+
{
|
11 |
+
"name": "Python: Flask",
|
12 |
+
"consoleName": "Flask",
|
13 |
+
"type": "debugpy",
|
14 |
+
"request": "launch",
|
15 |
+
"python": "${workspaceFolder}/.venv/bin/python",
|
16 |
+
"cwd": "${workspaceFolder}",
|
17 |
+
"envFile": ".env",
|
18 |
+
"module": "flask",
|
19 |
+
"justMyCode": true,
|
20 |
+
"jinja": true,
|
21 |
+
"env": {
|
22 |
+
"FLASK_APP": "app.py",
|
23 |
+
"GEVENT_SUPPORT": "True"
|
24 |
+
},
|
25 |
+
"args": [
|
26 |
+
"run",
|
27 |
+
"--port=5001"
|
28 |
+
]
|
29 |
+
},
|
30 |
+
{
|
31 |
+
"name": "Python: Celery",
|
32 |
+
"consoleName": "Celery",
|
33 |
+
"type": "debugpy",
|
34 |
+
"request": "launch",
|
35 |
+
"python": "${workspaceFolder}/.venv/bin/python",
|
36 |
+
"cwd": "${workspaceFolder}",
|
37 |
+
"module": "celery",
|
38 |
+
"justMyCode": true,
|
39 |
+
"envFile": ".env",
|
40 |
+
"console": "integratedTerminal",
|
41 |
+
"env": {
|
42 |
+
"FLASK_APP": "app.py",
|
43 |
+
"FLASK_DEBUG": "1",
|
44 |
+
"GEVENT_SUPPORT": "True"
|
45 |
+
},
|
46 |
+
"args": [
|
47 |
+
"-A",
|
48 |
+
"app.celery",
|
49 |
+
"worker",
|
50 |
+
"-P",
|
51 |
+
"gevent",
|
52 |
+
"-c",
|
53 |
+
"1",
|
54 |
+
"--loglevel",
|
55 |
+
"DEBUG",
|
56 |
+
"-Q",
|
57 |
+
"dataset,generation,mail,ops_trace,app_deletion"
|
58 |
+
]
|
59 |
+
}
|
60 |
+
]
|
61 |
+
}
|
api/configs/__init__.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
from .app_config import DifyConfig
|
2 |
+
|
3 |
+
dify_config = DifyConfig()
|
api/configs/app_config.py
ADDED
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
from typing import Any
|
3 |
+
|
4 |
+
from pydantic.fields import FieldInfo
|
5 |
+
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict
|
6 |
+
|
7 |
+
from .deploy import DeploymentConfig
|
8 |
+
from .enterprise import EnterpriseFeatureConfig
|
9 |
+
from .extra import ExtraServiceConfig
|
10 |
+
from .feature import FeatureConfig
|
11 |
+
from .middleware import MiddlewareConfig
|
12 |
+
from .packaging import PackagingInfo
|
13 |
+
from .remote_settings_sources import RemoteSettingsSource, RemoteSettingsSourceConfig, RemoteSettingsSourceName
|
14 |
+
from .remote_settings_sources.apollo import ApolloSettingsSource
|
15 |
+
|
16 |
+
logger = logging.getLogger(__name__)
|
17 |
+
|
18 |
+
|
19 |
+
class RemoteSettingsSourceFactory(PydanticBaseSettingsSource):
|
20 |
+
def __init__(self, settings_cls: type[BaseSettings]):
|
21 |
+
super().__init__(settings_cls)
|
22 |
+
|
23 |
+
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
24 |
+
raise NotImplementedError
|
25 |
+
|
26 |
+
def __call__(self) -> dict[str, Any]:
|
27 |
+
current_state = self.current_state
|
28 |
+
remote_source_name = current_state.get("REMOTE_SETTINGS_SOURCE_NAME")
|
29 |
+
if not remote_source_name:
|
30 |
+
return {}
|
31 |
+
|
32 |
+
remote_source: RemoteSettingsSource | None = None
|
33 |
+
match remote_source_name:
|
34 |
+
case RemoteSettingsSourceName.APOLLO:
|
35 |
+
remote_source = ApolloSettingsSource(current_state)
|
36 |
+
case _:
|
37 |
+
logger.warning(f"Unsupported remote source: {remote_source_name}")
|
38 |
+
return {}
|
39 |
+
|
40 |
+
d: dict[str, Any] = {}
|
41 |
+
|
42 |
+
for field_name, field in self.settings_cls.model_fields.items():
|
43 |
+
field_value, field_key, value_is_complex = remote_source.get_field_value(field, field_name)
|
44 |
+
field_value = remote_source.prepare_field_value(field_name, field, field_value, value_is_complex)
|
45 |
+
if field_value is not None:
|
46 |
+
d[field_key] = field_value
|
47 |
+
|
48 |
+
return d
|
49 |
+
|
50 |
+
|
51 |
+
class DifyConfig(
|
52 |
+
# Packaging info
|
53 |
+
PackagingInfo,
|
54 |
+
# Deployment configs
|
55 |
+
DeploymentConfig,
|
56 |
+
# Feature configs
|
57 |
+
FeatureConfig,
|
58 |
+
# Middleware configs
|
59 |
+
MiddlewareConfig,
|
60 |
+
# Extra service configs
|
61 |
+
ExtraServiceConfig,
|
62 |
+
# Remote source configs
|
63 |
+
RemoteSettingsSourceConfig,
|
64 |
+
# Enterprise feature configs
|
65 |
+
# **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
|
66 |
+
EnterpriseFeatureConfig,
|
67 |
+
):
|
68 |
+
model_config = SettingsConfigDict(
|
69 |
+
# read from dotenv format config file
|
70 |
+
env_file=".env",
|
71 |
+
env_file_encoding="utf-8",
|
72 |
+
# ignore extra attributes
|
73 |
+
extra="ignore",
|
74 |
+
)
|
75 |
+
|
76 |
+
# Before adding any config,
|
77 |
+
# please consider to arrange it in the proper config group of existed or added
|
78 |
+
# for better readability and maintainability.
|
79 |
+
# Thanks for your concentration and consideration.
|
80 |
+
|
81 |
+
@classmethod
|
82 |
+
def settings_customise_sources(
|
83 |
+
cls,
|
84 |
+
settings_cls: type[BaseSettings],
|
85 |
+
init_settings: PydanticBaseSettingsSource,
|
86 |
+
env_settings: PydanticBaseSettingsSource,
|
87 |
+
dotenv_settings: PydanticBaseSettingsSource,
|
88 |
+
file_secret_settings: PydanticBaseSettingsSource,
|
89 |
+
) -> tuple[PydanticBaseSettingsSource, ...]:
|
90 |
+
return (
|
91 |
+
init_settings,
|
92 |
+
env_settings,
|
93 |
+
RemoteSettingsSourceFactory(settings_cls),
|
94 |
+
dotenv_settings,
|
95 |
+
file_secret_settings,
|
96 |
+
)
|
api/configs/deploy/__init__.py
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic import Field
|
2 |
+
from pydantic_settings import BaseSettings
|
3 |
+
|
4 |
+
|
5 |
+
class DeploymentConfig(BaseSettings):
|
6 |
+
"""
|
7 |
+
Configuration settings for application deployment
|
8 |
+
"""
|
9 |
+
|
10 |
+
APPLICATION_NAME: str = Field(
|
11 |
+
description="Name of the application, used for identification and logging purposes",
|
12 |
+
default="langgenius/dify",
|
13 |
+
)
|
14 |
+
|
15 |
+
DEBUG: bool = Field(
|
16 |
+
description="Enable debug mode for additional logging and development features",
|
17 |
+
default=False,
|
18 |
+
)
|
19 |
+
|
20 |
+
EDITION: str = Field(
|
21 |
+
description="Deployment edition of the application (e.g., 'SELF_HOSTED', 'CLOUD')",
|
22 |
+
default="SELF_HOSTED",
|
23 |
+
)
|
24 |
+
|
25 |
+
DEPLOY_ENV: str = Field(
|
26 |
+
description="Deployment environment (e.g., 'PRODUCTION', 'DEVELOPMENT'), default to PRODUCTION",
|
27 |
+
default="PRODUCTION",
|
28 |
+
)
|
api/configs/enterprise/__init__.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic import Field
|
2 |
+
from pydantic_settings import BaseSettings
|
3 |
+
|
4 |
+
|
5 |
+
class EnterpriseFeatureConfig(BaseSettings):
|
6 |
+
"""
|
7 |
+
Configuration for enterprise-level features.
|
8 |
+
**Before using, please contact business@dify.ai by email to inquire about licensing matters.**
|
9 |
+
"""
|
10 |
+
|
11 |
+
ENTERPRISE_ENABLED: bool = Field(
|
12 |
+
description="Enable or disable enterprise-level features."
|
13 |
+
"Before using, please contact business@dify.ai by email to inquire about licensing matters.",
|
14 |
+
default=False,
|
15 |
+
)
|
16 |
+
|
17 |
+
CAN_REPLACE_LOGO: bool = Field(
|
18 |
+
description="Allow customization of the enterprise logo.",
|
19 |
+
default=False,
|
20 |
+
)
|
api/configs/extra/__init__.py
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from configs.extra.notion_config import NotionConfig
|
2 |
+
from configs.extra.sentry_config import SentryConfig
|
3 |
+
|
4 |
+
|
5 |
+
class ExtraServiceConfig(
|
6 |
+
# place the configs in alphabet order
|
7 |
+
NotionConfig,
|
8 |
+
SentryConfig,
|
9 |
+
):
|
10 |
+
pass
|
api/configs/extra/notion_config.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class NotionConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Notion integration
|
10 |
+
"""
|
11 |
+
|
12 |
+
NOTION_CLIENT_ID: Optional[str] = Field(
|
13 |
+
description="Client ID for Notion API authentication. Required for OAuth 2.0 flow.",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
NOTION_CLIENT_SECRET: Optional[str] = Field(
|
18 |
+
description="Client secret for Notion API authentication. Required for OAuth 2.0 flow.",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
NOTION_INTEGRATION_TYPE: Optional[str] = Field(
|
23 |
+
description="Type of Notion integration."
|
24 |
+
" Set to 'internal' for internal integrations, or None for public integrations.",
|
25 |
+
default=None,
|
26 |
+
)
|
27 |
+
|
28 |
+
NOTION_INTERNAL_SECRET: Optional[str] = Field(
|
29 |
+
description="Secret key for internal Notion integrations. Required when NOTION_INTEGRATION_TYPE is 'internal'.",
|
30 |
+
default=None,
|
31 |
+
)
|
32 |
+
|
33 |
+
NOTION_INTEGRATION_TOKEN: Optional[str] = Field(
|
34 |
+
description="Integration token for Notion API access. Used for direct API calls without OAuth flow.",
|
35 |
+
default=None,
|
36 |
+
)
|
api/configs/extra/sentry_config.py
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, NonNegativeFloat
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class SentryConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Sentry error tracking and performance monitoring
|
10 |
+
"""
|
11 |
+
|
12 |
+
SENTRY_DSN: Optional[str] = Field(
|
13 |
+
description="Sentry Data Source Name (DSN)."
|
14 |
+
" This is the unique identifier of your Sentry project, used to send events to the correct project.",
|
15 |
+
default=None,
|
16 |
+
)
|
17 |
+
|
18 |
+
SENTRY_TRACES_SAMPLE_RATE: NonNegativeFloat = Field(
|
19 |
+
description="Sample rate for Sentry performance monitoring traces."
|
20 |
+
" Value between 0.0 and 1.0, where 1.0 means 100% of traces are sent to Sentry.",
|
21 |
+
default=1.0,
|
22 |
+
)
|
23 |
+
|
24 |
+
SENTRY_PROFILES_SAMPLE_RATE: NonNegativeFloat = Field(
|
25 |
+
description="Sample rate for Sentry profiling."
|
26 |
+
" Value between 0.0 and 1.0, where 1.0 means 100% of profiles are sent to Sentry.",
|
27 |
+
default=1.0,
|
28 |
+
)
|
api/configs/feature/__init__.py
ADDED
@@ -0,0 +1,822 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Annotated, Literal, Optional
|
2 |
+
|
3 |
+
from pydantic import (
|
4 |
+
AliasChoices,
|
5 |
+
Field,
|
6 |
+
HttpUrl,
|
7 |
+
NegativeInt,
|
8 |
+
NonNegativeInt,
|
9 |
+
PositiveFloat,
|
10 |
+
PositiveInt,
|
11 |
+
computed_field,
|
12 |
+
)
|
13 |
+
from pydantic_settings import BaseSettings
|
14 |
+
|
15 |
+
from configs.feature.hosted_service import HostedServiceConfig
|
16 |
+
|
17 |
+
|
18 |
+
class SecurityConfig(BaseSettings):
|
19 |
+
"""
|
20 |
+
Security-related configurations for the application
|
21 |
+
"""
|
22 |
+
|
23 |
+
SECRET_KEY: str = Field(
|
24 |
+
description="Secret key for secure session cookie signing."
|
25 |
+
"Make sure you are changing this key for your deployment with a strong key."
|
26 |
+
"Generate a strong key using `openssl rand -base64 42` or set via the `SECRET_KEY` environment variable.",
|
27 |
+
default="",
|
28 |
+
)
|
29 |
+
|
30 |
+
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
|
31 |
+
description="Duration in minutes for which a password reset token remains valid",
|
32 |
+
default=5,
|
33 |
+
)
|
34 |
+
|
35 |
+
LOGIN_DISABLED: bool = Field(
|
36 |
+
description="Whether to disable login checks",
|
37 |
+
default=False,
|
38 |
+
)
|
39 |
+
|
40 |
+
ADMIN_API_KEY_ENABLE: bool = Field(
|
41 |
+
description="Whether to enable admin api key for authentication",
|
42 |
+
default=False,
|
43 |
+
)
|
44 |
+
|
45 |
+
ADMIN_API_KEY: Optional[str] = Field(
|
46 |
+
description="admin api key for authentication",
|
47 |
+
default=None,
|
48 |
+
)
|
49 |
+
|
50 |
+
|
51 |
+
class AppExecutionConfig(BaseSettings):
|
52 |
+
"""
|
53 |
+
Configuration parameters for application execution
|
54 |
+
"""
|
55 |
+
|
56 |
+
APP_MAX_EXECUTION_TIME: PositiveInt = Field(
|
57 |
+
description="Maximum allowed execution time for the application in seconds",
|
58 |
+
default=1200,
|
59 |
+
)
|
60 |
+
APP_MAX_ACTIVE_REQUESTS: NonNegativeInt = Field(
|
61 |
+
description="Maximum number of concurrent active requests per app (0 for unlimited)",
|
62 |
+
default=0,
|
63 |
+
)
|
64 |
+
|
65 |
+
|
66 |
+
class CodeExecutionSandboxConfig(BaseSettings):
|
67 |
+
"""
|
68 |
+
Configuration for the code execution sandbox environment
|
69 |
+
"""
|
70 |
+
|
71 |
+
CODE_EXECUTION_ENDPOINT: HttpUrl = Field(
|
72 |
+
description="URL endpoint for the code execution service",
|
73 |
+
default="http://sandbox:8194",
|
74 |
+
)
|
75 |
+
|
76 |
+
CODE_EXECUTION_API_KEY: str = Field(
|
77 |
+
description="API key for accessing the code execution service",
|
78 |
+
default="dify-sandbox",
|
79 |
+
)
|
80 |
+
|
81 |
+
CODE_EXECUTION_CONNECT_TIMEOUT: Optional[float] = Field(
|
82 |
+
description="Connection timeout in seconds for code execution requests",
|
83 |
+
default=10.0,
|
84 |
+
)
|
85 |
+
|
86 |
+
CODE_EXECUTION_READ_TIMEOUT: Optional[float] = Field(
|
87 |
+
description="Read timeout in seconds for code execution requests",
|
88 |
+
default=60.0,
|
89 |
+
)
|
90 |
+
|
91 |
+
CODE_EXECUTION_WRITE_TIMEOUT: Optional[float] = Field(
|
92 |
+
description="Write timeout in seconds for code execution request",
|
93 |
+
default=10.0,
|
94 |
+
)
|
95 |
+
|
96 |
+
CODE_MAX_NUMBER: PositiveInt = Field(
|
97 |
+
description="Maximum allowed numeric value in code execution",
|
98 |
+
default=9223372036854775807,
|
99 |
+
)
|
100 |
+
|
101 |
+
CODE_MIN_NUMBER: NegativeInt = Field(
|
102 |
+
description="Minimum allowed numeric value in code execution",
|
103 |
+
default=-9223372036854775807,
|
104 |
+
)
|
105 |
+
|
106 |
+
CODE_MAX_DEPTH: PositiveInt = Field(
|
107 |
+
description="Maximum allowed depth for nested structures in code execution",
|
108 |
+
default=5,
|
109 |
+
)
|
110 |
+
|
111 |
+
CODE_MAX_PRECISION: PositiveInt = Field(
|
112 |
+
description="Maximum number of decimal places for floating-point numbers in code execution",
|
113 |
+
default=20,
|
114 |
+
)
|
115 |
+
|
116 |
+
CODE_MAX_STRING_LENGTH: PositiveInt = Field(
|
117 |
+
description="Maximum allowed length for strings in code execution",
|
118 |
+
default=80000,
|
119 |
+
)
|
120 |
+
|
121 |
+
CODE_MAX_STRING_ARRAY_LENGTH: PositiveInt = Field(
|
122 |
+
description="Maximum allowed length for string arrays in code execution",
|
123 |
+
default=30,
|
124 |
+
)
|
125 |
+
|
126 |
+
CODE_MAX_OBJECT_ARRAY_LENGTH: PositiveInt = Field(
|
127 |
+
description="Maximum allowed length for object arrays in code execution",
|
128 |
+
default=30,
|
129 |
+
)
|
130 |
+
|
131 |
+
CODE_MAX_NUMBER_ARRAY_LENGTH: PositiveInt = Field(
|
132 |
+
description="Maximum allowed length for numeric arrays in code execution",
|
133 |
+
default=1000,
|
134 |
+
)
|
135 |
+
|
136 |
+
|
137 |
+
class EndpointConfig(BaseSettings):
|
138 |
+
"""
|
139 |
+
Configuration for various application endpoints and URLs
|
140 |
+
"""
|
141 |
+
|
142 |
+
CONSOLE_API_URL: str = Field(
|
143 |
+
description="Base URL for the console API,"
|
144 |
+
"used for login authentication callback or notion integration callbacks",
|
145 |
+
default="",
|
146 |
+
)
|
147 |
+
|
148 |
+
CONSOLE_WEB_URL: str = Field(
|
149 |
+
description="Base URL for the console web interface,used for frontend references and CORS configuration",
|
150 |
+
default="",
|
151 |
+
)
|
152 |
+
|
153 |
+
SERVICE_API_URL: str = Field(
|
154 |
+
description="Base URL for the service API, displayed to users for API access",
|
155 |
+
default="",
|
156 |
+
)
|
157 |
+
|
158 |
+
APP_WEB_URL: str = Field(
|
159 |
+
description="Base URL for the web application, used for frontend references",
|
160 |
+
default="",
|
161 |
+
)
|
162 |
+
|
163 |
+
|
164 |
+
class FileAccessConfig(BaseSettings):
|
165 |
+
"""
|
166 |
+
Configuration for file access and handling
|
167 |
+
"""
|
168 |
+
|
169 |
+
FILES_URL: str = Field(
|
170 |
+
description="Base URL for file preview or download,"
|
171 |
+
" used for frontend display and multi-model inputs"
|
172 |
+
"Url is signed and has expiration time.",
|
173 |
+
validation_alias=AliasChoices("FILES_URL", "CONSOLE_API_URL"),
|
174 |
+
alias_priority=1,
|
175 |
+
default="",
|
176 |
+
)
|
177 |
+
|
178 |
+
FILES_ACCESS_TIMEOUT: int = Field(
|
179 |
+
description="Expiration time in seconds for file access URLs",
|
180 |
+
default=300,
|
181 |
+
)
|
182 |
+
|
183 |
+
|
184 |
+
class FileUploadConfig(BaseSettings):
|
185 |
+
"""
|
186 |
+
Configuration for file upload limitations
|
187 |
+
"""
|
188 |
+
|
189 |
+
UPLOAD_FILE_SIZE_LIMIT: NonNegativeInt = Field(
|
190 |
+
description="Maximum allowed file size for uploads in megabytes",
|
191 |
+
default=15,
|
192 |
+
)
|
193 |
+
|
194 |
+
UPLOAD_FILE_BATCH_LIMIT: NonNegativeInt = Field(
|
195 |
+
description="Maximum number of files allowed in a single upload batch",
|
196 |
+
default=5,
|
197 |
+
)
|
198 |
+
|
199 |
+
UPLOAD_IMAGE_FILE_SIZE_LIMIT: NonNegativeInt = Field(
|
200 |
+
description="Maximum allowed image file size for uploads in megabytes",
|
201 |
+
default=10,
|
202 |
+
)
|
203 |
+
|
204 |
+
UPLOAD_VIDEO_FILE_SIZE_LIMIT: NonNegativeInt = Field(
|
205 |
+
description="video file size limit in Megabytes for uploading files",
|
206 |
+
default=100,
|
207 |
+
)
|
208 |
+
|
209 |
+
UPLOAD_AUDIO_FILE_SIZE_LIMIT: NonNegativeInt = Field(
|
210 |
+
description="audio file size limit in Megabytes for uploading files",
|
211 |
+
default=50,
|
212 |
+
)
|
213 |
+
|
214 |
+
BATCH_UPLOAD_LIMIT: NonNegativeInt = Field(
|
215 |
+
description="Maximum number of files allowed in a batch upload operation",
|
216 |
+
default=20,
|
217 |
+
)
|
218 |
+
|
219 |
+
WORKFLOW_FILE_UPLOAD_LIMIT: PositiveInt = Field(
|
220 |
+
description="Maximum number of files allowed in a workflow upload operation",
|
221 |
+
default=10,
|
222 |
+
)
|
223 |
+
|
224 |
+
|
225 |
+
class HttpConfig(BaseSettings):
|
226 |
+
"""
|
227 |
+
HTTP-related configurations for the application
|
228 |
+
"""
|
229 |
+
|
230 |
+
API_COMPRESSION_ENABLED: bool = Field(
|
231 |
+
description="Enable or disable gzip compression for HTTP responses",
|
232 |
+
default=False,
|
233 |
+
)
|
234 |
+
|
235 |
+
inner_CONSOLE_CORS_ALLOW_ORIGINS: str = Field(
|
236 |
+
description="Comma-separated list of allowed origins for CORS in the console",
|
237 |
+
validation_alias=AliasChoices("CONSOLE_CORS_ALLOW_ORIGINS", "CONSOLE_WEB_URL"),
|
238 |
+
default="",
|
239 |
+
)
|
240 |
+
|
241 |
+
@computed_field
|
242 |
+
def CONSOLE_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
243 |
+
return self.inner_CONSOLE_CORS_ALLOW_ORIGINS.split(",")
|
244 |
+
|
245 |
+
inner_WEB_API_CORS_ALLOW_ORIGINS: str = Field(
|
246 |
+
description="",
|
247 |
+
validation_alias=AliasChoices("WEB_API_CORS_ALLOW_ORIGINS"),
|
248 |
+
default="*",
|
249 |
+
)
|
250 |
+
|
251 |
+
@computed_field
|
252 |
+
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
253 |
+
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
|
254 |
+
|
255 |
+
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: Annotated[
|
256 |
+
PositiveInt, Field(ge=10, description="Maximum connection timeout in seconds for HTTP requests")
|
257 |
+
] = 10
|
258 |
+
|
259 |
+
HTTP_REQUEST_MAX_READ_TIMEOUT: Annotated[
|
260 |
+
PositiveInt, Field(ge=60, description="Maximum read timeout in seconds for HTTP requests")
|
261 |
+
] = 60
|
262 |
+
|
263 |
+
HTTP_REQUEST_MAX_WRITE_TIMEOUT: Annotated[
|
264 |
+
PositiveInt, Field(ge=10, description="Maximum write timeout in seconds for HTTP requests")
|
265 |
+
] = 20
|
266 |
+
|
267 |
+
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field(
|
268 |
+
description="Maximum allowed size in bytes for binary data in HTTP requests",
|
269 |
+
default=10 * 1024 * 1024,
|
270 |
+
)
|
271 |
+
|
272 |
+
HTTP_REQUEST_NODE_MAX_TEXT_SIZE: PositiveInt = Field(
|
273 |
+
description="Maximum allowed size in bytes for text data in HTTP requests",
|
274 |
+
default=1 * 1024 * 1024,
|
275 |
+
)
|
276 |
+
|
277 |
+
SSRF_DEFAULT_MAX_RETRIES: PositiveInt = Field(
|
278 |
+
description="Maximum number of retries for network requests (SSRF)",
|
279 |
+
default=3,
|
280 |
+
)
|
281 |
+
|
282 |
+
SSRF_PROXY_ALL_URL: Optional[str] = Field(
|
283 |
+
description="Proxy URL for HTTP or HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
|
284 |
+
default=None,
|
285 |
+
)
|
286 |
+
|
287 |
+
SSRF_PROXY_HTTP_URL: Optional[str] = Field(
|
288 |
+
description="Proxy URL for HTTP requests to prevent Server-Side Request Forgery (SSRF)",
|
289 |
+
default=None,
|
290 |
+
)
|
291 |
+
|
292 |
+
SSRF_PROXY_HTTPS_URL: Optional[str] = Field(
|
293 |
+
description="Proxy URL for HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
|
294 |
+
default=None,
|
295 |
+
)
|
296 |
+
|
297 |
+
SSRF_DEFAULT_TIME_OUT: PositiveFloat = Field(
|
298 |
+
description="The default timeout period used for network requests (SSRF)",
|
299 |
+
default=5,
|
300 |
+
)
|
301 |
+
|
302 |
+
SSRF_DEFAULT_CONNECT_TIME_OUT: PositiveFloat = Field(
|
303 |
+
description="The default connect timeout period used for network requests (SSRF)",
|
304 |
+
default=5,
|
305 |
+
)
|
306 |
+
|
307 |
+
SSRF_DEFAULT_READ_TIME_OUT: PositiveFloat = Field(
|
308 |
+
description="The default read timeout period used for network requests (SSRF)",
|
309 |
+
default=5,
|
310 |
+
)
|
311 |
+
|
312 |
+
SSRF_DEFAULT_WRITE_TIME_OUT: PositiveFloat = Field(
|
313 |
+
description="The default write timeout period used for network requests (SSRF)",
|
314 |
+
default=5,
|
315 |
+
)
|
316 |
+
|
317 |
+
RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
|
318 |
+
description="Enable handling of X-Forwarded-For, X-Forwarded-Proto, and X-Forwarded-Port headers"
|
319 |
+
" when the app is behind a single trusted reverse proxy.",
|
320 |
+
default=False,
|
321 |
+
)
|
322 |
+
|
323 |
+
|
324 |
+
class InnerAPIConfig(BaseSettings):
|
325 |
+
"""
|
326 |
+
Configuration for internal API functionality
|
327 |
+
"""
|
328 |
+
|
329 |
+
INNER_API: bool = Field(
|
330 |
+
description="Enable or disable the internal API",
|
331 |
+
default=False,
|
332 |
+
)
|
333 |
+
|
334 |
+
INNER_API_KEY: Optional[str] = Field(
|
335 |
+
description="API key for accessing the internal API",
|
336 |
+
default=None,
|
337 |
+
)
|
338 |
+
|
339 |
+
|
340 |
+
class LoggingConfig(BaseSettings):
|
341 |
+
"""
|
342 |
+
Configuration for application logging
|
343 |
+
"""
|
344 |
+
|
345 |
+
LOG_LEVEL: str = Field(
|
346 |
+
description="Logging level, default to INFO. Set to ERROR for production environments.",
|
347 |
+
default="INFO",
|
348 |
+
)
|
349 |
+
|
350 |
+
LOG_FILE: Optional[str] = Field(
|
351 |
+
description="File path for log output.",
|
352 |
+
default=None,
|
353 |
+
)
|
354 |
+
|
355 |
+
LOG_FILE_MAX_SIZE: PositiveInt = Field(
|
356 |
+
description="Maximum file size for file rotation retention, the unit is megabytes (MB)",
|
357 |
+
default=20,
|
358 |
+
)
|
359 |
+
|
360 |
+
LOG_FILE_BACKUP_COUNT: PositiveInt = Field(
|
361 |
+
description="Maximum file backup count file rotation retention",
|
362 |
+
default=5,
|
363 |
+
)
|
364 |
+
|
365 |
+
LOG_FORMAT: str = Field(
|
366 |
+
description="Format string for log messages",
|
367 |
+
default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s",
|
368 |
+
)
|
369 |
+
|
370 |
+
LOG_DATEFORMAT: Optional[str] = Field(
|
371 |
+
description="Date format string for log timestamps",
|
372 |
+
default=None,
|
373 |
+
)
|
374 |
+
|
375 |
+
LOG_TZ: Optional[str] = Field(
|
376 |
+
description="Timezone for log timestamps (e.g., 'America/New_York')",
|
377 |
+
default="UTC",
|
378 |
+
)
|
379 |
+
|
380 |
+
|
381 |
+
class ModelLoadBalanceConfig(BaseSettings):
|
382 |
+
"""
|
383 |
+
Configuration for model load balancing
|
384 |
+
"""
|
385 |
+
|
386 |
+
MODEL_LB_ENABLED: bool = Field(
|
387 |
+
description="Enable or disable load balancing for models",
|
388 |
+
default=False,
|
389 |
+
)
|
390 |
+
|
391 |
+
|
392 |
+
class BillingConfig(BaseSettings):
|
393 |
+
"""
|
394 |
+
Configuration for platform billing features
|
395 |
+
"""
|
396 |
+
|
397 |
+
BILLING_ENABLED: bool = Field(
|
398 |
+
description="Enable or disable billing functionality",
|
399 |
+
default=False,
|
400 |
+
)
|
401 |
+
|
402 |
+
|
403 |
+
class UpdateConfig(BaseSettings):
|
404 |
+
"""
|
405 |
+
Configuration for application update checks
|
406 |
+
"""
|
407 |
+
|
408 |
+
CHECK_UPDATE_URL: str = Field(
|
409 |
+
description="URL to check for application updates",
|
410 |
+
default="https://updates.dify.ai",
|
411 |
+
)
|
412 |
+
|
413 |
+
|
414 |
+
class WorkflowConfig(BaseSettings):
|
415 |
+
"""
|
416 |
+
Configuration for workflow execution
|
417 |
+
"""
|
418 |
+
|
419 |
+
WORKFLOW_MAX_EXECUTION_STEPS: PositiveInt = Field(
|
420 |
+
description="Maximum number of steps allowed in a single workflow execution",
|
421 |
+
default=500,
|
422 |
+
)
|
423 |
+
|
424 |
+
WORKFLOW_MAX_EXECUTION_TIME: PositiveInt = Field(
|
425 |
+
description="Maximum execution time in seconds for a single workflow",
|
426 |
+
default=1200,
|
427 |
+
)
|
428 |
+
|
429 |
+
WORKFLOW_CALL_MAX_DEPTH: PositiveInt = Field(
|
430 |
+
description="Maximum allowed depth for nested workflow calls",
|
431 |
+
default=5,
|
432 |
+
)
|
433 |
+
|
434 |
+
WORKFLOW_PARALLEL_DEPTH_LIMIT: PositiveInt = Field(
|
435 |
+
description="Maximum allowed depth for nested parallel executions",
|
436 |
+
default=3,
|
437 |
+
)
|
438 |
+
|
439 |
+
MAX_VARIABLE_SIZE: PositiveInt = Field(
|
440 |
+
description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.",
|
441 |
+
default=200 * 1024,
|
442 |
+
)
|
443 |
+
|
444 |
+
|
445 |
+
class WorkflowNodeExecutionConfig(BaseSettings):
|
446 |
+
"""
|
447 |
+
Configuration for workflow node execution
|
448 |
+
"""
|
449 |
+
|
450 |
+
MAX_SUBMIT_COUNT: PositiveInt = Field(
|
451 |
+
description="Maximum number of submitted thread count in a ThreadPool for parallel node execution",
|
452 |
+
default=100,
|
453 |
+
)
|
454 |
+
|
455 |
+
|
456 |
+
class AuthConfig(BaseSettings):
|
457 |
+
"""
|
458 |
+
Configuration for authentication and OAuth
|
459 |
+
"""
|
460 |
+
|
461 |
+
OAUTH_REDIRECT_PATH: str = Field(
|
462 |
+
description="Redirect path for OAuth authentication callbacks",
|
463 |
+
default="/console/api/oauth/authorize",
|
464 |
+
)
|
465 |
+
|
466 |
+
GITHUB_CLIENT_ID: Optional[str] = Field(
|
467 |
+
description="GitHub OAuth client ID",
|
468 |
+
default=None,
|
469 |
+
)
|
470 |
+
|
471 |
+
GITHUB_CLIENT_SECRET: Optional[str] = Field(
|
472 |
+
description="GitHub OAuth client secret",
|
473 |
+
default=None,
|
474 |
+
)
|
475 |
+
|
476 |
+
GOOGLE_CLIENT_ID: Optional[str] = Field(
|
477 |
+
description="Google OAuth client ID",
|
478 |
+
default=None,
|
479 |
+
)
|
480 |
+
|
481 |
+
GOOGLE_CLIENT_SECRET: Optional[str] = Field(
|
482 |
+
description="Google OAuth client secret",
|
483 |
+
default=None,
|
484 |
+
)
|
485 |
+
|
486 |
+
ACCESS_TOKEN_EXPIRE_MINUTES: PositiveInt = Field(
|
487 |
+
description="Expiration time for access tokens in minutes",
|
488 |
+
default=60,
|
489 |
+
)
|
490 |
+
|
491 |
+
REFRESH_TOKEN_EXPIRE_DAYS: PositiveFloat = Field(
|
492 |
+
description="Expiration time for refresh tokens in days",
|
493 |
+
default=30,
|
494 |
+
)
|
495 |
+
|
496 |
+
LOGIN_LOCKOUT_DURATION: PositiveInt = Field(
|
497 |
+
description="Time (in seconds) a user must wait before retrying login after exceeding the rate limit.",
|
498 |
+
default=86400,
|
499 |
+
)
|
500 |
+
|
501 |
+
FORGOT_PASSWORD_LOCKOUT_DURATION: PositiveInt = Field(
|
502 |
+
description="Time (in seconds) a user must wait before retrying password reset after exceeding the rate limit.",
|
503 |
+
default=86400,
|
504 |
+
)
|
505 |
+
|
506 |
+
|
507 |
+
class ModerationConfig(BaseSettings):
|
508 |
+
"""
|
509 |
+
Configuration for content moderation
|
510 |
+
"""
|
511 |
+
|
512 |
+
MODERATION_BUFFER_SIZE: PositiveInt = Field(
|
513 |
+
description="Size of the buffer for content moderation processing",
|
514 |
+
default=300,
|
515 |
+
)
|
516 |
+
|
517 |
+
|
518 |
+
class ToolConfig(BaseSettings):
|
519 |
+
"""
|
520 |
+
Configuration for tool management
|
521 |
+
"""
|
522 |
+
|
523 |
+
TOOL_ICON_CACHE_MAX_AGE: PositiveInt = Field(
|
524 |
+
description="Maximum age in seconds for caching tool icons",
|
525 |
+
default=3600,
|
526 |
+
)
|
527 |
+
|
528 |
+
|
529 |
+
class MailConfig(BaseSettings):
|
530 |
+
"""
|
531 |
+
Configuration for email services
|
532 |
+
"""
|
533 |
+
|
534 |
+
MAIL_TYPE: Optional[str] = Field(
|
535 |
+
description="Email service provider type ('smtp' or 'resend'), default to None.",
|
536 |
+
default=None,
|
537 |
+
)
|
538 |
+
|
539 |
+
MAIL_DEFAULT_SEND_FROM: Optional[str] = Field(
|
540 |
+
description="Default email address to use as the sender",
|
541 |
+
default=None,
|
542 |
+
)
|
543 |
+
|
544 |
+
RESEND_API_KEY: Optional[str] = Field(
|
545 |
+
description="API key for Resend email service",
|
546 |
+
default=None,
|
547 |
+
)
|
548 |
+
|
549 |
+
RESEND_API_URL: Optional[str] = Field(
|
550 |
+
description="API URL for Resend email service",
|
551 |
+
default=None,
|
552 |
+
)
|
553 |
+
|
554 |
+
SMTP_SERVER: Optional[str] = Field(
|
555 |
+
description="SMTP server hostname",
|
556 |
+
default=None,
|
557 |
+
)
|
558 |
+
|
559 |
+
SMTP_PORT: Optional[int] = Field(
|
560 |
+
description="SMTP server port number",
|
561 |
+
default=465,
|
562 |
+
)
|
563 |
+
|
564 |
+
SMTP_USERNAME: Optional[str] = Field(
|
565 |
+
description="Username for SMTP authentication",
|
566 |
+
default=None,
|
567 |
+
)
|
568 |
+
|
569 |
+
SMTP_PASSWORD: Optional[str] = Field(
|
570 |
+
description="Password for SMTP authentication",
|
571 |
+
default=None,
|
572 |
+
)
|
573 |
+
|
574 |
+
SMTP_USE_TLS: bool = Field(
|
575 |
+
description="Enable TLS encryption for SMTP connections",
|
576 |
+
default=False,
|
577 |
+
)
|
578 |
+
|
579 |
+
SMTP_OPPORTUNISTIC_TLS: bool = Field(
|
580 |
+
description="Enable opportunistic TLS for SMTP connections",
|
581 |
+
default=False,
|
582 |
+
)
|
583 |
+
|
584 |
+
EMAIL_SEND_IP_LIMIT_PER_MINUTE: PositiveInt = Field(
|
585 |
+
description="Maximum number of emails allowed to be sent from the same IP address in a minute",
|
586 |
+
default=50,
|
587 |
+
)
|
588 |
+
|
589 |
+
|
590 |
+
class RagEtlConfig(BaseSettings):
|
591 |
+
"""
|
592 |
+
Configuration for RAG ETL processes
|
593 |
+
"""
|
594 |
+
|
595 |
+
# TODO: This config is not only for rag etl, it is also for file upload, we should move it to file upload config
|
596 |
+
ETL_TYPE: str = Field(
|
597 |
+
description="RAG ETL type ('dify' or 'Unstructured'), default to 'dify'",
|
598 |
+
default="dify",
|
599 |
+
)
|
600 |
+
|
601 |
+
KEYWORD_DATA_SOURCE_TYPE: str = Field(
|
602 |
+
description="Data source type for keyword extraction"
|
603 |
+
" ('database' or other supported types), default to 'database'",
|
604 |
+
default="database",
|
605 |
+
)
|
606 |
+
|
607 |
+
UNSTRUCTURED_API_URL: Optional[str] = Field(
|
608 |
+
description="API URL for Unstructured.io service",
|
609 |
+
default=None,
|
610 |
+
)
|
611 |
+
|
612 |
+
UNSTRUCTURED_API_KEY: Optional[str] = Field(
|
613 |
+
description="API key for Unstructured.io service",
|
614 |
+
default="",
|
615 |
+
)
|
616 |
+
|
617 |
+
SCARF_NO_ANALYTICS: Optional[str] = Field(
|
618 |
+
description="This is about whether to disable Scarf analytics in Unstructured library.",
|
619 |
+
default="false",
|
620 |
+
)
|
621 |
+
|
622 |
+
|
623 |
+
class DataSetConfig(BaseSettings):
|
624 |
+
"""
|
625 |
+
Configuration for dataset management
|
626 |
+
"""
|
627 |
+
|
628 |
+
PLAN_SANDBOX_CLEAN_DAY_SETTING: PositiveInt = Field(
|
629 |
+
description="Interval in days for dataset cleanup operations - plan: sandbox",
|
630 |
+
default=30,
|
631 |
+
)
|
632 |
+
|
633 |
+
PLAN_PRO_CLEAN_DAY_SETTING: PositiveInt = Field(
|
634 |
+
description="Interval in days for dataset cleanup operations - plan: pro and team",
|
635 |
+
default=7,
|
636 |
+
)
|
637 |
+
|
638 |
+
DATASET_OPERATOR_ENABLED: bool = Field(
|
639 |
+
description="Enable or disable dataset operator functionality",
|
640 |
+
default=False,
|
641 |
+
)
|
642 |
+
|
643 |
+
TIDB_SERVERLESS_NUMBER: PositiveInt = Field(
|
644 |
+
description="number of tidb serverless cluster",
|
645 |
+
default=500,
|
646 |
+
)
|
647 |
+
|
648 |
+
CREATE_TIDB_SERVICE_JOB_ENABLED: bool = Field(
|
649 |
+
description="Enable or disable create tidb service job",
|
650 |
+
default=False,
|
651 |
+
)
|
652 |
+
|
653 |
+
PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING: PositiveInt = Field(
|
654 |
+
description="Interval in days for message cleanup operations - plan: sandbox",
|
655 |
+
default=30,
|
656 |
+
)
|
657 |
+
|
658 |
+
|
659 |
+
class WorkspaceConfig(BaseSettings):
|
660 |
+
"""
|
661 |
+
Configuration for workspace management
|
662 |
+
"""
|
663 |
+
|
664 |
+
INVITE_EXPIRY_HOURS: PositiveInt = Field(
|
665 |
+
description="Expiration time in hours for workspace invitation links",
|
666 |
+
default=72,
|
667 |
+
)
|
668 |
+
|
669 |
+
|
670 |
+
class IndexingConfig(BaseSettings):
|
671 |
+
"""
|
672 |
+
Configuration for indexing operations
|
673 |
+
"""
|
674 |
+
|
675 |
+
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: PositiveInt = Field(
|
676 |
+
description="Maximum token length for text segmentation during indexing",
|
677 |
+
default=4000,
|
678 |
+
)
|
679 |
+
|
680 |
+
CHILD_CHUNKS_PREVIEW_NUMBER: PositiveInt = Field(
|
681 |
+
description="Maximum number of child chunks to preview",
|
682 |
+
default=50,
|
683 |
+
)
|
684 |
+
|
685 |
+
|
686 |
+
class MultiModalTransferConfig(BaseSettings):
|
687 |
+
MULTIMODAL_SEND_FORMAT: Literal["base64", "url"] = Field(
|
688 |
+
description="Format for sending files in multimodal contexts ('base64' or 'url'), default is base64",
|
689 |
+
default="base64",
|
690 |
+
)
|
691 |
+
|
692 |
+
|
693 |
+
class CeleryBeatConfig(BaseSettings):
|
694 |
+
CELERY_BEAT_SCHEDULER_TIME: int = Field(
|
695 |
+
description="Interval in days for Celery Beat scheduler execution, default to 1 day",
|
696 |
+
default=1,
|
697 |
+
)
|
698 |
+
|
699 |
+
|
700 |
+
class PositionConfig(BaseSettings):
|
701 |
+
POSITION_PROVIDER_PINS: str = Field(
|
702 |
+
description="Comma-separated list of pinned model providers",
|
703 |
+
default="",
|
704 |
+
)
|
705 |
+
|
706 |
+
POSITION_PROVIDER_INCLUDES: str = Field(
|
707 |
+
description="Comma-separated list of included model providers",
|
708 |
+
default="",
|
709 |
+
)
|
710 |
+
|
711 |
+
POSITION_PROVIDER_EXCLUDES: str = Field(
|
712 |
+
description="Comma-separated list of excluded model providers",
|
713 |
+
default="",
|
714 |
+
)
|
715 |
+
|
716 |
+
POSITION_TOOL_PINS: str = Field(
|
717 |
+
description="Comma-separated list of pinned tools",
|
718 |
+
default="",
|
719 |
+
)
|
720 |
+
|
721 |
+
POSITION_TOOL_INCLUDES: str = Field(
|
722 |
+
description="Comma-separated list of included tools",
|
723 |
+
default="",
|
724 |
+
)
|
725 |
+
|
726 |
+
POSITION_TOOL_EXCLUDES: str = Field(
|
727 |
+
description="Comma-separated list of excluded tools",
|
728 |
+
default="",
|
729 |
+
)
|
730 |
+
|
731 |
+
@property
|
732 |
+
def POSITION_PROVIDER_PINS_LIST(self) -> list[str]:
|
733 |
+
return [item.strip() for item in self.POSITION_PROVIDER_PINS.split(",") if item.strip() != ""]
|
734 |
+
|
735 |
+
@property
|
736 |
+
def POSITION_PROVIDER_INCLUDES_SET(self) -> set[str]:
|
737 |
+
return {item.strip() for item in self.POSITION_PROVIDER_INCLUDES.split(",") if item.strip() != ""}
|
738 |
+
|
739 |
+
@property
|
740 |
+
def POSITION_PROVIDER_EXCLUDES_SET(self) -> set[str]:
|
741 |
+
return {item.strip() for item in self.POSITION_PROVIDER_EXCLUDES.split(",") if item.strip() != ""}
|
742 |
+
|
743 |
+
@property
|
744 |
+
def POSITION_TOOL_PINS_LIST(self) -> list[str]:
|
745 |
+
return [item.strip() for item in self.POSITION_TOOL_PINS.split(",") if item.strip() != ""]
|
746 |
+
|
747 |
+
@property
|
748 |
+
def POSITION_TOOL_INCLUDES_SET(self) -> set[str]:
|
749 |
+
return {item.strip() for item in self.POSITION_TOOL_INCLUDES.split(",") if item.strip() != ""}
|
750 |
+
|
751 |
+
@property
|
752 |
+
def POSITION_TOOL_EXCLUDES_SET(self) -> set[str]:
|
753 |
+
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
|
754 |
+
|
755 |
+
|
756 |
+
class LoginConfig(BaseSettings):
|
757 |
+
ENABLE_EMAIL_CODE_LOGIN: bool = Field(
|
758 |
+
description="whether to enable email code login",
|
759 |
+
default=False,
|
760 |
+
)
|
761 |
+
ENABLE_EMAIL_PASSWORD_LOGIN: bool = Field(
|
762 |
+
description="whether to enable email password login",
|
763 |
+
default=True,
|
764 |
+
)
|
765 |
+
ENABLE_SOCIAL_OAUTH_LOGIN: bool = Field(
|
766 |
+
description="whether to enable github/google oauth login",
|
767 |
+
default=False,
|
768 |
+
)
|
769 |
+
EMAIL_CODE_LOGIN_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
|
770 |
+
description="expiry time in minutes for email code login token",
|
771 |
+
default=5,
|
772 |
+
)
|
773 |
+
ALLOW_REGISTER: bool = Field(
|
774 |
+
description="whether to enable register",
|
775 |
+
default=False,
|
776 |
+
)
|
777 |
+
ALLOW_CREATE_WORKSPACE: bool = Field(
|
778 |
+
description="whether to enable create workspace",
|
779 |
+
default=False,
|
780 |
+
)
|
781 |
+
|
782 |
+
|
783 |
+
class AccountConfig(BaseSettings):
|
784 |
+
ACCOUNT_DELETION_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
|
785 |
+
description="Duration in minutes for which a account deletion token remains valid",
|
786 |
+
default=5,
|
787 |
+
)
|
788 |
+
|
789 |
+
|
790 |
+
class FeatureConfig(
|
791 |
+
# place the configs in alphabet order
|
792 |
+
AppExecutionConfig,
|
793 |
+
AuthConfig, # Changed from OAuthConfig to AuthConfig
|
794 |
+
BillingConfig,
|
795 |
+
CodeExecutionSandboxConfig,
|
796 |
+
DataSetConfig,
|
797 |
+
EndpointConfig,
|
798 |
+
FileAccessConfig,
|
799 |
+
FileUploadConfig,
|
800 |
+
HttpConfig,
|
801 |
+
InnerAPIConfig,
|
802 |
+
IndexingConfig,
|
803 |
+
LoggingConfig,
|
804 |
+
MailConfig,
|
805 |
+
ModelLoadBalanceConfig,
|
806 |
+
ModerationConfig,
|
807 |
+
MultiModalTransferConfig,
|
808 |
+
PositionConfig,
|
809 |
+
RagEtlConfig,
|
810 |
+
SecurityConfig,
|
811 |
+
ToolConfig,
|
812 |
+
UpdateConfig,
|
813 |
+
WorkflowConfig,
|
814 |
+
WorkflowNodeExecutionConfig,
|
815 |
+
WorkspaceConfig,
|
816 |
+
LoginConfig,
|
817 |
+
AccountConfig,
|
818 |
+
# hosted services config
|
819 |
+
HostedServiceConfig,
|
820 |
+
CeleryBeatConfig,
|
821 |
+
):
|
822 |
+
pass
|
api/configs/feature/hosted_service/__init__.py
ADDED
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, NonNegativeInt, computed_field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class HostedCreditConfig(BaseSettings):
|
8 |
+
HOSTED_MODEL_CREDIT_CONFIG: str = Field(
|
9 |
+
description="Model credit configuration in format 'model:credits,model:credits', e.g., 'gpt-4:20,gpt-4o:10'",
|
10 |
+
default="",
|
11 |
+
)
|
12 |
+
|
13 |
+
def get_model_credits(self, model_name: str) -> int:
|
14 |
+
"""
|
15 |
+
Get credit value for a specific model name.
|
16 |
+
Returns 1 if model is not found in configuration (default credit).
|
17 |
+
|
18 |
+
:param model_name: The name of the model to search for
|
19 |
+
:return: The credit value for the model
|
20 |
+
"""
|
21 |
+
if not self.HOSTED_MODEL_CREDIT_CONFIG:
|
22 |
+
return 1
|
23 |
+
|
24 |
+
try:
|
25 |
+
credit_map = dict(
|
26 |
+
item.strip().split(":", 1) for item in self.HOSTED_MODEL_CREDIT_CONFIG.split(",") if ":" in item
|
27 |
+
)
|
28 |
+
|
29 |
+
# Search for matching model pattern
|
30 |
+
for pattern, credit in credit_map.items():
|
31 |
+
if pattern.strip() == model_name:
|
32 |
+
return int(credit)
|
33 |
+
return 1 # Default quota if no match found
|
34 |
+
except (ValueError, AttributeError):
|
35 |
+
return 1 # Return default quota if parsing fails
|
36 |
+
|
37 |
+
|
38 |
+
class HostedOpenAiConfig(BaseSettings):
|
39 |
+
"""
|
40 |
+
Configuration for hosted OpenAI service
|
41 |
+
"""
|
42 |
+
|
43 |
+
HOSTED_OPENAI_API_KEY: Optional[str] = Field(
|
44 |
+
description="API key for hosted OpenAI service",
|
45 |
+
default=None,
|
46 |
+
)
|
47 |
+
|
48 |
+
HOSTED_OPENAI_API_BASE: Optional[str] = Field(
|
49 |
+
description="Base URL for hosted OpenAI API",
|
50 |
+
default=None,
|
51 |
+
)
|
52 |
+
|
53 |
+
HOSTED_OPENAI_API_ORGANIZATION: Optional[str] = Field(
|
54 |
+
description="Organization ID for hosted OpenAI service",
|
55 |
+
default=None,
|
56 |
+
)
|
57 |
+
|
58 |
+
HOSTED_OPENAI_TRIAL_ENABLED: bool = Field(
|
59 |
+
description="Enable trial access to hosted OpenAI service",
|
60 |
+
default=False,
|
61 |
+
)
|
62 |
+
|
63 |
+
HOSTED_OPENAI_TRIAL_MODELS: str = Field(
|
64 |
+
description="Comma-separated list of available models for trial access",
|
65 |
+
default="gpt-3.5-turbo,"
|
66 |
+
"gpt-3.5-turbo-1106,"
|
67 |
+
"gpt-3.5-turbo-instruct,"
|
68 |
+
"gpt-3.5-turbo-16k,"
|
69 |
+
"gpt-3.5-turbo-16k-0613,"
|
70 |
+
"gpt-3.5-turbo-0613,"
|
71 |
+
"gpt-3.5-turbo-0125,"
|
72 |
+
"text-davinci-003",
|
73 |
+
)
|
74 |
+
|
75 |
+
HOSTED_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
|
76 |
+
description="Quota limit for hosted OpenAI service usage",
|
77 |
+
default=200,
|
78 |
+
)
|
79 |
+
|
80 |
+
HOSTED_OPENAI_PAID_ENABLED: bool = Field(
|
81 |
+
description="Enable paid access to hosted OpenAI service",
|
82 |
+
default=False,
|
83 |
+
)
|
84 |
+
|
85 |
+
HOSTED_OPENAI_PAID_MODELS: str = Field(
|
86 |
+
description="Comma-separated list of available models for paid access",
|
87 |
+
default="gpt-4,"
|
88 |
+
"gpt-4-turbo-preview,"
|
89 |
+
"gpt-4-turbo-2024-04-09,"
|
90 |
+
"gpt-4-1106-preview,"
|
91 |
+
"gpt-4-0125-preview,"
|
92 |
+
"gpt-3.5-turbo,"
|
93 |
+
"gpt-3.5-turbo-16k,"
|
94 |
+
"gpt-3.5-turbo-16k-0613,"
|
95 |
+
"gpt-3.5-turbo-1106,"
|
96 |
+
"gpt-3.5-turbo-0613,"
|
97 |
+
"gpt-3.5-turbo-0125,"
|
98 |
+
"gpt-3.5-turbo-instruct,"
|
99 |
+
"text-davinci-003",
|
100 |
+
)
|
101 |
+
|
102 |
+
|
103 |
+
class HostedAzureOpenAiConfig(BaseSettings):
|
104 |
+
"""
|
105 |
+
Configuration for hosted Azure OpenAI service
|
106 |
+
"""
|
107 |
+
|
108 |
+
HOSTED_AZURE_OPENAI_ENABLED: bool = Field(
|
109 |
+
description="Enable hosted Azure OpenAI service",
|
110 |
+
default=False,
|
111 |
+
)
|
112 |
+
|
113 |
+
HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field(
|
114 |
+
description="API key for hosted Azure OpenAI service",
|
115 |
+
default=None,
|
116 |
+
)
|
117 |
+
|
118 |
+
HOSTED_AZURE_OPENAI_API_BASE: Optional[str] = Field(
|
119 |
+
description="Base URL for hosted Azure OpenAI API",
|
120 |
+
default=None,
|
121 |
+
)
|
122 |
+
|
123 |
+
HOSTED_AZURE_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
|
124 |
+
description="Quota limit for hosted Azure OpenAI service usage",
|
125 |
+
default=200,
|
126 |
+
)
|
127 |
+
|
128 |
+
|
129 |
+
class HostedAnthropicConfig(BaseSettings):
|
130 |
+
"""
|
131 |
+
Configuration for hosted Anthropic service
|
132 |
+
"""
|
133 |
+
|
134 |
+
HOSTED_ANTHROPIC_API_BASE: Optional[str] = Field(
|
135 |
+
description="Base URL for hosted Anthropic API",
|
136 |
+
default=None,
|
137 |
+
)
|
138 |
+
|
139 |
+
HOSTED_ANTHROPIC_API_KEY: Optional[str] = Field(
|
140 |
+
description="API key for hosted Anthropic service",
|
141 |
+
default=None,
|
142 |
+
)
|
143 |
+
|
144 |
+
HOSTED_ANTHROPIC_TRIAL_ENABLED: bool = Field(
|
145 |
+
description="Enable trial access to hosted Anthropic service",
|
146 |
+
default=False,
|
147 |
+
)
|
148 |
+
|
149 |
+
HOSTED_ANTHROPIC_QUOTA_LIMIT: NonNegativeInt = Field(
|
150 |
+
description="Quota limit for hosted Anthropic service usage",
|
151 |
+
default=600000,
|
152 |
+
)
|
153 |
+
|
154 |
+
HOSTED_ANTHROPIC_PAID_ENABLED: bool = Field(
|
155 |
+
description="Enable paid access to hosted Anthropic service",
|
156 |
+
default=False,
|
157 |
+
)
|
158 |
+
|
159 |
+
|
160 |
+
class HostedMinmaxConfig(BaseSettings):
|
161 |
+
"""
|
162 |
+
Configuration for hosted Minmax service
|
163 |
+
"""
|
164 |
+
|
165 |
+
HOSTED_MINIMAX_ENABLED: bool = Field(
|
166 |
+
description="Enable hosted Minmax service",
|
167 |
+
default=False,
|
168 |
+
)
|
169 |
+
|
170 |
+
|
171 |
+
class HostedSparkConfig(BaseSettings):
|
172 |
+
"""
|
173 |
+
Configuration for hosted Spark service
|
174 |
+
"""
|
175 |
+
|
176 |
+
HOSTED_SPARK_ENABLED: bool = Field(
|
177 |
+
description="Enable hosted Spark service",
|
178 |
+
default=False,
|
179 |
+
)
|
180 |
+
|
181 |
+
|
182 |
+
class HostedZhipuAIConfig(BaseSettings):
|
183 |
+
"""
|
184 |
+
Configuration for hosted ZhipuAI service
|
185 |
+
"""
|
186 |
+
|
187 |
+
HOSTED_ZHIPUAI_ENABLED: bool = Field(
|
188 |
+
description="Enable hosted ZhipuAI service",
|
189 |
+
default=False,
|
190 |
+
)
|
191 |
+
|
192 |
+
|
193 |
+
class HostedModerationConfig(BaseSettings):
|
194 |
+
"""
|
195 |
+
Configuration for hosted Moderation service
|
196 |
+
"""
|
197 |
+
|
198 |
+
HOSTED_MODERATION_ENABLED: bool = Field(
|
199 |
+
description="Enable hosted Moderation service",
|
200 |
+
default=False,
|
201 |
+
)
|
202 |
+
|
203 |
+
HOSTED_MODERATION_PROVIDERS: str = Field(
|
204 |
+
description="Comma-separated list of moderation providers",
|
205 |
+
default="",
|
206 |
+
)
|
207 |
+
|
208 |
+
|
209 |
+
class HostedFetchAppTemplateConfig(BaseSettings):
|
210 |
+
"""
|
211 |
+
Configuration for fetching app templates
|
212 |
+
"""
|
213 |
+
|
214 |
+
HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
|
215 |
+
description="Mode for fetching app templates: remote, db, or builtin default to remote,",
|
216 |
+
default="remote",
|
217 |
+
)
|
218 |
+
|
219 |
+
HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN: str = Field(
|
220 |
+
description="Domain for fetching remote app templates",
|
221 |
+
default="https://tmpl.dify.ai",
|
222 |
+
)
|
223 |
+
|
224 |
+
|
225 |
+
class HostedServiceConfig(
|
226 |
+
# place the configs in alphabet order
|
227 |
+
HostedAnthropicConfig,
|
228 |
+
HostedAzureOpenAiConfig,
|
229 |
+
HostedFetchAppTemplateConfig,
|
230 |
+
HostedMinmaxConfig,
|
231 |
+
HostedOpenAiConfig,
|
232 |
+
HostedSparkConfig,
|
233 |
+
HostedZhipuAIConfig,
|
234 |
+
# moderation
|
235 |
+
HostedModerationConfig,
|
236 |
+
# credit config
|
237 |
+
HostedCreditConfig,
|
238 |
+
):
|
239 |
+
pass
|
api/configs/middleware/__init__.py
ADDED
@@ -0,0 +1,279 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Any, Literal, Optional
|
2 |
+
from urllib.parse import quote_plus
|
3 |
+
|
4 |
+
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
|
5 |
+
from pydantic_settings import BaseSettings
|
6 |
+
|
7 |
+
from .cache.redis_config import RedisConfig
|
8 |
+
from .storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
|
9 |
+
from .storage.amazon_s3_storage_config import S3StorageConfig
|
10 |
+
from .storage.azure_blob_storage_config import AzureBlobStorageConfig
|
11 |
+
from .storage.baidu_obs_storage_config import BaiduOBSStorageConfig
|
12 |
+
from .storage.google_cloud_storage_config import GoogleCloudStorageConfig
|
13 |
+
from .storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
|
14 |
+
from .storage.oci_storage_config import OCIStorageConfig
|
15 |
+
from .storage.opendal_storage_config import OpenDALStorageConfig
|
16 |
+
from .storage.supabase_storage_config import SupabaseStorageConfig
|
17 |
+
from .storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
|
18 |
+
from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
|
19 |
+
from .vdb.analyticdb_config import AnalyticdbConfig
|
20 |
+
from .vdb.baidu_vector_config import BaiduVectorDBConfig
|
21 |
+
from .vdb.chroma_config import ChromaConfig
|
22 |
+
from .vdb.couchbase_config import CouchbaseConfig
|
23 |
+
from .vdb.elasticsearch_config import ElasticsearchConfig
|
24 |
+
from .vdb.lindorm_config import LindormConfig
|
25 |
+
from .vdb.milvus_config import MilvusConfig
|
26 |
+
from .vdb.myscale_config import MyScaleConfig
|
27 |
+
from .vdb.oceanbase_config import OceanBaseVectorConfig
|
28 |
+
from .vdb.opensearch_config import OpenSearchConfig
|
29 |
+
from .vdb.oracle_config import OracleConfig
|
30 |
+
from .vdb.pgvector_config import PGVectorConfig
|
31 |
+
from .vdb.pgvectors_config import PGVectoRSConfig
|
32 |
+
from .vdb.qdrant_config import QdrantConfig
|
33 |
+
from .vdb.relyt_config import RelytConfig
|
34 |
+
from .vdb.tencent_vector_config import TencentVectorDBConfig
|
35 |
+
from .vdb.tidb_on_qdrant_config import TidbOnQdrantConfig
|
36 |
+
from .vdb.tidb_vector_config import TiDBVectorConfig
|
37 |
+
from .vdb.upstash_config import UpstashConfig
|
38 |
+
from .vdb.vikingdb_config import VikingDBConfig
|
39 |
+
from .vdb.weaviate_config import WeaviateConfig
|
40 |
+
|
41 |
+
|
42 |
+
class StorageConfig(BaseSettings):
|
43 |
+
STORAGE_TYPE: Literal[
|
44 |
+
"opendal",
|
45 |
+
"s3",
|
46 |
+
"aliyun-oss",
|
47 |
+
"azure-blob",
|
48 |
+
"baidu-obs",
|
49 |
+
"google-storage",
|
50 |
+
"huawei-obs",
|
51 |
+
"oci-storage",
|
52 |
+
"tencent-cos",
|
53 |
+
"volcengine-tos",
|
54 |
+
"supabase",
|
55 |
+
"local",
|
56 |
+
] = Field(
|
57 |
+
description="Type of storage to use."
|
58 |
+
" Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', "
|
59 |
+
"'huawei-obs', 'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'opendal'.",
|
60 |
+
default="opendal",
|
61 |
+
)
|
62 |
+
|
63 |
+
STORAGE_LOCAL_PATH: str = Field(
|
64 |
+
description="Path for local storage when STORAGE_TYPE is set to 'local'.",
|
65 |
+
default="storage",
|
66 |
+
deprecated=True,
|
67 |
+
)
|
68 |
+
|
69 |
+
|
70 |
+
class VectorStoreConfig(BaseSettings):
|
71 |
+
VECTOR_STORE: Optional[str] = Field(
|
72 |
+
description="Type of vector store to use for efficient similarity search."
|
73 |
+
" Set to None if not using a vector store.",
|
74 |
+
default=None,
|
75 |
+
)
|
76 |
+
|
77 |
+
VECTOR_STORE_WHITELIST_ENABLE: Optional[bool] = Field(
|
78 |
+
description="Enable whitelist for vector store.",
|
79 |
+
default=False,
|
80 |
+
)
|
81 |
+
|
82 |
+
|
83 |
+
class KeywordStoreConfig(BaseSettings):
|
84 |
+
KEYWORD_STORE: str = Field(
|
85 |
+
description="Method for keyword extraction and storage."
|
86 |
+
" Default is 'jieba', a Chinese text segmentation library.",
|
87 |
+
default="jieba",
|
88 |
+
)
|
89 |
+
|
90 |
+
|
91 |
+
class DatabaseConfig(BaseSettings):
|
92 |
+
DB_HOST: str = Field(
|
93 |
+
description="Hostname or IP address of the database server.",
|
94 |
+
default="localhost",
|
95 |
+
)
|
96 |
+
|
97 |
+
DB_PORT: PositiveInt = Field(
|
98 |
+
description="Port number for database connection.",
|
99 |
+
default=5432,
|
100 |
+
)
|
101 |
+
|
102 |
+
DB_USERNAME: str = Field(
|
103 |
+
description="Username for database authentication.",
|
104 |
+
default="postgres",
|
105 |
+
)
|
106 |
+
|
107 |
+
DB_PASSWORD: str = Field(
|
108 |
+
description="Password for database authentication.",
|
109 |
+
default="",
|
110 |
+
)
|
111 |
+
|
112 |
+
DB_DATABASE: str = Field(
|
113 |
+
description="Name of the database to connect to.",
|
114 |
+
default="dify",
|
115 |
+
)
|
116 |
+
|
117 |
+
DB_CHARSET: str = Field(
|
118 |
+
description="Character set for database connection.",
|
119 |
+
default="",
|
120 |
+
)
|
121 |
+
|
122 |
+
DB_EXTRAS: str = Field(
|
123 |
+
description="Additional database connection parameters. Example: 'keepalives_idle=60&keepalives=1'",
|
124 |
+
default="",
|
125 |
+
)
|
126 |
+
|
127 |
+
SQLALCHEMY_DATABASE_URI_SCHEME: str = Field(
|
128 |
+
description="Database URI scheme for SQLAlchemy connection.",
|
129 |
+
default="postgresql",
|
130 |
+
)
|
131 |
+
|
132 |
+
@computed_field
|
133 |
+
def SQLALCHEMY_DATABASE_URI(self) -> str:
|
134 |
+
db_extras = (
|
135 |
+
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
|
136 |
+
).strip("&")
|
137 |
+
db_extras = f"?{db_extras}" if db_extras else ""
|
138 |
+
return (
|
139 |
+
f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://"
|
140 |
+
f"{quote_plus(self.DB_USERNAME)}:{quote_plus(self.DB_PASSWORD)}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}"
|
141 |
+
f"{db_extras}"
|
142 |
+
)
|
143 |
+
|
144 |
+
SQLALCHEMY_POOL_SIZE: NonNegativeInt = Field(
|
145 |
+
description="Maximum number of database connections in the pool.",
|
146 |
+
default=30,
|
147 |
+
)
|
148 |
+
|
149 |
+
SQLALCHEMY_MAX_OVERFLOW: NonNegativeInt = Field(
|
150 |
+
description="Maximum number of connections that can be created beyond the pool_size.",
|
151 |
+
default=10,
|
152 |
+
)
|
153 |
+
|
154 |
+
SQLALCHEMY_POOL_RECYCLE: NonNegativeInt = Field(
|
155 |
+
description="Number of seconds after which a connection is automatically recycled.",
|
156 |
+
default=3600,
|
157 |
+
)
|
158 |
+
|
159 |
+
SQLALCHEMY_POOL_PRE_PING: bool = Field(
|
160 |
+
description="If True, enables connection pool pre-ping feature to check connections.",
|
161 |
+
default=False,
|
162 |
+
)
|
163 |
+
|
164 |
+
SQLALCHEMY_ECHO: bool | str = Field(
|
165 |
+
description="If True, SQLAlchemy will log all SQL statements.",
|
166 |
+
default=False,
|
167 |
+
)
|
168 |
+
|
169 |
+
@computed_field
|
170 |
+
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
|
171 |
+
return {
|
172 |
+
"pool_size": self.SQLALCHEMY_POOL_SIZE,
|
173 |
+
"max_overflow": self.SQLALCHEMY_MAX_OVERFLOW,
|
174 |
+
"pool_recycle": self.SQLALCHEMY_POOL_RECYCLE,
|
175 |
+
"pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
|
176 |
+
"connect_args": {"options": "-c timezone=UTC"},
|
177 |
+
}
|
178 |
+
|
179 |
+
|
180 |
+
class CeleryConfig(DatabaseConfig):
|
181 |
+
CELERY_BACKEND: str = Field(
|
182 |
+
description="Backend for Celery task results. Options: 'database', 'redis'.",
|
183 |
+
default="database",
|
184 |
+
)
|
185 |
+
|
186 |
+
CELERY_BROKER_URL: Optional[str] = Field(
|
187 |
+
description="URL of the message broker for Celery tasks.",
|
188 |
+
default=None,
|
189 |
+
)
|
190 |
+
|
191 |
+
CELERY_USE_SENTINEL: Optional[bool] = Field(
|
192 |
+
description="Whether to use Redis Sentinel for high availability.",
|
193 |
+
default=False,
|
194 |
+
)
|
195 |
+
|
196 |
+
CELERY_SENTINEL_MASTER_NAME: Optional[str] = Field(
|
197 |
+
description="Name of the Redis Sentinel master.",
|
198 |
+
default=None,
|
199 |
+
)
|
200 |
+
|
201 |
+
CELERY_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
|
202 |
+
description="Timeout for Redis Sentinel socket operations in seconds.",
|
203 |
+
default=0.1,
|
204 |
+
)
|
205 |
+
|
206 |
+
@computed_field
|
207 |
+
def CELERY_RESULT_BACKEND(self) -> str | None:
|
208 |
+
return (
|
209 |
+
"db+{}".format(self.SQLALCHEMY_DATABASE_URI)
|
210 |
+
if self.CELERY_BACKEND == "database"
|
211 |
+
else self.CELERY_BROKER_URL
|
212 |
+
)
|
213 |
+
|
214 |
+
@property
|
215 |
+
def BROKER_USE_SSL(self) -> bool:
|
216 |
+
return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False
|
217 |
+
|
218 |
+
|
219 |
+
class InternalTestConfig(BaseSettings):
|
220 |
+
"""
|
221 |
+
Configuration settings for Internal Test
|
222 |
+
"""
|
223 |
+
|
224 |
+
AWS_SECRET_ACCESS_KEY: Optional[str] = Field(
|
225 |
+
description="Internal test AWS secret access key",
|
226 |
+
default=None,
|
227 |
+
)
|
228 |
+
|
229 |
+
AWS_ACCESS_KEY_ID: Optional[str] = Field(
|
230 |
+
description="Internal test AWS access key ID",
|
231 |
+
default=None,
|
232 |
+
)
|
233 |
+
|
234 |
+
|
235 |
+
class MiddlewareConfig(
|
236 |
+
# place the configs in alphabet order
|
237 |
+
CeleryConfig,
|
238 |
+
DatabaseConfig,
|
239 |
+
KeywordStoreConfig,
|
240 |
+
RedisConfig,
|
241 |
+
# configs of storage and storage providers
|
242 |
+
StorageConfig,
|
243 |
+
AliyunOSSStorageConfig,
|
244 |
+
AzureBlobStorageConfig,
|
245 |
+
BaiduOBSStorageConfig,
|
246 |
+
GoogleCloudStorageConfig,
|
247 |
+
HuaweiCloudOBSStorageConfig,
|
248 |
+
OCIStorageConfig,
|
249 |
+
OpenDALStorageConfig,
|
250 |
+
S3StorageConfig,
|
251 |
+
SupabaseStorageConfig,
|
252 |
+
TencentCloudCOSStorageConfig,
|
253 |
+
VolcengineTOSStorageConfig,
|
254 |
+
# configs of vdb and vdb providers
|
255 |
+
VectorStoreConfig,
|
256 |
+
AnalyticdbConfig,
|
257 |
+
ChromaConfig,
|
258 |
+
MilvusConfig,
|
259 |
+
MyScaleConfig,
|
260 |
+
OpenSearchConfig,
|
261 |
+
OracleConfig,
|
262 |
+
PGVectorConfig,
|
263 |
+
PGVectoRSConfig,
|
264 |
+
QdrantConfig,
|
265 |
+
RelytConfig,
|
266 |
+
TencentVectorDBConfig,
|
267 |
+
TiDBVectorConfig,
|
268 |
+
WeaviateConfig,
|
269 |
+
ElasticsearchConfig,
|
270 |
+
CouchbaseConfig,
|
271 |
+
InternalTestConfig,
|
272 |
+
VikingDBConfig,
|
273 |
+
UpstashConfig,
|
274 |
+
TidbOnQdrantConfig,
|
275 |
+
LindormConfig,
|
276 |
+
OceanBaseVectorConfig,
|
277 |
+
BaiduVectorDBConfig,
|
278 |
+
):
|
279 |
+
pass
|
api/configs/middleware/cache/__init__.py
ADDED
File without changes
|
api/configs/middleware/cache/redis_config.py
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class RedisConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Redis connection
|
10 |
+
"""
|
11 |
+
|
12 |
+
REDIS_HOST: str = Field(
|
13 |
+
description="Hostname or IP address of the Redis server",
|
14 |
+
default="localhost",
|
15 |
+
)
|
16 |
+
|
17 |
+
REDIS_PORT: PositiveInt = Field(
|
18 |
+
description="Port number on which the Redis server is listening",
|
19 |
+
default=6379,
|
20 |
+
)
|
21 |
+
|
22 |
+
REDIS_USERNAME: Optional[str] = Field(
|
23 |
+
description="Username for Redis authentication (if required)",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
REDIS_PASSWORD: Optional[str] = Field(
|
28 |
+
description="Password for Redis authentication (if required)",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
REDIS_DB: NonNegativeInt = Field(
|
33 |
+
description="Redis database number to use (0-15)",
|
34 |
+
default=0,
|
35 |
+
)
|
36 |
+
|
37 |
+
REDIS_USE_SSL: bool = Field(
|
38 |
+
description="Enable SSL/TLS for the Redis connection",
|
39 |
+
default=False,
|
40 |
+
)
|
41 |
+
|
42 |
+
REDIS_USE_SENTINEL: Optional[bool] = Field(
|
43 |
+
description="Enable Redis Sentinel mode for high availability",
|
44 |
+
default=False,
|
45 |
+
)
|
46 |
+
|
47 |
+
REDIS_SENTINELS: Optional[str] = Field(
|
48 |
+
description="Comma-separated list of Redis Sentinel nodes (host:port)",
|
49 |
+
default=None,
|
50 |
+
)
|
51 |
+
|
52 |
+
REDIS_SENTINEL_SERVICE_NAME: Optional[str] = Field(
|
53 |
+
description="Name of the Redis Sentinel service to monitor",
|
54 |
+
default=None,
|
55 |
+
)
|
56 |
+
|
57 |
+
REDIS_SENTINEL_USERNAME: Optional[str] = Field(
|
58 |
+
description="Username for Redis Sentinel authentication (if required)",
|
59 |
+
default=None,
|
60 |
+
)
|
61 |
+
|
62 |
+
REDIS_SENTINEL_PASSWORD: Optional[str] = Field(
|
63 |
+
description="Password for Redis Sentinel authentication (if required)",
|
64 |
+
default=None,
|
65 |
+
)
|
66 |
+
|
67 |
+
REDIS_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
|
68 |
+
description="Socket timeout in seconds for Redis Sentinel connections",
|
69 |
+
default=0.1,
|
70 |
+
)
|
71 |
+
|
72 |
+
REDIS_USE_CLUSTERS: bool = Field(
|
73 |
+
description="Enable Redis Clusters mode for high availability",
|
74 |
+
default=False,
|
75 |
+
)
|
76 |
+
|
77 |
+
REDIS_CLUSTERS: Optional[str] = Field(
|
78 |
+
description="Comma-separated list of Redis Clusters nodes (host:port)",
|
79 |
+
default=None,
|
80 |
+
)
|
81 |
+
|
82 |
+
REDIS_CLUSTERS_PASSWORD: Optional[str] = Field(
|
83 |
+
description="Password for Redis Clusters authentication (if required)",
|
84 |
+
default=None,
|
85 |
+
)
|
api/configs/middleware/storage/aliyun_oss_storage_config.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class AliyunOSSStorageConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Aliyun Object Storage Service (OSS)
|
10 |
+
"""
|
11 |
+
|
12 |
+
ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field(
|
13 |
+
description="Name of the Aliyun OSS bucket to store and retrieve objects",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field(
|
18 |
+
description="Access key ID for authenticating with Aliyun OSS",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
ALIYUN_OSS_SECRET_KEY: Optional[str] = Field(
|
23 |
+
description="Secret access key for authenticating with Aliyun OSS",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
ALIYUN_OSS_ENDPOINT: Optional[str] = Field(
|
28 |
+
description="URL of the Aliyun OSS endpoint for your chosen region",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
ALIYUN_OSS_REGION: Optional[str] = Field(
|
33 |
+
description="Aliyun OSS region where your bucket is located (e.g., 'oss-cn-hangzhou')",
|
34 |
+
default=None,
|
35 |
+
)
|
36 |
+
|
37 |
+
ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field(
|
38 |
+
description="Version of the authentication protocol to use with Aliyun OSS (e.g., 'v4')",
|
39 |
+
default=None,
|
40 |
+
)
|
41 |
+
|
42 |
+
ALIYUN_OSS_PATH: Optional[str] = Field(
|
43 |
+
description="Base path within the bucket to store objects (e.g., 'my-app-data/')",
|
44 |
+
default=None,
|
45 |
+
)
|
api/configs/middleware/storage/amazon_s3_storage_config.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class S3StorageConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for S3-compatible object storage
|
10 |
+
"""
|
11 |
+
|
12 |
+
S3_ENDPOINT: Optional[str] = Field(
|
13 |
+
description="URL of the S3-compatible storage endpoint (e.g., 'https://s3.amazonaws.com')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
S3_REGION: Optional[str] = Field(
|
18 |
+
description="Region where the S3 bucket is located (e.g., 'us-east-1')",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
S3_BUCKET_NAME: Optional[str] = Field(
|
23 |
+
description="Name of the S3 bucket to store and retrieve objects",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
S3_ACCESS_KEY: Optional[str] = Field(
|
28 |
+
description="Access key ID for authenticating with the S3 service",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
S3_SECRET_KEY: Optional[str] = Field(
|
33 |
+
description="Secret access key for authenticating with the S3 service",
|
34 |
+
default=None,
|
35 |
+
)
|
36 |
+
|
37 |
+
S3_ADDRESS_STYLE: str = Field(
|
38 |
+
description="S3 addressing style: 'auto', 'path', or 'virtual'",
|
39 |
+
default="auto",
|
40 |
+
)
|
41 |
+
|
42 |
+
S3_USE_AWS_MANAGED_IAM: bool = Field(
|
43 |
+
description="Use AWS managed IAM roles for authentication instead of access/secret keys",
|
44 |
+
default=False,
|
45 |
+
)
|
api/configs/middleware/storage/azure_blob_storage_config.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class AzureBlobStorageConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Azure Blob Storage
|
10 |
+
"""
|
11 |
+
|
12 |
+
AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field(
|
13 |
+
description="Name of the Azure Storage account (e.g., 'mystorageaccount')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field(
|
18 |
+
description="Access key for authenticating with the Azure Storage account",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field(
|
23 |
+
description="Name of the Azure Blob container to store and retrieve objects",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field(
|
28 |
+
description="URL of the Azure Blob storage endpoint (e.g., 'https://mystorageaccount.blob.core.windows.net')",
|
29 |
+
default=None,
|
30 |
+
)
|
api/configs/middleware/storage/baidu_obs_storage_config.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class BaiduOBSStorageConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Baidu Object Storage Service (OBS)
|
10 |
+
"""
|
11 |
+
|
12 |
+
BAIDU_OBS_BUCKET_NAME: Optional[str] = Field(
|
13 |
+
description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
BAIDU_OBS_ACCESS_KEY: Optional[str] = Field(
|
18 |
+
description="Access Key ID for authenticating with Baidu OBS",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
BAIDU_OBS_SECRET_KEY: Optional[str] = Field(
|
23 |
+
description="Secret Access Key for authenticating with Baidu OBS",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
BAIDU_OBS_ENDPOINT: Optional[str] = Field(
|
28 |
+
description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')",
|
29 |
+
default=None,
|
30 |
+
)
|
api/configs/middleware/storage/google_cloud_storage_config.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class GoogleCloudStorageConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Google Cloud Storage
|
10 |
+
"""
|
11 |
+
|
12 |
+
GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field(
|
13 |
+
description="Name of the Google Cloud Storage bucket to store and retrieve objects (e.g., 'my-gcs-bucket')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field(
|
18 |
+
description="Base64-encoded JSON key file for Google Cloud service account authentication",
|
19 |
+
default=None,
|
20 |
+
)
|
api/configs/middleware/storage/huawei_obs_storage_config.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class HuaweiCloudOBSStorageConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Huawei Cloud Object Storage Service (OBS)
|
10 |
+
"""
|
11 |
+
|
12 |
+
HUAWEI_OBS_BUCKET_NAME: Optional[str] = Field(
|
13 |
+
description="Name of the Huawei Cloud OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
HUAWEI_OBS_ACCESS_KEY: Optional[str] = Field(
|
18 |
+
description="Access Key ID for authenticating with Huawei Cloud OBS",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
HUAWEI_OBS_SECRET_KEY: Optional[str] = Field(
|
23 |
+
description="Secret Access Key for authenticating with Huawei Cloud OBS",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
HUAWEI_OBS_SERVER: Optional[str] = Field(
|
28 |
+
description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')",
|
29 |
+
default=None,
|
30 |
+
)
|
api/configs/middleware/storage/oci_storage_config.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class OCIStorageConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Oracle Cloud Infrastructure (OCI) Object Storage
|
10 |
+
"""
|
11 |
+
|
12 |
+
OCI_ENDPOINT: Optional[str] = Field(
|
13 |
+
description="URL of the OCI Object Storage endpoint (e.g., 'https://objectstorage.us-phoenix-1.oraclecloud.com')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
OCI_REGION: Optional[str] = Field(
|
18 |
+
description="OCI region where the bucket is located (e.g., 'us-phoenix-1')",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
OCI_BUCKET_NAME: Optional[str] = Field(
|
23 |
+
description="Name of the OCI Object Storage bucket to store and retrieve objects (e.g., 'my-oci-bucket')",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
OCI_ACCESS_KEY: Optional[str] = Field(
|
28 |
+
description="Access key (also known as API key) for authenticating with OCI Object Storage",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
OCI_SECRET_KEY: Optional[str] = Field(
|
33 |
+
description="Secret key associated with the access key for authenticating with OCI Object Storage",
|
34 |
+
default=None,
|
35 |
+
)
|
api/configs/middleware/storage/opendal_storage_config.py
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic import Field
|
2 |
+
from pydantic_settings import BaseSettings
|
3 |
+
|
4 |
+
|
5 |
+
class OpenDALStorageConfig(BaseSettings):
|
6 |
+
OPENDAL_SCHEME: str = Field(
|
7 |
+
default="fs",
|
8 |
+
description="OpenDAL scheme.",
|
9 |
+
)
|
api/configs/middleware/storage/supabase_storage_config.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class SupabaseStorageConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Supabase Object Storage Service
|
10 |
+
"""
|
11 |
+
|
12 |
+
SUPABASE_BUCKET_NAME: Optional[str] = Field(
|
13 |
+
description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
SUPABASE_API_KEY: Optional[str] = Field(
|
18 |
+
description="API KEY for authenticating with Supabase",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
SUPABASE_URL: Optional[str] = Field(
|
23 |
+
description="URL of the Supabase",
|
24 |
+
default=None,
|
25 |
+
)
|
api/configs/middleware/storage/tencent_cos_storage_config.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class TencentCloudCOSStorageConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Tencent Cloud Object Storage (COS)
|
10 |
+
"""
|
11 |
+
|
12 |
+
TENCENT_COS_BUCKET_NAME: Optional[str] = Field(
|
13 |
+
description="Name of the Tencent Cloud COS bucket to store and retrieve objects",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
TENCENT_COS_REGION: Optional[str] = Field(
|
18 |
+
description="Tencent Cloud region where the COS bucket is located (e.g., 'ap-guangzhou')",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
TENCENT_COS_SECRET_ID: Optional[str] = Field(
|
23 |
+
description="SecretId for authenticating with Tencent Cloud COS (part of API credentials)",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
TENCENT_COS_SECRET_KEY: Optional[str] = Field(
|
28 |
+
description="SecretKey for authenticating with Tencent Cloud COS (part of API credentials)",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
TENCENT_COS_SCHEME: Optional[str] = Field(
|
33 |
+
description="Protocol scheme for COS requests: 'https' (recommended) or 'http'",
|
34 |
+
default=None,
|
35 |
+
)
|
api/configs/middleware/storage/volcengine_tos_storage_config.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class VolcengineTOSStorageConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Volcengine Tinder Object Storage (TOS)
|
10 |
+
"""
|
11 |
+
|
12 |
+
VOLCENGINE_TOS_BUCKET_NAME: Optional[str] = Field(
|
13 |
+
description="Name of the Volcengine TOS bucket to store and retrieve objects (e.g., 'my-tos-bucket')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
VOLCENGINE_TOS_ACCESS_KEY: Optional[str] = Field(
|
18 |
+
description="Access Key ID for authenticating with Volcengine TOS",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
VOLCENGINE_TOS_SECRET_KEY: Optional[str] = Field(
|
23 |
+
description="Secret Access Key for authenticating with Volcengine TOS",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
VOLCENGINE_TOS_ENDPOINT: Optional[str] = Field(
|
28 |
+
description="URL of the Volcengine TOS endpoint (e.g., 'https://tos-cn-beijing.volces.com')",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
VOLCENGINE_TOS_REGION: Optional[str] = Field(
|
33 |
+
description="Volcengine region where the TOS bucket is located (e.g., 'cn-beijing')",
|
34 |
+
default=None,
|
35 |
+
)
|
api/configs/middleware/vdb/analyticdb_config.py
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class AnalyticdbConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration for connecting to Alibaba Cloud AnalyticDB for PostgreSQL.
|
10 |
+
Refer to the following documentation for details on obtaining credentials:
|
11 |
+
https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
|
12 |
+
"""
|
13 |
+
|
14 |
+
ANALYTICDB_KEY_ID: Optional[str] = Field(
|
15 |
+
default=None, description="The Access Key ID provided by Alibaba Cloud for API authentication."
|
16 |
+
)
|
17 |
+
ANALYTICDB_KEY_SECRET: Optional[str] = Field(
|
18 |
+
default=None, description="The Secret Access Key corresponding to the Access Key ID for secure API access."
|
19 |
+
)
|
20 |
+
ANALYTICDB_REGION_ID: Optional[str] = Field(
|
21 |
+
default=None,
|
22 |
+
description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou', 'ap-southeast-1').",
|
23 |
+
)
|
24 |
+
ANALYTICDB_INSTANCE_ID: Optional[str] = Field(
|
25 |
+
default=None,
|
26 |
+
description="The unique identifier of the AnalyticDB instance you want to connect to.",
|
27 |
+
)
|
28 |
+
ANALYTICDB_ACCOUNT: Optional[str] = Field(
|
29 |
+
default=None,
|
30 |
+
description="The account name used to log in to the AnalyticDB instance"
|
31 |
+
" (usually the initial account created with the instance).",
|
32 |
+
)
|
33 |
+
ANALYTICDB_PASSWORD: Optional[str] = Field(
|
34 |
+
default=None, description="The password associated with the AnalyticDB account for database authentication."
|
35 |
+
)
|
36 |
+
ANALYTICDB_NAMESPACE: Optional[str] = Field(
|
37 |
+
default=None, description="The namespace within AnalyticDB for schema isolation (if using namespace feature)."
|
38 |
+
)
|
39 |
+
ANALYTICDB_NAMESPACE_PASSWORD: Optional[str] = Field(
|
40 |
+
default=None,
|
41 |
+
description="The password for accessing the specified namespace within the AnalyticDB instance"
|
42 |
+
" (if namespace feature is enabled).",
|
43 |
+
)
|
44 |
+
ANALYTICDB_HOST: Optional[str] = Field(
|
45 |
+
default=None, description="The host of the AnalyticDB instance you want to connect to."
|
46 |
+
)
|
47 |
+
ANALYTICDB_PORT: PositiveInt = Field(
|
48 |
+
default=5432, description="The port of the AnalyticDB instance you want to connect to."
|
49 |
+
)
|
50 |
+
ANALYTICDB_MIN_CONNECTION: PositiveInt = Field(default=1, description="Min connection of the AnalyticDB database.")
|
51 |
+
ANALYTICDB_MAX_CONNECTION: PositiveInt = Field(default=5, description="Max connection of the AnalyticDB database.")
|
api/configs/middleware/vdb/baidu_vector_config.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, NonNegativeInt, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class BaiduVectorDBConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Baidu Vector Database
|
10 |
+
"""
|
11 |
+
|
12 |
+
BAIDU_VECTOR_DB_ENDPOINT: Optional[str] = Field(
|
13 |
+
description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS: PositiveInt = Field(
|
18 |
+
description="Timeout in milliseconds for Baidu Vector Database operations (default is 30000 milliseconds)",
|
19 |
+
default=30000,
|
20 |
+
)
|
21 |
+
|
22 |
+
BAIDU_VECTOR_DB_ACCOUNT: Optional[str] = Field(
|
23 |
+
description="Account for authenticating with the Baidu Vector Database",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
BAIDU_VECTOR_DB_API_KEY: Optional[str] = Field(
|
28 |
+
description="API key for authenticating with the Baidu Vector Database service",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
BAIDU_VECTOR_DB_DATABASE: Optional[str] = Field(
|
33 |
+
description="Name of the specific Baidu Vector Database to connect to",
|
34 |
+
default=None,
|
35 |
+
)
|
36 |
+
|
37 |
+
BAIDU_VECTOR_DB_SHARD: PositiveInt = Field(
|
38 |
+
description="Number of shards for the Baidu Vector Database (default is 1)",
|
39 |
+
default=1,
|
40 |
+
)
|
41 |
+
|
42 |
+
BAIDU_VECTOR_DB_REPLICAS: NonNegativeInt = Field(
|
43 |
+
description="Number of replicas for the Baidu Vector Database (default is 3)",
|
44 |
+
default=3,
|
45 |
+
)
|
api/configs/middleware/vdb/chroma_config.py
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class ChromaConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Chroma vector database
|
10 |
+
"""
|
11 |
+
|
12 |
+
CHROMA_HOST: Optional[str] = Field(
|
13 |
+
description="Hostname or IP address of the Chroma server (e.g., 'localhost' or '192.168.1.100')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
CHROMA_PORT: PositiveInt = Field(
|
18 |
+
description="Port number on which the Chroma server is listening (default is 8000)",
|
19 |
+
default=8000,
|
20 |
+
)
|
21 |
+
|
22 |
+
CHROMA_TENANT: Optional[str] = Field(
|
23 |
+
description="Tenant identifier for multi-tenancy support in Chroma",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
CHROMA_DATABASE: Optional[str] = Field(
|
28 |
+
description="Name of the Chroma database to connect to",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
CHROMA_AUTH_PROVIDER: Optional[str] = Field(
|
33 |
+
description="Authentication provider for Chroma (e.g., 'basic', 'token', or a custom provider)",
|
34 |
+
default=None,
|
35 |
+
)
|
36 |
+
|
37 |
+
CHROMA_AUTH_CREDENTIALS: Optional[str] = Field(
|
38 |
+
description="Authentication credentials for Chroma (format depends on the auth provider)",
|
39 |
+
default=None,
|
40 |
+
)
|
api/configs/middleware/vdb/couchbase_config.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class CouchbaseConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Couchbase configs
|
10 |
+
"""
|
11 |
+
|
12 |
+
COUCHBASE_CONNECTION_STRING: Optional[str] = Field(
|
13 |
+
description="COUCHBASE connection string",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
COUCHBASE_USER: Optional[str] = Field(
|
18 |
+
description="COUCHBASE user",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
COUCHBASE_PASSWORD: Optional[str] = Field(
|
23 |
+
description="COUCHBASE password",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
COUCHBASE_BUCKET_NAME: Optional[str] = Field(
|
28 |
+
description="COUCHBASE bucket name",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
COUCHBASE_SCOPE_NAME: Optional[str] = Field(
|
33 |
+
description="COUCHBASE scope name",
|
34 |
+
default=None,
|
35 |
+
)
|
api/configs/middleware/vdb/elasticsearch_config.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class ElasticsearchConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Elasticsearch
|
10 |
+
"""
|
11 |
+
|
12 |
+
ELASTICSEARCH_HOST: Optional[str] = Field(
|
13 |
+
description="Hostname or IP address of the Elasticsearch server (e.g., 'localhost' or '192.168.1.100')",
|
14 |
+
default="127.0.0.1",
|
15 |
+
)
|
16 |
+
|
17 |
+
ELASTICSEARCH_PORT: PositiveInt = Field(
|
18 |
+
description="Port number on which the Elasticsearch server is listening (default is 9200)",
|
19 |
+
default=9200,
|
20 |
+
)
|
21 |
+
|
22 |
+
ELASTICSEARCH_USERNAME: Optional[str] = Field(
|
23 |
+
description="Username for authenticating with Elasticsearch (default is 'elastic')",
|
24 |
+
default="elastic",
|
25 |
+
)
|
26 |
+
|
27 |
+
ELASTICSEARCH_PASSWORD: Optional[str] = Field(
|
28 |
+
description="Password for authenticating with Elasticsearch (default is 'elastic')",
|
29 |
+
default="elastic",
|
30 |
+
)
|
api/configs/middleware/vdb/lindorm_config.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class LindormConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Lindorm configs
|
10 |
+
"""
|
11 |
+
|
12 |
+
LINDORM_URL: Optional[str] = Field(
|
13 |
+
description="Lindorm url",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
LINDORM_USERNAME: Optional[str] = Field(
|
17 |
+
description="Lindorm user",
|
18 |
+
default=None,
|
19 |
+
)
|
20 |
+
LINDORM_PASSWORD: Optional[str] = Field(
|
21 |
+
description="Lindorm password",
|
22 |
+
default=None,
|
23 |
+
)
|
24 |
+
DEFAULT_INDEX_TYPE: Optional[str] = Field(
|
25 |
+
description="Lindorm Vector Index Type, hnsw or flat is available in dify",
|
26 |
+
default="hnsw",
|
27 |
+
)
|
28 |
+
DEFAULT_DISTANCE_TYPE: Optional[str] = Field(
|
29 |
+
description="Vector Distance Type, support l2, cosinesimil, innerproduct", default="l2"
|
30 |
+
)
|
31 |
+
USING_UGC_INDEX: Optional[bool] = Field(
|
32 |
+
description="Using UGC index will store the same type of Index in a single index but can retrieve separately.",
|
33 |
+
default=False,
|
34 |
+
)
|
api/configs/middleware/vdb/milvus_config.py
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class MilvusConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Milvus vector database
|
10 |
+
"""
|
11 |
+
|
12 |
+
MILVUS_URI: Optional[str] = Field(
|
13 |
+
description="URI for connecting to the Milvus server (e.g., 'http://localhost:19530' or 'https://milvus-instance.example.com:19530')",
|
14 |
+
default="http://127.0.0.1:19530",
|
15 |
+
)
|
16 |
+
|
17 |
+
MILVUS_TOKEN: Optional[str] = Field(
|
18 |
+
description="Authentication token for Milvus, if token-based authentication is enabled",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
MILVUS_USER: Optional[str] = Field(
|
23 |
+
description="Username for authenticating with Milvus, if username/password authentication is enabled",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
MILVUS_PASSWORD: Optional[str] = Field(
|
28 |
+
description="Password for authenticating with Milvus, if username/password authentication is enabled",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
MILVUS_DATABASE: str = Field(
|
33 |
+
description="Name of the Milvus database to connect to (default is 'default')",
|
34 |
+
default="default",
|
35 |
+
)
|
36 |
+
|
37 |
+
MILVUS_ENABLE_HYBRID_SEARCH: bool = Field(
|
38 |
+
description="Enable hybrid search features (requires Milvus >= 2.5.0). Set to false for compatibility with "
|
39 |
+
"older versions",
|
40 |
+
default=True,
|
41 |
+
)
|
api/configs/middleware/vdb/myscale_config.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic import Field, PositiveInt
|
2 |
+
from pydantic_settings import BaseSettings
|
3 |
+
|
4 |
+
|
5 |
+
class MyScaleConfig(BaseSettings):
|
6 |
+
"""
|
7 |
+
Configuration settings for MyScale vector database
|
8 |
+
"""
|
9 |
+
|
10 |
+
MYSCALE_HOST: str = Field(
|
11 |
+
description="Hostname or IP address of the MyScale server (e.g., 'localhost' or 'myscale.example.com')",
|
12 |
+
default="localhost",
|
13 |
+
)
|
14 |
+
|
15 |
+
MYSCALE_PORT: PositiveInt = Field(
|
16 |
+
description="Port number on which the MyScale server is listening (default is 8123)",
|
17 |
+
default=8123,
|
18 |
+
)
|
19 |
+
|
20 |
+
MYSCALE_USER: str = Field(
|
21 |
+
description="Username for authenticating with MyScale (default is 'default')",
|
22 |
+
default="default",
|
23 |
+
)
|
24 |
+
|
25 |
+
MYSCALE_PASSWORD: str = Field(
|
26 |
+
description="Password for authenticating with MyScale (default is an empty string)",
|
27 |
+
default="",
|
28 |
+
)
|
29 |
+
|
30 |
+
MYSCALE_DATABASE: str = Field(
|
31 |
+
description="Name of the MyScale database to connect to (default is 'default')",
|
32 |
+
default="default",
|
33 |
+
)
|
34 |
+
|
35 |
+
MYSCALE_FTS_PARAMS: str = Field(
|
36 |
+
description="Additional parameters for MyScale Full Text Search index)",
|
37 |
+
default="",
|
38 |
+
)
|
api/configs/middleware/vdb/oceanbase_config.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class OceanBaseVectorConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for OceanBase Vector database
|
10 |
+
"""
|
11 |
+
|
12 |
+
OCEANBASE_VECTOR_HOST: Optional[str] = Field(
|
13 |
+
description="Hostname or IP address of the OceanBase Vector server (e.g. 'localhost')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
OCEANBASE_VECTOR_PORT: Optional[PositiveInt] = Field(
|
18 |
+
description="Port number on which the OceanBase Vector server is listening (default is 2881)",
|
19 |
+
default=2881,
|
20 |
+
)
|
21 |
+
|
22 |
+
OCEANBASE_VECTOR_USER: Optional[str] = Field(
|
23 |
+
description="Username for authenticating with the OceanBase Vector database",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
OCEANBASE_VECTOR_PASSWORD: Optional[str] = Field(
|
28 |
+
description="Password for authenticating with the OceanBase Vector database",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
OCEANBASE_VECTOR_DATABASE: Optional[str] = Field(
|
33 |
+
description="Name of the OceanBase Vector database to connect to",
|
34 |
+
default=None,
|
35 |
+
)
|
api/configs/middleware/vdb/opensearch_config.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class OpenSearchConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for OpenSearch
|
10 |
+
"""
|
11 |
+
|
12 |
+
OPENSEARCH_HOST: Optional[str] = Field(
|
13 |
+
description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
OPENSEARCH_PORT: PositiveInt = Field(
|
18 |
+
description="Port number on which the OpenSearch server is listening (default is 9200)",
|
19 |
+
default=9200,
|
20 |
+
)
|
21 |
+
|
22 |
+
OPENSEARCH_USER: Optional[str] = Field(
|
23 |
+
description="Username for authenticating with OpenSearch",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
OPENSEARCH_PASSWORD: Optional[str] = Field(
|
28 |
+
description="Password for authenticating with OpenSearch",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
OPENSEARCH_SECURE: bool = Field(
|
33 |
+
description="Whether to use SSL/TLS encrypted connection for OpenSearch (True for HTTPS, False for HTTP)",
|
34 |
+
default=False,
|
35 |
+
)
|
api/configs/middleware/vdb/oracle_config.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class OracleConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Oracle database
|
10 |
+
"""
|
11 |
+
|
12 |
+
ORACLE_HOST: Optional[str] = Field(
|
13 |
+
description="Hostname or IP address of the Oracle database server (e.g., 'localhost' or 'oracle.example.com')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
ORACLE_PORT: PositiveInt = Field(
|
18 |
+
description="Port number on which the Oracle database server is listening (default is 1521)",
|
19 |
+
default=1521,
|
20 |
+
)
|
21 |
+
|
22 |
+
ORACLE_USER: Optional[str] = Field(
|
23 |
+
description="Username for authenticating with the Oracle database",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
ORACLE_PASSWORD: Optional[str] = Field(
|
28 |
+
description="Password for authenticating with the Oracle database",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
ORACLE_DATABASE: Optional[str] = Field(
|
33 |
+
description="Name of the Oracle database or service to connect to (e.g., 'ORCL' or 'pdborcl')",
|
34 |
+
default=None,
|
35 |
+
)
|
api/configs/middleware/vdb/pgvector_config.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class PGVectorConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for PGVector (PostgreSQL with vector extension)
|
10 |
+
"""
|
11 |
+
|
12 |
+
PGVECTOR_HOST: Optional[str] = Field(
|
13 |
+
description="Hostname or IP address of the PostgreSQL server with PGVector extension (e.g., 'localhost')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
PGVECTOR_PORT: PositiveInt = Field(
|
18 |
+
description="Port number on which the PostgreSQL server is listening (default is 5433)",
|
19 |
+
default=5433,
|
20 |
+
)
|
21 |
+
|
22 |
+
PGVECTOR_USER: Optional[str] = Field(
|
23 |
+
description="Username for authenticating with the PostgreSQL database",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
PGVECTOR_PASSWORD: Optional[str] = Field(
|
28 |
+
description="Password for authenticating with the PostgreSQL database",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
PGVECTOR_DATABASE: Optional[str] = Field(
|
33 |
+
description="Name of the PostgreSQL database to connect to",
|
34 |
+
default=None,
|
35 |
+
)
|
36 |
+
|
37 |
+
PGVECTOR_MIN_CONNECTION: PositiveInt = Field(
|
38 |
+
description="Min connection of the PostgreSQL database",
|
39 |
+
default=1,
|
40 |
+
)
|
41 |
+
|
42 |
+
PGVECTOR_MAX_CONNECTION: PositiveInt = Field(
|
43 |
+
description="Max connection of the PostgreSQL database",
|
44 |
+
default=5,
|
45 |
+
)
|
api/configs/middleware/vdb/pgvectors_config.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class PGVectoRSConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for PGVecto.RS (Rust-based vector extension for PostgreSQL)
|
10 |
+
"""
|
11 |
+
|
12 |
+
PGVECTO_RS_HOST: Optional[str] = Field(
|
13 |
+
description="Hostname or IP address of the PostgreSQL server with PGVecto.RS extension (e.g., 'localhost')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
PGVECTO_RS_PORT: PositiveInt = Field(
|
18 |
+
description="Port number on which the PostgreSQL server with PGVecto.RS is listening (default is 5431)",
|
19 |
+
default=5431,
|
20 |
+
)
|
21 |
+
|
22 |
+
PGVECTO_RS_USER: Optional[str] = Field(
|
23 |
+
description="Username for authenticating with the PostgreSQL database using PGVecto.RS",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
PGVECTO_RS_PASSWORD: Optional[str] = Field(
|
28 |
+
description="Password for authenticating with the PostgreSQL database using PGVecto.RS",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
PGVECTO_RS_DATABASE: Optional[str] = Field(
|
33 |
+
description="Name of the PostgreSQL database with PGVecto.RS extension to connect to",
|
34 |
+
default=None,
|
35 |
+
)
|
api/configs/middleware/vdb/qdrant_config.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, NonNegativeInt, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class QdrantConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Qdrant vector database
|
10 |
+
"""
|
11 |
+
|
12 |
+
QDRANT_URL: Optional[str] = Field(
|
13 |
+
description="URL of the Qdrant server (e.g., 'http://localhost:6333' or 'https://qdrant.example.com')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
QDRANT_API_KEY: Optional[str] = Field(
|
18 |
+
description="API key for authenticating with the Qdrant server",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
QDRANT_CLIENT_TIMEOUT: NonNegativeInt = Field(
|
23 |
+
description="Timeout in seconds for Qdrant client operations (default is 20 seconds)",
|
24 |
+
default=20,
|
25 |
+
)
|
26 |
+
|
27 |
+
QDRANT_GRPC_ENABLED: bool = Field(
|
28 |
+
description="Whether to enable gRPC support for Qdrant connection (True for gRPC, False for HTTP)",
|
29 |
+
default=False,
|
30 |
+
)
|
31 |
+
|
32 |
+
QDRANT_GRPC_PORT: PositiveInt = Field(
|
33 |
+
description="Port number for gRPC connection to Qdrant server (default is 6334)",
|
34 |
+
default=6334,
|
35 |
+
)
|
api/configs/middleware/vdb/relyt_config.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class RelytConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Relyt database
|
10 |
+
"""
|
11 |
+
|
12 |
+
RELYT_HOST: Optional[str] = Field(
|
13 |
+
description="Hostname or IP address of the Relyt server (e.g., 'localhost' or 'relyt.example.com')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
RELYT_PORT: PositiveInt = Field(
|
18 |
+
description="Port number on which the Relyt server is listening (default is 9200)",
|
19 |
+
default=9200,
|
20 |
+
)
|
21 |
+
|
22 |
+
RELYT_USER: Optional[str] = Field(
|
23 |
+
description="Username for authenticating with the Relyt database",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
RELYT_PASSWORD: Optional[str] = Field(
|
28 |
+
description="Password for authenticating with the Relyt database",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
RELYT_DATABASE: Optional[str] = Field(
|
33 |
+
description="Name of the Relyt database to connect to (default is 'default')",
|
34 |
+
default="default",
|
35 |
+
)
|
api/configs/middleware/vdb/tencent_vector_config.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, NonNegativeInt, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class TencentVectorDBConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Tencent Vector Database
|
10 |
+
"""
|
11 |
+
|
12 |
+
TENCENT_VECTOR_DB_URL: Optional[str] = Field(
|
13 |
+
description="URL of the Tencent Vector Database service (e.g., 'https://vectordb.tencentcloudapi.com')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field(
|
18 |
+
description="API key for authenticating with the Tencent Vector Database service",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
TENCENT_VECTOR_DB_TIMEOUT: PositiveInt = Field(
|
23 |
+
description="Timeout in seconds for Tencent Vector Database operations (default is 30 seconds)",
|
24 |
+
default=30,
|
25 |
+
)
|
26 |
+
|
27 |
+
TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field(
|
28 |
+
description="Username for authenticating with the Tencent Vector Database (if required)",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field(
|
33 |
+
description="Password for authenticating with the Tencent Vector Database (if required)",
|
34 |
+
default=None,
|
35 |
+
)
|
36 |
+
|
37 |
+
TENCENT_VECTOR_DB_SHARD: PositiveInt = Field(
|
38 |
+
description="Number of shards for the Tencent Vector Database (default is 1)",
|
39 |
+
default=1,
|
40 |
+
)
|
41 |
+
|
42 |
+
TENCENT_VECTOR_DB_REPLICAS: NonNegativeInt = Field(
|
43 |
+
description="Number of replicas for the Tencent Vector Database (default is 2)",
|
44 |
+
default=2,
|
45 |
+
)
|
46 |
+
|
47 |
+
TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field(
|
48 |
+
description="Name of the specific Tencent Vector Database to connect to",
|
49 |
+
default=None,
|
50 |
+
)
|
api/configs/middleware/vdb/tidb_on_qdrant_config.py
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, NonNegativeInt, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class TidbOnQdrantConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Tidb on Qdrant configs
|
10 |
+
"""
|
11 |
+
|
12 |
+
TIDB_ON_QDRANT_URL: Optional[str] = Field(
|
13 |
+
description="Tidb on Qdrant url",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
TIDB_ON_QDRANT_API_KEY: Optional[str] = Field(
|
18 |
+
description="Tidb on Qdrant api key",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
TIDB_ON_QDRANT_CLIENT_TIMEOUT: NonNegativeInt = Field(
|
23 |
+
description="Tidb on Qdrant client timeout in seconds",
|
24 |
+
default=20,
|
25 |
+
)
|
26 |
+
|
27 |
+
TIDB_ON_QDRANT_GRPC_ENABLED: bool = Field(
|
28 |
+
description="whether enable grpc support for Tidb on Qdrant connection",
|
29 |
+
default=False,
|
30 |
+
)
|
31 |
+
|
32 |
+
TIDB_ON_QDRANT_GRPC_PORT: PositiveInt = Field(
|
33 |
+
description="Tidb on Qdrant grpc port",
|
34 |
+
default=6334,
|
35 |
+
)
|
36 |
+
|
37 |
+
TIDB_PUBLIC_KEY: Optional[str] = Field(
|
38 |
+
description="Tidb account public key",
|
39 |
+
default=None,
|
40 |
+
)
|
41 |
+
|
42 |
+
TIDB_PRIVATE_KEY: Optional[str] = Field(
|
43 |
+
description="Tidb account private key",
|
44 |
+
default=None,
|
45 |
+
)
|
46 |
+
|
47 |
+
TIDB_API_URL: Optional[str] = Field(
|
48 |
+
description="Tidb API url",
|
49 |
+
default=None,
|
50 |
+
)
|
51 |
+
|
52 |
+
TIDB_IAM_API_URL: Optional[str] = Field(
|
53 |
+
description="Tidb IAM API url",
|
54 |
+
default=None,
|
55 |
+
)
|
56 |
+
|
57 |
+
TIDB_REGION: Optional[str] = Field(
|
58 |
+
description="Tidb serverless region",
|
59 |
+
default="regions/aws-us-east-1",
|
60 |
+
)
|
61 |
+
|
62 |
+
TIDB_PROJECT_ID: Optional[str] = Field(
|
63 |
+
description="Tidb project id",
|
64 |
+
default=None,
|
65 |
+
)
|
66 |
+
|
67 |
+
TIDB_SPEND_LIMIT: Optional[int] = Field(
|
68 |
+
description="Tidb spend limit",
|
69 |
+
default=100,
|
70 |
+
)
|
api/configs/middleware/vdb/tidb_vector_config.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class TiDBVectorConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for TiDB Vector database
|
10 |
+
"""
|
11 |
+
|
12 |
+
TIDB_VECTOR_HOST: Optional[str] = Field(
|
13 |
+
description="Hostname or IP address of the TiDB Vector server (e.g., 'localhost' or 'tidb.example.com')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
TIDB_VECTOR_PORT: Optional[PositiveInt] = Field(
|
18 |
+
description="Port number on which the TiDB Vector server is listening (default is 4000)",
|
19 |
+
default=4000,
|
20 |
+
)
|
21 |
+
|
22 |
+
TIDB_VECTOR_USER: Optional[str] = Field(
|
23 |
+
description="Username for authenticating with the TiDB Vector database",
|
24 |
+
default=None,
|
25 |
+
)
|
26 |
+
|
27 |
+
TIDB_VECTOR_PASSWORD: Optional[str] = Field(
|
28 |
+
description="Password for authenticating with the TiDB Vector database",
|
29 |
+
default=None,
|
30 |
+
)
|
31 |
+
|
32 |
+
TIDB_VECTOR_DATABASE: Optional[str] = Field(
|
33 |
+
description="Name of the TiDB Vector database to connect to",
|
34 |
+
default=None,
|
35 |
+
)
|
api/configs/middleware/vdb/upstash_config.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class UpstashConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Upstash vector database
|
10 |
+
"""
|
11 |
+
|
12 |
+
UPSTASH_VECTOR_URL: Optional[str] = Field(
|
13 |
+
description="URL of the upstash server (e.g., 'https://vector.upstash.io')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
UPSTASH_VECTOR_TOKEN: Optional[str] = Field(
|
18 |
+
description="Token for authenticating with the upstash server",
|
19 |
+
default=None,
|
20 |
+
)
|
api/configs/middleware/vdb/vikingdb_config.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class VikingDBConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration for connecting to Volcengine VikingDB.
|
10 |
+
Refer to the following documentation for details on obtaining credentials:
|
11 |
+
https://www.volcengine.com/docs/6291/65568
|
12 |
+
"""
|
13 |
+
|
14 |
+
VIKINGDB_ACCESS_KEY: Optional[str] = Field(
|
15 |
+
description="The Access Key provided by Volcengine VikingDB for API authentication."
|
16 |
+
"Refer to the following documentation for details on obtaining credentials:"
|
17 |
+
"https://www.volcengine.com/docs/6291/65568",
|
18 |
+
default=None,
|
19 |
+
)
|
20 |
+
|
21 |
+
VIKINGDB_SECRET_KEY: Optional[str] = Field(
|
22 |
+
description="The Secret Key provided by Volcengine VikingDB for API authentication.",
|
23 |
+
default=None,
|
24 |
+
)
|
25 |
+
|
26 |
+
VIKINGDB_REGION: str = Field(
|
27 |
+
description="The region of the Volcengine VikingDB service.(e.g., 'cn-shanghai', 'cn-beijing').",
|
28 |
+
default="cn-shanghai",
|
29 |
+
)
|
30 |
+
|
31 |
+
VIKINGDB_HOST: str = Field(
|
32 |
+
description="The host of the Volcengine VikingDB service.(e.g., 'api-vikingdb.volces.com', \
|
33 |
+
'api-vikingdb.mlp.cn-shanghai.volces.com')",
|
34 |
+
default="api-vikingdb.mlp.cn-shanghai.volces.com",
|
35 |
+
)
|
36 |
+
|
37 |
+
VIKINGDB_SCHEME: str = Field(
|
38 |
+
description="The scheme of the Volcengine VikingDB service.(e.g., 'http', 'https').",
|
39 |
+
default="http",
|
40 |
+
)
|
41 |
+
|
42 |
+
VIKINGDB_CONNECTION_TIMEOUT: int = Field(
|
43 |
+
description="The connection timeout of the Volcengine VikingDB service.",
|
44 |
+
default=30,
|
45 |
+
)
|
46 |
+
|
47 |
+
VIKINGDB_SOCKET_TIMEOUT: int = Field(
|
48 |
+
description="The socket timeout of the Volcengine VikingDB service.",
|
49 |
+
default=30,
|
50 |
+
)
|
api/configs/middleware/vdb/weaviate_config.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field, PositiveInt
|
4 |
+
from pydantic_settings import BaseSettings
|
5 |
+
|
6 |
+
|
7 |
+
class WeaviateConfig(BaseSettings):
|
8 |
+
"""
|
9 |
+
Configuration settings for Weaviate vector database
|
10 |
+
"""
|
11 |
+
|
12 |
+
WEAVIATE_ENDPOINT: Optional[str] = Field(
|
13 |
+
description="URL of the Weaviate server (e.g., 'http://localhost:8080' or 'https://weaviate.example.com')",
|
14 |
+
default=None,
|
15 |
+
)
|
16 |
+
|
17 |
+
WEAVIATE_API_KEY: Optional[str] = Field(
|
18 |
+
description="API key for authenticating with the Weaviate server",
|
19 |
+
default=None,
|
20 |
+
)
|
21 |
+
|
22 |
+
WEAVIATE_GRPC_ENABLED: bool = Field(
|
23 |
+
description="Whether to enable gRPC for Weaviate connection (True for gRPC, False for HTTP)",
|
24 |
+
default=True,
|
25 |
+
)
|
26 |
+
|
27 |
+
WEAVIATE_BATCH_SIZE: PositiveInt = Field(
|
28 |
+
description="Number of objects to be processed in a single batch operation (default is 100)",
|
29 |
+
default=100,
|
30 |
+
)
|
api/configs/packaging/__init__.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic import Field
|
2 |
+
from pydantic_settings import BaseSettings
|
3 |
+
|
4 |
+
|
5 |
+
class PackagingInfo(BaseSettings):
|
6 |
+
"""
|
7 |
+
Packaging build information
|
8 |
+
"""
|
9 |
+
|
10 |
+
CURRENT_VERSION: str = Field(
|
11 |
+
description="Dify version",
|
12 |
+
default="0.15.3",
|
13 |
+
)
|
14 |
+
|
15 |
+
COMMIT_SHA: str = Field(
|
16 |
+
description="SHA-1 checksum of the git commit used to build the app",
|
17 |
+
default="",
|
18 |
+
)
|
api/configs/remote_settings_sources/__init__.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pydantic import Field
|
4 |
+
|
5 |
+
from .apollo import ApolloSettingsSourceInfo
|
6 |
+
from .base import RemoteSettingsSource
|
7 |
+
from .enums import RemoteSettingsSourceName
|
8 |
+
|
9 |
+
|
10 |
+
class RemoteSettingsSourceConfig(ApolloSettingsSourceInfo):
|
11 |
+
REMOTE_SETTINGS_SOURCE_NAME: RemoteSettingsSourceName | str = Field(
|
12 |
+
description="name of remote config source",
|
13 |
+
default="",
|
14 |
+
)
|
15 |
+
|
16 |
+
|
17 |
+
__all__ = ["RemoteSettingsSource", "RemoteSettingsSourceConfig", "RemoteSettingsSourceName"]
|
api/configs/remote_settings_sources/apollo/__init__.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from collections.abc import Mapping
|
2 |
+
from typing import Any, Optional
|
3 |
+
|
4 |
+
from pydantic import Field
|
5 |
+
from pydantic.fields import FieldInfo
|
6 |
+
from pydantic_settings import BaseSettings
|
7 |
+
|
8 |
+
from configs.remote_settings_sources.base import RemoteSettingsSource
|
9 |
+
|
10 |
+
from .client import ApolloClient
|
11 |
+
|
12 |
+
|
13 |
+
class ApolloSettingsSourceInfo(BaseSettings):
|
14 |
+
"""
|
15 |
+
Packaging build information
|
16 |
+
"""
|
17 |
+
|
18 |
+
APOLLO_APP_ID: Optional[str] = Field(
|
19 |
+
description="apollo app_id",
|
20 |
+
default=None,
|
21 |
+
)
|
22 |
+
|
23 |
+
APOLLO_CLUSTER: Optional[str] = Field(
|
24 |
+
description="apollo cluster",
|
25 |
+
default=None,
|
26 |
+
)
|
27 |
+
|
28 |
+
APOLLO_CONFIG_URL: Optional[str] = Field(
|
29 |
+
description="apollo config url",
|
30 |
+
default=None,
|
31 |
+
)
|
32 |
+
|
33 |
+
APOLLO_NAMESPACE: Optional[str] = Field(
|
34 |
+
description="apollo namespace",
|
35 |
+
default=None,
|
36 |
+
)
|
37 |
+
|
38 |
+
|
39 |
+
class ApolloSettingsSource(RemoteSettingsSource):
|
40 |
+
def __init__(self, configs: Mapping[str, Any]):
|
41 |
+
self.client = ApolloClient(
|
42 |
+
app_id=configs["APOLLO_APP_ID"],
|
43 |
+
cluster=configs["APOLLO_CLUSTER"],
|
44 |
+
config_url=configs["APOLLO_CONFIG_URL"],
|
45 |
+
start_hot_update=False,
|
46 |
+
_notification_map={configs["APOLLO_NAMESPACE"]: -1},
|
47 |
+
)
|
48 |
+
self.namespace = configs["APOLLO_NAMESPACE"]
|
49 |
+
self.remote_configs = self.client.get_all_dicts(self.namespace)
|
50 |
+
|
51 |
+
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
52 |
+
if not isinstance(self.remote_configs, dict):
|
53 |
+
raise ValueError(f"remote configs is not dict, but {type(self.remote_configs)}")
|
54 |
+
field_value = self.remote_configs.get(field_name)
|
55 |
+
return field_value, field_name, False
|