Spaces:
Build error
Build error
Deploy Open WebUI
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +19 -0
- Dockerfile +201 -10
- README.md +1 -4
- backend/.dockerignore +14 -0
- backend/.gitignore +12 -0
- backend/dev.sh +3 -0
- backend/open_webui/__init__.py +96 -0
- backend/open_webui/alembic.ini +114 -0
- backend/open_webui/config.py +0 -0
- backend/open_webui/constants.py +119 -0
- backend/open_webui/env.py +1058 -0
- backend/open_webui/functions.py +348 -0
- backend/open_webui/internal/db.py +409 -0
- backend/open_webui/internal/migrations/001_initial_schema.py +253 -0
- backend/open_webui/internal/migrations/002_add_local_sharing.py +45 -0
- backend/open_webui/internal/migrations/003_add_auth_api_key.py +45 -0
- backend/open_webui/internal/migrations/004_add_archived.py +45 -0
- backend/open_webui/internal/migrations/005_add_updated_at.py +125 -0
- backend/open_webui/internal/migrations/006_migrate_timestamps_and_charfields.py +129 -0
- backend/open_webui/internal/migrations/007_add_user_last_active_at.py +78 -0
- backend/open_webui/internal/migrations/008_add_memory.py +52 -0
- backend/open_webui/internal/migrations/009_add_models.py +60 -0
- backend/open_webui/internal/migrations/010_migrate_modelfiles_to_models.py +130 -0
- backend/open_webui/internal/migrations/011_add_user_settings.py +47 -0
- backend/open_webui/internal/migrations/012_add_tools.py +60 -0
- backend/open_webui/internal/migrations/013_add_user_info.py +47 -0
- backend/open_webui/internal/migrations/014_add_files.py +54 -0
- backend/open_webui/internal/migrations/015_add_functions.py +60 -0
- backend/open_webui/internal/migrations/016_add_valves_and_is_active.py +49 -0
- backend/open_webui/internal/migrations/017_add_user_oauth_sub.py +44 -0
- backend/open_webui/internal/migrations/018_add_function_is_global.py +48 -0
- backend/open_webui/internal/wrappers.py +84 -0
- backend/open_webui/main.py +0 -0
- backend/open_webui/migrations/README +4 -0
- backend/open_webui/migrations/env.py +120 -0
- backend/open_webui/migrations/script.py.mako +27 -0
- backend/open_webui/migrations/util.py +15 -0
- backend/open_webui/migrations/versions/018012973d35_add_indexes.py +46 -0
- backend/open_webui/migrations/versions/1af9b942657b_migrate_tags.py +140 -0
- backend/open_webui/migrations/versions/242a2047eae0_update_chat_table.py +97 -0
- backend/open_webui/migrations/versions/2f1211949ecc_update_message_and_channel_member_table.py +94 -0
- backend/open_webui/migrations/versions/374d2f66af06_add_prompt_history_table.py +245 -0
- backend/open_webui/migrations/versions/3781e22d8b01_update_message_table.py +58 -0
- backend/open_webui/migrations/versions/37f288994c47_add_group_member_table.py +137 -0
- backend/open_webui/migrations/versions/38d63c18f30f_add_oauth_session_table.py +75 -0
- backend/open_webui/migrations/versions/3ab32c4b8f59_update_tags.py +78 -0
- backend/open_webui/migrations/versions/3af16a1c9fb6_update_user_table.py +32 -0
- backend/open_webui/migrations/versions/3e0e00844bb0_add_knowledge_file_table.py +161 -0
- backend/open_webui/migrations/versions/4ace53fd72c8_update_folder_table_datetime.py +67 -0
- backend/open_webui/migrations/versions/56359461a091_add_calendar_tables.py +83 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,22 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
backend/open_webui/static/fonts/NotoSans-Bold.ttf filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
backend/open_webui/static/fonts/NotoSans-Italic.ttf filter=lfs diff=lfs merge=lfs -text
|
| 38 |
+
backend/open_webui/static/fonts/NotoSans-Regular.ttf filter=lfs diff=lfs merge=lfs -text
|
| 39 |
+
backend/open_webui/static/fonts/NotoSans-Variable.ttf filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
backend/open_webui/static/fonts/NotoSansJP-Regular.ttf filter=lfs diff=lfs merge=lfs -text
|
| 41 |
+
backend/open_webui/static/fonts/NotoSansJP-Variable.ttf filter=lfs diff=lfs merge=lfs -text
|
| 42 |
+
backend/open_webui/static/fonts/NotoSansKR-Regular.ttf filter=lfs diff=lfs merge=lfs -text
|
| 43 |
+
backend/open_webui/static/fonts/NotoSansKR-Variable.ttf filter=lfs diff=lfs merge=lfs -text
|
| 44 |
+
backend/open_webui/static/fonts/NotoSansSC-Regular.ttf filter=lfs diff=lfs merge=lfs -text
|
| 45 |
+
backend/open_webui/static/fonts/NotoSansSC-Variable.ttf filter=lfs diff=lfs merge=lfs -text
|
| 46 |
+
backend/open_webui/static/fonts/Twemoji.ttf filter=lfs diff=lfs merge=lfs -text
|
| 47 |
+
static/assets/fonts/Archivo-Variable.ttf filter=lfs diff=lfs merge=lfs -text
|
| 48 |
+
static/assets/fonts/Inter-Variable.ttf filter=lfs diff=lfs merge=lfs -text
|
| 49 |
+
static/assets/fonts/Mona-Sans.woff2 filter=lfs diff=lfs merge=lfs -text
|
| 50 |
+
static/assets/fonts/Vazirmatn-Variable.ttf filter=lfs diff=lfs merge=lfs -text
|
| 51 |
+
static/assets/images/adam.jpg filter=lfs diff=lfs merge=lfs -text
|
| 52 |
+
static/assets/images/earth.jpg filter=lfs diff=lfs merge=lfs -text
|
| 53 |
+
static/assets/images/galaxy.jpg filter=lfs diff=lfs merge=lfs -text
|
| 54 |
+
static/assets/images/space.jpg filter=lfs diff=lfs merge=lfs -text
|
Dockerfile
CHANGED
|
@@ -1,17 +1,208 @@
|
|
| 1 |
-
#
|
| 2 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
|
| 4 |
-
|
|
|
|
| 5 |
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
|
| 10 |
WORKDIR /app
|
| 11 |
|
| 12 |
-
|
| 13 |
-
RUN
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
|
| 15 |
-
|
| 16 |
-
|
|
|
|
| 17 |
|
|
|
|
|
|
| 1 |
+
# syntax=docker/dockerfile:1
|
| 2 |
+
# Initialize device type args
|
| 3 |
+
# use build args in the docker build command with --build-arg="BUILDARG=true"
|
| 4 |
+
ARG USE_CUDA=false
|
| 5 |
+
ARG USE_OLLAMA=false
|
| 6 |
+
ARG USE_SLIM=false
|
| 7 |
+
ARG USE_PERMISSION_HARDENING=false
|
| 8 |
+
# Tested with cu117 for CUDA 11 and cu121 for CUDA 12 (default)
|
| 9 |
+
ARG USE_CUDA_VER=cu128
|
| 10 |
+
# any sentence transformer model; models to use can be found at https://huggingface.co/models?library=sentence-transformers
|
| 11 |
+
# Leaderboard: https://huggingface.co/spaces/mteb/leaderboard
|
| 12 |
+
# for better performance and multilangauge support use "intfloat/multilingual-e5-large" (~2.5GB) or "intfloat/multilingual-e5-base" (~1.5GB)
|
| 13 |
+
# IMPORTANT: If you change the embedding model (sentence-transformers/all-MiniLM-L6-v2) and vice versa, you aren't able to use RAG Chat with your previous documents loaded in the WebUI! You need to re-embed them.
|
| 14 |
+
ARG USE_EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2
|
| 15 |
+
ARG USE_RERANKING_MODEL=""
|
| 16 |
+
ARG USE_AUXILIARY_EMBEDDING_MODEL=TaylorAI/bge-micro-v2
|
| 17 |
|
| 18 |
+
# Tiktoken encoding name; models to use can be found at https://huggingface.co/models?library=tiktoken
|
| 19 |
+
ARG USE_TIKTOKEN_ENCODING_NAME="cl100k_base"
|
| 20 |
|
| 21 |
+
ARG BUILD_HASH=dev-build
|
| 22 |
+
# Override at your own risk - non-root configurations are untested
|
| 23 |
+
ARG UID=0
|
| 24 |
+
ARG GID=0
|
| 25 |
+
|
| 26 |
+
######## WebUI frontend ########
|
| 27 |
+
FROM --platform=$BUILDPLATFORM node:22-alpine3.20 AS build
|
| 28 |
+
ARG BUILD_HASH
|
| 29 |
+
|
| 30 |
+
# Set Node.js options (heap limit Allocation failed - JavaScript heap out of memory)
|
| 31 |
+
# ENV NODE_OPTIONS="--max-old-space-size=4096"
|
| 32 |
|
| 33 |
WORKDIR /app
|
| 34 |
|
| 35 |
+
# to store git revision in build
|
| 36 |
+
RUN apk add --no-cache git
|
| 37 |
+
|
| 38 |
+
COPY package.json package-lock.json ./
|
| 39 |
+
RUN npm ci --force
|
| 40 |
+
|
| 41 |
+
COPY . .
|
| 42 |
+
ENV APP_BUILD_HASH=${BUILD_HASH}
|
| 43 |
+
RUN npm run build
|
| 44 |
+
|
| 45 |
+
######## WebUI backend ########
|
| 46 |
+
FROM python:3.11.14-slim-bookworm AS base
|
| 47 |
+
|
| 48 |
+
# Use args
|
| 49 |
+
ARG USE_CUDA
|
| 50 |
+
ARG USE_OLLAMA
|
| 51 |
+
ARG USE_CUDA_VER
|
| 52 |
+
ARG USE_SLIM
|
| 53 |
+
ARG USE_PERMISSION_HARDENING
|
| 54 |
+
ARG USE_EMBEDDING_MODEL
|
| 55 |
+
ARG USE_RERANKING_MODEL
|
| 56 |
+
ARG USE_AUXILIARY_EMBEDDING_MODEL
|
| 57 |
+
ARG UID
|
| 58 |
+
ARG GID
|
| 59 |
+
|
| 60 |
+
# Python settings
|
| 61 |
+
ENV PYTHONUNBUFFERED=1
|
| 62 |
+
|
| 63 |
+
## Basis ##
|
| 64 |
+
ENV ENV=prod \
|
| 65 |
+
PORT=8080 \
|
| 66 |
+
# pass build args to the build
|
| 67 |
+
USE_OLLAMA_DOCKER=${USE_OLLAMA} \
|
| 68 |
+
USE_CUDA_DOCKER=${USE_CUDA} \
|
| 69 |
+
USE_SLIM_DOCKER=${USE_SLIM} \
|
| 70 |
+
USE_CUDA_DOCKER_VER=${USE_CUDA_VER} \
|
| 71 |
+
USE_EMBEDDING_MODEL_DOCKER=${USE_EMBEDDING_MODEL} \
|
| 72 |
+
USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL} \
|
| 73 |
+
USE_AUXILIARY_EMBEDDING_MODEL_DOCKER=${USE_AUXILIARY_EMBEDDING_MODEL}
|
| 74 |
+
|
| 75 |
+
## Basis URL Config ##
|
| 76 |
+
ENV OLLAMA_BASE_URL="/ollama" \
|
| 77 |
+
OPENAI_API_BASE_URL=""
|
| 78 |
+
|
| 79 |
+
## API Key and Security Config ##
|
| 80 |
+
ENV OPENAI_API_KEY="" \
|
| 81 |
+
WEBUI_SECRET_KEY="" \
|
| 82 |
+
SCARF_NO_ANALYTICS=true \
|
| 83 |
+
DO_NOT_TRACK=true \
|
| 84 |
+
ANONYMIZED_TELEMETRY=false
|
| 85 |
+
|
| 86 |
+
#### Other models #########################################################
|
| 87 |
+
## whisper TTS model settings ##
|
| 88 |
+
ENV WHISPER_MODEL="base" \
|
| 89 |
+
WHISPER_MODEL_DIR="/app/backend/data/cache/whisper/models"
|
| 90 |
+
|
| 91 |
+
## RAG Embedding model settings ##
|
| 92 |
+
ENV RAG_EMBEDDING_MODEL="$USE_EMBEDDING_MODEL_DOCKER" \
|
| 93 |
+
RAG_RERANKING_MODEL="$USE_RERANKING_MODEL_DOCKER" \
|
| 94 |
+
AUXILIARY_EMBEDDING_MODEL="$USE_AUXILIARY_EMBEDDING_MODEL_DOCKER" \
|
| 95 |
+
SENTENCE_TRANSFORMERS_HOME="/app/backend/data/cache/embedding/models"
|
| 96 |
+
|
| 97 |
+
## Tiktoken model settings ##
|
| 98 |
+
ENV TIKTOKEN_ENCODING_NAME="cl100k_base" \
|
| 99 |
+
TIKTOKEN_CACHE_DIR="/app/backend/data/cache/tiktoken"
|
| 100 |
+
|
| 101 |
+
## Hugging Face download cache ##
|
| 102 |
+
ENV HF_HOME="/app/backend/data/cache/embedding/models"
|
| 103 |
+
|
| 104 |
+
## Torch Extensions ##
|
| 105 |
+
# ENV TORCH_EXTENSIONS_DIR="/.cache/torch_extensions"
|
| 106 |
+
|
| 107 |
+
#### Other models ##########################################################
|
| 108 |
+
|
| 109 |
+
WORKDIR /app/backend
|
| 110 |
+
|
| 111 |
+
ENV HOME=/root
|
| 112 |
+
# Create user and group if not root
|
| 113 |
+
RUN if [ $UID -ne 0 ]; then \
|
| 114 |
+
if [ $GID -ne 0 ]; then \
|
| 115 |
+
addgroup --gid $GID app; \
|
| 116 |
+
fi; \
|
| 117 |
+
adduser --uid $UID --gid $GID --home $HOME --disabled-password --no-create-home app; \
|
| 118 |
+
fi
|
| 119 |
+
|
| 120 |
+
RUN mkdir -p $HOME/.cache/chroma
|
| 121 |
+
RUN echo -n 00000000-0000-0000-0000-000000000000 > $HOME/.cache/chroma/telemetry_user_id
|
| 122 |
+
|
| 123 |
+
# Make sure the user has access to the app and root directory
|
| 124 |
+
RUN chown -R $UID:$GID /app $HOME
|
| 125 |
+
|
| 126 |
+
# Install common system dependencies
|
| 127 |
+
RUN apt-get update && \
|
| 128 |
+
apt-get install -y --no-install-recommends \
|
| 129 |
+
git build-essential pandoc gcc netcat-openbsd curl jq \
|
| 130 |
+
libmariadb-dev \
|
| 131 |
+
python3-dev \
|
| 132 |
+
ffmpeg libsm6 libxext6 zstd \
|
| 133 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 134 |
+
|
| 135 |
+
# install python dependencies
|
| 136 |
+
COPY --chown=$UID:$GID ./backend/requirements.txt ./requirements.txt
|
| 137 |
+
|
| 138 |
+
# Set UV_LINK_MODE to copy to prevent 0-byte file corruption in QEMU arm64 cross-builds
|
| 139 |
+
ENV UV_LINK_MODE=copy
|
| 140 |
+
|
| 141 |
+
RUN set -e; \
|
| 142 |
+
pip3 install --no-cache-dir uv; \
|
| 143 |
+
if [ "$USE_CUDA" = "true" ]; then \
|
| 144 |
+
# If you use CUDA the whisper and embedding model will be downloaded on first use
|
| 145 |
+
# fix: pin torch<=2.9.1 - torch 2.10.0 aarch64 wheels cause SIGILL on ARM devices (RPi 4 Cortex-A72) #21349
|
| 146 |
+
pip3 install 'torch<=2.9.1' torchvision torchaudio --index-url https://download.pytorch.org/whl/$USE_CUDA_DOCKER_VER --no-cache-dir; \
|
| 147 |
+
uv pip install --system -r requirements.txt --no-cache-dir; \
|
| 148 |
+
python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ['RAG_EMBEDDING_MODEL'], device='cpu')"; \
|
| 149 |
+
python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ.get('AUXILIARY_EMBEDDING_MODEL', 'TaylorAI/bge-micro-v2'), device='cpu')"; \
|
| 150 |
+
python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='cpu', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"; \
|
| 151 |
+
python -c "import os; import tiktoken; tiktoken.get_encoding(os.environ['TIKTOKEN_ENCODING_NAME'])"; \
|
| 152 |
+
python -c "import nltk; nltk.download('punkt_tab')"; \
|
| 153 |
+
else \
|
| 154 |
+
pip3 install 'torch<=2.9.1' torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu --no-cache-dir; \
|
| 155 |
+
uv pip install --system -r requirements.txt --no-cache-dir; \
|
| 156 |
+
if [ "$USE_SLIM" != "true" ]; then \
|
| 157 |
+
python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ['RAG_EMBEDDING_MODEL'], device='cpu')"; \
|
| 158 |
+
python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ.get('AUXILIARY_EMBEDDING_MODEL', 'TaylorAI/bge-micro-v2'), device='cpu')"; \
|
| 159 |
+
python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='cpu', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"; \
|
| 160 |
+
python -c "import os; import tiktoken; tiktoken.get_encoding(os.environ['TIKTOKEN_ENCODING_NAME'])"; \
|
| 161 |
+
python -c "import nltk; nltk.download('punkt_tab')"; \
|
| 162 |
+
fi; \
|
| 163 |
+
fi; \
|
| 164 |
+
mkdir -p /app/backend/data; chown -R $UID:$GID /app/backend/data/; \
|
| 165 |
+
rm -rf /var/lib/apt/lists/*;
|
| 166 |
+
|
| 167 |
+
# Install Ollama if requested
|
| 168 |
+
RUN if [ "$USE_OLLAMA" = "true" ]; then \
|
| 169 |
+
date +%s > /tmp/ollama_build_hash && \
|
| 170 |
+
echo "Cache broken at timestamp: `cat /tmp/ollama_build_hash`" && \
|
| 171 |
+
curl -fsSL https://ollama.com/install.sh | sh && \
|
| 172 |
+
rm -rf /var/lib/apt/lists/*; \
|
| 173 |
+
fi
|
| 174 |
+
|
| 175 |
+
# copy embedding weight from build
|
| 176 |
+
# RUN mkdir -p /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2
|
| 177 |
+
# COPY --from=build /app/onnx /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2/onnx
|
| 178 |
+
|
| 179 |
+
# copy built frontend files
|
| 180 |
+
COPY --chown=$UID:$GID --from=build /app/build /app/build
|
| 181 |
+
COPY --chown=$UID:$GID --from=build /app/CHANGELOG.md /app/CHANGELOG.md
|
| 182 |
+
COPY --chown=$UID:$GID --from=build /app/package.json /app/package.json
|
| 183 |
+
|
| 184 |
+
# copy backend files
|
| 185 |
+
COPY --chown=$UID:$GID ./backend .
|
| 186 |
+
|
| 187 |
+
EXPOSE 8080
|
| 188 |
+
|
| 189 |
+
HEALTHCHECK CMD curl --silent --fail http://localhost:${PORT:-8080}/health | jq -ne 'input.status == true' || exit 1
|
| 190 |
+
|
| 191 |
+
# Minimal, atomic permission hardening for OpenShift (arbitrary UID):
|
| 192 |
+
# - Group 0 owns /app and /root
|
| 193 |
+
# - Directories are group-writable and have SGID so new files inherit GID 0
|
| 194 |
+
RUN if [ "$USE_PERMISSION_HARDENING" = "true" ]; then \
|
| 195 |
+
set -eux; \
|
| 196 |
+
chgrp -R 0 /app /root || true; \
|
| 197 |
+
chmod -R g+rwX /app /root || true; \
|
| 198 |
+
find /app -type d -exec chmod g+s {} + || true; \
|
| 199 |
+
find /root -type d -exec chmod g+s {} + || true; \
|
| 200 |
+
fi
|
| 201 |
+
|
| 202 |
+
USER $UID:$GID
|
| 203 |
|
| 204 |
+
ARG BUILD_HASH
|
| 205 |
+
ENV WEBUI_BUILD_VERSION=${BUILD_HASH}
|
| 206 |
+
ENV DOCKER=true
|
| 207 |
|
| 208 |
+
CMD [ "bash", "start.sh"]
|
README.md
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
|
| 2 |
title: Open Webui
|
| 3 |
emoji: ♥︎
|
| 4 |
colorFrom: green
|
|
@@ -6,6 +6,3 @@ colorTo: blue
|
|
| 6 |
sdk: docker
|
| 7 |
pinned: false
|
| 8 |
license: mit
|
| 9 |
-
---
|
| 10 |
-
|
| 11 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
| 1 |
+
|
| 2 |
title: Open Webui
|
| 3 |
emoji: ♥︎
|
| 4 |
colorFrom: green
|
|
|
|
| 6 |
sdk: docker
|
| 7 |
pinned: false
|
| 8 |
license: mit
|
|
|
|
|
|
|
|
|
backend/.dockerignore
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__pycache__
|
| 2 |
+
.env
|
| 3 |
+
_old
|
| 4 |
+
uploads
|
| 5 |
+
.ipynb_checkpoints
|
| 6 |
+
*.db
|
| 7 |
+
_test
|
| 8 |
+
!/data
|
| 9 |
+
/data/*
|
| 10 |
+
!/data/litellm
|
| 11 |
+
/data/litellm/*
|
| 12 |
+
!data/litellm/config.yaml
|
| 13 |
+
|
| 14 |
+
!data/config.json
|
backend/.gitignore
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__pycache__
|
| 2 |
+
.env
|
| 3 |
+
_old
|
| 4 |
+
uploads
|
| 5 |
+
.ipynb_checkpoints
|
| 6 |
+
*.db
|
| 7 |
+
_test
|
| 8 |
+
Pipfile
|
| 9 |
+
!/data
|
| 10 |
+
/data/*
|
| 11 |
+
/open_webui/data/*
|
| 12 |
+
.webui_secret_key
|
backend/dev.sh
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
export CORS_ALLOW_ORIGIN="http://localhost:5173;http://localhost:8080"
|
| 2 |
+
PORT="${PORT:-8080}"
|
| 3 |
+
uvicorn open_webui.main:app --port $PORT --host 0.0.0.0 --forwarded-allow-ips "${FORWARDED_ALLOW_IPS:-*}" --reload
|
backend/open_webui/__init__.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import base64
|
| 2 |
+
import os
|
| 3 |
+
import random
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
from typing import Annotated
|
| 6 |
+
|
| 7 |
+
import typer
|
| 8 |
+
import uvicorn
|
| 9 |
+
|
| 10 |
+
app = typer.Typer()
|
| 11 |
+
|
| 12 |
+
KEY_FILE = Path.cwd() / '.webui_secret_key'
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def version_callback(value: bool) -> None:
|
| 16 |
+
if value:
|
| 17 |
+
from open_webui.env import VERSION
|
| 18 |
+
|
| 19 |
+
typer.echo(f'Open WebUI version: {VERSION}')
|
| 20 |
+
raise typer.Exit()
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@app.command()
|
| 24 |
+
def main(
|
| 25 |
+
version: Annotated[bool | None, typer.Option('--version', callback=version_callback)] = None,
|
| 26 |
+
):
|
| 27 |
+
pass
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
@app.command()
|
| 31 |
+
def serve(
|
| 32 |
+
host: str = '0.0.0.0',
|
| 33 |
+
port: int = 8080,
|
| 34 |
+
):
|
| 35 |
+
os.environ['FROM_INIT_PY'] = 'true'
|
| 36 |
+
if os.getenv('WEBUI_SECRET_KEY') is None:
|
| 37 |
+
typer.echo('Loading WEBUI_SECRET_KEY from file, not provided as an environment variable.')
|
| 38 |
+
if not KEY_FILE.exists():
|
| 39 |
+
typer.echo(f'Generating a new secret key and saving it to {KEY_FILE}')
|
| 40 |
+
KEY_FILE.write_bytes(base64.b64encode(random.randbytes(12)))
|
| 41 |
+
typer.echo(f'Loading WEBUI_SECRET_KEY from {KEY_FILE}')
|
| 42 |
+
os.environ['WEBUI_SECRET_KEY'] = KEY_FILE.read_text()
|
| 43 |
+
|
| 44 |
+
if os.getenv('USE_CUDA_DOCKER', 'false') == 'true':
|
| 45 |
+
typer.echo('CUDA is enabled, appending LD_LIBRARY_PATH to include torch/cudnn & cublas libraries.')
|
| 46 |
+
LD_LIBRARY_PATH = os.getenv('LD_LIBRARY_PATH', '').split(':')
|
| 47 |
+
os.environ['LD_LIBRARY_PATH'] = ':'.join(
|
| 48 |
+
LD_LIBRARY_PATH
|
| 49 |
+
+ [
|
| 50 |
+
'/usr/local/lib/python3.11/site-packages/torch/lib',
|
| 51 |
+
'/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib',
|
| 52 |
+
]
|
| 53 |
+
)
|
| 54 |
+
try:
|
| 55 |
+
import torch
|
| 56 |
+
|
| 57 |
+
assert torch.cuda.is_available(), 'CUDA not available'
|
| 58 |
+
typer.echo('CUDA seems to be working')
|
| 59 |
+
except Exception as e:
|
| 60 |
+
typer.echo(
|
| 61 |
+
'Error when testing CUDA but USE_CUDA_DOCKER is true. '
|
| 62 |
+
'Resetting USE_CUDA_DOCKER to false and removing '
|
| 63 |
+
f'LD_LIBRARY_PATH modifications: {e}'
|
| 64 |
+
)
|
| 65 |
+
os.environ['USE_CUDA_DOCKER'] = 'false'
|
| 66 |
+
os.environ['LD_LIBRARY_PATH'] = ':'.join(LD_LIBRARY_PATH)
|
| 67 |
+
|
| 68 |
+
import open_webui.main # noqa: F401
|
| 69 |
+
from open_webui.env import UVICORN_WORKERS # Import the workers setting
|
| 70 |
+
|
| 71 |
+
uvicorn.run(
|
| 72 |
+
'open_webui.main:app',
|
| 73 |
+
host=host,
|
| 74 |
+
port=port,
|
| 75 |
+
forwarded_allow_ips='*',
|
| 76 |
+
workers=UVICORN_WORKERS,
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
@app.command()
|
| 81 |
+
def dev(
|
| 82 |
+
host: str = '0.0.0.0',
|
| 83 |
+
port: int = 8080,
|
| 84 |
+
reload: bool = True,
|
| 85 |
+
):
|
| 86 |
+
uvicorn.run(
|
| 87 |
+
'open_webui.main:app',
|
| 88 |
+
host=host,
|
| 89 |
+
port=port,
|
| 90 |
+
reload=reload,
|
| 91 |
+
forwarded_allow_ips='*',
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
if __name__ == '__main__':
|
| 96 |
+
app()
|
backend/open_webui/alembic.ini
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# A generic, single database configuration.
|
| 2 |
+
|
| 3 |
+
[alembic]
|
| 4 |
+
# path to migration scripts
|
| 5 |
+
script_location = migrations
|
| 6 |
+
|
| 7 |
+
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
| 8 |
+
# Uncomment the line below if you want the files to be prepended with date and time
|
| 9 |
+
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
| 10 |
+
|
| 11 |
+
# sys.path path, will be prepended to sys.path if present.
|
| 12 |
+
# defaults to the current working directory.
|
| 13 |
+
prepend_sys_path = ..
|
| 14 |
+
|
| 15 |
+
# timezone to use when rendering the date within the migration file
|
| 16 |
+
# as well as the filename.
|
| 17 |
+
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
| 18 |
+
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
| 19 |
+
# string value is passed to ZoneInfo()
|
| 20 |
+
# leave blank for localtime
|
| 21 |
+
# timezone =
|
| 22 |
+
|
| 23 |
+
# max length of characters to apply to the
|
| 24 |
+
# "slug" field
|
| 25 |
+
# truncate_slug_length = 40
|
| 26 |
+
|
| 27 |
+
# set to 'true' to run the environment during
|
| 28 |
+
# the 'revision' command, regardless of autogenerate
|
| 29 |
+
# revision_environment = false
|
| 30 |
+
|
| 31 |
+
# set to 'true' to allow .pyc and .pyo files without
|
| 32 |
+
# a source .py file to be detected as revisions in the
|
| 33 |
+
# versions/ directory
|
| 34 |
+
# sourceless = false
|
| 35 |
+
|
| 36 |
+
# version location specification; This defaults
|
| 37 |
+
# to migrations/versions. When using multiple version
|
| 38 |
+
# directories, initial revisions must be specified with --version-path.
|
| 39 |
+
# The path separator used here should be the separator specified by "version_path_separator" below.
|
| 40 |
+
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
|
| 41 |
+
|
| 42 |
+
# version path separator; As mentioned above, this is the character used to split
|
| 43 |
+
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
| 44 |
+
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
| 45 |
+
# Valid values for version_path_separator are:
|
| 46 |
+
#
|
| 47 |
+
# version_path_separator = :
|
| 48 |
+
# version_path_separator = ;
|
| 49 |
+
# version_path_separator = space
|
| 50 |
+
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
| 51 |
+
|
| 52 |
+
# set to 'true' to search source files recursively
|
| 53 |
+
# in each "version_locations" directory
|
| 54 |
+
# new in Alembic version 1.10
|
| 55 |
+
# recursive_version_locations = false
|
| 56 |
+
|
| 57 |
+
# the output encoding used when revision files
|
| 58 |
+
# are written from script.py.mako
|
| 59 |
+
# output_encoding = utf-8
|
| 60 |
+
|
| 61 |
+
# sqlalchemy.url = REPLACE_WITH_DATABASE_URL
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
[post_write_hooks]
|
| 65 |
+
# post_write_hooks defines scripts or Python functions that are run
|
| 66 |
+
# on newly generated revision scripts. See the documentation for further
|
| 67 |
+
# detail and examples
|
| 68 |
+
|
| 69 |
+
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
| 70 |
+
# hooks = black
|
| 71 |
+
# black.type = console_scripts
|
| 72 |
+
# black.entrypoint = black
|
| 73 |
+
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
| 74 |
+
|
| 75 |
+
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
| 76 |
+
# hooks = ruff
|
| 77 |
+
# ruff.type = exec
|
| 78 |
+
# ruff.executable = %(here)s/.venv/bin/ruff
|
| 79 |
+
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
| 80 |
+
|
| 81 |
+
# Logging configuration
|
| 82 |
+
[loggers]
|
| 83 |
+
keys = root,sqlalchemy,alembic
|
| 84 |
+
|
| 85 |
+
[handlers]
|
| 86 |
+
keys = console
|
| 87 |
+
|
| 88 |
+
[formatters]
|
| 89 |
+
keys = generic
|
| 90 |
+
|
| 91 |
+
[logger_root]
|
| 92 |
+
level = WARN
|
| 93 |
+
handlers = console
|
| 94 |
+
qualname =
|
| 95 |
+
|
| 96 |
+
[logger_sqlalchemy]
|
| 97 |
+
level = WARN
|
| 98 |
+
handlers =
|
| 99 |
+
qualname = sqlalchemy.engine
|
| 100 |
+
|
| 101 |
+
[logger_alembic]
|
| 102 |
+
level = INFO
|
| 103 |
+
handlers =
|
| 104 |
+
qualname = alembic
|
| 105 |
+
|
| 106 |
+
[handler_console]
|
| 107 |
+
class = StreamHandler
|
| 108 |
+
args = (sys.stderr,)
|
| 109 |
+
level = NOTSET
|
| 110 |
+
formatter = generic
|
| 111 |
+
|
| 112 |
+
[formatter_generic]
|
| 113 |
+
format = %(levelname)-5.5s [%(name)s] %(message)s
|
| 114 |
+
datefmt = %H:%M:%S
|
backend/open_webui/config.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
backend/open_webui/constants.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from enum import Enum
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class MESSAGES(str, Enum):
|
| 5 |
+
DEFAULT = lambda msg='': f'{msg if msg else ""}'
|
| 6 |
+
MODEL_ADDED = lambda model='': f"The model '{model}' has been added successfully."
|
| 7 |
+
MODEL_DELETED = lambda model='': f"The model '{model}' has been deleted successfully."
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class WEBHOOK_MESSAGES(str, Enum):
|
| 11 |
+
DEFAULT = lambda msg='': f'{msg if msg else ""}'
|
| 12 |
+
USER_SIGNUP = lambda username='': f'New user signed up: {username}' if username else 'New user signed up'
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class ERROR_MESSAGES(str, Enum):
|
| 16 |
+
def __str__(self) -> str:
|
| 17 |
+
return super().__str__()
|
| 18 |
+
|
| 19 |
+
DEFAULT = lambda err='': f'{"Something went wrong :/" if err == "" else "[ERROR: " + str(err) + "]"}'
|
| 20 |
+
ENV_VAR_NOT_FOUND = 'Required environment variable not found. Terminating now.'
|
| 21 |
+
CREATE_USER_ERROR = 'Oops! Something went wrong while creating your account. Please try again later. If the issue persists, contact support for assistance.'
|
| 22 |
+
DELETE_USER_ERROR = 'Oops! Something went wrong. We encountered an issue while trying to delete the user. Please give it another shot.'
|
| 23 |
+
EMAIL_MISMATCH = 'Uh-oh! This email does not match the email your provider is registered with. Please check your email and try again.'
|
| 24 |
+
EMAIL_TAKEN = 'Uh-oh! This email is already registered. Sign in with your existing account or choose another email to start anew.'
|
| 25 |
+
USERNAME_TAKEN = 'Uh-oh! This username is already registered. Please choose another username.'
|
| 26 |
+
PASSWORD_TOO_LONG = (
|
| 27 |
+
'Uh-oh! The password you entered is too long. Please make sure your password is less than 72 bytes long.'
|
| 28 |
+
)
|
| 29 |
+
COMMAND_TAKEN = 'Uh-oh! This command is already registered. Please choose another command string.'
|
| 30 |
+
FILE_EXISTS = 'Uh-oh! This file is already registered. Please choose another file.'
|
| 31 |
+
|
| 32 |
+
ID_TAKEN = 'Uh-oh! This id is already registered. Please choose another id string.'
|
| 33 |
+
MODEL_ID_TAKEN = 'Uh-oh! This model id is already registered. Please choose another model id string.'
|
| 34 |
+
NAME_TAG_TAKEN = 'Uh-oh! This name tag is already registered. Please choose another name tag string.'
|
| 35 |
+
MODEL_ID_TOO_LONG = 'The model id is too long. Please make sure your model id is less than 256 characters long.'
|
| 36 |
+
|
| 37 |
+
INVALID_TOKEN = 'Your session has expired or the token is invalid. Please sign in again.'
|
| 38 |
+
INVALID_CRED = 'The email or password provided is incorrect. Please check for typos and try logging in again.'
|
| 39 |
+
INVALID_EMAIL_FORMAT = "The email format you entered is invalid. Please double-check and make sure you're using a valid email address (e.g., yourname@example.com)."
|
| 40 |
+
INCORRECT_PASSWORD = 'The password provided is incorrect. Please check for typos and try again.'
|
| 41 |
+
INVALID_TRUSTED_HEADER = (
|
| 42 |
+
'Your provider has not provided a trusted header. Please contact your administrator for assistance.'
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
EXISTING_USERS = "You can't turn off authentication because there are existing users. If you want to disable WEBUI_AUTH, make sure your web interface doesn't have any existing users and is a fresh installation."
|
| 46 |
+
|
| 47 |
+
UNAUTHORIZED = '401 Unauthorized'
|
| 48 |
+
ACCESS_PROHIBITED = (
|
| 49 |
+
'You do not have permission to access this resource. Please contact your administrator for assistance.'
|
| 50 |
+
)
|
| 51 |
+
ACTION_PROHIBITED = 'The requested action has been restricted as a security measure.'
|
| 52 |
+
|
| 53 |
+
FILE_NOT_SENT = 'FILE_NOT_SENT'
|
| 54 |
+
FILE_NOT_SUPPORTED = "Oops! It seems like the file format you're trying to upload is not supported. Please upload a file with a supported format and try again."
|
| 55 |
+
|
| 56 |
+
NOT_FOUND = "We could not find what you're looking for :/"
|
| 57 |
+
USER_NOT_FOUND = "We could not find what you're looking for :/"
|
| 58 |
+
API_KEY_NOT_FOUND = "Oops! It looks like there's a hiccup. The API key is missing. Please make sure to provide a valid API key to access this feature."
|
| 59 |
+
API_KEY_NOT_ALLOWED = 'Use of API key is not enabled in the environment.'
|
| 60 |
+
|
| 61 |
+
MALICIOUS = 'Unusual activities detected, please try again in a few minutes.'
|
| 62 |
+
|
| 63 |
+
PANDOC_NOT_INSTALLED = 'Pandoc is not installed on the server. Please contact your administrator for assistance.'
|
| 64 |
+
INCORRECT_FORMAT = lambda err='': f'Invalid format. Please use the correct format{err}'
|
| 65 |
+
RATE_LIMIT_EXCEEDED = 'API rate limit exceeded'
|
| 66 |
+
|
| 67 |
+
MODEL_NOT_FOUND = lambda name='': f"Model '{name}' was not found"
|
| 68 |
+
OPENAI_NOT_FOUND = lambda name='': 'OpenAI API was not found'
|
| 69 |
+
OLLAMA_NOT_FOUND = 'WebUI could not connect to Ollama'
|
| 70 |
+
CREATE_API_KEY_ERROR = 'Oops! Something went wrong while creating your API key. Please try again later. If the issue persists, contact support for assistance.'
|
| 71 |
+
API_KEY_CREATION_NOT_ALLOWED = 'API key creation is not allowed in the environment.'
|
| 72 |
+
|
| 73 |
+
EMPTY_CONTENT = 'The content provided is empty. Please ensure that there is text or data present before proceeding.'
|
| 74 |
+
|
| 75 |
+
DB_NOT_SQLITE = 'This feature is only available when running with SQLite databases.'
|
| 76 |
+
|
| 77 |
+
INVALID_URL = 'Oops! The URL you provided is invalid. Please double-check and try again.'
|
| 78 |
+
|
| 79 |
+
WEB_SEARCH_ERROR = lambda err='': f'{err if err else "Oops! Something went wrong while searching the web."}'
|
| 80 |
+
|
| 81 |
+
OLLAMA_API_DISABLED = 'The Ollama API is disabled. Please enable it to use this feature.'
|
| 82 |
+
|
| 83 |
+
FILE_TOO_LARGE = lambda size='': (
|
| 84 |
+
f"Oops! The file you're trying to upload is too large. Please upload a file that is less than {size}."
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
DUPLICATE_CONTENT = 'Duplicate content detected. Please provide unique content to proceed.'
|
| 88 |
+
FILE_NOT_PROCESSED = (
|
| 89 |
+
'Extracted content is not available for this file. Please ensure that the file is processed before proceeding.'
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
INVALID_PASSWORD = lambda err='': err if err else 'The password does not meet the required validation criteria.'
|
| 93 |
+
|
| 94 |
+
AUTOMATION_LIMIT_EXCEEDED = lambda size='': f'Automation limit reached ({size})'
|
| 95 |
+
AUTOMATION_TOO_FREQUENT = lambda interval='': f'Schedule too frequent. Minimum interval is {interval} seconds.'
|
| 96 |
+
AUTOMATION_INVALID_RRULE = lambda err='': f'Invalid RRULE: {err}'
|
| 97 |
+
AUTOMATION_NO_FUTURE_RUNS = 'RRULE has no future occurrences'
|
| 98 |
+
|
| 99 |
+
FEATURE_DISABLED = lambda name='': f'{name} is disabled'
|
| 100 |
+
INPUT_TOO_LONG = lambda size='': f'Input prompt exceeds maximum length of {size}'
|
| 101 |
+
SERVER_CONNECTION_ERROR = 'Open WebUI: Server Connection Error'
|
| 102 |
+
REQUIRED_FIELD_EMPTY = lambda name='': f'Required field {name} is empty'
|
| 103 |
+
OAUTH_NOT_CONFIGURED = lambda name='': f"Provider '{name}' is not configured"
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class TASKS(str, Enum):
|
| 107 |
+
def __str__(self) -> str:
|
| 108 |
+
return super().__str__()
|
| 109 |
+
|
| 110 |
+
DEFAULT = lambda task='': f'{task if task else "generation"}'
|
| 111 |
+
TITLE_GENERATION = 'title_generation'
|
| 112 |
+
FOLLOW_UP_GENERATION = 'follow_up_generation'
|
| 113 |
+
TAGS_GENERATION = 'tags_generation'
|
| 114 |
+
EMOJI_GENERATION = 'emoji_generation'
|
| 115 |
+
QUERY_GENERATION = 'query_generation'
|
| 116 |
+
IMAGE_PROMPT_GENERATION = 'image_prompt_generation'
|
| 117 |
+
AUTOCOMPLETE_GENERATION = 'autocomplete_generation'
|
| 118 |
+
FUNCTION_CALLING = 'function_calling'
|
| 119 |
+
MOA_RESPONSE_GENERATION = 'moa_response_generation'
|
backend/open_webui/env.py
ADDED
|
@@ -0,0 +1,1058 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib.metadata
|
| 2 |
+
import json
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import pkgutil
|
| 6 |
+
import sys
|
| 7 |
+
import shutil
|
| 8 |
+
import traceback
|
| 9 |
+
from datetime import datetime, timezone
|
| 10 |
+
from typing import Any
|
| 11 |
+
from uuid import uuid4
|
| 12 |
+
from pathlib import Path
|
| 13 |
+
from cryptography.hazmat.primitives import serialization
|
| 14 |
+
import re
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
import markdown
|
| 18 |
+
from bs4 import BeautifulSoup
|
| 19 |
+
from open_webui.constants import ERROR_MESSAGES
|
| 20 |
+
|
| 21 |
+
####################################
|
| 22 |
+
# Load .env file
|
| 23 |
+
####################################
|
| 24 |
+
|
| 25 |
+
# Use .resolve() to get the canonical path, removing any '..' or '.' components
|
| 26 |
+
ENV_FILE_PATH = Path(__file__).resolve()
|
| 27 |
+
|
| 28 |
+
# OPEN_WEBUI_DIR should be the directory where env.py resides (open_webui/)
|
| 29 |
+
OPEN_WEBUI_DIR = ENV_FILE_PATH.parent
|
| 30 |
+
|
| 31 |
+
# BACKEND_DIR is the parent of OPEN_WEBUI_DIR (backend/)
|
| 32 |
+
BACKEND_DIR = OPEN_WEBUI_DIR.parent
|
| 33 |
+
|
| 34 |
+
# BASE_DIR is the parent of BACKEND_DIR (open-webui-dev/)
|
| 35 |
+
BASE_DIR = BACKEND_DIR.parent
|
| 36 |
+
|
| 37 |
+
try:
|
| 38 |
+
from dotenv import find_dotenv, load_dotenv
|
| 39 |
+
|
| 40 |
+
load_dotenv(find_dotenv(str(BASE_DIR / '.env')))
|
| 41 |
+
except ImportError:
|
| 42 |
+
print('dotenv not installed, skipping...')
|
| 43 |
+
|
| 44 |
+
DOCKER = os.environ.get('DOCKER', 'False').lower() == 'true'
|
| 45 |
+
|
| 46 |
+
# device type embedding models - "cpu" (default), "cuda" (nvidia gpu required) or "mps" (apple silicon) - choosing this right can lead to better performance
|
| 47 |
+
USE_CUDA = os.environ.get('USE_CUDA_DOCKER', 'false')
|
| 48 |
+
|
| 49 |
+
if USE_CUDA.lower() == 'true':
|
| 50 |
+
try:
|
| 51 |
+
import torch
|
| 52 |
+
|
| 53 |
+
assert torch.cuda.is_available(), 'CUDA not available'
|
| 54 |
+
DEVICE_TYPE = 'cuda'
|
| 55 |
+
except Exception as e:
|
| 56 |
+
cuda_error = f'Error when testing CUDA but USE_CUDA_DOCKER is true. Resetting USE_CUDA_DOCKER to false: {e}'
|
| 57 |
+
os.environ['USE_CUDA_DOCKER'] = 'false'
|
| 58 |
+
USE_CUDA = 'false'
|
| 59 |
+
DEVICE_TYPE = 'cpu'
|
| 60 |
+
else:
|
| 61 |
+
DEVICE_TYPE = 'cpu'
|
| 62 |
+
|
| 63 |
+
if sys.platform == 'darwin':
|
| 64 |
+
try:
|
| 65 |
+
import torch
|
| 66 |
+
|
| 67 |
+
if torch.backends.mps.is_available() and torch.backends.mps.is_built():
|
| 68 |
+
DEVICE_TYPE = 'mps'
|
| 69 |
+
except Exception:
|
| 70 |
+
pass
|
| 71 |
+
|
| 72 |
+
####################################
|
| 73 |
+
# LOGGING
|
| 74 |
+
####################################
|
| 75 |
+
|
| 76 |
+
_LEVEL_MAP = {
|
| 77 |
+
'DEBUG': 'debug',
|
| 78 |
+
'INFO': 'info',
|
| 79 |
+
'WARNING': 'warn',
|
| 80 |
+
'ERROR': 'error',
|
| 81 |
+
'CRITICAL': 'fatal',
|
| 82 |
+
}
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
class JSONFormatter(logging.Formatter):
|
| 86 |
+
"""Format log records as single-line JSON objects for structured logging."""
|
| 87 |
+
|
| 88 |
+
def format(self, record: logging.LogRecord) -> str:
|
| 89 |
+
log_entry: dict[str, Any] = {
|
| 90 |
+
'ts': datetime.fromtimestamp(record.created, tz=timezone.utc).isoformat(timespec='milliseconds'),
|
| 91 |
+
'level': _LEVEL_MAP.get(record.levelname, record.levelname.lower()),
|
| 92 |
+
'msg': record.getMessage(),
|
| 93 |
+
'caller': record.name,
|
| 94 |
+
}
|
| 95 |
+
|
| 96 |
+
if record.exc_info and record.exc_info[0] is not None:
|
| 97 |
+
log_entry['error'] = ''.join(traceback.format_exception(*record.exc_info)).rstrip()
|
| 98 |
+
elif record.exc_text:
|
| 99 |
+
log_entry['error'] = record.exc_text
|
| 100 |
+
|
| 101 |
+
if record.stack_info:
|
| 102 |
+
log_entry['stacktrace'] = record.stack_info
|
| 103 |
+
|
| 104 |
+
return json.dumps(log_entry, ensure_ascii=False, default=str)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
LOG_FORMAT = os.environ.get('LOG_FORMAT', '').lower()
|
| 108 |
+
|
| 109 |
+
GLOBAL_LOG_LEVEL = os.environ.get('GLOBAL_LOG_LEVEL', '').upper()
|
| 110 |
+
if GLOBAL_LOG_LEVEL in logging.getLevelNamesMapping():
|
| 111 |
+
if LOG_FORMAT == 'json':
|
| 112 |
+
_handler = logging.StreamHandler(sys.stdout)
|
| 113 |
+
_handler.setFormatter(JSONFormatter())
|
| 114 |
+
logging.basicConfig(handlers=[_handler], level=GLOBAL_LOG_LEVEL, force=True)
|
| 115 |
+
else:
|
| 116 |
+
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL, force=True)
|
| 117 |
+
else:
|
| 118 |
+
GLOBAL_LOG_LEVEL = 'INFO'
|
| 119 |
+
|
| 120 |
+
log = logging.getLogger(__name__)
|
| 121 |
+
log.info(f'GLOBAL_LOG_LEVEL: {GLOBAL_LOG_LEVEL}')
|
| 122 |
+
|
| 123 |
+
if 'cuda_error' in locals():
|
| 124 |
+
log.exception(cuda_error)
|
| 125 |
+
del cuda_error
|
| 126 |
+
|
| 127 |
+
SRC_LOG_LEVELS = {} # Legacy variable, do not remove
|
| 128 |
+
|
| 129 |
+
WEBUI_NAME = os.environ.get('WEBUI_NAME', 'Open WebUI')
|
| 130 |
+
if WEBUI_NAME != 'Open WebUI':
|
| 131 |
+
WEBUI_NAME += ' (Open WebUI)'
|
| 132 |
+
|
| 133 |
+
WEBUI_FAVICON_URL = 'https://openwebui.com/favicon.png'
|
| 134 |
+
|
| 135 |
+
TRUSTED_SIGNATURE_KEY = os.environ.get('TRUSTED_SIGNATURE_KEY', '')
|
| 136 |
+
|
| 137 |
+
####################################
|
| 138 |
+
# ENV (dev,test,prod)
|
| 139 |
+
####################################
|
| 140 |
+
|
| 141 |
+
ENV = os.environ.get('ENV', 'dev')
|
| 142 |
+
|
| 143 |
+
FROM_INIT_PY = os.environ.get('FROM_INIT_PY', 'False').lower() == 'true'
|
| 144 |
+
|
| 145 |
+
if FROM_INIT_PY:
|
| 146 |
+
PACKAGE_DATA = {'version': importlib.metadata.version('open-webui')}
|
| 147 |
+
else:
|
| 148 |
+
try:
|
| 149 |
+
PACKAGE_DATA = json.loads((BASE_DIR / 'package.json').read_text())
|
| 150 |
+
except Exception:
|
| 151 |
+
PACKAGE_DATA = {'version': '0.0.0'}
|
| 152 |
+
|
| 153 |
+
VERSION = PACKAGE_DATA['version']
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
DEPLOYMENT_ID = os.environ.get('DEPLOYMENT_ID', '')
|
| 157 |
+
INSTANCE_ID = os.environ.get('INSTANCE_ID', str(uuid4()))
|
| 158 |
+
|
| 159 |
+
ENABLE_DB_MIGRATIONS = os.environ.get('ENABLE_DB_MIGRATIONS', 'True').lower() == 'true'
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
# Function to parse each section
|
| 163 |
+
def parse_section(section):
|
| 164 |
+
items = []
|
| 165 |
+
for li in section.find_all('li'):
|
| 166 |
+
# Extract raw HTML string
|
| 167 |
+
raw_html = str(li)
|
| 168 |
+
|
| 169 |
+
# Extract text without HTML tags
|
| 170 |
+
text = li.get_text(separator=' ', strip=True)
|
| 171 |
+
|
| 172 |
+
# Split into title and content
|
| 173 |
+
parts = text.split(': ', 1)
|
| 174 |
+
title = parts[0].strip() if len(parts) > 1 else ''
|
| 175 |
+
content = parts[1].strip() if len(parts) > 1 else text
|
| 176 |
+
|
| 177 |
+
items.append({'title': title, 'content': content, 'raw': raw_html})
|
| 178 |
+
return items
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
try:
|
| 182 |
+
changelog_path = BASE_DIR / 'CHANGELOG.md'
|
| 183 |
+
with open(str(changelog_path.absolute()), 'r', encoding='utf8') as file:
|
| 184 |
+
changelog_content = file.read()
|
| 185 |
+
|
| 186 |
+
except Exception:
|
| 187 |
+
changelog_content = (pkgutil.get_data('open_webui', 'CHANGELOG.md') or b'').decode()
|
| 188 |
+
|
| 189 |
+
# Convert markdown content to HTML
|
| 190 |
+
html_content = markdown.markdown(changelog_content)
|
| 191 |
+
|
| 192 |
+
# Parse the HTML content
|
| 193 |
+
soup = BeautifulSoup(html_content, 'html.parser')
|
| 194 |
+
|
| 195 |
+
# Initialize JSON structure
|
| 196 |
+
changelog_json = {}
|
| 197 |
+
|
| 198 |
+
# Iterate over each version
|
| 199 |
+
for version in soup.find_all('h2'):
|
| 200 |
+
version_number = version.get_text().strip().split(' - ')[0][1:-1] # Remove brackets
|
| 201 |
+
date = version.get_text().strip().split(' - ')[1]
|
| 202 |
+
|
| 203 |
+
version_data = {'date': date}
|
| 204 |
+
|
| 205 |
+
# Find the next sibling that is a h3 tag (section title)
|
| 206 |
+
current = version.find_next_sibling()
|
| 207 |
+
|
| 208 |
+
while current and current.name != 'h2':
|
| 209 |
+
if current.name == 'h3':
|
| 210 |
+
section_title = current.get_text().lower() # e.g., "added", "fixed"
|
| 211 |
+
section_items = parse_section(current.find_next_sibling('ul'))
|
| 212 |
+
version_data[section_title] = section_items
|
| 213 |
+
|
| 214 |
+
# Move to the next element
|
| 215 |
+
current = current.find_next_sibling()
|
| 216 |
+
|
| 217 |
+
changelog_json[version_number] = version_data
|
| 218 |
+
|
| 219 |
+
CHANGELOG = changelog_json
|
| 220 |
+
|
| 221 |
+
####################################
|
| 222 |
+
# SAFE_MODE
|
| 223 |
+
####################################
|
| 224 |
+
|
| 225 |
+
SAFE_MODE = os.environ.get('SAFE_MODE', 'false').lower() == 'true'
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
####################################
|
| 229 |
+
# ENABLE_FORWARD_USER_INFO_HEADERS
|
| 230 |
+
####################################
|
| 231 |
+
|
| 232 |
+
ENABLE_FORWARD_USER_INFO_HEADERS = os.environ.get('ENABLE_FORWARD_USER_INFO_HEADERS', 'False').lower() == 'true'
|
| 233 |
+
|
| 234 |
+
# Header names for user info forwarding (customizable via environment variables)
|
| 235 |
+
FORWARD_USER_INFO_HEADER_USER_NAME = os.environ.get('FORWARD_USER_INFO_HEADER_USER_NAME', 'X-OpenWebUI-User-Name')
|
| 236 |
+
FORWARD_USER_INFO_HEADER_USER_ID = os.environ.get('FORWARD_USER_INFO_HEADER_USER_ID', 'X-OpenWebUI-User-Id')
|
| 237 |
+
FORWARD_USER_INFO_HEADER_USER_EMAIL = os.environ.get('FORWARD_USER_INFO_HEADER_USER_EMAIL', 'X-OpenWebUI-User-Email')
|
| 238 |
+
FORWARD_USER_INFO_HEADER_USER_ROLE = os.environ.get('FORWARD_USER_INFO_HEADER_USER_ROLE', 'X-OpenWebUI-User-Role')
|
| 239 |
+
|
| 240 |
+
# Header name for chat ID forwarding (customizable via environment variable)
|
| 241 |
+
FORWARD_SESSION_INFO_HEADER_MESSAGE_ID = os.environ.get(
|
| 242 |
+
'FORWARD_SESSION_INFO_HEADER_MESSAGE_ID', 'X-OpenWebUI-Message-Id'
|
| 243 |
+
)
|
| 244 |
+
FORWARD_SESSION_INFO_HEADER_CHAT_ID = os.environ.get('FORWARD_SESSION_INFO_HEADER_CHAT_ID', 'X-OpenWebUI-Chat-Id')
|
| 245 |
+
|
| 246 |
+
# Experimental feature, may be removed in future
|
| 247 |
+
ENABLE_STAR_SESSIONS_MIDDLEWARE = os.environ.get('ENABLE_STAR_SESSIONS_MIDDLEWARE', 'False').lower() == 'true'
|
| 248 |
+
|
| 249 |
+
ENABLE_EASTER_EGGS = os.environ.get('ENABLE_EASTER_EGGS', 'True').lower() == 'true'
|
| 250 |
+
|
| 251 |
+
####################################
|
| 252 |
+
# WEBUI_BUILD_HASH
|
| 253 |
+
####################################
|
| 254 |
+
|
| 255 |
+
WEBUI_BUILD_HASH = os.environ.get('WEBUI_BUILD_HASH', 'dev-build')
|
| 256 |
+
|
| 257 |
+
####################################
|
| 258 |
+
# DATA/FRONTEND BUILD DIR
|
| 259 |
+
####################################
|
| 260 |
+
|
| 261 |
+
DATA_DIR = Path(os.getenv('DATA_DIR', BACKEND_DIR / 'data')).resolve()
|
| 262 |
+
|
| 263 |
+
if FROM_INIT_PY:
|
| 264 |
+
NEW_DATA_DIR = Path(os.getenv('DATA_DIR', OPEN_WEBUI_DIR / 'data')).resolve()
|
| 265 |
+
NEW_DATA_DIR.mkdir(parents=True, exist_ok=True)
|
| 266 |
+
|
| 267 |
+
# Check if the data directory exists in the package directory
|
| 268 |
+
if DATA_DIR.exists() and DATA_DIR != NEW_DATA_DIR:
|
| 269 |
+
log.info(f'Moving {DATA_DIR} to {NEW_DATA_DIR}')
|
| 270 |
+
for item in DATA_DIR.iterdir():
|
| 271 |
+
dest = NEW_DATA_DIR / item.name
|
| 272 |
+
if item.is_dir():
|
| 273 |
+
shutil.copytree(item, dest, dirs_exist_ok=True)
|
| 274 |
+
else:
|
| 275 |
+
shutil.copy2(item, dest)
|
| 276 |
+
|
| 277 |
+
# Zip the data directory
|
| 278 |
+
shutil.make_archive(DATA_DIR.parent / 'open_webui_data', 'zip', DATA_DIR)
|
| 279 |
+
|
| 280 |
+
# Remove the old data directory
|
| 281 |
+
shutil.rmtree(DATA_DIR)
|
| 282 |
+
|
| 283 |
+
DATA_DIR = Path(os.getenv('DATA_DIR', OPEN_WEBUI_DIR / 'data'))
|
| 284 |
+
|
| 285 |
+
STATIC_DIR = Path(os.getenv('STATIC_DIR', OPEN_WEBUI_DIR / 'static'))
|
| 286 |
+
|
| 287 |
+
FONTS_DIR = Path(os.getenv('FONTS_DIR', OPEN_WEBUI_DIR / 'static' / 'fonts'))
|
| 288 |
+
|
| 289 |
+
FRONTEND_BUILD_DIR = Path(os.getenv('FRONTEND_BUILD_DIR', BASE_DIR / 'build')).resolve()
|
| 290 |
+
|
| 291 |
+
if FROM_INIT_PY:
|
| 292 |
+
FRONTEND_BUILD_DIR = Path(os.getenv('FRONTEND_BUILD_DIR', OPEN_WEBUI_DIR / 'frontend')).resolve()
|
| 293 |
+
|
| 294 |
+
####################################
|
| 295 |
+
# Database
|
| 296 |
+
####################################
|
| 297 |
+
|
| 298 |
+
# Check if the file exists
|
| 299 |
+
if os.path.exists(f'{DATA_DIR}/ollama.db'):
|
| 300 |
+
# Rename the file
|
| 301 |
+
os.rename(f'{DATA_DIR}/ollama.db', f'{DATA_DIR}/webui.db')
|
| 302 |
+
log.info('Database migrated from Ollama-WebUI successfully.')
|
| 303 |
+
else:
|
| 304 |
+
pass
|
| 305 |
+
|
| 306 |
+
DATABASE_URL = os.environ.get('DATABASE_URL', f'sqlite:///{DATA_DIR}/webui.db')
|
| 307 |
+
|
| 308 |
+
DATABASE_TYPE = os.environ.get('DATABASE_TYPE')
|
| 309 |
+
DATABASE_USER = os.environ.get('DATABASE_USER')
|
| 310 |
+
DATABASE_PASSWORD = os.environ.get('DATABASE_PASSWORD')
|
| 311 |
+
|
| 312 |
+
DATABASE_CRED = ''
|
| 313 |
+
if DATABASE_USER:
|
| 314 |
+
DATABASE_CRED += f'{DATABASE_USER}'
|
| 315 |
+
if DATABASE_PASSWORD:
|
| 316 |
+
DATABASE_CRED += f':{DATABASE_PASSWORD}'
|
| 317 |
+
|
| 318 |
+
DB_VARS = {
|
| 319 |
+
'db_type': DATABASE_TYPE,
|
| 320 |
+
'db_cred': DATABASE_CRED,
|
| 321 |
+
'db_host': os.environ.get('DATABASE_HOST'),
|
| 322 |
+
'db_port': os.environ.get('DATABASE_PORT'),
|
| 323 |
+
'db_name': os.environ.get('DATABASE_NAME'),
|
| 324 |
+
}
|
| 325 |
+
|
| 326 |
+
if all(DB_VARS.values()):
|
| 327 |
+
DATABASE_URL = (
|
| 328 |
+
f'{DB_VARS["db_type"]}://{DB_VARS["db_cred"]}@{DB_VARS["db_host"]}:{DB_VARS["db_port"]}/{DB_VARS["db_name"]}'
|
| 329 |
+
)
|
| 330 |
+
elif DATABASE_TYPE == 'sqlite+sqlcipher' and not os.environ.get('DATABASE_URL'):
|
| 331 |
+
# Handle SQLCipher with local file when DATABASE_URL wasn't explicitly set
|
| 332 |
+
DATABASE_URL = f'sqlite+sqlcipher:///{DATA_DIR}/webui.db'
|
| 333 |
+
|
| 334 |
+
# Replace the postgres:// with postgresql://
|
| 335 |
+
if 'postgres://' in DATABASE_URL:
|
| 336 |
+
DATABASE_URL = DATABASE_URL.replace('postgres://', 'postgresql://')
|
| 337 |
+
|
| 338 |
+
DATABASE_SCHEMA = os.environ.get('DATABASE_SCHEMA', None)
|
| 339 |
+
|
| 340 |
+
DATABASE_POOL_SIZE = os.environ.get('DATABASE_POOL_SIZE', None)
|
| 341 |
+
|
| 342 |
+
if DATABASE_POOL_SIZE != None:
|
| 343 |
+
try:
|
| 344 |
+
DATABASE_POOL_SIZE = int(DATABASE_POOL_SIZE)
|
| 345 |
+
except Exception:
|
| 346 |
+
DATABASE_POOL_SIZE = None
|
| 347 |
+
|
| 348 |
+
DATABASE_POOL_MAX_OVERFLOW = os.environ.get('DATABASE_POOL_MAX_OVERFLOW', 0)
|
| 349 |
+
|
| 350 |
+
if DATABASE_POOL_MAX_OVERFLOW == '':
|
| 351 |
+
DATABASE_POOL_MAX_OVERFLOW = 0
|
| 352 |
+
else:
|
| 353 |
+
try:
|
| 354 |
+
DATABASE_POOL_MAX_OVERFLOW = int(DATABASE_POOL_MAX_OVERFLOW)
|
| 355 |
+
except Exception:
|
| 356 |
+
DATABASE_POOL_MAX_OVERFLOW = 0
|
| 357 |
+
|
| 358 |
+
DATABASE_POOL_TIMEOUT = os.environ.get('DATABASE_POOL_TIMEOUT', 30)
|
| 359 |
+
|
| 360 |
+
if DATABASE_POOL_TIMEOUT == '':
|
| 361 |
+
DATABASE_POOL_TIMEOUT = 30
|
| 362 |
+
else:
|
| 363 |
+
try:
|
| 364 |
+
DATABASE_POOL_TIMEOUT = int(DATABASE_POOL_TIMEOUT)
|
| 365 |
+
except Exception:
|
| 366 |
+
DATABASE_POOL_TIMEOUT = 30
|
| 367 |
+
|
| 368 |
+
DATABASE_POOL_RECYCLE = os.environ.get('DATABASE_POOL_RECYCLE', 3600)
|
| 369 |
+
|
| 370 |
+
if DATABASE_POOL_RECYCLE == '':
|
| 371 |
+
DATABASE_POOL_RECYCLE = 3600
|
| 372 |
+
else:
|
| 373 |
+
try:
|
| 374 |
+
DATABASE_POOL_RECYCLE = int(DATABASE_POOL_RECYCLE)
|
| 375 |
+
except Exception:
|
| 376 |
+
DATABASE_POOL_RECYCLE = 3600
|
| 377 |
+
|
| 378 |
+
DATABASE_ENABLE_SQLITE_WAL = os.environ.get('DATABASE_ENABLE_SQLITE_WAL', 'True').lower() == 'true'
|
| 379 |
+
|
| 380 |
+
# SQLite PRAGMA tuning — these defaults are optimised for WAL-mode web-server
|
| 381 |
+
# workloads. Each can be overridden via its environment variable.
|
| 382 |
+
# Set any value to an empty string to skip that PRAGMA entirely.
|
| 383 |
+
|
| 384 |
+
# PRAGMA synchronous: NORMAL (1) is safe with WAL and avoids an fsync per
|
| 385 |
+
# transaction. Valid values: OFF (0), NORMAL (1), FULL (2), EXTRA (3).
|
| 386 |
+
DATABASE_SQLITE_PRAGMA_SYNCHRONOUS = os.environ.get('DATABASE_SQLITE_PRAGMA_SYNCHRONOUS', 'NORMAL')
|
| 387 |
+
|
| 388 |
+
# PRAGMA busy_timeout (ms): how long a connection waits for a write lock
|
| 389 |
+
# before raising SQLITE_BUSY.
|
| 390 |
+
DATABASE_SQLITE_PRAGMA_BUSY_TIMEOUT = os.environ.get('DATABASE_SQLITE_PRAGMA_BUSY_TIMEOUT', '5000')
|
| 391 |
+
|
| 392 |
+
# PRAGMA cache_size: negative value = KiB. -65536 ≈ 64 MB page cache.
|
| 393 |
+
DATABASE_SQLITE_PRAGMA_CACHE_SIZE = os.environ.get('DATABASE_SQLITE_PRAGMA_CACHE_SIZE', '-65536')
|
| 394 |
+
|
| 395 |
+
# PRAGMA temp_store: MEMORY (2) keeps temp tables and indices in RAM.
|
| 396 |
+
# Valid values: DEFAULT (0), FILE (1), MEMORY (2).
|
| 397 |
+
DATABASE_SQLITE_PRAGMA_TEMP_STORE = os.environ.get('DATABASE_SQLITE_PRAGMA_TEMP_STORE', 'MEMORY')
|
| 398 |
+
|
| 399 |
+
# PRAGMA mmap_size (bytes): memory-mapped I/O size. 268435456 ≈ 256 MB.
|
| 400 |
+
# Set to 0 to disable mmap.
|
| 401 |
+
DATABASE_SQLITE_PRAGMA_MMAP_SIZE = os.environ.get('DATABASE_SQLITE_PRAGMA_MMAP_SIZE', '268435456')
|
| 402 |
+
|
| 403 |
+
# PRAGMA journal_size_limit (bytes): caps the WAL file size after checkpoint.
|
| 404 |
+
# Without this the WAL grows unbounded during write bursts and is never
|
| 405 |
+
# truncated. 67108864 ≈ 64 MB. Set to -1 for no limit (SQLite default).
|
| 406 |
+
DATABASE_SQLITE_PRAGMA_JOURNAL_SIZE_LIMIT = os.environ.get('DATABASE_SQLITE_PRAGMA_JOURNAL_SIZE_LIMIT', '67108864')
|
| 407 |
+
|
| 408 |
+
DATABASE_USER_ACTIVE_STATUS_UPDATE_INTERVAL = os.environ.get('DATABASE_USER_ACTIVE_STATUS_UPDATE_INTERVAL', None)
|
| 409 |
+
if DATABASE_USER_ACTIVE_STATUS_UPDATE_INTERVAL is not None:
|
| 410 |
+
try:
|
| 411 |
+
DATABASE_USER_ACTIVE_STATUS_UPDATE_INTERVAL = float(DATABASE_USER_ACTIVE_STATUS_UPDATE_INTERVAL)
|
| 412 |
+
except Exception:
|
| 413 |
+
DATABASE_USER_ACTIVE_STATUS_UPDATE_INTERVAL = 0.0
|
| 414 |
+
|
| 415 |
+
# When enabled, get_db_context reuses existing sessions; set to False to always create new sessions
|
| 416 |
+
DATABASE_ENABLE_SESSION_SHARING = os.environ.get('DATABASE_ENABLE_SESSION_SHARING', 'False').lower() == 'true'
|
| 417 |
+
|
| 418 |
+
# Enable public visibility of active user count (when disabled, only admins can see it)
|
| 419 |
+
ENABLE_PUBLIC_ACTIVE_USERS_COUNT = os.environ.get('ENABLE_PUBLIC_ACTIVE_USERS_COUNT', 'True').lower() == 'true'
|
| 420 |
+
|
| 421 |
+
RESET_CONFIG_ON_START = os.environ.get('RESET_CONFIG_ON_START', 'False').lower() == 'true'
|
| 422 |
+
|
| 423 |
+
ENABLE_REALTIME_CHAT_SAVE = os.environ.get('ENABLE_REALTIME_CHAT_SAVE', 'False').lower() == 'true'
|
| 424 |
+
|
| 425 |
+
ENABLE_QUERIES_CACHE = os.environ.get('ENABLE_QUERIES_CACHE', 'False').lower() == 'true'
|
| 426 |
+
|
| 427 |
+
RAG_SYSTEM_CONTEXT = os.environ.get('RAG_SYSTEM_CONTEXT', 'False').lower() == 'true'
|
| 428 |
+
|
| 429 |
+
####################################
|
| 430 |
+
# REDIS
|
| 431 |
+
####################################
|
| 432 |
+
|
| 433 |
+
REDIS_URL = os.environ.get('REDIS_URL', '')
|
| 434 |
+
REDIS_CLUSTER = os.environ.get('REDIS_CLUSTER', 'False').lower() == 'true'
|
| 435 |
+
|
| 436 |
+
REDIS_KEY_PREFIX = os.environ.get('REDIS_KEY_PREFIX', 'open-webui')
|
| 437 |
+
|
| 438 |
+
REDIS_SENTINEL_HOSTS = os.environ.get('REDIS_SENTINEL_HOSTS', '')
|
| 439 |
+
REDIS_SENTINEL_PORT = os.environ.get('REDIS_SENTINEL_PORT', '26379')
|
| 440 |
+
|
| 441 |
+
# Maximum number of retries for Redis operations when using Sentinel fail-over
|
| 442 |
+
REDIS_SENTINEL_MAX_RETRY_COUNT = os.environ.get('REDIS_SENTINEL_MAX_RETRY_COUNT', '2')
|
| 443 |
+
try:
|
| 444 |
+
REDIS_SENTINEL_MAX_RETRY_COUNT = int(REDIS_SENTINEL_MAX_RETRY_COUNT)
|
| 445 |
+
if REDIS_SENTINEL_MAX_RETRY_COUNT < 1:
|
| 446 |
+
REDIS_SENTINEL_MAX_RETRY_COUNT = 2
|
| 447 |
+
except ValueError:
|
| 448 |
+
REDIS_SENTINEL_MAX_RETRY_COUNT = 2
|
| 449 |
+
|
| 450 |
+
|
| 451 |
+
REDIS_SOCKET_CONNECT_TIMEOUT = os.environ.get('REDIS_SOCKET_CONNECT_TIMEOUT', '')
|
| 452 |
+
try:
|
| 453 |
+
REDIS_SOCKET_CONNECT_TIMEOUT = float(REDIS_SOCKET_CONNECT_TIMEOUT)
|
| 454 |
+
except ValueError:
|
| 455 |
+
REDIS_SOCKET_CONNECT_TIMEOUT = None
|
| 456 |
+
|
| 457 |
+
# Whether to enable TCP SO_KEEPALIVE on Redis client sockets. Opt-in:
|
| 458 |
+
# defaults to off so behavior is unchanged for existing deployments. When
|
| 459 |
+
# enabled, the kernel sends TCP keepalive probes on idle connections so
|
| 460 |
+
# half-closed sockets (e.g. after a silent firewall/LB reset or a NIC
|
| 461 |
+
# flap) are detected before the next command lands on them.
|
| 462 |
+
REDIS_SOCKET_KEEPALIVE = os.environ.get('REDIS_SOCKET_KEEPALIVE', 'False').lower() == 'true'
|
| 463 |
+
|
| 464 |
+
# How often (in seconds) redis-py should PING an idle pooled connection
|
| 465 |
+
# before reusing it. Opt-in: defaults to unset (empty string) so behavior
|
| 466 |
+
# is unchanged for existing deployments. When set, should be shorter than
|
| 467 |
+
# the Redis server `timeout` setting and any firewall/LB idle timeout on
|
| 468 |
+
# the path to Redis, so stale connections are detected before a real
|
| 469 |
+
# command lands on them. Set to 0 or empty to disable.
|
| 470 |
+
REDIS_HEALTH_CHECK_INTERVAL = os.environ.get('REDIS_HEALTH_CHECK_INTERVAL', '')
|
| 471 |
+
try:
|
| 472 |
+
REDIS_HEALTH_CHECK_INTERVAL = int(REDIS_HEALTH_CHECK_INTERVAL)
|
| 473 |
+
if REDIS_HEALTH_CHECK_INTERVAL <= 0:
|
| 474 |
+
REDIS_HEALTH_CHECK_INTERVAL = None
|
| 475 |
+
except ValueError:
|
| 476 |
+
REDIS_HEALTH_CHECK_INTERVAL = None
|
| 477 |
+
|
| 478 |
+
REDIS_RECONNECT_DELAY = os.environ.get('REDIS_RECONNECT_DELAY', '')
|
| 479 |
+
|
| 480 |
+
if REDIS_RECONNECT_DELAY == '':
|
| 481 |
+
REDIS_RECONNECT_DELAY = None
|
| 482 |
+
else:
|
| 483 |
+
try:
|
| 484 |
+
REDIS_RECONNECT_DELAY = float(REDIS_RECONNECT_DELAY)
|
| 485 |
+
if REDIS_RECONNECT_DELAY < 0:
|
| 486 |
+
REDIS_RECONNECT_DELAY = None
|
| 487 |
+
except Exception:
|
| 488 |
+
REDIS_RECONNECT_DELAY = None
|
| 489 |
+
|
| 490 |
+
####################################
|
| 491 |
+
# UVICORN WORKERS
|
| 492 |
+
####################################
|
| 493 |
+
|
| 494 |
+
# Number of uvicorn worker processes for handling requests
|
| 495 |
+
UVICORN_WORKERS = os.environ.get('UVICORN_WORKERS', '1')
|
| 496 |
+
try:
|
| 497 |
+
UVICORN_WORKERS = int(UVICORN_WORKERS)
|
| 498 |
+
if UVICORN_WORKERS < 1:
|
| 499 |
+
UVICORN_WORKERS = 1
|
| 500 |
+
except ValueError:
|
| 501 |
+
UVICORN_WORKERS = 1
|
| 502 |
+
log.info(f'Invalid UVICORN_WORKERS value, defaulting to {UVICORN_WORKERS}')
|
| 503 |
+
|
| 504 |
+
####################################
|
| 505 |
+
# WEBUI_AUTH (Required for security)
|
| 506 |
+
####################################
|
| 507 |
+
|
| 508 |
+
WEBUI_AUTH = os.environ.get('WEBUI_AUTH', 'True').lower() == 'true'
|
| 509 |
+
|
| 510 |
+
ENABLE_INITIAL_ADMIN_SIGNUP = os.environ.get('ENABLE_INITIAL_ADMIN_SIGNUP', 'False').lower() == 'true'
|
| 511 |
+
ENABLE_SIGNUP_PASSWORD_CONFIRMATION = os.environ.get('ENABLE_SIGNUP_PASSWORD_CONFIRMATION', 'False').lower() == 'true'
|
| 512 |
+
|
| 513 |
+
####################################
|
| 514 |
+
# Admin Account Runtime Creation
|
| 515 |
+
####################################
|
| 516 |
+
|
| 517 |
+
# Optional env vars for creating an admin account on startup
|
| 518 |
+
# Useful for headless/automated deployments
|
| 519 |
+
WEBUI_ADMIN_EMAIL = os.environ.get('WEBUI_ADMIN_EMAIL', '')
|
| 520 |
+
WEBUI_ADMIN_PASSWORD = os.environ.get('WEBUI_ADMIN_PASSWORD', '')
|
| 521 |
+
WEBUI_ADMIN_NAME = os.environ.get('WEBUI_ADMIN_NAME', 'Admin')
|
| 522 |
+
|
| 523 |
+
WEBUI_AUTH_TRUSTED_EMAIL_HEADER = os.environ.get('WEBUI_AUTH_TRUSTED_EMAIL_HEADER', None)
|
| 524 |
+
WEBUI_AUTH_TRUSTED_NAME_HEADER = os.environ.get('WEBUI_AUTH_TRUSTED_NAME_HEADER', None)
|
| 525 |
+
WEBUI_AUTH_TRUSTED_GROUPS_HEADER = os.environ.get('WEBUI_AUTH_TRUSTED_GROUPS_HEADER', None)
|
| 526 |
+
WEBUI_AUTH_TRUSTED_ROLE_HEADER = os.environ.get('WEBUI_AUTH_TRUSTED_ROLE_HEADER', None)
|
| 527 |
+
|
| 528 |
+
# Custom header name for API key authentication. Defaults to 'x-api-key'.
|
| 529 |
+
# Useful when Open WebUI sits behind a reverse proxy / API gateway that
|
| 530 |
+
# already uses the Authorization header for its own authentication — set
|
| 531 |
+
# this to a unique header (e.g. 'X-OpenWebUI-Key') so the middleware
|
| 532 |
+
# checks the custom header instead and avoids the 401 short-circuit.
|
| 533 |
+
CUSTOM_API_KEY_HEADER = os.environ.get('CUSTOM_API_KEY_HEADER', 'x-api-key')
|
| 534 |
+
|
| 535 |
+
ENABLE_PASSWORD_VALIDATION = os.environ.get('ENABLE_PASSWORD_VALIDATION', 'False').lower() == 'true'
|
| 536 |
+
PASSWORD_VALIDATION_REGEX_PATTERN = os.environ.get(
|
| 537 |
+
'PASSWORD_VALIDATION_REGEX_PATTERN',
|
| 538 |
+
r'^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[^\w\s]).{8,}$',
|
| 539 |
+
)
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
try:
|
| 543 |
+
PASSWORD_VALIDATION_REGEX_PATTERN = rf'{PASSWORD_VALIDATION_REGEX_PATTERN}'
|
| 544 |
+
PASSWORD_VALIDATION_REGEX_PATTERN = re.compile(PASSWORD_VALIDATION_REGEX_PATTERN)
|
| 545 |
+
except Exception as e:
|
| 546 |
+
log.error(f'Invalid PASSWORD_VALIDATION_REGEX_PATTERN: {e}')
|
| 547 |
+
PASSWORD_VALIDATION_REGEX_PATTERN = re.compile(r'^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[^\w\s]).{8,}$')
|
| 548 |
+
|
| 549 |
+
PASSWORD_VALIDATION_HINT = os.environ.get('PASSWORD_VALIDATION_HINT', '')
|
| 550 |
+
|
| 551 |
+
|
| 552 |
+
BYPASS_MODEL_ACCESS_CONTROL = os.environ.get('BYPASS_MODEL_ACCESS_CONTROL', 'False').lower() == 'true'
|
| 553 |
+
|
| 554 |
+
# When enabled, skips pydub-based preprocessing (format conversion, compression,
|
| 555 |
+
# and chunked splitting) before sending files to processing engines. Useful when
|
| 556 |
+
# the upstream provider handles these steps or when ffmpeg is unavailable.
|
| 557 |
+
BYPASS_PYDUB_PREPROCESSING = os.environ.get('BYPASS_PYDUB_PREPROCESSING', 'False').lower() == 'true'
|
| 558 |
+
|
| 559 |
+
# When disabled (default), the OpenAI catch-all proxy endpoint (/{path:path})
|
| 560 |
+
# is blocked. Enable only if you need direct passthrough to upstream OpenAI-
|
| 561 |
+
# compatible APIs for endpoints not natively handled by Open WebUI.
|
| 562 |
+
ENABLE_OPENAI_API_PASSTHROUGH = os.environ.get('ENABLE_OPENAI_API_PASSTHROUGH', 'False').lower() == 'true'
|
| 563 |
+
|
| 564 |
+
WEBUI_AUTH_SIGNOUT_REDIRECT_URL = os.environ.get('WEBUI_AUTH_SIGNOUT_REDIRECT_URL', None)
|
| 565 |
+
|
| 566 |
+
####################################
|
| 567 |
+
# WEBUI_SECRET_KEY
|
| 568 |
+
####################################
|
| 569 |
+
|
| 570 |
+
WEBUI_SECRET_KEY = os.environ.get(
|
| 571 |
+
'WEBUI_SECRET_KEY',
|
| 572 |
+
os.environ.get('WEBUI_JWT_SECRET_KEY', 't0p-s3cr3t'), # DEPRECATED: remove at next major version
|
| 573 |
+
)
|
| 574 |
+
|
| 575 |
+
WEBUI_SESSION_COOKIE_SAME_SITE = os.environ.get('WEBUI_SESSION_COOKIE_SAME_SITE', 'lax')
|
| 576 |
+
|
| 577 |
+
WEBUI_SESSION_COOKIE_SECURE = os.environ.get('WEBUI_SESSION_COOKIE_SECURE', 'false').lower() == 'true'
|
| 578 |
+
|
| 579 |
+
WEBUI_AUTH_COOKIE_SAME_SITE = os.environ.get('WEBUI_AUTH_COOKIE_SAME_SITE', WEBUI_SESSION_COOKIE_SAME_SITE)
|
| 580 |
+
|
| 581 |
+
WEBUI_AUTH_COOKIE_SECURE = (
|
| 582 |
+
os.environ.get(
|
| 583 |
+
'WEBUI_AUTH_COOKIE_SECURE',
|
| 584 |
+
os.environ.get('WEBUI_SESSION_COOKIE_SECURE', 'false'),
|
| 585 |
+
).lower()
|
| 586 |
+
== 'true'
|
| 587 |
+
)
|
| 588 |
+
|
| 589 |
+
if WEBUI_AUTH and WEBUI_SECRET_KEY == '':
|
| 590 |
+
raise ValueError(ERROR_MESSAGES.ENV_VAR_NOT_FOUND)
|
| 591 |
+
|
| 592 |
+
ENABLE_COMPRESSION_MIDDLEWARE = os.environ.get('ENABLE_COMPRESSION_MIDDLEWARE', 'True').lower() == 'true'
|
| 593 |
+
|
| 594 |
+
####################################
|
| 595 |
+
# OAUTH Configuration
|
| 596 |
+
####################################
|
| 597 |
+
ENABLE_OAUTH_EMAIL_FALLBACK = os.environ.get('ENABLE_OAUTH_EMAIL_FALLBACK', 'False').lower() == 'true'
|
| 598 |
+
|
| 599 |
+
ENABLE_OAUTH_ID_TOKEN_COOKIE = os.environ.get('ENABLE_OAUTH_ID_TOKEN_COOKIE', 'True').lower() == 'true'
|
| 600 |
+
|
| 601 |
+
OAUTH_CLIENT_INFO_ENCRYPTION_KEY = os.environ.get('OAUTH_CLIENT_INFO_ENCRYPTION_KEY', WEBUI_SECRET_KEY)
|
| 602 |
+
|
| 603 |
+
OAUTH_SESSION_TOKEN_ENCRYPTION_KEY = os.environ.get('OAUTH_SESSION_TOKEN_ENCRYPTION_KEY', WEBUI_SECRET_KEY)
|
| 604 |
+
|
| 605 |
+
# Maximum number of concurrent OAuth sessions per user per provider
|
| 606 |
+
# This prevents unbounded session growth while allowing multi-device usage
|
| 607 |
+
OAUTH_MAX_SESSIONS_PER_USER = int(os.environ.get('OAUTH_MAX_SESSIONS_PER_USER', '10'))
|
| 608 |
+
|
| 609 |
+
# Token Exchange Configuration
|
| 610 |
+
# Allows external apps to exchange OAuth tokens for OpenWebUI tokens
|
| 611 |
+
ENABLE_OAUTH_TOKEN_EXCHANGE = os.environ.get('ENABLE_OAUTH_TOKEN_EXCHANGE', 'False').lower() == 'true'
|
| 612 |
+
|
| 613 |
+
# Back-Channel Logout Configuration
|
| 614 |
+
# When enabled, exposes POST /oauth/backchannel-logout for IdP-initiated logout
|
| 615 |
+
# per OpenID Connect Back-Channel Logout 1.0 spec.
|
| 616 |
+
# Requires Redis for JWT revocation.
|
| 617 |
+
ENABLE_OAUTH_BACKCHANNEL_LOGOUT = os.environ.get('ENABLE_OAUTH_BACKCHANNEL_LOGOUT', 'False').lower() == 'true'
|
| 618 |
+
|
| 619 |
+
####################################
|
| 620 |
+
# SCIM Configuration
|
| 621 |
+
####################################
|
| 622 |
+
|
| 623 |
+
ENABLE_SCIM = os.environ.get('ENABLE_SCIM', os.environ.get('SCIM_ENABLED', 'False')).lower() == 'true'
|
| 624 |
+
SCIM_TOKEN = os.environ.get('SCIM_TOKEN', '')
|
| 625 |
+
SCIM_AUTH_PROVIDER = os.environ.get('SCIM_AUTH_PROVIDER', '')
|
| 626 |
+
|
| 627 |
+
if ENABLE_SCIM and not SCIM_AUTH_PROVIDER:
|
| 628 |
+
log.warning(
|
| 629 |
+
'SCIM is enabled but SCIM_AUTH_PROVIDER is not set. '
|
| 630 |
+
"Set SCIM_AUTH_PROVIDER to the OAuth provider name (e.g. 'microsoft', 'oidc') "
|
| 631 |
+
'to enable externalId storage.'
|
| 632 |
+
)
|
| 633 |
+
|
| 634 |
+
####################################
|
| 635 |
+
# LICENSE_KEY
|
| 636 |
+
####################################
|
| 637 |
+
|
| 638 |
+
LICENSE_KEY = os.environ.get('LICENSE_KEY', '')
|
| 639 |
+
|
| 640 |
+
LICENSE_BLOB = None
|
| 641 |
+
LICENSE_BLOB_PATH = os.environ.get('LICENSE_BLOB_PATH', DATA_DIR / 'l.data')
|
| 642 |
+
if LICENSE_BLOB_PATH and os.path.exists(LICENSE_BLOB_PATH):
|
| 643 |
+
with open(LICENSE_BLOB_PATH, 'rb') as f:
|
| 644 |
+
LICENSE_BLOB = f.read()
|
| 645 |
+
|
| 646 |
+
LICENSE_PUBLIC_KEY = os.environ.get('LICENSE_PUBLIC_KEY', '')
|
| 647 |
+
|
| 648 |
+
pk = None
|
| 649 |
+
if LICENSE_PUBLIC_KEY:
|
| 650 |
+
pk = serialization.load_pem_public_key(
|
| 651 |
+
f"""
|
| 652 |
+
-----BEGIN PUBLIC KEY-----
|
| 653 |
+
{LICENSE_PUBLIC_KEY}
|
| 654 |
+
-----END PUBLIC KEY-----
|
| 655 |
+
""".encode('utf-8')
|
| 656 |
+
)
|
| 657 |
+
|
| 658 |
+
|
| 659 |
+
####################################
|
| 660 |
+
# MODELS
|
| 661 |
+
####################################
|
| 662 |
+
|
| 663 |
+
ENABLE_CUSTOM_MODEL_FALLBACK = os.environ.get('ENABLE_CUSTOM_MODEL_FALLBACK', 'False').lower() == 'true'
|
| 664 |
+
|
| 665 |
+
MODELS_CACHE_TTL = os.environ.get('MODELS_CACHE_TTL', '1')
|
| 666 |
+
if MODELS_CACHE_TTL == '':
|
| 667 |
+
MODELS_CACHE_TTL = None
|
| 668 |
+
else:
|
| 669 |
+
try:
|
| 670 |
+
MODELS_CACHE_TTL = int(MODELS_CACHE_TTL)
|
| 671 |
+
except Exception:
|
| 672 |
+
MODELS_CACHE_TTL = 1
|
| 673 |
+
|
| 674 |
+
|
| 675 |
+
####################################
|
| 676 |
+
# CHAT
|
| 677 |
+
####################################
|
| 678 |
+
|
| 679 |
+
ENABLE_CHAT_RESPONSE_BASE64_IMAGE_URL_CONVERSION = (
|
| 680 |
+
os.environ.get('ENABLE_CHAT_RESPONSE_BASE64_IMAGE_URL_CONVERSION', 'False').lower() == 'true'
|
| 681 |
+
)
|
| 682 |
+
|
| 683 |
+
# When enabled, uses a hardcoded extension-to-MIME dictionary as a last-resort
|
| 684 |
+
# fallback when both mimetypes.guess_type() and file.meta.content_type fail to
|
| 685 |
+
# determine the content type. This can help on minimal container images (e.g.
|
| 686 |
+
# wolfi-base) that lack /etc/mime.types AND have legacy files without stored
|
| 687 |
+
# content_type metadata.
|
| 688 |
+
ENABLE_IMAGE_CONTENT_TYPE_EXTENSION_FALLBACK = (
|
| 689 |
+
os.environ.get('ENABLE_IMAGE_CONTENT_TYPE_EXTENSION_FALLBACK', 'False').lower() == 'true'
|
| 690 |
+
)
|
| 691 |
+
|
| 692 |
+
CHAT_RESPONSE_STREAM_DELTA_CHUNK_SIZE = os.environ.get('CHAT_RESPONSE_STREAM_DELTA_CHUNK_SIZE', '1')
|
| 693 |
+
|
| 694 |
+
if CHAT_RESPONSE_STREAM_DELTA_CHUNK_SIZE == '':
|
| 695 |
+
CHAT_RESPONSE_STREAM_DELTA_CHUNK_SIZE = 1
|
| 696 |
+
else:
|
| 697 |
+
try:
|
| 698 |
+
CHAT_RESPONSE_STREAM_DELTA_CHUNK_SIZE = int(CHAT_RESPONSE_STREAM_DELTA_CHUNK_SIZE)
|
| 699 |
+
except Exception:
|
| 700 |
+
CHAT_RESPONSE_STREAM_DELTA_CHUNK_SIZE = 1
|
| 701 |
+
|
| 702 |
+
|
| 703 |
+
CHAT_RESPONSE_MAX_TOOL_CALL_RETRIES = os.environ.get('CHAT_RESPONSE_MAX_TOOL_CALL_RETRIES', '30')
|
| 704 |
+
|
| 705 |
+
if CHAT_RESPONSE_MAX_TOOL_CALL_RETRIES == '':
|
| 706 |
+
CHAT_RESPONSE_MAX_TOOL_CALL_RETRIES = 30
|
| 707 |
+
else:
|
| 708 |
+
try:
|
| 709 |
+
CHAT_RESPONSE_MAX_TOOL_CALL_RETRIES = int(CHAT_RESPONSE_MAX_TOOL_CALL_RETRIES)
|
| 710 |
+
except Exception:
|
| 711 |
+
CHAT_RESPONSE_MAX_TOOL_CALL_RETRIES = 30
|
| 712 |
+
|
| 713 |
+
|
| 714 |
+
# WARNING: Experimental. Only enable if your upstream Responses API endpoint
|
| 715 |
+
# supports stateful sessions (i.e. server-side response storage with
|
| 716 |
+
# previous_response_id anchoring). Most proxies and third-party endpoints
|
| 717 |
+
# are stateless and will break if this is enabled.
|
| 718 |
+
ENABLE_RESPONSES_API_STATEFUL = os.environ.get('ENABLE_RESPONSES_API_STATEFUL', 'False').lower() == 'true'
|
| 719 |
+
|
| 720 |
+
|
| 721 |
+
CHAT_STREAM_RESPONSE_CHUNK_MAX_BUFFER_SIZE = os.environ.get('CHAT_STREAM_RESPONSE_CHUNK_MAX_BUFFER_SIZE', '')
|
| 722 |
+
|
| 723 |
+
if CHAT_STREAM_RESPONSE_CHUNK_MAX_BUFFER_SIZE == '':
|
| 724 |
+
CHAT_STREAM_RESPONSE_CHUNK_MAX_BUFFER_SIZE = None
|
| 725 |
+
else:
|
| 726 |
+
try:
|
| 727 |
+
CHAT_STREAM_RESPONSE_CHUNK_MAX_BUFFER_SIZE = int(CHAT_STREAM_RESPONSE_CHUNK_MAX_BUFFER_SIZE)
|
| 728 |
+
except Exception:
|
| 729 |
+
CHAT_STREAM_RESPONSE_CHUNK_MAX_BUFFER_SIZE = None
|
| 730 |
+
|
| 731 |
+
|
| 732 |
+
####################################
|
| 733 |
+
# WEBSOCKET SUPPORT
|
| 734 |
+
####################################
|
| 735 |
+
|
| 736 |
+
ENABLE_WEBSOCKET_SUPPORT = os.environ.get('ENABLE_WEBSOCKET_SUPPORT', 'True').lower() == 'true'
|
| 737 |
+
|
| 738 |
+
|
| 739 |
+
WEBSOCKET_MANAGER = os.environ.get('WEBSOCKET_MANAGER', '')
|
| 740 |
+
|
| 741 |
+
WEBSOCKET_REDIS_OPTIONS = os.environ.get('WEBSOCKET_REDIS_OPTIONS', '')
|
| 742 |
+
|
| 743 |
+
|
| 744 |
+
if WEBSOCKET_REDIS_OPTIONS == '':
|
| 745 |
+
if REDIS_SOCKET_CONNECT_TIMEOUT:
|
| 746 |
+
WEBSOCKET_REDIS_OPTIONS = {'socket_connect_timeout': REDIS_SOCKET_CONNECT_TIMEOUT}
|
| 747 |
+
else:
|
| 748 |
+
log.debug('No WEBSOCKET_REDIS_OPTIONS provided, defaulting to None')
|
| 749 |
+
WEBSOCKET_REDIS_OPTIONS = None
|
| 750 |
+
else:
|
| 751 |
+
try:
|
| 752 |
+
WEBSOCKET_REDIS_OPTIONS = json.loads(WEBSOCKET_REDIS_OPTIONS)
|
| 753 |
+
except Exception:
|
| 754 |
+
log.warning('Invalid WEBSOCKET_REDIS_OPTIONS, defaulting to None')
|
| 755 |
+
WEBSOCKET_REDIS_OPTIONS = None
|
| 756 |
+
|
| 757 |
+
WEBSOCKET_REDIS_URL = os.environ.get('WEBSOCKET_REDIS_URL', REDIS_URL)
|
| 758 |
+
WEBSOCKET_REDIS_CLUSTER = os.environ.get('WEBSOCKET_REDIS_CLUSTER', str(REDIS_CLUSTER)).lower() == 'true'
|
| 759 |
+
|
| 760 |
+
websocket_redis_lock_timeout = os.environ.get('WEBSOCKET_REDIS_LOCK_TIMEOUT', '60')
|
| 761 |
+
|
| 762 |
+
try:
|
| 763 |
+
WEBSOCKET_REDIS_LOCK_TIMEOUT = int(websocket_redis_lock_timeout)
|
| 764 |
+
except ValueError:
|
| 765 |
+
WEBSOCKET_REDIS_LOCK_TIMEOUT = 60
|
| 766 |
+
|
| 767 |
+
WEBSOCKET_SENTINEL_HOSTS = os.environ.get('WEBSOCKET_SENTINEL_HOSTS', '')
|
| 768 |
+
WEBSOCKET_SENTINEL_PORT = os.environ.get('WEBSOCKET_SENTINEL_PORT', '26379')
|
| 769 |
+
WEBSOCKET_SERVER_LOGGING = os.environ.get('WEBSOCKET_SERVER_LOGGING', 'False').lower() == 'true'
|
| 770 |
+
WEBSOCKET_SERVER_ENGINEIO_LOGGING = (
|
| 771 |
+
os.environ.get(
|
| 772 |
+
'WEBSOCKET_SERVER_ENGINEIO_LOGGING',
|
| 773 |
+
os.environ.get('WEBSOCKET_SERVER_LOGGING', 'False'),
|
| 774 |
+
).lower()
|
| 775 |
+
== 'true'
|
| 776 |
+
)
|
| 777 |
+
WEBSOCKET_SERVER_PING_TIMEOUT = os.environ.get('WEBSOCKET_SERVER_PING_TIMEOUT', '20')
|
| 778 |
+
try:
|
| 779 |
+
WEBSOCKET_SERVER_PING_TIMEOUT = int(WEBSOCKET_SERVER_PING_TIMEOUT)
|
| 780 |
+
except ValueError:
|
| 781 |
+
WEBSOCKET_SERVER_PING_TIMEOUT = 20
|
| 782 |
+
|
| 783 |
+
WEBSOCKET_SERVER_PING_INTERVAL = os.environ.get('WEBSOCKET_SERVER_PING_INTERVAL', '25')
|
| 784 |
+
try:
|
| 785 |
+
WEBSOCKET_SERVER_PING_INTERVAL = int(WEBSOCKET_SERVER_PING_INTERVAL)
|
| 786 |
+
except ValueError:
|
| 787 |
+
WEBSOCKET_SERVER_PING_INTERVAL = 25
|
| 788 |
+
|
| 789 |
+
WEBSOCKET_EVENT_CALLER_TIMEOUT = os.environ.get('WEBSOCKET_EVENT_CALLER_TIMEOUT', '')
|
| 790 |
+
|
| 791 |
+
if WEBSOCKET_EVENT_CALLER_TIMEOUT == '':
|
| 792 |
+
WEBSOCKET_EVENT_CALLER_TIMEOUT = None
|
| 793 |
+
else:
|
| 794 |
+
try:
|
| 795 |
+
WEBSOCKET_EVENT_CALLER_TIMEOUT = int(WEBSOCKET_EVENT_CALLER_TIMEOUT)
|
| 796 |
+
except ValueError:
|
| 797 |
+
WEBSOCKET_EVENT_CALLER_TIMEOUT = 300
|
| 798 |
+
|
| 799 |
+
|
| 800 |
+
REQUESTS_VERIFY = os.environ.get('REQUESTS_VERIFY', 'True').lower() == 'true'
|
| 801 |
+
|
| 802 |
+
AIOHTTP_CLIENT_TIMEOUT = os.environ.get('AIOHTTP_CLIENT_TIMEOUT', '')
|
| 803 |
+
|
| 804 |
+
if AIOHTTP_CLIENT_TIMEOUT == '':
|
| 805 |
+
AIOHTTP_CLIENT_TIMEOUT = None
|
| 806 |
+
else:
|
| 807 |
+
try:
|
| 808 |
+
AIOHTTP_CLIENT_TIMEOUT = int(AIOHTTP_CLIENT_TIMEOUT)
|
| 809 |
+
except Exception:
|
| 810 |
+
AIOHTTP_CLIENT_TIMEOUT = 300
|
| 811 |
+
|
| 812 |
+
|
| 813 |
+
AIOHTTP_CLIENT_SESSION_SSL = os.environ.get('AIOHTTP_CLIENT_SESSION_SSL', 'True').lower() == 'true'
|
| 814 |
+
|
| 815 |
+
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = os.environ.get(
|
| 816 |
+
'AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST',
|
| 817 |
+
os.environ.get('AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST', '10'),
|
| 818 |
+
)
|
| 819 |
+
|
| 820 |
+
if AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST == '':
|
| 821 |
+
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = None
|
| 822 |
+
else:
|
| 823 |
+
try:
|
| 824 |
+
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = int(AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST)
|
| 825 |
+
except Exception:
|
| 826 |
+
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = 10
|
| 827 |
+
|
| 828 |
+
|
| 829 |
+
AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER_DATA = os.environ.get('AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER_DATA', '10')
|
| 830 |
+
|
| 831 |
+
if AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER_DATA == '':
|
| 832 |
+
AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER_DATA = None
|
| 833 |
+
else:
|
| 834 |
+
try:
|
| 835 |
+
AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER_DATA = int(AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER_DATA)
|
| 836 |
+
except Exception:
|
| 837 |
+
AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER_DATA = 10
|
| 838 |
+
|
| 839 |
+
|
| 840 |
+
AIOHTTP_CLIENT_SESSION_TOOL_SERVER_SSL = (
|
| 841 |
+
os.environ.get('AIOHTTP_CLIENT_SESSION_TOOL_SERVER_SSL', 'True').lower() == 'true'
|
| 842 |
+
)
|
| 843 |
+
|
| 844 |
+
AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER = os.environ.get('AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER', '')
|
| 845 |
+
|
| 846 |
+
if AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER == '':
|
| 847 |
+
AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER = AIOHTTP_CLIENT_TIMEOUT
|
| 848 |
+
else:
|
| 849 |
+
try:
|
| 850 |
+
AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER = int(AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER)
|
| 851 |
+
except Exception:
|
| 852 |
+
AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER = AIOHTTP_CLIENT_TIMEOUT
|
| 853 |
+
|
| 854 |
+
|
| 855 |
+
####################################
|
| 856 |
+
# AIOHTTP Connection Pool
|
| 857 |
+
####################################
|
| 858 |
+
|
| 859 |
+
AIOHTTP_POOL_CONNECTIONS = os.environ.get('AIOHTTP_POOL_CONNECTIONS', '')
|
| 860 |
+
if AIOHTTP_POOL_CONNECTIONS == '':
|
| 861 |
+
AIOHTTP_POOL_CONNECTIONS = None
|
| 862 |
+
else:
|
| 863 |
+
try:
|
| 864 |
+
AIOHTTP_POOL_CONNECTIONS = int(AIOHTTP_POOL_CONNECTIONS)
|
| 865 |
+
except ValueError:
|
| 866 |
+
AIOHTTP_POOL_CONNECTIONS = None
|
| 867 |
+
|
| 868 |
+
AIOHTTP_POOL_CONNECTIONS_PER_HOST = os.environ.get('AIOHTTP_POOL_CONNECTIONS_PER_HOST', '')
|
| 869 |
+
if AIOHTTP_POOL_CONNECTIONS_PER_HOST == '':
|
| 870 |
+
AIOHTTP_POOL_CONNECTIONS_PER_HOST = None
|
| 871 |
+
else:
|
| 872 |
+
try:
|
| 873 |
+
AIOHTTP_POOL_CONNECTIONS_PER_HOST = int(AIOHTTP_POOL_CONNECTIONS_PER_HOST)
|
| 874 |
+
except ValueError:
|
| 875 |
+
AIOHTTP_POOL_CONNECTIONS_PER_HOST = None
|
| 876 |
+
|
| 877 |
+
AIOHTTP_POOL_DNS_TTL = os.environ.get('AIOHTTP_POOL_DNS_TTL', '300')
|
| 878 |
+
try:
|
| 879 |
+
AIOHTTP_POOL_DNS_TTL = int(AIOHTTP_POOL_DNS_TTL)
|
| 880 |
+
if AIOHTTP_POOL_DNS_TTL < 0:
|
| 881 |
+
AIOHTTP_POOL_DNS_TTL = 300
|
| 882 |
+
except ValueError:
|
| 883 |
+
AIOHTTP_POOL_DNS_TTL = 300
|
| 884 |
+
|
| 885 |
+
RAG_EMBEDDING_TIMEOUT = os.environ.get('RAG_EMBEDDING_TIMEOUT', '')
|
| 886 |
+
|
| 887 |
+
if RAG_EMBEDDING_TIMEOUT == '':
|
| 888 |
+
RAG_EMBEDDING_TIMEOUT = None
|
| 889 |
+
else:
|
| 890 |
+
try:
|
| 891 |
+
RAG_EMBEDDING_TIMEOUT = int(RAG_EMBEDDING_TIMEOUT)
|
| 892 |
+
except Exception:
|
| 893 |
+
RAG_EMBEDDING_TIMEOUT = None
|
| 894 |
+
|
| 895 |
+
|
| 896 |
+
####################################
|
| 897 |
+
# SENTENCE TRANSFORMERS
|
| 898 |
+
####################################
|
| 899 |
+
|
| 900 |
+
|
| 901 |
+
SENTENCE_TRANSFORMERS_BACKEND = os.environ.get('SENTENCE_TRANSFORMERS_BACKEND', '')
|
| 902 |
+
if SENTENCE_TRANSFORMERS_BACKEND == '':
|
| 903 |
+
SENTENCE_TRANSFORMERS_BACKEND = 'torch'
|
| 904 |
+
|
| 905 |
+
|
| 906 |
+
SENTENCE_TRANSFORMERS_MODEL_KWARGS = os.environ.get('SENTENCE_TRANSFORMERS_MODEL_KWARGS', '')
|
| 907 |
+
if SENTENCE_TRANSFORMERS_MODEL_KWARGS == '':
|
| 908 |
+
SENTENCE_TRANSFORMERS_MODEL_KWARGS = None
|
| 909 |
+
else:
|
| 910 |
+
try:
|
| 911 |
+
SENTENCE_TRANSFORMERS_MODEL_KWARGS = json.loads(SENTENCE_TRANSFORMERS_MODEL_KWARGS)
|
| 912 |
+
except Exception:
|
| 913 |
+
SENTENCE_TRANSFORMERS_MODEL_KWARGS = None
|
| 914 |
+
|
| 915 |
+
|
| 916 |
+
SENTENCE_TRANSFORMERS_CROSS_ENCODER_BACKEND = os.environ.get('SENTENCE_TRANSFORMERS_CROSS_ENCODER_BACKEND', '')
|
| 917 |
+
if SENTENCE_TRANSFORMERS_CROSS_ENCODER_BACKEND == '':
|
| 918 |
+
SENTENCE_TRANSFORMERS_CROSS_ENCODER_BACKEND = 'torch'
|
| 919 |
+
|
| 920 |
+
|
| 921 |
+
SENTENCE_TRANSFORMERS_CROSS_ENCODER_MODEL_KWARGS = os.environ.get(
|
| 922 |
+
'SENTENCE_TRANSFORMERS_CROSS_ENCODER_MODEL_KWARGS', ''
|
| 923 |
+
)
|
| 924 |
+
if SENTENCE_TRANSFORMERS_CROSS_ENCODER_MODEL_KWARGS == '':
|
| 925 |
+
SENTENCE_TRANSFORMERS_CROSS_ENCODER_MODEL_KWARGS = None
|
| 926 |
+
else:
|
| 927 |
+
try:
|
| 928 |
+
SENTENCE_TRANSFORMERS_CROSS_ENCODER_MODEL_KWARGS = json.loads(SENTENCE_TRANSFORMERS_CROSS_ENCODER_MODEL_KWARGS)
|
| 929 |
+
except Exception:
|
| 930 |
+
SENTENCE_TRANSFORMERS_CROSS_ENCODER_MODEL_KWARGS = None
|
| 931 |
+
|
| 932 |
+
# Whether to apply sigmoid normalization to CrossEncoder reranking scores.
|
| 933 |
+
# When enabled (default), scores are normalized to 0-1 range for proper
|
| 934 |
+
# relevance threshold behavior with MS MARCO models.
|
| 935 |
+
SENTENCE_TRANSFORMERS_CROSS_ENCODER_SIGMOID_ACTIVATION_FUNCTION = (
|
| 936 |
+
os.environ.get('SENTENCE_TRANSFORMERS_CROSS_ENCODER_SIGMOID_ACTIVATION_FUNCTION', 'True').lower() == 'true'
|
| 937 |
+
)
|
| 938 |
+
|
| 939 |
+
####################################
|
| 940 |
+
# OFFLINE_MODE
|
| 941 |
+
####################################
|
| 942 |
+
|
| 943 |
+
ENABLE_VERSION_UPDATE_CHECK = os.environ.get('ENABLE_VERSION_UPDATE_CHECK', 'true').lower() == 'true'
|
| 944 |
+
OFFLINE_MODE = os.environ.get('OFFLINE_MODE', 'false').lower() == 'true'
|
| 945 |
+
|
| 946 |
+
if OFFLINE_MODE:
|
| 947 |
+
os.environ['HF_HUB_OFFLINE'] = '1'
|
| 948 |
+
ENABLE_VERSION_UPDATE_CHECK = False
|
| 949 |
+
|
| 950 |
+
####################################
|
| 951 |
+
# AUDIT LOGGING
|
| 952 |
+
####################################
|
| 953 |
+
|
| 954 |
+
|
| 955 |
+
ENABLE_AUDIT_STDOUT = os.getenv('ENABLE_AUDIT_STDOUT', 'False').lower() == 'true'
|
| 956 |
+
ENABLE_AUDIT_LOGS_FILE = os.getenv('ENABLE_AUDIT_LOGS_FILE', 'True').lower() == 'true'
|
| 957 |
+
|
| 958 |
+
# Where to store log file
|
| 959 |
+
# Defaults to the DATA_DIR/audit.log. To set AUDIT_LOGS_FILE_PATH you need to
|
| 960 |
+
# provide the whole path, like: /app/audit.log
|
| 961 |
+
AUDIT_LOGS_FILE_PATH = os.getenv('AUDIT_LOGS_FILE_PATH', f'{DATA_DIR}/audit.log')
|
| 962 |
+
# Maximum size of a file before rotating into a new log file
|
| 963 |
+
AUDIT_LOG_FILE_ROTATION_SIZE = os.getenv('AUDIT_LOG_FILE_ROTATION_SIZE', '10MB')
|
| 964 |
+
|
| 965 |
+
# Comma separated list of logger names to use for audit logging
|
| 966 |
+
# Default is "uvicorn.access" which is the access log for Uvicorn
|
| 967 |
+
# You can add more logger names to this list if you want to capture more logs
|
| 968 |
+
AUDIT_UVICORN_LOGGER_NAMES = os.getenv('AUDIT_UVICORN_LOGGER_NAMES', 'uvicorn.access').split(',')
|
| 969 |
+
|
| 970 |
+
# METADATA | REQUEST | REQUEST_RESPONSE
|
| 971 |
+
AUDIT_LOG_LEVEL = os.getenv('AUDIT_LOG_LEVEL', 'NONE').upper()
|
| 972 |
+
try:
|
| 973 |
+
MAX_BODY_LOG_SIZE = int(os.environ.get('MAX_BODY_LOG_SIZE') or 2048)
|
| 974 |
+
except ValueError:
|
| 975 |
+
MAX_BODY_LOG_SIZE = 2048
|
| 976 |
+
|
| 977 |
+
# Comma separated list for urls to exclude from audit
|
| 978 |
+
AUDIT_EXCLUDED_PATHS = os.getenv('AUDIT_EXCLUDED_PATHS', '/chats,/chat,/folders').split(',')
|
| 979 |
+
AUDIT_EXCLUDED_PATHS = [path.strip() for path in AUDIT_EXCLUDED_PATHS]
|
| 980 |
+
AUDIT_EXCLUDED_PATHS = [path.lstrip('/') for path in AUDIT_EXCLUDED_PATHS]
|
| 981 |
+
|
| 982 |
+
# Comma separated list of urls to include in audit (whitelist mode)
|
| 983 |
+
# When set, only these paths are audited and AUDIT_EXCLUDED_PATHS is ignored
|
| 984 |
+
AUDIT_INCLUDED_PATHS = os.getenv('AUDIT_INCLUDED_PATHS', '').split(',')
|
| 985 |
+
AUDIT_INCLUDED_PATHS = [path.strip() for path in AUDIT_INCLUDED_PATHS]
|
| 986 |
+
AUDIT_INCLUDED_PATHS = [path.lstrip('/') for path in AUDIT_INCLUDED_PATHS if path]
|
| 987 |
+
|
| 988 |
+
# When enabled, GET requests are also audited (disabled by default to avoid log noise)
|
| 989 |
+
ENABLE_AUDIT_GET_REQUESTS = os.getenv('ENABLE_AUDIT_GET_REQUESTS', 'False').lower() == 'true'
|
| 990 |
+
|
| 991 |
+
|
| 992 |
+
####################################
|
| 993 |
+
# OPENTELEMETRY
|
| 994 |
+
####################################
|
| 995 |
+
|
| 996 |
+
ENABLE_OTEL = os.environ.get('ENABLE_OTEL', 'False').lower() == 'true'
|
| 997 |
+
ENABLE_OTEL_TRACES = os.environ.get('ENABLE_OTEL_TRACES', 'False').lower() == 'true'
|
| 998 |
+
ENABLE_OTEL_METRICS = os.environ.get('ENABLE_OTEL_METRICS', 'False').lower() == 'true'
|
| 999 |
+
ENABLE_OTEL_LOGS = os.environ.get('ENABLE_OTEL_LOGS', 'False').lower() == 'true'
|
| 1000 |
+
|
| 1001 |
+
OTEL_EXPORTER_OTLP_ENDPOINT = os.environ.get('OTEL_EXPORTER_OTLP_ENDPOINT', 'http://localhost:4317')
|
| 1002 |
+
OTEL_METRICS_EXPORTER_OTLP_ENDPOINT = os.environ.get('OTEL_METRICS_EXPORTER_OTLP_ENDPOINT', OTEL_EXPORTER_OTLP_ENDPOINT)
|
| 1003 |
+
OTEL_LOGS_EXPORTER_OTLP_ENDPOINT = os.environ.get('OTEL_LOGS_EXPORTER_OTLP_ENDPOINT', OTEL_EXPORTER_OTLP_ENDPOINT)
|
| 1004 |
+
OTEL_EXPORTER_OTLP_INSECURE = os.environ.get('OTEL_EXPORTER_OTLP_INSECURE', 'False').lower() == 'true'
|
| 1005 |
+
OTEL_METRICS_EXPORTER_OTLP_INSECURE = (
|
| 1006 |
+
os.environ.get('OTEL_METRICS_EXPORTER_OTLP_INSECURE', str(OTEL_EXPORTER_OTLP_INSECURE)).lower() == 'true'
|
| 1007 |
+
)
|
| 1008 |
+
OTEL_LOGS_EXPORTER_OTLP_INSECURE = (
|
| 1009 |
+
os.environ.get('OTEL_LOGS_EXPORTER_OTLP_INSECURE', str(OTEL_EXPORTER_OTLP_INSECURE)).lower() == 'true'
|
| 1010 |
+
)
|
| 1011 |
+
OTEL_SERVICE_NAME = os.environ.get('OTEL_SERVICE_NAME', 'open-webui')
|
| 1012 |
+
OTEL_RESOURCE_ATTRIBUTES = os.environ.get('OTEL_RESOURCE_ATTRIBUTES', '') # e.g. key1=val1,key2=val2
|
| 1013 |
+
OTEL_TRACES_SAMPLER = os.environ.get('OTEL_TRACES_SAMPLER', 'parentbased_always_on').lower()
|
| 1014 |
+
OTEL_BASIC_AUTH_USERNAME = os.environ.get('OTEL_BASIC_AUTH_USERNAME', '')
|
| 1015 |
+
OTEL_BASIC_AUTH_PASSWORD = os.environ.get('OTEL_BASIC_AUTH_PASSWORD', '')
|
| 1016 |
+
OTEL_METRICS_EXPORT_INTERVAL_MILLIS = int(os.environ.get('OTEL_METRICS_EXPORT_INTERVAL_MILLIS', '10000'))
|
| 1017 |
+
|
| 1018 |
+
OTEL_METRICS_BASIC_AUTH_USERNAME = os.environ.get('OTEL_METRICS_BASIC_AUTH_USERNAME', OTEL_BASIC_AUTH_USERNAME)
|
| 1019 |
+
OTEL_METRICS_BASIC_AUTH_PASSWORD = os.environ.get('OTEL_METRICS_BASIC_AUTH_PASSWORD', OTEL_BASIC_AUTH_PASSWORD)
|
| 1020 |
+
OTEL_LOGS_BASIC_AUTH_USERNAME = os.environ.get('OTEL_LOGS_BASIC_AUTH_USERNAME', OTEL_BASIC_AUTH_USERNAME)
|
| 1021 |
+
OTEL_LOGS_BASIC_AUTH_PASSWORD = os.environ.get('OTEL_LOGS_BASIC_AUTH_PASSWORD', OTEL_BASIC_AUTH_PASSWORD)
|
| 1022 |
+
|
| 1023 |
+
OTEL_OTLP_SPAN_EXPORTER = os.environ.get('OTEL_OTLP_SPAN_EXPORTER', 'grpc').lower() # grpc or http
|
| 1024 |
+
|
| 1025 |
+
OTEL_METRICS_OTLP_SPAN_EXPORTER = os.environ.get(
|
| 1026 |
+
'OTEL_METRICS_OTLP_SPAN_EXPORTER', OTEL_OTLP_SPAN_EXPORTER
|
| 1027 |
+
).lower() # grpc or http
|
| 1028 |
+
|
| 1029 |
+
OTEL_LOGS_OTLP_SPAN_EXPORTER = os.environ.get(
|
| 1030 |
+
'OTEL_LOGS_OTLP_SPAN_EXPORTER', OTEL_OTLP_SPAN_EXPORTER
|
| 1031 |
+
).lower() # grpc or http
|
| 1032 |
+
|
| 1033 |
+
####################################
|
| 1034 |
+
# TOOLS/FUNCTIONS PIP OPTIONS
|
| 1035 |
+
####################################
|
| 1036 |
+
|
| 1037 |
+
ENABLE_PIP_INSTALL_FRONTMATTER_REQUIREMENTS = (
|
| 1038 |
+
os.environ.get('ENABLE_PIP_INSTALL_FRONTMATTER_REQUIREMENTS', 'True').lower() == 'true'
|
| 1039 |
+
)
|
| 1040 |
+
|
| 1041 |
+
PIP_OPTIONS = os.getenv('PIP_OPTIONS', '').split()
|
| 1042 |
+
PIP_PACKAGE_INDEX_OPTIONS = os.getenv('PIP_PACKAGE_INDEX_OPTIONS', '').split()
|
| 1043 |
+
|
| 1044 |
+
|
| 1045 |
+
####################################
|
| 1046 |
+
# PROGRESSIVE WEB APP OPTIONS
|
| 1047 |
+
####################################
|
| 1048 |
+
|
| 1049 |
+
EXTERNAL_PWA_MANIFEST_URL = os.environ.get('EXTERNAL_PWA_MANIFEST_URL')
|
| 1050 |
+
|
| 1051 |
+
####################################
|
| 1052 |
+
# GROUP DEFAULTS
|
| 1053 |
+
####################################
|
| 1054 |
+
|
| 1055 |
+
# Controls the default "Who can share to this group" setting for new groups.
|
| 1056 |
+
# Env var values: "true" (anyone), "false" (no one), "members" (only group members).
|
| 1057 |
+
_default_group_share = os.environ.get('DEFAULT_GROUP_SHARE_PERMISSION', 'members').strip().lower()
|
| 1058 |
+
DEFAULT_GROUP_SHARE_PERMISSION = 'members' if _default_group_share == 'members' else _default_group_share == 'true'
|
backend/open_webui/functions.py
ADDED
|
@@ -0,0 +1,348 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import sys
|
| 3 |
+
import inspect
|
| 4 |
+
import json
|
| 5 |
+
import asyncio
|
| 6 |
+
|
| 7 |
+
from pydantic import BaseModel
|
| 8 |
+
from typing import AsyncGenerator, Generator, Iterator
|
| 9 |
+
from fastapi import (
|
| 10 |
+
Depends,
|
| 11 |
+
FastAPI,
|
| 12 |
+
File,
|
| 13 |
+
Form,
|
| 14 |
+
HTTPException,
|
| 15 |
+
Request,
|
| 16 |
+
UploadFile,
|
| 17 |
+
status,
|
| 18 |
+
)
|
| 19 |
+
from starlette.responses import Response, StreamingResponse
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
from open_webui.constants import ERROR_MESSAGES
|
| 23 |
+
from open_webui.socket.main import (
|
| 24 |
+
get_event_call,
|
| 25 |
+
get_event_emitter,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
from open_webui.models.users import UserModel
|
| 30 |
+
from open_webui.models.functions import Functions
|
| 31 |
+
from open_webui.models.models import Models
|
| 32 |
+
|
| 33 |
+
from open_webui.utils.plugin import (
|
| 34 |
+
load_function_module_by_id,
|
| 35 |
+
get_function_module_from_cache,
|
| 36 |
+
)
|
| 37 |
+
from open_webui.utils.access_control import check_model_access
|
| 38 |
+
|
| 39 |
+
from open_webui.env import GLOBAL_LOG_LEVEL, BYPASS_MODEL_ACCESS_CONTROL
|
| 40 |
+
from open_webui.config import BYPASS_ADMIN_ACCESS_CONTROL
|
| 41 |
+
|
| 42 |
+
from open_webui.utils.misc import (
|
| 43 |
+
add_or_update_system_message,
|
| 44 |
+
get_last_user_message,
|
| 45 |
+
prepend_to_first_user_message_content,
|
| 46 |
+
openai_chat_chunk_message_template,
|
| 47 |
+
openai_chat_completion_message_template,
|
| 48 |
+
)
|
| 49 |
+
from open_webui.utils.payload import (
|
| 50 |
+
apply_model_params_to_body_openai,
|
| 51 |
+
apply_system_prompt_to_body,
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
|
| 55 |
+
log = logging.getLogger(__name__)
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
async def get_function_module_by_id(request: Request, pipe_id: str):
|
| 59 |
+
function_module, _, _ = await get_function_module_from_cache(request, pipe_id)
|
| 60 |
+
|
| 61 |
+
if hasattr(function_module, 'valves') and hasattr(function_module, 'Valves'):
|
| 62 |
+
Valves = function_module.Valves
|
| 63 |
+
valves = await Functions.get_function_valves_by_id(pipe_id)
|
| 64 |
+
|
| 65 |
+
if valves:
|
| 66 |
+
try:
|
| 67 |
+
function_module.valves = Valves(**{k: v for k, v in valves.items() if v is not None})
|
| 68 |
+
except Exception as e:
|
| 69 |
+
log.exception(f'Error loading valves for function {pipe_id}: {e}')
|
| 70 |
+
raise e
|
| 71 |
+
else:
|
| 72 |
+
function_module.valves = Valves()
|
| 73 |
+
|
| 74 |
+
return function_module
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
async def get_function_models(request):
|
| 78 |
+
pipes = await Functions.get_functions_by_type('pipe', active_only=True)
|
| 79 |
+
pipe_models = []
|
| 80 |
+
|
| 81 |
+
for pipe in pipes:
|
| 82 |
+
try:
|
| 83 |
+
function_module = await get_function_module_by_id(request, pipe.id)
|
| 84 |
+
|
| 85 |
+
has_user_valves = False
|
| 86 |
+
if hasattr(function_module, 'UserValves'):
|
| 87 |
+
has_user_valves = True
|
| 88 |
+
|
| 89 |
+
# Check if function is a manifold
|
| 90 |
+
if hasattr(function_module, 'pipes'):
|
| 91 |
+
sub_pipes = []
|
| 92 |
+
|
| 93 |
+
# Handle pipes being a list, sync function, or async function
|
| 94 |
+
try:
|
| 95 |
+
if callable(function_module.pipes):
|
| 96 |
+
if asyncio.iscoroutinefunction(function_module.pipes):
|
| 97 |
+
sub_pipes = await function_module.pipes()
|
| 98 |
+
else:
|
| 99 |
+
sub_pipes = function_module.pipes()
|
| 100 |
+
else:
|
| 101 |
+
sub_pipes = function_module.pipes
|
| 102 |
+
except Exception as e:
|
| 103 |
+
log.exception(e)
|
| 104 |
+
sub_pipes = []
|
| 105 |
+
|
| 106 |
+
log.debug(f"get_function_models: function '{pipe.id}' is a manifold of {sub_pipes}")
|
| 107 |
+
|
| 108 |
+
for p in sub_pipes:
|
| 109 |
+
sub_pipe_id = f'{pipe.id}.{p["id"]}'
|
| 110 |
+
sub_pipe_name = p['name']
|
| 111 |
+
|
| 112 |
+
if hasattr(function_module, 'name'):
|
| 113 |
+
sub_pipe_name = f'{function_module.name}{sub_pipe_name}'
|
| 114 |
+
|
| 115 |
+
pipe_flag = {'type': pipe.type}
|
| 116 |
+
|
| 117 |
+
pipe_models.append(
|
| 118 |
+
{
|
| 119 |
+
'id': sub_pipe_id,
|
| 120 |
+
'name': sub_pipe_name,
|
| 121 |
+
'object': 'model',
|
| 122 |
+
'created': pipe.created_at,
|
| 123 |
+
'owned_by': 'openai',
|
| 124 |
+
'pipe': pipe_flag,
|
| 125 |
+
'has_user_valves': has_user_valves,
|
| 126 |
+
}
|
| 127 |
+
)
|
| 128 |
+
else:
|
| 129 |
+
pipe_flag = {'type': 'pipe'}
|
| 130 |
+
|
| 131 |
+
log.debug(
|
| 132 |
+
f"get_function_models: function '{pipe.id}' is a single pipe {{ 'id': {pipe.id}, 'name': {pipe.name} }}"
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
pipe_models.append(
|
| 136 |
+
{
|
| 137 |
+
'id': pipe.id,
|
| 138 |
+
'name': pipe.name,
|
| 139 |
+
'object': 'model',
|
| 140 |
+
'created': pipe.created_at,
|
| 141 |
+
'owned_by': 'openai',
|
| 142 |
+
'pipe': pipe_flag,
|
| 143 |
+
'has_user_valves': has_user_valves,
|
| 144 |
+
}
|
| 145 |
+
)
|
| 146 |
+
except Exception as e:
|
| 147 |
+
log.exception(e)
|
| 148 |
+
continue
|
| 149 |
+
|
| 150 |
+
return pipe_models
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
async def generate_function_chat_completion(request, form_data, user, models: dict = {}):
|
| 154 |
+
async def execute_pipe(pipe, params):
|
| 155 |
+
if inspect.iscoroutinefunction(pipe):
|
| 156 |
+
return await pipe(**params)
|
| 157 |
+
else:
|
| 158 |
+
return pipe(**params)
|
| 159 |
+
|
| 160 |
+
async def get_message_content(res: str | Generator | AsyncGenerator) -> str:
|
| 161 |
+
if isinstance(res, str):
|
| 162 |
+
return res
|
| 163 |
+
if isinstance(res, Generator):
|
| 164 |
+
return ''.join(map(str, res))
|
| 165 |
+
if isinstance(res, AsyncGenerator):
|
| 166 |
+
return ''.join([str(stream) async for stream in res])
|
| 167 |
+
|
| 168 |
+
def process_line(form_data: dict, line):
|
| 169 |
+
if isinstance(line, BaseModel):
|
| 170 |
+
line = line.model_dump_json()
|
| 171 |
+
line = f'data: {line}'
|
| 172 |
+
if isinstance(line, dict):
|
| 173 |
+
line = f'data: {json.dumps(line)}'
|
| 174 |
+
|
| 175 |
+
try:
|
| 176 |
+
line = line.decode('utf-8')
|
| 177 |
+
except Exception:
|
| 178 |
+
pass
|
| 179 |
+
|
| 180 |
+
if line.startswith('data:'):
|
| 181 |
+
return f'{line}\n\n'
|
| 182 |
+
else:
|
| 183 |
+
line = openai_chat_chunk_message_template(form_data['model'], line)
|
| 184 |
+
return f'data: {json.dumps(line)}\n\n'
|
| 185 |
+
|
| 186 |
+
def get_pipe_id(form_data: dict) -> str:
|
| 187 |
+
pipe_id = form_data['model']
|
| 188 |
+
if '.' in pipe_id:
|
| 189 |
+
pipe_id, _ = pipe_id.split('.', 1)
|
| 190 |
+
return pipe_id
|
| 191 |
+
|
| 192 |
+
async def get_function_params(function_module, form_data, user, extra_params=None):
|
| 193 |
+
if extra_params is None:
|
| 194 |
+
extra_params = {}
|
| 195 |
+
|
| 196 |
+
pipe_id = get_pipe_id(form_data)
|
| 197 |
+
|
| 198 |
+
# Get the signature of the function
|
| 199 |
+
sig = inspect.signature(function_module.pipe)
|
| 200 |
+
params = {'body': form_data} | {k: v for k, v in extra_params.items() if k in sig.parameters}
|
| 201 |
+
|
| 202 |
+
if '__user__' in params and hasattr(function_module, 'UserValves'):
|
| 203 |
+
user_valves = await Functions.get_user_valves_by_id_and_user_id(pipe_id, user.id)
|
| 204 |
+
try:
|
| 205 |
+
params['__user__']['valves'] = function_module.UserValves(**user_valves)
|
| 206 |
+
except Exception as e:
|
| 207 |
+
log.exception(e)
|
| 208 |
+
params['__user__']['valves'] = function_module.UserValves()
|
| 209 |
+
|
| 210 |
+
return params
|
| 211 |
+
|
| 212 |
+
model_id = form_data.get('model')
|
| 213 |
+
model_info = await Models.get_model_by_id(model_id)
|
| 214 |
+
|
| 215 |
+
metadata = form_data.pop('metadata', {})
|
| 216 |
+
|
| 217 |
+
files = metadata.get('files', [])
|
| 218 |
+
tool_ids = metadata.get('tool_ids', [])
|
| 219 |
+
# Check if tool_ids is None
|
| 220 |
+
if tool_ids is None:
|
| 221 |
+
tool_ids = []
|
| 222 |
+
|
| 223 |
+
__event_emitter__ = None
|
| 224 |
+
__event_call__ = None
|
| 225 |
+
__task__ = None
|
| 226 |
+
__task_body__ = None
|
| 227 |
+
|
| 228 |
+
if metadata:
|
| 229 |
+
if all(k in metadata for k in ('session_id', 'chat_id', 'message_id')):
|
| 230 |
+
__event_emitter__ = await get_event_emitter(metadata)
|
| 231 |
+
__event_call__ = await get_event_call(metadata)
|
| 232 |
+
__task__ = metadata.get('task', None)
|
| 233 |
+
__task_body__ = metadata.get('task_body', None)
|
| 234 |
+
|
| 235 |
+
oauth_token = None
|
| 236 |
+
try:
|
| 237 |
+
oauth_session_id = request.cookies.get('oauth_session_id', None)
|
| 238 |
+
if oauth_session_id:
|
| 239 |
+
oauth_token = await request.app.state.oauth_manager.get_oauth_token(
|
| 240 |
+
user.id,
|
| 241 |
+
oauth_session_id,
|
| 242 |
+
)
|
| 243 |
+
|
| 244 |
+
# Fallback: no cookie (automation, API key, etc.) — use most recent session
|
| 245 |
+
if oauth_token is None:
|
| 246 |
+
from open_webui.models.oauth_sessions import OAuthSessions
|
| 247 |
+
|
| 248 |
+
sessions = await OAuthSessions.get_sessions_by_user_id(user.id)
|
| 249 |
+
if sessions:
|
| 250 |
+
best = max(sessions, key=lambda s: s.updated_at)
|
| 251 |
+
oauth_token = await request.app.state.oauth_manager.get_oauth_token(
|
| 252 |
+
user.id,
|
| 253 |
+
best.id,
|
| 254 |
+
)
|
| 255 |
+
except Exception as e:
|
| 256 |
+
log.error(f'Error getting OAuth token: {e}')
|
| 257 |
+
|
| 258 |
+
extra_params = {
|
| 259 |
+
'__event_emitter__': __event_emitter__,
|
| 260 |
+
'__event_call__': __event_call__,
|
| 261 |
+
'__chat_id__': metadata.get('chat_id', None),
|
| 262 |
+
'__session_id__': metadata.get('session_id', None),
|
| 263 |
+
'__message_id__': metadata.get('message_id', None),
|
| 264 |
+
'__task__': __task__,
|
| 265 |
+
'__task_body__': __task_body__,
|
| 266 |
+
'__files__': files,
|
| 267 |
+
'__user__': user.model_dump() if isinstance(user, UserModel) else {},
|
| 268 |
+
'__metadata__': metadata,
|
| 269 |
+
'__oauth_token__': oauth_token,
|
| 270 |
+
'__request__': request,
|
| 271 |
+
}
|
| 272 |
+
extra_params['__tools__'] = metadata.get('tools', {})
|
| 273 |
+
|
| 274 |
+
if model_info:
|
| 275 |
+
if model_info.base_model_id:
|
| 276 |
+
form_data['model'] = model_info.base_model_id
|
| 277 |
+
|
| 278 |
+
if not BYPASS_MODEL_ACCESS_CONTROL:
|
| 279 |
+
bypass = isinstance(user, UserModel) and user.role == 'admin' and BYPASS_ADMIN_ACCESS_CONTROL
|
| 280 |
+
await check_model_access(user if isinstance(user, UserModel) else UserModel(**user), model_info, bypass)
|
| 281 |
+
|
| 282 |
+
params = model_info.params.model_dump()
|
| 283 |
+
|
| 284 |
+
if params:
|
| 285 |
+
system = params.pop('system', None)
|
| 286 |
+
form_data = apply_model_params_to_body_openai(params, form_data)
|
| 287 |
+
form_data = apply_system_prompt_to_body(system, form_data, metadata, user)
|
| 288 |
+
|
| 289 |
+
pipe_id = get_pipe_id(form_data)
|
| 290 |
+
function_module = await get_function_module_by_id(request, pipe_id)
|
| 291 |
+
|
| 292 |
+
pipe = function_module.pipe
|
| 293 |
+
params = await get_function_params(function_module, form_data, user, extra_params)
|
| 294 |
+
|
| 295 |
+
if form_data.get('stream', False):
|
| 296 |
+
|
| 297 |
+
async def stream_content():
|
| 298 |
+
try:
|
| 299 |
+
res = await execute_pipe(pipe, params)
|
| 300 |
+
|
| 301 |
+
# Directly return if the response is a StreamingResponse
|
| 302 |
+
if isinstance(res, StreamingResponse):
|
| 303 |
+
async for data in res.body_iterator:
|
| 304 |
+
yield data
|
| 305 |
+
return
|
| 306 |
+
if isinstance(res, dict):
|
| 307 |
+
yield f'data: {json.dumps(res)}\n\n'
|
| 308 |
+
return
|
| 309 |
+
|
| 310 |
+
except Exception as e:
|
| 311 |
+
log.error(f'Error: {e}')
|
| 312 |
+
yield f'data: {json.dumps({"error": {"detail": str(e)}})}\n\n'
|
| 313 |
+
return
|
| 314 |
+
|
| 315 |
+
if isinstance(res, str):
|
| 316 |
+
message = openai_chat_chunk_message_template(form_data['model'], res)
|
| 317 |
+
yield f'data: {json.dumps(message)}\n\n'
|
| 318 |
+
|
| 319 |
+
if isinstance(res, Iterator):
|
| 320 |
+
for line in res:
|
| 321 |
+
yield process_line(form_data, line)
|
| 322 |
+
|
| 323 |
+
if isinstance(res, AsyncGenerator):
|
| 324 |
+
async for line in res:
|
| 325 |
+
yield process_line(form_data, line)
|
| 326 |
+
|
| 327 |
+
if isinstance(res, str) or isinstance(res, Generator):
|
| 328 |
+
finish_message = openai_chat_chunk_message_template(form_data['model'], '')
|
| 329 |
+
finish_message['choices'][0]['finish_reason'] = 'stop'
|
| 330 |
+
yield f'data: {json.dumps(finish_message)}\n\n'
|
| 331 |
+
yield 'data: [DONE]'
|
| 332 |
+
|
| 333 |
+
return StreamingResponse(stream_content(), media_type='text/event-stream')
|
| 334 |
+
else:
|
| 335 |
+
try:
|
| 336 |
+
res = await execute_pipe(pipe, params)
|
| 337 |
+
|
| 338 |
+
except Exception as e:
|
| 339 |
+
log.error(f'Error: {e}')
|
| 340 |
+
return {'error': {'detail': str(e)}}
|
| 341 |
+
|
| 342 |
+
if isinstance(res, StreamingResponse) or isinstance(res, dict):
|
| 343 |
+
return res
|
| 344 |
+
if isinstance(res, BaseModel):
|
| 345 |
+
return res.model_dump()
|
| 346 |
+
|
| 347 |
+
message = await get_message_content(res)
|
| 348 |
+
return openai_chat_completion_message_template(form_data['model'], message)
|
backend/open_webui/internal/db.py
ADDED
|
@@ -0,0 +1,409 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import json
|
| 3 |
+
import logging
|
| 4 |
+
from contextlib import asynccontextmanager, contextmanager
|
| 5 |
+
from typing import Any, Optional
|
| 6 |
+
from urllib.parse import parse_qs, urlencode, urlparse, urlunparse
|
| 7 |
+
|
| 8 |
+
from open_webui.internal.wrappers import register_connection
|
| 9 |
+
from open_webui.env import (
|
| 10 |
+
OPEN_WEBUI_DIR,
|
| 11 |
+
DATABASE_URL,
|
| 12 |
+
DATABASE_SCHEMA,
|
| 13 |
+
DATABASE_POOL_MAX_OVERFLOW,
|
| 14 |
+
DATABASE_POOL_RECYCLE,
|
| 15 |
+
DATABASE_POOL_SIZE,
|
| 16 |
+
DATABASE_POOL_TIMEOUT,
|
| 17 |
+
DATABASE_ENABLE_SQLITE_WAL,
|
| 18 |
+
DATABASE_ENABLE_SESSION_SHARING,
|
| 19 |
+
DATABASE_SQLITE_PRAGMA_SYNCHRONOUS,
|
| 20 |
+
DATABASE_SQLITE_PRAGMA_BUSY_TIMEOUT,
|
| 21 |
+
DATABASE_SQLITE_PRAGMA_CACHE_SIZE,
|
| 22 |
+
DATABASE_SQLITE_PRAGMA_TEMP_STORE,
|
| 23 |
+
DATABASE_SQLITE_PRAGMA_MMAP_SIZE,
|
| 24 |
+
DATABASE_SQLITE_PRAGMA_JOURNAL_SIZE_LIMIT,
|
| 25 |
+
ENABLE_DB_MIGRATIONS,
|
| 26 |
+
)
|
| 27 |
+
from peewee_migrate import Router
|
| 28 |
+
from sqlalchemy import Dialect, create_engine, MetaData, event, types
|
| 29 |
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
| 30 |
+
from sqlalchemy.ext.declarative import declarative_base
|
| 31 |
+
from sqlalchemy.orm import scoped_session, sessionmaker, Session
|
| 32 |
+
from sqlalchemy.pool import QueuePool, NullPool
|
| 33 |
+
from sqlalchemy.sql.type_api import _T
|
| 34 |
+
from typing_extensions import Self
|
| 35 |
+
|
| 36 |
+
log = logging.getLogger(__name__)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
# ── SSL URL normalization (used by sync engine & Alembic migrations) ─
|
| 40 |
+
#
|
| 41 |
+
# psycopg2 (sync) needs ``sslmode=`` in the connection string (it does
|
| 42 |
+
# not recognise the bare ``ssl=`` key that some ORMs emit). The helpers
|
| 43 |
+
# below strip all SSL-related query params, normalise them, and
|
| 44 |
+
# reattach them in the canonical libpq form.
|
| 45 |
+
#
|
| 46 |
+
# The **async** engine now uses psycopg (v3), which speaks libpq
|
| 47 |
+
# natively, so it needs no translation at all — the DATABASE_URL is
|
| 48 |
+
# passed through as-is.
|
| 49 |
+
# ─────────────────────────────────────────────────────────────────────
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def _pop_first(params: dict[str, list[str]], key: str) -> str | None:
|
| 53 |
+
"""Pop a single-valued query param, returning ``None`` if absent."""
|
| 54 |
+
values = params.pop(key, None)
|
| 55 |
+
return values[0] if values else None
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def _is_postgres_url(url: str) -> bool:
|
| 59 |
+
"""Return True if *url* looks like a PostgreSQL connection string."""
|
| 60 |
+
return bool(url) and any(url.startswith(p) for p in ('postgresql://', 'postgresql+', 'postgres://'))
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def extract_ssl_params_from_url(url: str) -> tuple[str, dict[str, str]]:
|
| 64 |
+
"""Strip SSL query-string parameters from a PostgreSQL URL.
|
| 65 |
+
|
| 66 |
+
Returns ``(url_without_ssl, ssl_dict)`` where *ssl_dict* maps
|
| 67 |
+
canonical libpq key names (``sslmode``, ``sslrootcert``, …) to
|
| 68 |
+
their values. Non-PostgreSQL URLs are returned unchanged with an
|
| 69 |
+
empty dict.
|
| 70 |
+
"""
|
| 71 |
+
if not _is_postgres_url(url):
|
| 72 |
+
return url, {}
|
| 73 |
+
|
| 74 |
+
parsed = urlparse(url)
|
| 75 |
+
qp = parse_qs(parsed.query, keep_blank_values=True)
|
| 76 |
+
|
| 77 |
+
# Prefer sslmode (libpq canonical) over the bare ``ssl`` key.
|
| 78 |
+
sslmode_val = _pop_first(qp, 'sslmode')
|
| 79 |
+
ssl_val = _pop_first(qp, 'ssl')
|
| 80 |
+
ssl_mode = sslmode_val or ssl_val
|
| 81 |
+
|
| 82 |
+
ssl_dict: dict[str, str] = {}
|
| 83 |
+
if ssl_mode:
|
| 84 |
+
ssl_dict['sslmode'] = ssl_mode
|
| 85 |
+
for key in ('sslrootcert', 'sslcert', 'sslkey', 'sslcrl'):
|
| 86 |
+
val = _pop_first(qp, key)
|
| 87 |
+
if val:
|
| 88 |
+
ssl_dict[key] = val
|
| 89 |
+
|
| 90 |
+
if not ssl_dict:
|
| 91 |
+
return url, ssl_dict
|
| 92 |
+
|
| 93 |
+
cleaned_query = urlencode(qp, doseq=True)
|
| 94 |
+
return urlunparse(parsed._replace(query=cleaned_query)), ssl_dict
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def reattach_ssl_params_to_url(url_without_ssl: str, ssl_dict: dict[str, str]) -> str:
|
| 98 |
+
"""Re-append SSL query-string parameters to a cleaned PostgreSQL URL.
|
| 99 |
+
|
| 100 |
+
Used for psycopg2/libpq consumers that expect ``sslmode`` and the
|
| 101 |
+
certificate-file keys in the connection string.
|
| 102 |
+
"""
|
| 103 |
+
if not ssl_dict:
|
| 104 |
+
return url_without_ssl
|
| 105 |
+
|
| 106 |
+
parts = [f'{k}={v}' for k, v in ssl_dict.items() if v]
|
| 107 |
+
if not parts:
|
| 108 |
+
return url_without_ssl
|
| 109 |
+
|
| 110 |
+
sep = '&' if '?' in url_without_ssl else '?'
|
| 111 |
+
return f'{url_without_ssl}{sep}{"&".join(parts)}'
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
# Backwards-compatible aliases for external callers.
|
| 115 |
+
extract_ssl_mode_from_url = extract_ssl_params_from_url
|
| 116 |
+
reattach_ssl_mode_to_url = reattach_ssl_params_to_url
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
class JSONField(types.TypeDecorator):
|
| 120 |
+
impl = types.Text
|
| 121 |
+
cache_ok = True
|
| 122 |
+
|
| 123 |
+
def process_bind_param(self, value: Optional[_T], dialect: Dialect) -> Any:
|
| 124 |
+
return json.dumps(value)
|
| 125 |
+
|
| 126 |
+
def process_result_value(self, value: Optional[_T], dialect: Dialect) -> Any:
|
| 127 |
+
if value is not None:
|
| 128 |
+
return json.loads(value)
|
| 129 |
+
|
| 130 |
+
def copy(self, **kw: Any) -> Self:
|
| 131 |
+
return JSONField(self.impl.length)
|
| 132 |
+
|
| 133 |
+
def db_value(self, value):
|
| 134 |
+
return json.dumps(value)
|
| 135 |
+
|
| 136 |
+
def python_value(self, value):
|
| 137 |
+
if value is not None:
|
| 138 |
+
return json.loads(value)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
# Workaround to handle the peewee migration
|
| 142 |
+
# This is required to ensure the peewee migration is handled before the alembic migration
|
| 143 |
+
def handle_peewee_migration(DATABASE_URL):
|
| 144 |
+
db = None
|
| 145 |
+
try:
|
| 146 |
+
# Normalize SSL params so psycopg2 always sees `sslmode=` (never `ssl=`)
|
| 147 |
+
# and cert-file params are preserved in the connection string.
|
| 148 |
+
url_without_ssl, ssl_params = extract_ssl_params_from_url(DATABASE_URL)
|
| 149 |
+
normalized_url = reattach_ssl_params_to_url(url_without_ssl, ssl_params)
|
| 150 |
+
|
| 151 |
+
# Replace the postgresql:// with postgres:// to handle the peewee migration
|
| 152 |
+
db = register_connection(normalized_url.replace('postgresql://', 'postgres://'))
|
| 153 |
+
migrate_dir = OPEN_WEBUI_DIR / 'internal' / 'migrations'
|
| 154 |
+
router = Router(db, logger=log, migrate_dir=migrate_dir)
|
| 155 |
+
router.run()
|
| 156 |
+
db.close()
|
| 157 |
+
|
| 158 |
+
except Exception as e:
|
| 159 |
+
log.error(f'Failed to initialize the database connection: {e}')
|
| 160 |
+
log.warning('Hint: If your database password contains special characters, you may need to URL-encode it.')
|
| 161 |
+
raise
|
| 162 |
+
finally:
|
| 163 |
+
# Properly closing the database connection
|
| 164 |
+
if db and not db.is_closed():
|
| 165 |
+
db.close()
|
| 166 |
+
|
| 167 |
+
# Assert if db connection has been closed
|
| 168 |
+
if db is not None:
|
| 169 |
+
assert db.is_closed(), 'Database connection is still open.'
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
if ENABLE_DB_MIGRATIONS:
|
| 173 |
+
handle_peewee_migration(DATABASE_URL)
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
# Normalize SSL params from the URL once; the sync engine needs them
|
| 177 |
+
# reattached in canonical libpq form for psycopg2.
|
| 178 |
+
_url_without_ssl, _ssl_dict = extract_ssl_params_from_url(DATABASE_URL)
|
| 179 |
+
|
| 180 |
+
# For psycopg2 (sync engine), re-append sslmode + cert-file params.
|
| 181 |
+
SQLALCHEMY_DATABASE_URL = reattach_ssl_params_to_url(_url_without_ssl, _ssl_dict) if _ssl_dict else DATABASE_URL
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
def _make_async_url(url: str) -> str:
|
| 185 |
+
"""Convert a sync database URL to its async driver equivalent.
|
| 186 |
+
|
| 187 |
+
The async engine uses psycopg (v3) which speaks libpq natively,
|
| 188 |
+
so all standard connection-string parameters (``sslmode``,
|
| 189 |
+
``options``, ``target_session_attrs``, etc.) are passed through
|
| 190 |
+
without any translation.
|
| 191 |
+
"""
|
| 192 |
+
if url.startswith('sqlite+sqlcipher://'):
|
| 193 |
+
raise ValueError(
|
| 194 |
+
'sqlite+sqlcipher:// URLs are not supported with async engine. '
|
| 195 |
+
'Use standard sqlite:// or postgresql:// instead.'
|
| 196 |
+
)
|
| 197 |
+
if url.startswith('sqlite:///') or url.startswith('sqlite://'):
|
| 198 |
+
return url.replace('sqlite://', 'sqlite+aiosqlite://', 1)
|
| 199 |
+
# psycopg v3 — auto-selects async mode with create_async_engine
|
| 200 |
+
if url.startswith('postgresql+psycopg2://'):
|
| 201 |
+
return url.replace('postgresql+psycopg2://', 'postgresql+psycopg://', 1)
|
| 202 |
+
if url.startswith('postgresql://'):
|
| 203 |
+
return url.replace('postgresql://', 'postgresql+psycopg://', 1)
|
| 204 |
+
if url.startswith('postgres://'):
|
| 205 |
+
return url.replace('postgres://', 'postgresql+psycopg://', 1)
|
| 206 |
+
# For other dialects, return as-is and let SQLAlchemy handle it
|
| 207 |
+
return url
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
# ============================================================
|
| 211 |
+
# SYNC ENGINE (used only for: startup migrations, config loading,
|
| 212 |
+
# Alembic, peewee migration, health checks)
|
| 213 |
+
# ============================================================
|
| 214 |
+
|
| 215 |
+
# Handle SQLCipher URLs
|
| 216 |
+
if SQLALCHEMY_DATABASE_URL.startswith('sqlite+sqlcipher://'):
|
| 217 |
+
database_password = os.environ.get('DATABASE_PASSWORD')
|
| 218 |
+
if not database_password or database_password.strip() == '':
|
| 219 |
+
raise ValueError('DATABASE_PASSWORD is required when using sqlite+sqlcipher:// URLs')
|
| 220 |
+
|
| 221 |
+
# Extract database path from SQLCipher URL
|
| 222 |
+
db_path = SQLALCHEMY_DATABASE_URL.replace('sqlite+sqlcipher://', '')
|
| 223 |
+
|
| 224 |
+
# Create a custom creator function that uses sqlcipher3
|
| 225 |
+
def create_sqlcipher_connection():
|
| 226 |
+
import sqlcipher3
|
| 227 |
+
|
| 228 |
+
conn = sqlcipher3.connect(db_path, check_same_thread=False)
|
| 229 |
+
conn.execute(f"PRAGMA key = '{database_password}'")
|
| 230 |
+
return conn
|
| 231 |
+
|
| 232 |
+
# The dummy "sqlite://" URL would cause SQLAlchemy to auto-select
|
| 233 |
+
# SingletonThreadPool, which non-deterministically closes in-use
|
| 234 |
+
# connections when thread count exceeds pool_size, leading to segfaults
|
| 235 |
+
# in the native sqlcipher3 C library. Use NullPool by default for safety,
|
| 236 |
+
# or QueuePool if DATABASE_POOL_SIZE is explicitly configured.
|
| 237 |
+
if isinstance(DATABASE_POOL_SIZE, int) and DATABASE_POOL_SIZE > 0:
|
| 238 |
+
engine = create_engine(
|
| 239 |
+
'sqlite://',
|
| 240 |
+
creator=create_sqlcipher_connection,
|
| 241 |
+
pool_size=DATABASE_POOL_SIZE,
|
| 242 |
+
max_overflow=DATABASE_POOL_MAX_OVERFLOW,
|
| 243 |
+
pool_timeout=DATABASE_POOL_TIMEOUT,
|
| 244 |
+
pool_recycle=DATABASE_POOL_RECYCLE,
|
| 245 |
+
pool_pre_ping=True,
|
| 246 |
+
poolclass=QueuePool,
|
| 247 |
+
echo=False,
|
| 248 |
+
)
|
| 249 |
+
else:
|
| 250 |
+
engine = create_engine(
|
| 251 |
+
'sqlite://',
|
| 252 |
+
creator=create_sqlcipher_connection,
|
| 253 |
+
poolclass=NullPool,
|
| 254 |
+
echo=False,
|
| 255 |
+
)
|
| 256 |
+
|
| 257 |
+
log.info('Connected to encrypted SQLite database using SQLCipher')
|
| 258 |
+
|
| 259 |
+
elif 'sqlite' in SQLALCHEMY_DATABASE_URL:
|
| 260 |
+
engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={'check_same_thread': False})
|
| 261 |
+
|
| 262 |
+
def _apply_sqlite_pragmas(dbapi_connection):
|
| 263 |
+
"""Apply all configured SQLite PRAGMAs to a raw DBAPI connection."""
|
| 264 |
+
cursor = dbapi_connection.cursor()
|
| 265 |
+
if DATABASE_ENABLE_SQLITE_WAL:
|
| 266 |
+
cursor.execute('PRAGMA journal_mode=WAL')
|
| 267 |
+
else:
|
| 268 |
+
cursor.execute('PRAGMA journal_mode=DELETE')
|
| 269 |
+
|
| 270 |
+
# Each PRAGMA is skipped when its env var is empty, allowing opt-out.
|
| 271 |
+
if DATABASE_SQLITE_PRAGMA_SYNCHRONOUS:
|
| 272 |
+
cursor.execute(f'PRAGMA synchronous={DATABASE_SQLITE_PRAGMA_SYNCHRONOUS}')
|
| 273 |
+
if DATABASE_SQLITE_PRAGMA_BUSY_TIMEOUT:
|
| 274 |
+
cursor.execute(f'PRAGMA busy_timeout={DATABASE_SQLITE_PRAGMA_BUSY_TIMEOUT}')
|
| 275 |
+
if DATABASE_SQLITE_PRAGMA_CACHE_SIZE:
|
| 276 |
+
cursor.execute(f'PRAGMA cache_size={DATABASE_SQLITE_PRAGMA_CACHE_SIZE}')
|
| 277 |
+
if DATABASE_SQLITE_PRAGMA_TEMP_STORE:
|
| 278 |
+
cursor.execute(f'PRAGMA temp_store={DATABASE_SQLITE_PRAGMA_TEMP_STORE}')
|
| 279 |
+
if DATABASE_SQLITE_PRAGMA_MMAP_SIZE:
|
| 280 |
+
cursor.execute(f'PRAGMA mmap_size={DATABASE_SQLITE_PRAGMA_MMAP_SIZE}')
|
| 281 |
+
if DATABASE_SQLITE_PRAGMA_JOURNAL_SIZE_LIMIT:
|
| 282 |
+
cursor.execute(f'PRAGMA journal_size_limit={DATABASE_SQLITE_PRAGMA_JOURNAL_SIZE_LIMIT}')
|
| 283 |
+
cursor.close()
|
| 284 |
+
|
| 285 |
+
def on_connect(dbapi_connection, connection_record):
|
| 286 |
+
_apply_sqlite_pragmas(dbapi_connection)
|
| 287 |
+
|
| 288 |
+
event.listen(engine, 'connect', on_connect)
|
| 289 |
+
else:
|
| 290 |
+
if isinstance(DATABASE_POOL_SIZE, int):
|
| 291 |
+
if DATABASE_POOL_SIZE > 0:
|
| 292 |
+
engine = create_engine(
|
| 293 |
+
SQLALCHEMY_DATABASE_URL,
|
| 294 |
+
pool_size=DATABASE_POOL_SIZE,
|
| 295 |
+
max_overflow=DATABASE_POOL_MAX_OVERFLOW,
|
| 296 |
+
pool_timeout=DATABASE_POOL_TIMEOUT,
|
| 297 |
+
pool_recycle=DATABASE_POOL_RECYCLE,
|
| 298 |
+
pool_pre_ping=True,
|
| 299 |
+
poolclass=QueuePool,
|
| 300 |
+
)
|
| 301 |
+
else:
|
| 302 |
+
engine = create_engine(SQLALCHEMY_DATABASE_URL, pool_pre_ping=True, poolclass=NullPool)
|
| 303 |
+
else:
|
| 304 |
+
engine = create_engine(SQLALCHEMY_DATABASE_URL, pool_pre_ping=True)
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
# Sync session — used ONLY for startup config loading (config.py runs at import time)
|
| 308 |
+
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine, expire_on_commit=False)
|
| 309 |
+
metadata_obj = MetaData(schema=DATABASE_SCHEMA)
|
| 310 |
+
Base = declarative_base(metadata=metadata_obj)
|
| 311 |
+
ScopedSession = scoped_session(SessionLocal)
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def get_session():
|
| 315 |
+
"""Sync session generator — used ONLY for startup/config operations."""
|
| 316 |
+
db = SessionLocal()
|
| 317 |
+
try:
|
| 318 |
+
yield db
|
| 319 |
+
finally:
|
| 320 |
+
db.close()
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
get_db = contextmanager(get_session)
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
# ============================================================
|
| 327 |
+
# ASYNC ENGINE (used for ALL runtime database operations)
|
| 328 |
+
# ============================================================
|
| 329 |
+
|
| 330 |
+
# psycopg (v3) speaks libpq natively — the full DATABASE_URL is passed
|
| 331 |
+
# through as-is. SSL params, ``options``, ``target_session_attrs``, etc.
|
| 332 |
+
# all work without any stripping or translation.
|
| 333 |
+
ASYNC_SQLALCHEMY_DATABASE_URL = _make_async_url(SQLALCHEMY_DATABASE_URL)
|
| 334 |
+
|
| 335 |
+
if 'sqlite' in ASYNC_SQLALCHEMY_DATABASE_URL:
|
| 336 |
+
# Generous default — async coroutines + no session sharing = high connection demand.
|
| 337 |
+
_sqlite_pool_size = DATABASE_POOL_SIZE if isinstance(DATABASE_POOL_SIZE, int) and DATABASE_POOL_SIZE > 0 else 512
|
| 338 |
+
async_engine = create_async_engine(
|
| 339 |
+
ASYNC_SQLALCHEMY_DATABASE_URL,
|
| 340 |
+
connect_args={'check_same_thread': False},
|
| 341 |
+
pool_size=_sqlite_pool_size,
|
| 342 |
+
pool_timeout=DATABASE_POOL_TIMEOUT,
|
| 343 |
+
pool_recycle=DATABASE_POOL_RECYCLE,
|
| 344 |
+
pool_pre_ping=True,
|
| 345 |
+
)
|
| 346 |
+
|
| 347 |
+
@event.listens_for(async_engine.sync_engine, 'connect')
|
| 348 |
+
def _set_sqlite_pragmas(dbapi_connection, connection_record):
|
| 349 |
+
_apply_sqlite_pragmas(dbapi_connection)
|
| 350 |
+
else:
|
| 351 |
+
if isinstance(DATABASE_POOL_SIZE, int):
|
| 352 |
+
if DATABASE_POOL_SIZE > 0:
|
| 353 |
+
async_engine = create_async_engine(
|
| 354 |
+
ASYNC_SQLALCHEMY_DATABASE_URL,
|
| 355 |
+
pool_size=DATABASE_POOL_SIZE,
|
| 356 |
+
max_overflow=DATABASE_POOL_MAX_OVERFLOW,
|
| 357 |
+
pool_timeout=DATABASE_POOL_TIMEOUT,
|
| 358 |
+
pool_recycle=DATABASE_POOL_RECYCLE,
|
| 359 |
+
pool_pre_ping=True,
|
| 360 |
+
)
|
| 361 |
+
else:
|
| 362 |
+
async_engine = create_async_engine(
|
| 363 |
+
ASYNC_SQLALCHEMY_DATABASE_URL,
|
| 364 |
+
pool_pre_ping=True,
|
| 365 |
+
poolclass=NullPool,
|
| 366 |
+
)
|
| 367 |
+
else:
|
| 368 |
+
async_engine = create_async_engine(
|
| 369 |
+
ASYNC_SQLALCHEMY_DATABASE_URL,
|
| 370 |
+
pool_pre_ping=True,
|
| 371 |
+
)
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
AsyncSessionLocal = async_sessionmaker(
|
| 375 |
+
bind=async_engine,
|
| 376 |
+
class_=AsyncSession,
|
| 377 |
+
autocommit=False,
|
| 378 |
+
autoflush=False,
|
| 379 |
+
expire_on_commit=False,
|
| 380 |
+
)
|
| 381 |
+
|
| 382 |
+
|
| 383 |
+
async def get_async_session():
|
| 384 |
+
"""Async session generator for FastAPI Depends()."""
|
| 385 |
+
async with AsyncSessionLocal() as db:
|
| 386 |
+
try:
|
| 387 |
+
yield db
|
| 388 |
+
finally:
|
| 389 |
+
await db.close()
|
| 390 |
+
|
| 391 |
+
|
| 392 |
+
@asynccontextmanager
|
| 393 |
+
async def get_async_db():
|
| 394 |
+
"""Async context manager for use outside of FastAPI dependency injection."""
|
| 395 |
+
async with AsyncSessionLocal() as db:
|
| 396 |
+
try:
|
| 397 |
+
yield db
|
| 398 |
+
finally:
|
| 399 |
+
await db.close()
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
@asynccontextmanager
|
| 403 |
+
async def get_async_db_context(db: Optional[AsyncSession] = None):
|
| 404 |
+
"""Async context manager that reuses an existing session if provided and session sharing is enabled."""
|
| 405 |
+
if isinstance(db, AsyncSession) and DATABASE_ENABLE_SESSION_SHARING:
|
| 406 |
+
yield db
|
| 407 |
+
else:
|
| 408 |
+
async with get_async_db() as session:
|
| 409 |
+
yield session
|
backend/open_webui/internal/migrations/001_initial_schema.py
ADDED
|
@@ -0,0 +1,253 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 001_initial_schema.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
# We perform different migrations for SQLite and other databases
|
| 40 |
+
# This is because SQLite is very loose with enforcing its schema, and trying to migrate other databases like SQLite
|
| 41 |
+
# will require per-database SQL queries.
|
| 42 |
+
# Instead, we assume that because external DB support was added at a later date, it is safe to assume a newer base
|
| 43 |
+
# schema instead of trying to migrate from an older schema.
|
| 44 |
+
if isinstance(database, pw.SqliteDatabase):
|
| 45 |
+
migrate_sqlite(migrator, database, fake=fake)
|
| 46 |
+
else:
|
| 47 |
+
migrate_external(migrator, database, fake=fake)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def migrate_sqlite(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 51 |
+
@migrator.create_model
|
| 52 |
+
class Auth(pw.Model):
|
| 53 |
+
id = pw.CharField(max_length=255, unique=True)
|
| 54 |
+
email = pw.CharField(max_length=255)
|
| 55 |
+
password = pw.CharField(max_length=255)
|
| 56 |
+
active = pw.BooleanField()
|
| 57 |
+
|
| 58 |
+
class Meta:
|
| 59 |
+
table_name = 'auth'
|
| 60 |
+
|
| 61 |
+
@migrator.create_model
|
| 62 |
+
class Chat(pw.Model):
|
| 63 |
+
id = pw.CharField(max_length=255, unique=True)
|
| 64 |
+
user_id = pw.CharField(max_length=255)
|
| 65 |
+
title = pw.CharField()
|
| 66 |
+
chat = pw.TextField()
|
| 67 |
+
timestamp = pw.BigIntegerField()
|
| 68 |
+
|
| 69 |
+
class Meta:
|
| 70 |
+
table_name = 'chat'
|
| 71 |
+
|
| 72 |
+
@migrator.create_model
|
| 73 |
+
class ChatIdTag(pw.Model):
|
| 74 |
+
id = pw.CharField(max_length=255, unique=True)
|
| 75 |
+
tag_name = pw.CharField(max_length=255)
|
| 76 |
+
chat_id = pw.CharField(max_length=255)
|
| 77 |
+
user_id = pw.CharField(max_length=255)
|
| 78 |
+
timestamp = pw.BigIntegerField()
|
| 79 |
+
|
| 80 |
+
class Meta:
|
| 81 |
+
table_name = 'chatidtag'
|
| 82 |
+
|
| 83 |
+
@migrator.create_model
|
| 84 |
+
class Document(pw.Model):
|
| 85 |
+
id = pw.AutoField()
|
| 86 |
+
collection_name = pw.CharField(max_length=255, unique=True)
|
| 87 |
+
name = pw.CharField(max_length=255, unique=True)
|
| 88 |
+
title = pw.CharField()
|
| 89 |
+
filename = pw.CharField()
|
| 90 |
+
content = pw.TextField(null=True)
|
| 91 |
+
user_id = pw.CharField(max_length=255)
|
| 92 |
+
timestamp = pw.BigIntegerField()
|
| 93 |
+
|
| 94 |
+
class Meta:
|
| 95 |
+
table_name = 'document'
|
| 96 |
+
|
| 97 |
+
@migrator.create_model
|
| 98 |
+
class Modelfile(pw.Model):
|
| 99 |
+
id = pw.AutoField()
|
| 100 |
+
tag_name = pw.CharField(max_length=255, unique=True)
|
| 101 |
+
user_id = pw.CharField(max_length=255)
|
| 102 |
+
modelfile = pw.TextField()
|
| 103 |
+
timestamp = pw.BigIntegerField()
|
| 104 |
+
|
| 105 |
+
class Meta:
|
| 106 |
+
table_name = 'modelfile'
|
| 107 |
+
|
| 108 |
+
@migrator.create_model
|
| 109 |
+
class Prompt(pw.Model):
|
| 110 |
+
id = pw.AutoField()
|
| 111 |
+
command = pw.CharField(max_length=255, unique=True)
|
| 112 |
+
user_id = pw.CharField(max_length=255)
|
| 113 |
+
title = pw.CharField()
|
| 114 |
+
content = pw.TextField()
|
| 115 |
+
timestamp = pw.BigIntegerField()
|
| 116 |
+
|
| 117 |
+
class Meta:
|
| 118 |
+
table_name = 'prompt'
|
| 119 |
+
|
| 120 |
+
@migrator.create_model
|
| 121 |
+
class Tag(pw.Model):
|
| 122 |
+
id = pw.CharField(max_length=255, unique=True)
|
| 123 |
+
name = pw.CharField(max_length=255)
|
| 124 |
+
user_id = pw.CharField(max_length=255)
|
| 125 |
+
data = pw.TextField(null=True)
|
| 126 |
+
|
| 127 |
+
class Meta:
|
| 128 |
+
table_name = 'tag'
|
| 129 |
+
|
| 130 |
+
@migrator.create_model
|
| 131 |
+
class User(pw.Model):
|
| 132 |
+
id = pw.CharField(max_length=255, unique=True)
|
| 133 |
+
name = pw.CharField(max_length=255)
|
| 134 |
+
email = pw.CharField(max_length=255)
|
| 135 |
+
role = pw.CharField(max_length=255)
|
| 136 |
+
profile_image_url = pw.CharField(max_length=255)
|
| 137 |
+
timestamp = pw.BigIntegerField()
|
| 138 |
+
|
| 139 |
+
class Meta:
|
| 140 |
+
table_name = 'user'
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def migrate_external(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 144 |
+
@migrator.create_model
|
| 145 |
+
class Auth(pw.Model):
|
| 146 |
+
id = pw.CharField(max_length=255, unique=True)
|
| 147 |
+
email = pw.CharField(max_length=255)
|
| 148 |
+
password = pw.TextField()
|
| 149 |
+
active = pw.BooleanField()
|
| 150 |
+
|
| 151 |
+
class Meta:
|
| 152 |
+
table_name = 'auth'
|
| 153 |
+
|
| 154 |
+
@migrator.create_model
|
| 155 |
+
class Chat(pw.Model):
|
| 156 |
+
id = pw.CharField(max_length=255, unique=True)
|
| 157 |
+
user_id = pw.CharField(max_length=255)
|
| 158 |
+
title = pw.TextField()
|
| 159 |
+
chat = pw.TextField()
|
| 160 |
+
timestamp = pw.BigIntegerField()
|
| 161 |
+
|
| 162 |
+
class Meta:
|
| 163 |
+
table_name = 'chat'
|
| 164 |
+
|
| 165 |
+
@migrator.create_model
|
| 166 |
+
class ChatIdTag(pw.Model):
|
| 167 |
+
id = pw.CharField(max_length=255, unique=True)
|
| 168 |
+
tag_name = pw.CharField(max_length=255)
|
| 169 |
+
chat_id = pw.CharField(max_length=255)
|
| 170 |
+
user_id = pw.CharField(max_length=255)
|
| 171 |
+
timestamp = pw.BigIntegerField()
|
| 172 |
+
|
| 173 |
+
class Meta:
|
| 174 |
+
table_name = 'chatidtag'
|
| 175 |
+
|
| 176 |
+
@migrator.create_model
|
| 177 |
+
class Document(pw.Model):
|
| 178 |
+
id = pw.AutoField()
|
| 179 |
+
collection_name = pw.CharField(max_length=255, unique=True)
|
| 180 |
+
name = pw.CharField(max_length=255, unique=True)
|
| 181 |
+
title = pw.TextField()
|
| 182 |
+
filename = pw.TextField()
|
| 183 |
+
content = pw.TextField(null=True)
|
| 184 |
+
user_id = pw.CharField(max_length=255)
|
| 185 |
+
timestamp = pw.BigIntegerField()
|
| 186 |
+
|
| 187 |
+
class Meta:
|
| 188 |
+
table_name = 'document'
|
| 189 |
+
|
| 190 |
+
@migrator.create_model
|
| 191 |
+
class Modelfile(pw.Model):
|
| 192 |
+
id = pw.AutoField()
|
| 193 |
+
tag_name = pw.CharField(max_length=255, unique=True)
|
| 194 |
+
user_id = pw.CharField(max_length=255)
|
| 195 |
+
modelfile = pw.TextField()
|
| 196 |
+
timestamp = pw.BigIntegerField()
|
| 197 |
+
|
| 198 |
+
class Meta:
|
| 199 |
+
table_name = 'modelfile'
|
| 200 |
+
|
| 201 |
+
@migrator.create_model
|
| 202 |
+
class Prompt(pw.Model):
|
| 203 |
+
id = pw.AutoField()
|
| 204 |
+
command = pw.CharField(max_length=255, unique=True)
|
| 205 |
+
user_id = pw.CharField(max_length=255)
|
| 206 |
+
title = pw.TextField()
|
| 207 |
+
content = pw.TextField()
|
| 208 |
+
timestamp = pw.BigIntegerField()
|
| 209 |
+
|
| 210 |
+
class Meta:
|
| 211 |
+
table_name = 'prompt'
|
| 212 |
+
|
| 213 |
+
@migrator.create_model
|
| 214 |
+
class Tag(pw.Model):
|
| 215 |
+
id = pw.CharField(max_length=255, unique=True)
|
| 216 |
+
name = pw.CharField(max_length=255)
|
| 217 |
+
user_id = pw.CharField(max_length=255)
|
| 218 |
+
data = pw.TextField(null=True)
|
| 219 |
+
|
| 220 |
+
class Meta:
|
| 221 |
+
table_name = 'tag'
|
| 222 |
+
|
| 223 |
+
@migrator.create_model
|
| 224 |
+
class User(pw.Model):
|
| 225 |
+
id = pw.CharField(max_length=255, unique=True)
|
| 226 |
+
name = pw.CharField(max_length=255)
|
| 227 |
+
email = pw.CharField(max_length=255)
|
| 228 |
+
role = pw.CharField(max_length=255)
|
| 229 |
+
profile_image_url = pw.TextField()
|
| 230 |
+
timestamp = pw.BigIntegerField()
|
| 231 |
+
|
| 232 |
+
class Meta:
|
| 233 |
+
table_name = 'user'
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 237 |
+
"""Write your rollback migrations here."""
|
| 238 |
+
|
| 239 |
+
migrator.remove_model('user')
|
| 240 |
+
|
| 241 |
+
migrator.remove_model('tag')
|
| 242 |
+
|
| 243 |
+
migrator.remove_model('prompt')
|
| 244 |
+
|
| 245 |
+
migrator.remove_model('modelfile')
|
| 246 |
+
|
| 247 |
+
migrator.remove_model('document')
|
| 248 |
+
|
| 249 |
+
migrator.remove_model('chatidtag')
|
| 250 |
+
|
| 251 |
+
migrator.remove_model('chat')
|
| 252 |
+
|
| 253 |
+
migrator.remove_model('auth')
|
backend/open_webui/internal/migrations/002_add_local_sharing.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 002_add_local_sharing.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
migrator.add_fields('chat', share_id=pw.CharField(max_length=255, null=True, unique=True))
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 43 |
+
"""Write your rollback migrations here."""
|
| 44 |
+
|
| 45 |
+
migrator.remove_fields('chat', 'share_id')
|
backend/open_webui/internal/migrations/003_add_auth_api_key.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 002_add_local_sharing.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
migrator.add_fields('user', api_key=pw.CharField(max_length=255, null=True, unique=True))
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 43 |
+
"""Write your rollback migrations here."""
|
| 44 |
+
|
| 45 |
+
migrator.remove_fields('user', 'api_key')
|
backend/open_webui/internal/migrations/004_add_archived.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 002_add_local_sharing.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
migrator.add_fields('chat', archived=pw.BooleanField(default=False))
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 43 |
+
"""Write your rollback migrations here."""
|
| 44 |
+
|
| 45 |
+
migrator.remove_fields('chat', 'archived')
|
backend/open_webui/internal/migrations/005_add_updated_at.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 002_add_local_sharing.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
if isinstance(database, pw.SqliteDatabase):
|
| 40 |
+
migrate_sqlite(migrator, database, fake=fake)
|
| 41 |
+
else:
|
| 42 |
+
migrate_external(migrator, database, fake=fake)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def migrate_sqlite(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 46 |
+
# Adding fields created_at and updated_at to the 'chat' table
|
| 47 |
+
migrator.add_fields(
|
| 48 |
+
'chat',
|
| 49 |
+
created_at=pw.DateTimeField(null=True), # Allow null for transition
|
| 50 |
+
updated_at=pw.DateTimeField(null=True), # Allow null for transition
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
# Populate the new fields from an existing 'timestamp' field
|
| 54 |
+
migrator.sql('UPDATE chat SET created_at = timestamp, updated_at = timestamp WHERE timestamp IS NOT NULL')
|
| 55 |
+
|
| 56 |
+
# Now that the data has been copied, remove the original 'timestamp' field
|
| 57 |
+
migrator.remove_fields('chat', 'timestamp')
|
| 58 |
+
|
| 59 |
+
# Update the fields to be not null now that they are populated
|
| 60 |
+
migrator.change_fields(
|
| 61 |
+
'chat',
|
| 62 |
+
created_at=pw.DateTimeField(null=False),
|
| 63 |
+
updated_at=pw.DateTimeField(null=False),
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def migrate_external(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 68 |
+
# Adding fields created_at and updated_at to the 'chat' table
|
| 69 |
+
migrator.add_fields(
|
| 70 |
+
'chat',
|
| 71 |
+
created_at=pw.BigIntegerField(null=True), # Allow null for transition
|
| 72 |
+
updated_at=pw.BigIntegerField(null=True), # Allow null for transition
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
# Populate the new fields from an existing 'timestamp' field
|
| 76 |
+
migrator.sql('UPDATE chat SET created_at = timestamp, updated_at = timestamp WHERE timestamp IS NOT NULL')
|
| 77 |
+
|
| 78 |
+
# Now that the data has been copied, remove the original 'timestamp' field
|
| 79 |
+
migrator.remove_fields('chat', 'timestamp')
|
| 80 |
+
|
| 81 |
+
# Update the fields to be not null now that they are populated
|
| 82 |
+
migrator.change_fields(
|
| 83 |
+
'chat',
|
| 84 |
+
created_at=pw.BigIntegerField(null=False),
|
| 85 |
+
updated_at=pw.BigIntegerField(null=False),
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 90 |
+
"""Write your rollback migrations here."""
|
| 91 |
+
|
| 92 |
+
if isinstance(database, pw.SqliteDatabase):
|
| 93 |
+
rollback_sqlite(migrator, database, fake=fake)
|
| 94 |
+
else:
|
| 95 |
+
rollback_external(migrator, database, fake=fake)
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def rollback_sqlite(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 99 |
+
# Recreate the timestamp field initially allowing null values for safe transition
|
| 100 |
+
migrator.add_fields('chat', timestamp=pw.DateTimeField(null=True))
|
| 101 |
+
|
| 102 |
+
# Copy the earliest created_at date back into the new timestamp field
|
| 103 |
+
# This assumes created_at was originally a copy of timestamp
|
| 104 |
+
migrator.sql('UPDATE chat SET timestamp = created_at')
|
| 105 |
+
|
| 106 |
+
# Remove the created_at and updated_at fields
|
| 107 |
+
migrator.remove_fields('chat', 'created_at', 'updated_at')
|
| 108 |
+
|
| 109 |
+
# Finally, alter the timestamp field to not allow nulls if that was the original setting
|
| 110 |
+
migrator.change_fields('chat', timestamp=pw.DateTimeField(null=False))
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def rollback_external(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 114 |
+
# Recreate the timestamp field initially allowing null values for safe transition
|
| 115 |
+
migrator.add_fields('chat', timestamp=pw.BigIntegerField(null=True))
|
| 116 |
+
|
| 117 |
+
# Copy the earliest created_at date back into the new timestamp field
|
| 118 |
+
# This assumes created_at was originally a copy of timestamp
|
| 119 |
+
migrator.sql('UPDATE chat SET timestamp = created_at')
|
| 120 |
+
|
| 121 |
+
# Remove the created_at and updated_at fields
|
| 122 |
+
migrator.remove_fields('chat', 'created_at', 'updated_at')
|
| 123 |
+
|
| 124 |
+
# Finally, alter the timestamp field to not allow nulls if that was the original setting
|
| 125 |
+
migrator.change_fields('chat', timestamp=pw.BigIntegerField(null=False))
|
backend/open_webui/internal/migrations/006_migrate_timestamps_and_charfields.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 006_migrate_timestamps_and_charfields.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
# Alter the tables with timestamps
|
| 40 |
+
migrator.change_fields(
|
| 41 |
+
'chatidtag',
|
| 42 |
+
timestamp=pw.BigIntegerField(),
|
| 43 |
+
)
|
| 44 |
+
migrator.change_fields(
|
| 45 |
+
'document',
|
| 46 |
+
timestamp=pw.BigIntegerField(),
|
| 47 |
+
)
|
| 48 |
+
migrator.change_fields(
|
| 49 |
+
'modelfile',
|
| 50 |
+
timestamp=pw.BigIntegerField(),
|
| 51 |
+
)
|
| 52 |
+
migrator.change_fields(
|
| 53 |
+
'prompt',
|
| 54 |
+
timestamp=pw.BigIntegerField(),
|
| 55 |
+
)
|
| 56 |
+
migrator.change_fields(
|
| 57 |
+
'user',
|
| 58 |
+
timestamp=pw.BigIntegerField(),
|
| 59 |
+
)
|
| 60 |
+
# Alter the tables with varchar to text where necessary
|
| 61 |
+
migrator.change_fields(
|
| 62 |
+
'auth',
|
| 63 |
+
password=pw.TextField(),
|
| 64 |
+
)
|
| 65 |
+
migrator.change_fields(
|
| 66 |
+
'chat',
|
| 67 |
+
title=pw.TextField(),
|
| 68 |
+
)
|
| 69 |
+
migrator.change_fields(
|
| 70 |
+
'document',
|
| 71 |
+
title=pw.TextField(),
|
| 72 |
+
filename=pw.TextField(),
|
| 73 |
+
)
|
| 74 |
+
migrator.change_fields(
|
| 75 |
+
'prompt',
|
| 76 |
+
title=pw.TextField(),
|
| 77 |
+
)
|
| 78 |
+
migrator.change_fields(
|
| 79 |
+
'user',
|
| 80 |
+
profile_image_url=pw.TextField(),
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 85 |
+
"""Write your rollback migrations here."""
|
| 86 |
+
|
| 87 |
+
if isinstance(database, pw.SqliteDatabase):
|
| 88 |
+
# Alter the tables with timestamps
|
| 89 |
+
migrator.change_fields(
|
| 90 |
+
'chatidtag',
|
| 91 |
+
timestamp=pw.DateField(),
|
| 92 |
+
)
|
| 93 |
+
migrator.change_fields(
|
| 94 |
+
'document',
|
| 95 |
+
timestamp=pw.DateField(),
|
| 96 |
+
)
|
| 97 |
+
migrator.change_fields(
|
| 98 |
+
'modelfile',
|
| 99 |
+
timestamp=pw.DateField(),
|
| 100 |
+
)
|
| 101 |
+
migrator.change_fields(
|
| 102 |
+
'prompt',
|
| 103 |
+
timestamp=pw.DateField(),
|
| 104 |
+
)
|
| 105 |
+
migrator.change_fields(
|
| 106 |
+
'user',
|
| 107 |
+
timestamp=pw.DateField(),
|
| 108 |
+
)
|
| 109 |
+
migrator.change_fields(
|
| 110 |
+
'auth',
|
| 111 |
+
password=pw.CharField(max_length=255),
|
| 112 |
+
)
|
| 113 |
+
migrator.change_fields(
|
| 114 |
+
'chat',
|
| 115 |
+
title=pw.CharField(),
|
| 116 |
+
)
|
| 117 |
+
migrator.change_fields(
|
| 118 |
+
'document',
|
| 119 |
+
title=pw.CharField(),
|
| 120 |
+
filename=pw.CharField(),
|
| 121 |
+
)
|
| 122 |
+
migrator.change_fields(
|
| 123 |
+
'prompt',
|
| 124 |
+
title=pw.CharField(),
|
| 125 |
+
)
|
| 126 |
+
migrator.change_fields(
|
| 127 |
+
'user',
|
| 128 |
+
profile_image_url=pw.CharField(),
|
| 129 |
+
)
|
backend/open_webui/internal/migrations/007_add_user_last_active_at.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 002_add_local_sharing.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
# Adding fields created_at and updated_at to the 'user' table
|
| 40 |
+
migrator.add_fields(
|
| 41 |
+
'user',
|
| 42 |
+
created_at=pw.BigIntegerField(null=True), # Allow null for transition
|
| 43 |
+
updated_at=pw.BigIntegerField(null=True), # Allow null for transition
|
| 44 |
+
last_active_at=pw.BigIntegerField(null=True), # Allow null for transition
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
# Populate the new fields from an existing 'timestamp' field
|
| 48 |
+
migrator.sql(
|
| 49 |
+
'UPDATE "user" SET created_at = timestamp, updated_at = timestamp, last_active_at = timestamp WHERE timestamp IS NOT NULL'
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
# Now that the data has been copied, remove the original 'timestamp' field
|
| 53 |
+
migrator.remove_fields('user', 'timestamp')
|
| 54 |
+
|
| 55 |
+
# Update the fields to be not null now that they are populated
|
| 56 |
+
migrator.change_fields(
|
| 57 |
+
'user',
|
| 58 |
+
created_at=pw.BigIntegerField(null=False),
|
| 59 |
+
updated_at=pw.BigIntegerField(null=False),
|
| 60 |
+
last_active_at=pw.BigIntegerField(null=False),
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 65 |
+
"""Write your rollback migrations here."""
|
| 66 |
+
|
| 67 |
+
# Recreate the timestamp field initially allowing null values for safe transition
|
| 68 |
+
migrator.add_fields('user', timestamp=pw.BigIntegerField(null=True))
|
| 69 |
+
|
| 70 |
+
# Copy the earliest created_at date back into the new timestamp field
|
| 71 |
+
# This assumes created_at was originally a copy of timestamp
|
| 72 |
+
migrator.sql('UPDATE "user" SET timestamp = created_at')
|
| 73 |
+
|
| 74 |
+
# Remove the created_at and updated_at fields
|
| 75 |
+
migrator.remove_fields('user', 'created_at', 'updated_at', 'last_active_at')
|
| 76 |
+
|
| 77 |
+
# Finally, alter the timestamp field to not allow nulls if that was the original setting
|
| 78 |
+
migrator.change_fields('user', timestamp=pw.BigIntegerField(null=False))
|
backend/open_webui/internal/migrations/008_add_memory.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 002_add_local_sharing.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
@migrator.create_model
|
| 38 |
+
class Memory(pw.Model):
|
| 39 |
+
id = pw.CharField(max_length=255, unique=True)
|
| 40 |
+
user_id = pw.CharField(max_length=255)
|
| 41 |
+
content = pw.TextField(null=False)
|
| 42 |
+
updated_at = pw.BigIntegerField(null=False)
|
| 43 |
+
created_at = pw.BigIntegerField(null=False)
|
| 44 |
+
|
| 45 |
+
class Meta:
|
| 46 |
+
table_name = 'memory'
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 50 |
+
"""Write your rollback migrations here."""
|
| 51 |
+
|
| 52 |
+
migrator.remove_model('memory')
|
backend/open_webui/internal/migrations/009_add_models.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 009_add_models.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
@migrator.create_model
|
| 40 |
+
class Model(pw.Model):
|
| 41 |
+
id = pw.TextField(unique=True)
|
| 42 |
+
user_id = pw.TextField()
|
| 43 |
+
base_model_id = pw.TextField(null=True)
|
| 44 |
+
|
| 45 |
+
name = pw.TextField()
|
| 46 |
+
|
| 47 |
+
meta = pw.TextField()
|
| 48 |
+
params = pw.TextField()
|
| 49 |
+
|
| 50 |
+
created_at = pw.BigIntegerField(null=False)
|
| 51 |
+
updated_at = pw.BigIntegerField(null=False)
|
| 52 |
+
|
| 53 |
+
class Meta:
|
| 54 |
+
table_name = 'model'
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 58 |
+
"""Write your rollback migrations here."""
|
| 59 |
+
|
| 60 |
+
migrator.remove_model('model')
|
backend/open_webui/internal/migrations/010_migrate_modelfiles_to_models.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 009_add_models.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
import json
|
| 32 |
+
|
| 33 |
+
from open_webui.utils.misc import parse_ollama_modelfile
|
| 34 |
+
|
| 35 |
+
with suppress(ImportError):
|
| 36 |
+
import playhouse.postgres_ext as pw_pext
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 40 |
+
"""Write your migrations here."""
|
| 41 |
+
|
| 42 |
+
# Fetch data from 'modelfile' table and insert into 'model' table
|
| 43 |
+
migrate_modelfile_to_model(migrator, database)
|
| 44 |
+
# Drop the 'modelfile' table
|
| 45 |
+
migrator.remove_model('modelfile')
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def migrate_modelfile_to_model(migrator: Migrator, database: pw.Database):
|
| 49 |
+
ModelFile = migrator.orm['modelfile']
|
| 50 |
+
Model = migrator.orm['model']
|
| 51 |
+
|
| 52 |
+
modelfiles = ModelFile.select()
|
| 53 |
+
|
| 54 |
+
for modelfile in modelfiles:
|
| 55 |
+
# Extract and transform data in Python
|
| 56 |
+
|
| 57 |
+
modelfile.modelfile = json.loads(modelfile.modelfile)
|
| 58 |
+
meta = json.dumps(
|
| 59 |
+
{
|
| 60 |
+
'description': modelfile.modelfile.get('desc'),
|
| 61 |
+
'profile_image_url': modelfile.modelfile.get('imageUrl'),
|
| 62 |
+
'ollama': {'modelfile': modelfile.modelfile.get('content')},
|
| 63 |
+
'suggestion_prompts': modelfile.modelfile.get('suggestionPrompts'),
|
| 64 |
+
'categories': modelfile.modelfile.get('categories'),
|
| 65 |
+
'user': {**modelfile.modelfile.get('user', {}), 'community': True},
|
| 66 |
+
}
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
info = parse_ollama_modelfile(modelfile.modelfile.get('content'))
|
| 70 |
+
|
| 71 |
+
# Insert the processed data into the 'model' table
|
| 72 |
+
Model.create(
|
| 73 |
+
id=f'ollama-{modelfile.tag_name}',
|
| 74 |
+
user_id=modelfile.user_id,
|
| 75 |
+
base_model_id=info.get('base_model_id'),
|
| 76 |
+
name=modelfile.modelfile.get('title'),
|
| 77 |
+
meta=meta,
|
| 78 |
+
params=json.dumps(info.get('params', {})),
|
| 79 |
+
created_at=modelfile.timestamp,
|
| 80 |
+
updated_at=modelfile.timestamp,
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 85 |
+
"""Write your rollback migrations here."""
|
| 86 |
+
|
| 87 |
+
recreate_modelfile_table(migrator, database)
|
| 88 |
+
move_data_back_to_modelfile(migrator, database)
|
| 89 |
+
migrator.remove_model('model')
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def recreate_modelfile_table(migrator: Migrator, database: pw.Database):
|
| 93 |
+
query = """
|
| 94 |
+
CREATE TABLE IF NOT EXISTS modelfile (
|
| 95 |
+
user_id TEXT,
|
| 96 |
+
tag_name TEXT,
|
| 97 |
+
modelfile JSON,
|
| 98 |
+
timestamp BIGINT
|
| 99 |
+
)
|
| 100 |
+
"""
|
| 101 |
+
migrator.sql(query)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def move_data_back_to_modelfile(migrator: Migrator, database: pw.Database):
|
| 105 |
+
Model = migrator.orm['model']
|
| 106 |
+
Modelfile = migrator.orm['modelfile']
|
| 107 |
+
|
| 108 |
+
models = Model.select()
|
| 109 |
+
|
| 110 |
+
for model in models:
|
| 111 |
+
# Extract and transform data in Python
|
| 112 |
+
meta = json.loads(model.meta)
|
| 113 |
+
|
| 114 |
+
modelfile_data = {
|
| 115 |
+
'title': model.name,
|
| 116 |
+
'desc': meta.get('description'),
|
| 117 |
+
'imageUrl': meta.get('profile_image_url'),
|
| 118 |
+
'content': meta.get('ollama', {}).get('modelfile'),
|
| 119 |
+
'suggestionPrompts': meta.get('suggestion_prompts'),
|
| 120 |
+
'categories': meta.get('categories'),
|
| 121 |
+
'user': {k: v for k, v in meta.get('user', {}).items() if k != 'community'},
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
# Insert the processed data back into the 'modelfile' table
|
| 125 |
+
Modelfile.create(
|
| 126 |
+
user_id=model.user_id,
|
| 127 |
+
tag_name=model.id,
|
| 128 |
+
modelfile=modelfile_data,
|
| 129 |
+
timestamp=model.created_at,
|
| 130 |
+
)
|
backend/open_webui/internal/migrations/011_add_user_settings.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 002_add_local_sharing.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
# Adding fields settings to the 'user' table
|
| 40 |
+
migrator.add_fields('user', settings=pw.TextField(null=True))
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 44 |
+
"""Write your rollback migrations here."""
|
| 45 |
+
|
| 46 |
+
# Remove the settings field
|
| 47 |
+
migrator.remove_fields('user', 'settings')
|
backend/open_webui/internal/migrations/012_add_tools.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 009_add_models.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
@migrator.create_model
|
| 40 |
+
class Tool(pw.Model):
|
| 41 |
+
id = pw.TextField(unique=True)
|
| 42 |
+
user_id = pw.TextField()
|
| 43 |
+
|
| 44 |
+
name = pw.TextField()
|
| 45 |
+
content = pw.TextField()
|
| 46 |
+
specs = pw.TextField()
|
| 47 |
+
|
| 48 |
+
meta = pw.TextField()
|
| 49 |
+
|
| 50 |
+
created_at = pw.BigIntegerField(null=False)
|
| 51 |
+
updated_at = pw.BigIntegerField(null=False)
|
| 52 |
+
|
| 53 |
+
class Meta:
|
| 54 |
+
table_name = 'tool'
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 58 |
+
"""Write your rollback migrations here."""
|
| 59 |
+
|
| 60 |
+
migrator.remove_model('tool')
|
backend/open_webui/internal/migrations/013_add_user_info.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 002_add_local_sharing.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
# Adding fields info to the 'user' table
|
| 40 |
+
migrator.add_fields('user', info=pw.TextField(null=True))
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 44 |
+
"""Write your rollback migrations here."""
|
| 45 |
+
|
| 46 |
+
# Remove the settings field
|
| 47 |
+
migrator.remove_fields('user', 'info')
|
backend/open_webui/internal/migrations/014_add_files.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 009_add_models.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
@migrator.create_model
|
| 40 |
+
class File(pw.Model):
|
| 41 |
+
id = pw.TextField(unique=True)
|
| 42 |
+
user_id = pw.TextField()
|
| 43 |
+
filename = pw.TextField()
|
| 44 |
+
meta = pw.TextField()
|
| 45 |
+
created_at = pw.BigIntegerField(null=False)
|
| 46 |
+
|
| 47 |
+
class Meta:
|
| 48 |
+
table_name = 'file'
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 52 |
+
"""Write your rollback migrations here."""
|
| 53 |
+
|
| 54 |
+
migrator.remove_model('file')
|
backend/open_webui/internal/migrations/015_add_functions.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 009_add_models.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
@migrator.create_model
|
| 40 |
+
class Function(pw.Model):
|
| 41 |
+
id = pw.TextField(unique=True)
|
| 42 |
+
user_id = pw.TextField()
|
| 43 |
+
|
| 44 |
+
name = pw.TextField()
|
| 45 |
+
type = pw.TextField()
|
| 46 |
+
|
| 47 |
+
content = pw.TextField()
|
| 48 |
+
meta = pw.TextField()
|
| 49 |
+
|
| 50 |
+
created_at = pw.BigIntegerField(null=False)
|
| 51 |
+
updated_at = pw.BigIntegerField(null=False)
|
| 52 |
+
|
| 53 |
+
class Meta:
|
| 54 |
+
table_name = 'function'
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 58 |
+
"""Write your rollback migrations here."""
|
| 59 |
+
|
| 60 |
+
migrator.remove_model('function')
|
backend/open_webui/internal/migrations/016_add_valves_and_is_active.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 009_add_models.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
migrator.add_fields('tool', valves=pw.TextField(null=True))
|
| 40 |
+
migrator.add_fields('function', valves=pw.TextField(null=True))
|
| 41 |
+
migrator.add_fields('function', is_active=pw.BooleanField(default=False))
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 45 |
+
"""Write your rollback migrations here."""
|
| 46 |
+
|
| 47 |
+
migrator.remove_fields('tool', 'valves')
|
| 48 |
+
migrator.remove_fields('function', 'valves')
|
| 49 |
+
migrator.remove_fields('function', 'is_active')
|
backend/open_webui/internal/migrations/017_add_user_oauth_sub.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 017_add_user_oauth_sub.py.
|
| 2 |
+
Some examples (model - class or model name)::
|
| 3 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 4 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 5 |
+
> migrator.sql(sql) # Run custom SQL
|
| 6 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 7 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 8 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 9 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 10 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 11 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 12 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 13 |
+
> migrator.rename_table(model, new_table_name)
|
| 14 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 15 |
+
> migrator.add_not_null(model, *field_names)
|
| 16 |
+
> migrator.add_default(model, field_name, default)
|
| 17 |
+
> migrator.add_constraint(model, name, sql)
|
| 18 |
+
> migrator.drop_index(model, *col_names)
|
| 19 |
+
> migrator.drop_not_null(model, *field_names)
|
| 20 |
+
> migrator.drop_constraints(model, *constraints)
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
from contextlib import suppress
|
| 24 |
+
|
| 25 |
+
import peewee as pw
|
| 26 |
+
from peewee_migrate import Migrator
|
| 27 |
+
|
| 28 |
+
with suppress(ImportError):
|
| 29 |
+
import playhouse.postgres_ext as pw_pext
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 33 |
+
"""Write your migrations here."""
|
| 34 |
+
|
| 35 |
+
migrator.add_fields(
|
| 36 |
+
'user',
|
| 37 |
+
oauth_sub=pw.TextField(null=True, unique=True),
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 42 |
+
"""Write your rollback migrations here."""
|
| 43 |
+
|
| 44 |
+
migrator.remove_fields('user', 'oauth_sub')
|
backend/open_webui/internal/migrations/018_add_function_is_global.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Peewee migrations -- 017_add_user_oauth_sub.py.
|
| 2 |
+
|
| 3 |
+
Some examples (model - class or model name)::
|
| 4 |
+
|
| 5 |
+
> Model = migrator.orm['table_name'] # Return model in current state by name
|
| 6 |
+
> Model = migrator.ModelClass # Return model in current state by name
|
| 7 |
+
|
| 8 |
+
> migrator.sql(sql) # Run custom SQL
|
| 9 |
+
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
| 10 |
+
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
| 11 |
+
> migrator.remove_model(model, cascade=True) # Remove a model
|
| 12 |
+
> migrator.add_fields(model, **fields) # Add fields to a model
|
| 13 |
+
> migrator.change_fields(model, **fields) # Change fields
|
| 14 |
+
> migrator.remove_fields(model, *field_names, cascade=True)
|
| 15 |
+
> migrator.rename_field(model, old_field_name, new_field_name)
|
| 16 |
+
> migrator.rename_table(model, new_table_name)
|
| 17 |
+
> migrator.add_index(model, *col_names, unique=False)
|
| 18 |
+
> migrator.add_not_null(model, *field_names)
|
| 19 |
+
> migrator.add_default(model, field_name, default)
|
| 20 |
+
> migrator.add_constraint(model, name, sql)
|
| 21 |
+
> migrator.drop_index(model, *col_names)
|
| 22 |
+
> migrator.drop_not_null(model, *field_names)
|
| 23 |
+
> migrator.drop_constraints(model, *constraints)
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
from contextlib import suppress
|
| 28 |
+
|
| 29 |
+
import peewee as pw
|
| 30 |
+
from peewee_migrate import Migrator
|
| 31 |
+
|
| 32 |
+
with suppress(ImportError):
|
| 33 |
+
import playhouse.postgres_ext as pw_pext
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 37 |
+
"""Write your migrations here."""
|
| 38 |
+
|
| 39 |
+
migrator.add_fields(
|
| 40 |
+
'function',
|
| 41 |
+
is_global=pw.BooleanField(default=False),
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
| 46 |
+
"""Write your rollback migrations here."""
|
| 47 |
+
|
| 48 |
+
migrator.remove_fields('function', 'is_global')
|
backend/open_webui/internal/wrappers.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
from contextvars import ContextVar
|
| 4 |
+
|
| 5 |
+
from peewee import *
|
| 6 |
+
from peewee import InterfaceError as PeeWeeInterfaceError
|
| 7 |
+
from peewee import PostgresqlDatabase
|
| 8 |
+
from playhouse.db_url import connect, parse
|
| 9 |
+
from playhouse.shortcuts import ReconnectMixin
|
| 10 |
+
|
| 11 |
+
log = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
db_state_default = {'closed': None, 'conn': None, 'ctx': None, 'transactions': None}
|
| 14 |
+
db_state = ContextVar('db_state', default=db_state_default.copy())
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class PeeweeConnectionState(object):
|
| 18 |
+
def __init__(self, **kwargs):
|
| 19 |
+
super().__setattr__('_state', db_state)
|
| 20 |
+
super().__init__(**kwargs)
|
| 21 |
+
|
| 22 |
+
def __setattr__(self, name, value):
|
| 23 |
+
self._state.get()[name] = value
|
| 24 |
+
|
| 25 |
+
def __getattr__(self, name):
|
| 26 |
+
value = self._state.get()[name]
|
| 27 |
+
return value
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class CustomReconnectMixin(ReconnectMixin):
|
| 31 |
+
reconnect_errors = (
|
| 32 |
+
# psycopg2
|
| 33 |
+
(OperationalError, 'termin'),
|
| 34 |
+
(InterfaceError, 'closed'),
|
| 35 |
+
# peewee
|
| 36 |
+
(PeeWeeInterfaceError, 'closed'),
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class ReconnectingPostgresqlDatabase(CustomReconnectMixin, PostgresqlDatabase):
|
| 41 |
+
pass
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def register_connection(db_url):
|
| 45 |
+
# Check if using SQLCipher protocol
|
| 46 |
+
if db_url.startswith('sqlite+sqlcipher://'):
|
| 47 |
+
database_password = os.environ.get('DATABASE_PASSWORD')
|
| 48 |
+
if not database_password or database_password.strip() == '':
|
| 49 |
+
raise ValueError('DATABASE_PASSWORD is required when using sqlite+sqlcipher:// URLs')
|
| 50 |
+
from playhouse.sqlcipher_ext import SqlCipherDatabase
|
| 51 |
+
|
| 52 |
+
# Parse the database path from SQLCipher URL
|
| 53 |
+
# Convert sqlite+sqlcipher:///path/to/db.sqlite to /path/to/db.sqlite
|
| 54 |
+
db_path = db_url.replace('sqlite+sqlcipher://', '')
|
| 55 |
+
|
| 56 |
+
# Use Peewee's native SqlCipherDatabase with encryption
|
| 57 |
+
db = SqlCipherDatabase(db_path, passphrase=database_password)
|
| 58 |
+
db.autoconnect = True
|
| 59 |
+
db.reuse_if_open = True
|
| 60 |
+
log.info('Connected to encrypted SQLite database using SQLCipher')
|
| 61 |
+
|
| 62 |
+
else:
|
| 63 |
+
# Standard database connection (existing logic)
|
| 64 |
+
db = connect(db_url, unquote_user=True, unquote_password=True)
|
| 65 |
+
if isinstance(db, PostgresqlDatabase):
|
| 66 |
+
# Enable autoconnect for SQLite databases, managed by Peewee
|
| 67 |
+
db.autoconnect = True
|
| 68 |
+
db.reuse_if_open = True
|
| 69 |
+
log.info('Connected to PostgreSQL database')
|
| 70 |
+
|
| 71 |
+
# Get the connection details
|
| 72 |
+
connection = parse(db_url, unquote_user=True, unquote_password=True)
|
| 73 |
+
|
| 74 |
+
# Use our custom database class that supports reconnection
|
| 75 |
+
db = ReconnectingPostgresqlDatabase(**connection)
|
| 76 |
+
db.connect(reuse_if_open=True)
|
| 77 |
+
elif isinstance(db, SqliteDatabase):
|
| 78 |
+
# Enable autoconnect for SQLite databases, managed by Peewee
|
| 79 |
+
db.autoconnect = True
|
| 80 |
+
db.reuse_if_open = True
|
| 81 |
+
log.info('Connected to SQLite database')
|
| 82 |
+
else:
|
| 83 |
+
raise ValueError('Unsupported database connection')
|
| 84 |
+
return db
|
backend/open_webui/main.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
backend/open_webui/migrations/README
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Generic single-database configuration.
|
| 2 |
+
|
| 3 |
+
Create new migrations with
|
| 4 |
+
DATABASE_URL=<replace with actual url> alembic revision --autogenerate -m "a description"
|
backend/open_webui/migrations/env.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from logging.config import fileConfig
|
| 3 |
+
|
| 4 |
+
from alembic import context
|
| 5 |
+
from open_webui.models.auths import Auth
|
| 6 |
+
from open_webui.models.calendar import Calendar, CalendarEvent, CalendarEventAttendee # noqa: F401
|
| 7 |
+
from open_webui.env import DATABASE_URL, DATABASE_PASSWORD, LOG_FORMAT
|
| 8 |
+
from open_webui.internal.db import extract_ssl_params_from_url, reattach_ssl_params_to_url
|
| 9 |
+
from sqlalchemy import engine_from_config, pool, create_engine
|
| 10 |
+
|
| 11 |
+
# this is the Alembic Config object, which provides
|
| 12 |
+
# access to the values within the .ini file in use.
|
| 13 |
+
config = context.config
|
| 14 |
+
|
| 15 |
+
# Interpret the config file for Python logging.
|
| 16 |
+
# This line sets up loggers basically.
|
| 17 |
+
if config.config_file_name is not None:
|
| 18 |
+
fileConfig(config.config_file_name, disable_existing_loggers=False)
|
| 19 |
+
|
| 20 |
+
# Re-apply JSON formatter after fileConfig replaces handlers.
|
| 21 |
+
if LOG_FORMAT == 'json':
|
| 22 |
+
from open_webui.env import JSONFormatter
|
| 23 |
+
|
| 24 |
+
for handler in logging.root.handlers:
|
| 25 |
+
handler.setFormatter(JSONFormatter())
|
| 26 |
+
|
| 27 |
+
# add your model's MetaData object here
|
| 28 |
+
# for 'autogenerate' support
|
| 29 |
+
# from myapp import mymodel
|
| 30 |
+
# target_metadata = mymodel.Base.metadata
|
| 31 |
+
target_metadata = Auth.metadata
|
| 32 |
+
|
| 33 |
+
# other values from the config, defined by the needs of env.py,
|
| 34 |
+
# can be acquired:
|
| 35 |
+
# my_important_option = config.get_main_option("my_important_option")
|
| 36 |
+
# ... etc.
|
| 37 |
+
|
| 38 |
+
DB_URL = DATABASE_URL
|
| 39 |
+
|
| 40 |
+
# Normalize SSL query params for psycopg2 (Alembic uses psycopg2 for sync migrations).
|
| 41 |
+
url_without_ssl, ssl_params = extract_ssl_params_from_url(DB_URL)
|
| 42 |
+
DB_URL = reattach_ssl_params_to_url(url_without_ssl, ssl_params) if ssl_params else DB_URL
|
| 43 |
+
|
| 44 |
+
if DB_URL:
|
| 45 |
+
config.set_main_option('sqlalchemy.url', DB_URL.replace('%', '%%'))
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def run_migrations_offline() -> None:
|
| 49 |
+
"""Run migrations in 'offline' mode.
|
| 50 |
+
|
| 51 |
+
This configures the context with just a URL
|
| 52 |
+
and not an Engine, though an Engine is acceptable
|
| 53 |
+
here as well. By skipping the Engine creation
|
| 54 |
+
we don't even need a DBAPI to be available.
|
| 55 |
+
|
| 56 |
+
Calls to context.execute() here emit the given string to the
|
| 57 |
+
script output.
|
| 58 |
+
|
| 59 |
+
"""
|
| 60 |
+
url = config.get_main_option('sqlalchemy.url')
|
| 61 |
+
context.configure(
|
| 62 |
+
url=url,
|
| 63 |
+
target_metadata=target_metadata,
|
| 64 |
+
literal_binds=True,
|
| 65 |
+
dialect_opts={'paramstyle': 'named'},
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
with context.begin_transaction():
|
| 69 |
+
context.run_migrations()
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def run_migrations_online() -> None:
|
| 73 |
+
"""Run migrations in 'online' mode.
|
| 74 |
+
|
| 75 |
+
In this scenario we need to create an Engine
|
| 76 |
+
and associate a connection with the context.
|
| 77 |
+
|
| 78 |
+
"""
|
| 79 |
+
# Handle SQLCipher URLs
|
| 80 |
+
if DB_URL and DB_URL.startswith('sqlite+sqlcipher://'):
|
| 81 |
+
if not DATABASE_PASSWORD or DATABASE_PASSWORD.strip() == '':
|
| 82 |
+
raise ValueError('DATABASE_PASSWORD is required when using sqlite+sqlcipher:// URLs')
|
| 83 |
+
|
| 84 |
+
# Extract database path from SQLCipher URL
|
| 85 |
+
db_path = DB_URL.replace('sqlite+sqlcipher://', '')
|
| 86 |
+
if db_path.startswith('/'):
|
| 87 |
+
db_path = db_path[1:] # Remove leading slash for relative paths
|
| 88 |
+
|
| 89 |
+
# Create a custom creator function that uses sqlcipher3
|
| 90 |
+
def create_sqlcipher_connection():
|
| 91 |
+
import sqlcipher3
|
| 92 |
+
|
| 93 |
+
conn = sqlcipher3.connect(db_path, check_same_thread=False)
|
| 94 |
+
conn.execute(f"PRAGMA key = '{DATABASE_PASSWORD}'")
|
| 95 |
+
return conn
|
| 96 |
+
|
| 97 |
+
connectable = create_engine(
|
| 98 |
+
'sqlite://', # Dummy URL since we're using creator
|
| 99 |
+
creator=create_sqlcipher_connection,
|
| 100 |
+
echo=False,
|
| 101 |
+
)
|
| 102 |
+
else:
|
| 103 |
+
# Standard database connection (existing logic)
|
| 104 |
+
connectable = engine_from_config(
|
| 105 |
+
config.get_section(config.config_ini_section, {}),
|
| 106 |
+
prefix='sqlalchemy.',
|
| 107 |
+
poolclass=pool.NullPool,
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
with connectable.connect() as connection:
|
| 111 |
+
context.configure(connection=connection, target_metadata=target_metadata)
|
| 112 |
+
|
| 113 |
+
with context.begin_transaction():
|
| 114 |
+
context.run_migrations()
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
if context.is_offline_mode():
|
| 118 |
+
run_migrations_offline()
|
| 119 |
+
else:
|
| 120 |
+
run_migrations_online()
|
backend/open_webui/migrations/script.py.mako
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""${message}
|
| 2 |
+
|
| 3 |
+
Revision ID: ${up_revision}
|
| 4 |
+
Revises: ${down_revision | comma,n}
|
| 5 |
+
Create Date: ${create_date}
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import open_webui.internal.db
|
| 13 |
+
${imports if imports else ""}
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = ${repr(up_revision)}
|
| 17 |
+
down_revision: Union[str, None] = ${repr(down_revision)}
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
${upgrades if upgrades else "pass"}
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def downgrade() -> None:
|
| 27 |
+
${downgrades if downgrades else "pass"}
|
backend/open_webui/migrations/util.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from alembic import op
|
| 2 |
+
from sqlalchemy import Inspector
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def get_existing_tables():
|
| 6 |
+
con = op.get_bind()
|
| 7 |
+
inspector = Inspector.from_engine(con)
|
| 8 |
+
tables = set(inspector.get_table_names())
|
| 9 |
+
return tables
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def get_revision_id():
|
| 13 |
+
import uuid
|
| 14 |
+
|
| 15 |
+
return str(uuid.uuid4()).replace('-', '')[:12]
|
backend/open_webui/migrations/versions/018012973d35_add_indexes.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Add indexes
|
| 2 |
+
|
| 3 |
+
Revision ID: 018012973d35
|
| 4 |
+
Revises: d31026856c01
|
| 5 |
+
Create Date: 2025-08-13 03:00:00.000000
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from alembic import op
|
| 10 |
+
import sqlalchemy as sa
|
| 11 |
+
|
| 12 |
+
revision = '018012973d35'
|
| 13 |
+
down_revision = 'd31026856c01'
|
| 14 |
+
branch_labels = None
|
| 15 |
+
depends_on = None
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def upgrade():
|
| 19 |
+
# Chat table indexes
|
| 20 |
+
op.create_index('folder_id_idx', 'chat', ['folder_id'])
|
| 21 |
+
op.create_index('user_id_pinned_idx', 'chat', ['user_id', 'pinned'])
|
| 22 |
+
op.create_index('user_id_archived_idx', 'chat', ['user_id', 'archived'])
|
| 23 |
+
op.create_index('updated_at_user_id_idx', 'chat', ['updated_at', 'user_id'])
|
| 24 |
+
op.create_index('folder_id_user_id_idx', 'chat', ['folder_id', 'user_id'])
|
| 25 |
+
|
| 26 |
+
# Tag table index
|
| 27 |
+
op.create_index('user_id_idx', 'tag', ['user_id'])
|
| 28 |
+
|
| 29 |
+
# Function table index
|
| 30 |
+
op.create_index('is_global_idx', 'function', ['is_global'])
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def downgrade():
|
| 34 |
+
# Chat table indexes
|
| 35 |
+
op.drop_index('folder_id_idx', table_name='chat')
|
| 36 |
+
op.drop_index('user_id_pinned_idx', table_name='chat')
|
| 37 |
+
op.drop_index('user_id_archived_idx', table_name='chat')
|
| 38 |
+
op.drop_index('updated_at_user_id_idx', table_name='chat')
|
| 39 |
+
op.drop_index('folder_id_user_id_idx', table_name='chat')
|
| 40 |
+
|
| 41 |
+
# Tag table index
|
| 42 |
+
op.drop_index('user_id_idx', table_name='tag')
|
| 43 |
+
|
| 44 |
+
# Function table index
|
| 45 |
+
|
| 46 |
+
op.drop_index('is_global_idx', table_name='function')
|
backend/open_webui/migrations/versions/1af9b942657b_migrate_tags.py
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Migrate tags
|
| 2 |
+
|
| 3 |
+
Revision ID: 1af9b942657b
|
| 4 |
+
Revises: 242a2047eae0
|
| 5 |
+
Create Date: 2024-10-09 21:02:35.241684
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from alembic import op
|
| 10 |
+
import sqlalchemy as sa
|
| 11 |
+
from sqlalchemy.sql import table, select, update, column
|
| 12 |
+
from sqlalchemy.engine.reflection import Inspector
|
| 13 |
+
|
| 14 |
+
import json
|
| 15 |
+
|
| 16 |
+
revision = '1af9b942657b'
|
| 17 |
+
down_revision = '242a2047eae0'
|
| 18 |
+
branch_labels = None
|
| 19 |
+
depends_on = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade():
|
| 23 |
+
# Setup an inspection on the existing table to avoid issues
|
| 24 |
+
conn = op.get_bind()
|
| 25 |
+
inspector = Inspector.from_engine(conn)
|
| 26 |
+
|
| 27 |
+
# Clean up potential leftover temp table from previous failures
|
| 28 |
+
conn.execute(sa.text('DROP TABLE IF EXISTS _alembic_tmp_tag'))
|
| 29 |
+
|
| 30 |
+
# Check if the 'tag' table exists
|
| 31 |
+
tables = inspector.get_table_names()
|
| 32 |
+
|
| 33 |
+
# Step 1: Modify Tag table using batch mode for SQLite support
|
| 34 |
+
if 'tag' in tables:
|
| 35 |
+
# Get the current columns in the 'tag' table
|
| 36 |
+
columns = [col['name'] for col in inspector.get_columns('tag')]
|
| 37 |
+
|
| 38 |
+
# Get any existing unique constraints on the 'tag' table
|
| 39 |
+
current_constraints = inspector.get_unique_constraints('tag')
|
| 40 |
+
|
| 41 |
+
with op.batch_alter_table('tag', schema=None) as batch_op:
|
| 42 |
+
# Check if the unique constraint already exists
|
| 43 |
+
if not any(constraint['name'] == 'uq_id_user_id' for constraint in current_constraints):
|
| 44 |
+
# Create unique constraint if it doesn't exist
|
| 45 |
+
batch_op.create_unique_constraint('uq_id_user_id', ['id', 'user_id'])
|
| 46 |
+
|
| 47 |
+
# Check if the 'data' column exists before trying to drop it
|
| 48 |
+
if 'data' in columns:
|
| 49 |
+
batch_op.drop_column('data')
|
| 50 |
+
|
| 51 |
+
# Check if the 'meta' column needs to be created
|
| 52 |
+
if 'meta' not in columns:
|
| 53 |
+
# Add the 'meta' column if it doesn't already exist
|
| 54 |
+
batch_op.add_column(sa.Column('meta', sa.JSON(), nullable=True))
|
| 55 |
+
|
| 56 |
+
tag = table(
|
| 57 |
+
'tag',
|
| 58 |
+
column('id', sa.String()),
|
| 59 |
+
column('name', sa.String()),
|
| 60 |
+
column('user_id', sa.String()),
|
| 61 |
+
column('meta', sa.JSON()),
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
# Step 2: Migrate tags
|
| 65 |
+
conn = op.get_bind()
|
| 66 |
+
result = conn.execute(sa.select(tag.c.id, tag.c.name, tag.c.user_id))
|
| 67 |
+
|
| 68 |
+
tag_updates = {}
|
| 69 |
+
for row in result:
|
| 70 |
+
new_id = row.name.replace(' ', '_').lower()
|
| 71 |
+
tag_updates[row.id] = new_id
|
| 72 |
+
|
| 73 |
+
for tag_id, new_tag_id in tag_updates.items():
|
| 74 |
+
print(f'Updating tag {tag_id} to {new_tag_id}')
|
| 75 |
+
if new_tag_id == 'pinned':
|
| 76 |
+
# delete tag
|
| 77 |
+
delete_stmt = sa.delete(tag).where(tag.c.id == tag_id)
|
| 78 |
+
conn.execute(delete_stmt)
|
| 79 |
+
else:
|
| 80 |
+
# Check if the new_tag_id already exists in the database
|
| 81 |
+
existing_tag_query = sa.select(tag.c.id).where(tag.c.id == new_tag_id)
|
| 82 |
+
existing_tag_result = conn.execute(existing_tag_query).fetchone()
|
| 83 |
+
|
| 84 |
+
if existing_tag_result:
|
| 85 |
+
# Handle duplicate case: the new_tag_id already exists
|
| 86 |
+
print(f'Tag {new_tag_id} already exists. Removing current tag with ID {tag_id} to avoid duplicates.')
|
| 87 |
+
# Option 1: Delete the current tag if an update to new_tag_id would cause duplication
|
| 88 |
+
delete_stmt = sa.delete(tag).where(tag.c.id == tag_id)
|
| 89 |
+
conn.execute(delete_stmt)
|
| 90 |
+
else:
|
| 91 |
+
update_stmt = sa.update(tag).where(tag.c.id == tag_id)
|
| 92 |
+
update_stmt = update_stmt.values(id=new_tag_id)
|
| 93 |
+
conn.execute(update_stmt)
|
| 94 |
+
|
| 95 |
+
# Add columns `pinned` and `meta` to 'chat'
|
| 96 |
+
op.add_column('chat', sa.Column('pinned', sa.Boolean(), nullable=True))
|
| 97 |
+
op.add_column('chat', sa.Column('meta', sa.JSON(), nullable=False, server_default='{}'))
|
| 98 |
+
|
| 99 |
+
chatidtag = table('chatidtag', column('chat_id', sa.String()), column('tag_name', sa.String()))
|
| 100 |
+
chat = table(
|
| 101 |
+
'chat',
|
| 102 |
+
column('id', sa.String()),
|
| 103 |
+
column('pinned', sa.Boolean()),
|
| 104 |
+
column('meta', sa.JSON()),
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
# Fetch existing tags
|
| 108 |
+
conn = op.get_bind()
|
| 109 |
+
result = conn.execute(sa.select(chatidtag.c.chat_id, chatidtag.c.tag_name))
|
| 110 |
+
|
| 111 |
+
chat_updates = {}
|
| 112 |
+
for row in result:
|
| 113 |
+
chat_id = row.chat_id
|
| 114 |
+
tag_name = row.tag_name.replace(' ', '_').lower()
|
| 115 |
+
|
| 116 |
+
if tag_name == 'pinned':
|
| 117 |
+
# Specifically handle 'pinned' tag
|
| 118 |
+
if chat_id not in chat_updates:
|
| 119 |
+
chat_updates[chat_id] = {'pinned': True, 'meta': {}}
|
| 120 |
+
else:
|
| 121 |
+
chat_updates[chat_id]['pinned'] = True
|
| 122 |
+
else:
|
| 123 |
+
if chat_id not in chat_updates:
|
| 124 |
+
chat_updates[chat_id] = {'pinned': False, 'meta': {'tags': [tag_name]}}
|
| 125 |
+
else:
|
| 126 |
+
tags = chat_updates[chat_id]['meta'].get('tags', [])
|
| 127 |
+
tags.append(tag_name)
|
| 128 |
+
|
| 129 |
+
chat_updates[chat_id]['meta']['tags'] = list(set(tags))
|
| 130 |
+
|
| 131 |
+
# Update chats based on accumulated changes
|
| 132 |
+
for chat_id, updates in chat_updates.items():
|
| 133 |
+
update_stmt = sa.update(chat).where(chat.c.id == chat_id)
|
| 134 |
+
update_stmt = update_stmt.values(meta=updates.get('meta', {}), pinned=updates.get('pinned', False))
|
| 135 |
+
conn.execute(update_stmt)
|
| 136 |
+
pass
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def downgrade():
|
| 140 |
+
pass
|
backend/open_webui/migrations/versions/242a2047eae0_update_chat_table.py
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Update chat table
|
| 2 |
+
|
| 3 |
+
Revision ID: 242a2047eae0
|
| 4 |
+
Revises: 6a39f3d8e55c
|
| 5 |
+
Create Date: 2024-10-09 21:02:35.241684
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from alembic import op
|
| 10 |
+
import sqlalchemy as sa
|
| 11 |
+
from sqlalchemy.sql import table, select, update
|
| 12 |
+
|
| 13 |
+
import json
|
| 14 |
+
|
| 15 |
+
revision = '242a2047eae0'
|
| 16 |
+
down_revision = '6a39f3d8e55c'
|
| 17 |
+
branch_labels = None
|
| 18 |
+
depends_on = None
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def upgrade():
|
| 22 |
+
conn = op.get_bind()
|
| 23 |
+
inspector = sa.inspect(conn)
|
| 24 |
+
|
| 25 |
+
columns = inspector.get_columns('chat')
|
| 26 |
+
column_dict = {col['name']: col for col in columns}
|
| 27 |
+
|
| 28 |
+
chat_column = column_dict.get('chat')
|
| 29 |
+
old_chat_exists = 'old_chat' in column_dict
|
| 30 |
+
|
| 31 |
+
if chat_column:
|
| 32 |
+
if isinstance(chat_column['type'], sa.Text):
|
| 33 |
+
print("Converting 'chat' column to JSON")
|
| 34 |
+
|
| 35 |
+
if old_chat_exists:
|
| 36 |
+
print("Dropping old 'old_chat' column")
|
| 37 |
+
op.drop_column('chat', 'old_chat')
|
| 38 |
+
|
| 39 |
+
# Step 1: Rename current 'chat' column to 'old_chat'
|
| 40 |
+
print("Renaming 'chat' column to 'old_chat'")
|
| 41 |
+
op.alter_column('chat', 'chat', new_column_name='old_chat', existing_type=sa.Text())
|
| 42 |
+
|
| 43 |
+
# Step 2: Add new 'chat' column of type JSON
|
| 44 |
+
print("Adding new 'chat' column of type JSON")
|
| 45 |
+
op.add_column('chat', sa.Column('chat', sa.JSON(), nullable=True))
|
| 46 |
+
else:
|
| 47 |
+
# If the column is already JSON, no need to do anything
|
| 48 |
+
pass
|
| 49 |
+
|
| 50 |
+
# Step 3: Migrate data from 'old_chat' to 'chat'
|
| 51 |
+
chat_table = table(
|
| 52 |
+
'chat',
|
| 53 |
+
sa.Column('id', sa.String(), primary_key=True),
|
| 54 |
+
sa.Column('old_chat', sa.Text()),
|
| 55 |
+
sa.Column('chat', sa.JSON()),
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
# - Selecting all data from the table
|
| 59 |
+
connection = op.get_bind()
|
| 60 |
+
results = connection.execute(select(chat_table.c.id, chat_table.c.old_chat))
|
| 61 |
+
for row in results:
|
| 62 |
+
try:
|
| 63 |
+
# Convert text JSON to actual JSON object, assuming the text is in JSON format
|
| 64 |
+
json_data = json.loads(row.old_chat)
|
| 65 |
+
except json.JSONDecodeError:
|
| 66 |
+
json_data = None # Handle cases where the text cannot be converted to JSON
|
| 67 |
+
|
| 68 |
+
connection.execute(sa.update(chat_table).where(chat_table.c.id == row.id).values(chat=json_data))
|
| 69 |
+
|
| 70 |
+
# Step 4: Drop 'old_chat' column
|
| 71 |
+
print("Dropping 'old_chat' column")
|
| 72 |
+
op.drop_column('chat', 'old_chat')
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def downgrade():
|
| 76 |
+
# Step 1: Add 'old_chat' column back as Text
|
| 77 |
+
op.add_column('chat', sa.Column('old_chat', sa.Text(), nullable=True))
|
| 78 |
+
|
| 79 |
+
# Step 2: Convert 'chat' JSON data back to text and store in 'old_chat'
|
| 80 |
+
chat_table = table(
|
| 81 |
+
'chat',
|
| 82 |
+
sa.Column('id', sa.String(), primary_key=True),
|
| 83 |
+
sa.Column('chat', sa.JSON()),
|
| 84 |
+
sa.Column('old_chat', sa.Text()),
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
connection = op.get_bind()
|
| 88 |
+
results = connection.execute(select(chat_table.c.id, chat_table.c.chat))
|
| 89 |
+
for row in results:
|
| 90 |
+
text_data = json.dumps(row.chat) if row.chat is not None else None
|
| 91 |
+
connection.execute(sa.update(chat_table).where(chat_table.c.id == row.id).values(old_chat=text_data))
|
| 92 |
+
|
| 93 |
+
# Step 3: Remove the new 'chat' JSON column
|
| 94 |
+
op.drop_column('chat', 'chat')
|
| 95 |
+
|
| 96 |
+
# Step 4: Rename 'old_chat' back to 'chat'
|
| 97 |
+
op.alter_column('chat', 'old_chat', new_column_name='chat', existing_type=sa.Text())
|
backend/open_webui/migrations/versions/2f1211949ecc_update_message_and_channel_member_table.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Update messages and channel member table
|
| 2 |
+
|
| 3 |
+
Revision ID: 2f1211949ecc
|
| 4 |
+
Revises: 37f288994c47
|
| 5 |
+
Create Date: 2025-11-27 03:07:56.200231
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from typing import Sequence, Union
|
| 10 |
+
|
| 11 |
+
from alembic import op
|
| 12 |
+
import sqlalchemy as sa
|
| 13 |
+
import open_webui.internal.db
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = '2f1211949ecc'
|
| 17 |
+
down_revision: Union[str, None] = '37f288994c47'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
# New columns to be added to channel_member table
|
| 24 |
+
op.add_column('channel_member', sa.Column('status', sa.Text(), nullable=True))
|
| 25 |
+
op.add_column(
|
| 26 |
+
'channel_member',
|
| 27 |
+
sa.Column(
|
| 28 |
+
'is_active',
|
| 29 |
+
sa.Boolean(),
|
| 30 |
+
nullable=False,
|
| 31 |
+
default=True,
|
| 32 |
+
server_default=sa.sql.expression.true(),
|
| 33 |
+
),
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
op.add_column(
|
| 37 |
+
'channel_member',
|
| 38 |
+
sa.Column(
|
| 39 |
+
'is_channel_muted',
|
| 40 |
+
sa.Boolean(),
|
| 41 |
+
nullable=False,
|
| 42 |
+
default=False,
|
| 43 |
+
server_default=sa.sql.expression.false(),
|
| 44 |
+
),
|
| 45 |
+
)
|
| 46 |
+
op.add_column(
|
| 47 |
+
'channel_member',
|
| 48 |
+
sa.Column(
|
| 49 |
+
'is_channel_pinned',
|
| 50 |
+
sa.Boolean(),
|
| 51 |
+
nullable=False,
|
| 52 |
+
default=False,
|
| 53 |
+
server_default=sa.sql.expression.false(),
|
| 54 |
+
),
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
op.add_column('channel_member', sa.Column('data', sa.JSON(), nullable=True))
|
| 58 |
+
op.add_column('channel_member', sa.Column('meta', sa.JSON(), nullable=True))
|
| 59 |
+
|
| 60 |
+
op.add_column('channel_member', sa.Column('joined_at', sa.BigInteger(), nullable=False))
|
| 61 |
+
op.add_column('channel_member', sa.Column('left_at', sa.BigInteger(), nullable=True))
|
| 62 |
+
|
| 63 |
+
op.add_column('channel_member', sa.Column('last_read_at', sa.BigInteger(), nullable=True))
|
| 64 |
+
|
| 65 |
+
op.add_column('channel_member', sa.Column('updated_at', sa.BigInteger(), nullable=True))
|
| 66 |
+
|
| 67 |
+
# New columns to be added to message table
|
| 68 |
+
op.add_column(
|
| 69 |
+
'message',
|
| 70 |
+
sa.Column(
|
| 71 |
+
'is_pinned',
|
| 72 |
+
sa.Boolean(),
|
| 73 |
+
nullable=False,
|
| 74 |
+
default=False,
|
| 75 |
+
server_default=sa.sql.expression.false(),
|
| 76 |
+
),
|
| 77 |
+
)
|
| 78 |
+
op.add_column('message', sa.Column('pinned_at', sa.BigInteger(), nullable=True))
|
| 79 |
+
op.add_column('message', sa.Column('pinned_by', sa.Text(), nullable=True))
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def downgrade() -> None:
|
| 83 |
+
op.drop_column('channel_member', 'updated_at')
|
| 84 |
+
op.drop_column('channel_member', 'last_read_at')
|
| 85 |
+
|
| 86 |
+
op.drop_column('channel_member', 'meta')
|
| 87 |
+
op.drop_column('channel_member', 'data')
|
| 88 |
+
|
| 89 |
+
op.drop_column('channel_member', 'is_channel_pinned')
|
| 90 |
+
op.drop_column('channel_member', 'is_channel_muted')
|
| 91 |
+
|
| 92 |
+
op.drop_column('message', 'pinned_by')
|
| 93 |
+
op.drop_column('message', 'pinned_at')
|
| 94 |
+
op.drop_column('message', 'is_pinned')
|
backend/open_webui/migrations/versions/374d2f66af06_add_prompt_history_table.py
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Add prompt history table
|
| 2 |
+
|
| 3 |
+
Revision ID: 374d2f66af06
|
| 4 |
+
Revises: c440947495f3
|
| 5 |
+
Create Date: 2026-01-23 17:15:00.000000
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from typing import Sequence, Union
|
| 10 |
+
import uuid
|
| 11 |
+
|
| 12 |
+
from alembic import op
|
| 13 |
+
import sqlalchemy as sa
|
| 14 |
+
|
| 15 |
+
revision: str = '374d2f66af06'
|
| 16 |
+
down_revision: Union[str, None] = 'c440947495f3'
|
| 17 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 18 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def upgrade() -> None:
|
| 22 |
+
conn = op.get_bind()
|
| 23 |
+
|
| 24 |
+
# Step 1: Read existing data from OLD table (schema likely command as PK)
|
| 25 |
+
# We use batch_alter previously, but we want to move to new table.
|
| 26 |
+
# We need to assume the OLD structure.
|
| 27 |
+
|
| 28 |
+
old_prompt_table = sa.table(
|
| 29 |
+
'prompt',
|
| 30 |
+
sa.column('command', sa.Text()),
|
| 31 |
+
sa.column('user_id', sa.Text()),
|
| 32 |
+
sa.column('title', sa.Text()),
|
| 33 |
+
sa.column('content', sa.Text()),
|
| 34 |
+
sa.column('timestamp', sa.BigInteger()),
|
| 35 |
+
sa.column('access_control', sa.JSON()),
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
# Check if table exists/read data
|
| 39 |
+
try:
|
| 40 |
+
existing_prompts = conn.execute(
|
| 41 |
+
sa.select(
|
| 42 |
+
old_prompt_table.c.command,
|
| 43 |
+
old_prompt_table.c.user_id,
|
| 44 |
+
old_prompt_table.c.title,
|
| 45 |
+
old_prompt_table.c.content,
|
| 46 |
+
old_prompt_table.c.timestamp,
|
| 47 |
+
old_prompt_table.c.access_control,
|
| 48 |
+
)
|
| 49 |
+
).fetchall()
|
| 50 |
+
except Exception:
|
| 51 |
+
# Fallback if table doesn't exist (new install)
|
| 52 |
+
existing_prompts = []
|
| 53 |
+
|
| 54 |
+
# Step 2: Create new prompt table with 'id' as PRIMARY KEY
|
| 55 |
+
op.create_table(
|
| 56 |
+
'prompt_new',
|
| 57 |
+
sa.Column('id', sa.Text(), primary_key=True),
|
| 58 |
+
sa.Column('command', sa.String(), unique=True, index=True),
|
| 59 |
+
sa.Column('user_id', sa.String(), nullable=False),
|
| 60 |
+
sa.Column('name', sa.Text(), nullable=False),
|
| 61 |
+
sa.Column('content', sa.Text(), nullable=False),
|
| 62 |
+
sa.Column('data', sa.JSON(), nullable=True),
|
| 63 |
+
sa.Column('meta', sa.JSON(), nullable=True),
|
| 64 |
+
sa.Column('access_control', sa.JSON(), nullable=True),
|
| 65 |
+
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='1'),
|
| 66 |
+
sa.Column('version_id', sa.Text(), nullable=True),
|
| 67 |
+
sa.Column('tags', sa.JSON(), nullable=True),
|
| 68 |
+
sa.Column('created_at', sa.BigInteger(), nullable=False),
|
| 69 |
+
sa.Column('updated_at', sa.BigInteger(), nullable=False),
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
# Step 3: Create prompt_history table
|
| 73 |
+
op.create_table(
|
| 74 |
+
'prompt_history',
|
| 75 |
+
sa.Column('id', sa.Text(), primary_key=True),
|
| 76 |
+
sa.Column('prompt_id', sa.Text(), nullable=False, index=True),
|
| 77 |
+
sa.Column('parent_id', sa.Text(), nullable=True),
|
| 78 |
+
sa.Column('snapshot', sa.JSON(), nullable=False),
|
| 79 |
+
sa.Column('user_id', sa.Text(), nullable=False),
|
| 80 |
+
sa.Column('commit_message', sa.Text(), nullable=True),
|
| 81 |
+
sa.Column('created_at', sa.BigInteger(), nullable=False),
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
# Step 4: Migrate data
|
| 85 |
+
prompt_new_table = sa.table(
|
| 86 |
+
'prompt_new',
|
| 87 |
+
sa.column('id', sa.Text()),
|
| 88 |
+
sa.column('command', sa.String()),
|
| 89 |
+
sa.column('user_id', sa.String()),
|
| 90 |
+
sa.column('name', sa.Text()),
|
| 91 |
+
sa.column('content', sa.Text()),
|
| 92 |
+
sa.column('data', sa.JSON()),
|
| 93 |
+
sa.column('meta', sa.JSON()),
|
| 94 |
+
sa.column('access_control', sa.JSON()),
|
| 95 |
+
sa.column('is_active', sa.Boolean()),
|
| 96 |
+
sa.column('version_id', sa.Text()),
|
| 97 |
+
sa.column('tags', sa.JSON()),
|
| 98 |
+
sa.column('created_at', sa.BigInteger()),
|
| 99 |
+
sa.column('updated_at', sa.BigInteger()),
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
prompt_history_table = sa.table(
|
| 103 |
+
'prompt_history',
|
| 104 |
+
sa.column('id', sa.Text()),
|
| 105 |
+
sa.column('prompt_id', sa.Text()),
|
| 106 |
+
sa.column('parent_id', sa.Text()),
|
| 107 |
+
sa.column('snapshot', sa.JSON()),
|
| 108 |
+
sa.column('user_id', sa.Text()),
|
| 109 |
+
sa.column('commit_message', sa.Text()),
|
| 110 |
+
sa.column('created_at', sa.BigInteger()),
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
for row in existing_prompts:
|
| 114 |
+
command = row[0]
|
| 115 |
+
user_id = row[1]
|
| 116 |
+
title = row[2]
|
| 117 |
+
content = row[3]
|
| 118 |
+
timestamp = row[4]
|
| 119 |
+
access_control = row[5]
|
| 120 |
+
|
| 121 |
+
new_uuid = str(uuid.uuid4())
|
| 122 |
+
history_uuid = str(uuid.uuid4())
|
| 123 |
+
clean_command = command[1:] if command and command.startswith('/') else command
|
| 124 |
+
|
| 125 |
+
# Insert into prompt_new
|
| 126 |
+
conn.execute(
|
| 127 |
+
sa.insert(prompt_new_table).values(
|
| 128 |
+
id=new_uuid,
|
| 129 |
+
command=clean_command,
|
| 130 |
+
user_id=user_id,
|
| 131 |
+
name=title,
|
| 132 |
+
content=content,
|
| 133 |
+
data={},
|
| 134 |
+
meta={},
|
| 135 |
+
access_control=access_control,
|
| 136 |
+
is_active=True,
|
| 137 |
+
version_id=history_uuid,
|
| 138 |
+
tags=[],
|
| 139 |
+
created_at=timestamp,
|
| 140 |
+
updated_at=timestamp,
|
| 141 |
+
)
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
# Create initial history entry
|
| 145 |
+
conn.execute(
|
| 146 |
+
sa.insert(prompt_history_table).values(
|
| 147 |
+
id=history_uuid,
|
| 148 |
+
prompt_id=new_uuid,
|
| 149 |
+
parent_id=None,
|
| 150 |
+
snapshot={
|
| 151 |
+
'name': title,
|
| 152 |
+
'content': content,
|
| 153 |
+
'command': clean_command,
|
| 154 |
+
'data': {},
|
| 155 |
+
'meta': {},
|
| 156 |
+
'access_control': access_control,
|
| 157 |
+
},
|
| 158 |
+
user_id=user_id,
|
| 159 |
+
commit_message=None,
|
| 160 |
+
created_at=timestamp,
|
| 161 |
+
)
|
| 162 |
+
)
|
| 163 |
+
|
| 164 |
+
# Step 5: Replace old table with new one
|
| 165 |
+
op.drop_table('prompt')
|
| 166 |
+
op.rename_table('prompt_new', 'prompt')
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
def downgrade() -> None:
|
| 170 |
+
conn = op.get_bind()
|
| 171 |
+
|
| 172 |
+
# Step 1: Read new data
|
| 173 |
+
prompt_table = sa.table(
|
| 174 |
+
'prompt',
|
| 175 |
+
sa.column('command', sa.String()),
|
| 176 |
+
sa.column('name', sa.Text()),
|
| 177 |
+
sa.column('created_at', sa.BigInteger()),
|
| 178 |
+
sa.column('user_id', sa.Text()),
|
| 179 |
+
sa.column('content', sa.Text()),
|
| 180 |
+
sa.column('access_control', sa.JSON()),
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
try:
|
| 184 |
+
current_data = conn.execute(
|
| 185 |
+
sa.select(
|
| 186 |
+
prompt_table.c.command,
|
| 187 |
+
prompt_table.c.name,
|
| 188 |
+
prompt_table.c.created_at,
|
| 189 |
+
prompt_table.c.user_id,
|
| 190 |
+
prompt_table.c.content,
|
| 191 |
+
prompt_table.c.access_control,
|
| 192 |
+
)
|
| 193 |
+
).fetchall()
|
| 194 |
+
except Exception:
|
| 195 |
+
current_data = []
|
| 196 |
+
|
| 197 |
+
# Step 2: Drop history and table
|
| 198 |
+
op.drop_table('prompt_history')
|
| 199 |
+
op.drop_table('prompt')
|
| 200 |
+
|
| 201 |
+
# Step 3: Recreate old table (command as PK?)
|
| 202 |
+
# Assuming old schema:
|
| 203 |
+
op.create_table(
|
| 204 |
+
'prompt',
|
| 205 |
+
sa.Column('command', sa.String(), primary_key=True),
|
| 206 |
+
sa.Column('user_id', sa.String()),
|
| 207 |
+
sa.Column('title', sa.Text()),
|
| 208 |
+
sa.Column('content', sa.Text()),
|
| 209 |
+
sa.Column('timestamp', sa.BigInteger()),
|
| 210 |
+
sa.Column('access_control', sa.JSON()),
|
| 211 |
+
sa.Column('id', sa.Integer(), nullable=True),
|
| 212 |
+
)
|
| 213 |
+
|
| 214 |
+
# Step 4: Restore data
|
| 215 |
+
old_prompt_table = sa.table(
|
| 216 |
+
'prompt',
|
| 217 |
+
sa.column('command', sa.String()),
|
| 218 |
+
sa.column('user_id', sa.String()),
|
| 219 |
+
sa.column('title', sa.Text()),
|
| 220 |
+
sa.column('content', sa.Text()),
|
| 221 |
+
sa.column('timestamp', sa.BigInteger()),
|
| 222 |
+
sa.column('access_control', sa.JSON()),
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
for row in current_data:
|
| 226 |
+
command = row[0]
|
| 227 |
+
name = row[1]
|
| 228 |
+
created_at = row[2]
|
| 229 |
+
user_id = row[3]
|
| 230 |
+
content = row[4]
|
| 231 |
+
access_control = row[5]
|
| 232 |
+
|
| 233 |
+
# Restore leading /
|
| 234 |
+
old_command = '/' + command if command and not command.startswith('/') else command
|
| 235 |
+
|
| 236 |
+
conn.execute(
|
| 237 |
+
sa.insert(old_prompt_table).values(
|
| 238 |
+
command=old_command,
|
| 239 |
+
user_id=user_id,
|
| 240 |
+
title=name,
|
| 241 |
+
content=content,
|
| 242 |
+
timestamp=created_at,
|
| 243 |
+
access_control=access_control,
|
| 244 |
+
)
|
| 245 |
+
)
|
backend/open_webui/migrations/versions/3781e22d8b01_update_message_table.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Update message & channel tables
|
| 2 |
+
|
| 3 |
+
Revision ID: 3781e22d8b01
|
| 4 |
+
Revises: 7826ab40b532
|
| 5 |
+
Create Date: 2024-12-30 03:00:00.000000
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from alembic import op
|
| 10 |
+
import sqlalchemy as sa
|
| 11 |
+
|
| 12 |
+
revision = '3781e22d8b01'
|
| 13 |
+
down_revision = '7826ab40b532'
|
| 14 |
+
branch_labels = None
|
| 15 |
+
depends_on = None
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def upgrade():
|
| 19 |
+
# Add 'type' column to the 'channel' table
|
| 20 |
+
op.add_column(
|
| 21 |
+
'channel',
|
| 22 |
+
sa.Column(
|
| 23 |
+
'type',
|
| 24 |
+
sa.Text(),
|
| 25 |
+
nullable=True,
|
| 26 |
+
),
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
# Add 'parent_id' column to the 'message' table for threads
|
| 30 |
+
op.add_column(
|
| 31 |
+
'message',
|
| 32 |
+
sa.Column('parent_id', sa.Text(), nullable=True),
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
op.create_table(
|
| 36 |
+
'message_reaction',
|
| 37 |
+
sa.Column('id', sa.Text(), nullable=False, primary_key=True, unique=True), # Unique reaction ID
|
| 38 |
+
sa.Column('user_id', sa.Text(), nullable=False), # User who reacted
|
| 39 |
+
sa.Column('message_id', sa.Text(), nullable=False), # Message that was reacted to
|
| 40 |
+
sa.Column('name', sa.Text(), nullable=False), # Reaction name (e.g. "thumbs_up")
|
| 41 |
+
sa.Column('created_at', sa.BigInteger(), nullable=True), # Timestamp of when the reaction was added
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
op.create_table(
|
| 45 |
+
'channel_member',
|
| 46 |
+
sa.Column('id', sa.Text(), nullable=False, primary_key=True, unique=True), # Record ID for the membership row
|
| 47 |
+
sa.Column('channel_id', sa.Text(), nullable=False), # Associated channel
|
| 48 |
+
sa.Column('user_id', sa.Text(), nullable=False), # Associated user
|
| 49 |
+
sa.Column('created_at', sa.BigInteger(), nullable=True), # Timestamp of when the user joined the channel
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def downgrade():
|
| 54 |
+
# Revert 'type' column addition to the 'channel' table
|
| 55 |
+
op.drop_column('channel', 'type')
|
| 56 |
+
op.drop_column('message', 'parent_id')
|
| 57 |
+
op.drop_table('message_reaction')
|
| 58 |
+
op.drop_table('channel_member')
|
backend/open_webui/migrations/versions/37f288994c47_add_group_member_table.py
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""add_group_member_table
|
| 2 |
+
|
| 3 |
+
Revision ID: 37f288994c47
|
| 4 |
+
Revises: a5c220713937
|
| 5 |
+
Create Date: 2025-11-17 03:45:25.123939
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import uuid
|
| 10 |
+
import time
|
| 11 |
+
import json
|
| 12 |
+
from typing import Sequence, Union
|
| 13 |
+
|
| 14 |
+
from alembic import op
|
| 15 |
+
import sqlalchemy as sa
|
| 16 |
+
|
| 17 |
+
# revision identifiers, used by Alembic.
|
| 18 |
+
revision: str = '37f288994c47'
|
| 19 |
+
down_revision: Union[str, None] = 'a5c220713937'
|
| 20 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 21 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def upgrade() -> None:
|
| 25 |
+
# 1. Create new table
|
| 26 |
+
op.create_table(
|
| 27 |
+
'group_member',
|
| 28 |
+
sa.Column('id', sa.Text(), primary_key=True, unique=True, nullable=False),
|
| 29 |
+
sa.Column(
|
| 30 |
+
'group_id',
|
| 31 |
+
sa.Text(),
|
| 32 |
+
sa.ForeignKey('group.id', ondelete='CASCADE'),
|
| 33 |
+
nullable=False,
|
| 34 |
+
),
|
| 35 |
+
sa.Column(
|
| 36 |
+
'user_id',
|
| 37 |
+
sa.Text(),
|
| 38 |
+
sa.ForeignKey('user.id', ondelete='CASCADE'),
|
| 39 |
+
nullable=False,
|
| 40 |
+
),
|
| 41 |
+
sa.Column('created_at', sa.BigInteger(), nullable=True),
|
| 42 |
+
sa.Column('updated_at', sa.BigInteger(), nullable=True),
|
| 43 |
+
sa.UniqueConstraint('group_id', 'user_id', name='uq_group_member_group_user'),
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
connection = op.get_bind()
|
| 47 |
+
|
| 48 |
+
# 2. Read existing group with user_ids JSON column
|
| 49 |
+
group_table = sa.Table(
|
| 50 |
+
'group',
|
| 51 |
+
sa.MetaData(),
|
| 52 |
+
sa.Column('id', sa.Text()),
|
| 53 |
+
sa.Column('user_ids', sa.JSON()), # JSON stored as text in SQLite + PG
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
results = connection.execute(sa.select(group_table.c.id, group_table.c.user_ids)).fetchall()
|
| 57 |
+
|
| 58 |
+
print(results)
|
| 59 |
+
|
| 60 |
+
# 3. Insert members into group_member table
|
| 61 |
+
gm_table = sa.Table(
|
| 62 |
+
'group_member',
|
| 63 |
+
sa.MetaData(),
|
| 64 |
+
sa.Column('id', sa.Text()),
|
| 65 |
+
sa.Column('group_id', sa.Text()),
|
| 66 |
+
sa.Column('user_id', sa.Text()),
|
| 67 |
+
sa.Column('created_at', sa.BigInteger()),
|
| 68 |
+
sa.Column('updated_at', sa.BigInteger()),
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
now = int(time.time())
|
| 72 |
+
for group_id, user_ids in results:
|
| 73 |
+
if not user_ids:
|
| 74 |
+
continue
|
| 75 |
+
|
| 76 |
+
if isinstance(user_ids, str):
|
| 77 |
+
try:
|
| 78 |
+
user_ids = json.loads(user_ids)
|
| 79 |
+
except Exception:
|
| 80 |
+
continue # skip invalid JSON
|
| 81 |
+
|
| 82 |
+
if not isinstance(user_ids, list):
|
| 83 |
+
continue
|
| 84 |
+
|
| 85 |
+
rows = [
|
| 86 |
+
{
|
| 87 |
+
'id': str(uuid.uuid4()),
|
| 88 |
+
'group_id': group_id,
|
| 89 |
+
'user_id': uid,
|
| 90 |
+
'created_at': now,
|
| 91 |
+
'updated_at': now,
|
| 92 |
+
}
|
| 93 |
+
for uid in user_ids
|
| 94 |
+
]
|
| 95 |
+
|
| 96 |
+
if rows:
|
| 97 |
+
connection.execute(gm_table.insert(), rows)
|
| 98 |
+
|
| 99 |
+
# 4. Optionally drop the old column
|
| 100 |
+
with op.batch_alter_table('group') as batch:
|
| 101 |
+
batch.drop_column('user_ids')
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def downgrade():
|
| 105 |
+
# Reverse: restore user_ids column
|
| 106 |
+
with op.batch_alter_table('group') as batch:
|
| 107 |
+
batch.add_column(sa.Column('user_ids', sa.JSON()))
|
| 108 |
+
|
| 109 |
+
connection = op.get_bind()
|
| 110 |
+
gm_table = sa.Table(
|
| 111 |
+
'group_member',
|
| 112 |
+
sa.MetaData(),
|
| 113 |
+
sa.Column('group_id', sa.Text()),
|
| 114 |
+
sa.Column('user_id', sa.Text()),
|
| 115 |
+
sa.Column('created_at', sa.BigInteger()),
|
| 116 |
+
sa.Column('updated_at', sa.BigInteger()),
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
group_table = sa.Table(
|
| 120 |
+
'group',
|
| 121 |
+
sa.MetaData(),
|
| 122 |
+
sa.Column('id', sa.Text()),
|
| 123 |
+
sa.Column('user_ids', sa.JSON()),
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
# Build JSON arrays again
|
| 127 |
+
results = connection.execute(sa.select(group_table.c.id)).fetchall()
|
| 128 |
+
|
| 129 |
+
for (group_id,) in results:
|
| 130 |
+
members = connection.execute(sa.select(gm_table.c.user_id).where(gm_table.c.group_id == group_id)).fetchall()
|
| 131 |
+
|
| 132 |
+
member_ids = [m[0] for m in members]
|
| 133 |
+
|
| 134 |
+
connection.execute(group_table.update().where(group_table.c.id == group_id).values(user_ids=member_ids))
|
| 135 |
+
|
| 136 |
+
# Drop the new table
|
| 137 |
+
op.drop_table('group_member')
|
backend/open_webui/migrations/versions/38d63c18f30f_add_oauth_session_table.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Add oauth_session table
|
| 2 |
+
|
| 3 |
+
Revision ID: 38d63c18f30f
|
| 4 |
+
Revises: 3af16a1c9fb6
|
| 5 |
+
Create Date: 2025-09-08 14:19:59.583921
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from typing import Sequence, Union
|
| 10 |
+
|
| 11 |
+
from alembic import op
|
| 12 |
+
import sqlalchemy as sa
|
| 13 |
+
|
| 14 |
+
# revision identifiers, used by Alembic.
|
| 15 |
+
revision: str = '38d63c18f30f'
|
| 16 |
+
down_revision: Union[str, None] = '3af16a1c9fb6'
|
| 17 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 18 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def upgrade() -> None:
|
| 22 |
+
# Ensure 'id' column in 'user' table is unique and primary key (ForeignKey constraint)
|
| 23 |
+
inspector = sa.inspect(op.get_bind())
|
| 24 |
+
columns = inspector.get_columns('user')
|
| 25 |
+
|
| 26 |
+
pk_columns = inspector.get_pk_constraint('user')['constrained_columns']
|
| 27 |
+
id_column = next((col for col in columns if col['name'] == 'id'), None)
|
| 28 |
+
|
| 29 |
+
if id_column and not id_column.get('unique', False):
|
| 30 |
+
unique_constraints = inspector.get_unique_constraints('user')
|
| 31 |
+
unique_columns = {tuple(u['column_names']) for u in unique_constraints}
|
| 32 |
+
|
| 33 |
+
with op.batch_alter_table('user') as batch_op:
|
| 34 |
+
# If primary key is wrong, drop it
|
| 35 |
+
if pk_columns and pk_columns != ['id']:
|
| 36 |
+
batch_op.drop_constraint(inspector.get_pk_constraint('user')['name'], type_='primary')
|
| 37 |
+
|
| 38 |
+
# Add unique constraint if missing
|
| 39 |
+
if ('id',) not in unique_columns:
|
| 40 |
+
batch_op.create_unique_constraint('uq_user_id', ['id'])
|
| 41 |
+
|
| 42 |
+
# Re-create correct primary key
|
| 43 |
+
batch_op.create_primary_key('pk_user_id', ['id'])
|
| 44 |
+
|
| 45 |
+
# Create oauth_session table
|
| 46 |
+
op.create_table(
|
| 47 |
+
'oauth_session',
|
| 48 |
+
sa.Column('id', sa.Text(), primary_key=True, nullable=False, unique=True),
|
| 49 |
+
sa.Column(
|
| 50 |
+
'user_id',
|
| 51 |
+
sa.Text(),
|
| 52 |
+
sa.ForeignKey('user.id', ondelete='CASCADE'),
|
| 53 |
+
nullable=False,
|
| 54 |
+
),
|
| 55 |
+
sa.Column('provider', sa.Text(), nullable=False),
|
| 56 |
+
sa.Column('token', sa.Text(), nullable=False),
|
| 57 |
+
sa.Column('expires_at', sa.BigInteger(), nullable=False),
|
| 58 |
+
sa.Column('created_at', sa.BigInteger(), nullable=False),
|
| 59 |
+
sa.Column('updated_at', sa.BigInteger(), nullable=False),
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
# Create indexes for better performance
|
| 63 |
+
op.create_index('idx_oauth_session_user_id', 'oauth_session', ['user_id'])
|
| 64 |
+
op.create_index('idx_oauth_session_expires_at', 'oauth_session', ['expires_at'])
|
| 65 |
+
op.create_index('idx_oauth_session_user_provider', 'oauth_session', ['user_id', 'provider'])
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def downgrade() -> None:
|
| 69 |
+
# Drop indexes first
|
| 70 |
+
op.drop_index('idx_oauth_session_user_provider', table_name='oauth_session')
|
| 71 |
+
op.drop_index('idx_oauth_session_expires_at', table_name='oauth_session')
|
| 72 |
+
op.drop_index('idx_oauth_session_user_id', table_name='oauth_session')
|
| 73 |
+
|
| 74 |
+
# Drop the table
|
| 75 |
+
op.drop_table('oauth_session')
|
backend/open_webui/migrations/versions/3ab32c4b8f59_update_tags.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Update tags
|
| 2 |
+
|
| 3 |
+
Revision ID: 3ab32c4b8f59
|
| 4 |
+
Revises: 1af9b942657b
|
| 5 |
+
Create Date: 2024-10-09 21:02:35.241684
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from alembic import op
|
| 10 |
+
import sqlalchemy as sa
|
| 11 |
+
from sqlalchemy.sql import table, select, update, column
|
| 12 |
+
from sqlalchemy.engine.reflection import Inspector
|
| 13 |
+
|
| 14 |
+
import json
|
| 15 |
+
|
| 16 |
+
revision = '3ab32c4b8f59'
|
| 17 |
+
down_revision = '1af9b942657b'
|
| 18 |
+
branch_labels = None
|
| 19 |
+
depends_on = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade():
|
| 23 |
+
conn = op.get_bind()
|
| 24 |
+
inspector = Inspector.from_engine(conn)
|
| 25 |
+
|
| 26 |
+
# Inspecting the 'tag' table constraints and structure
|
| 27 |
+
existing_pk = inspector.get_pk_constraint('tag')
|
| 28 |
+
unique_constraints = inspector.get_unique_constraints('tag')
|
| 29 |
+
existing_indexes = inspector.get_indexes('tag')
|
| 30 |
+
|
| 31 |
+
print(f'Primary Key: {existing_pk}')
|
| 32 |
+
print(f'Unique Constraints: {unique_constraints}')
|
| 33 |
+
print(f'Indexes: {existing_indexes}')
|
| 34 |
+
|
| 35 |
+
with op.batch_alter_table('tag', schema=None) as batch_op:
|
| 36 |
+
# Drop existing primary key constraint if it exists
|
| 37 |
+
if existing_pk and existing_pk.get('constrained_columns'):
|
| 38 |
+
pk_name = existing_pk.get('name')
|
| 39 |
+
if pk_name:
|
| 40 |
+
print(f'Dropping primary key constraint: {pk_name}')
|
| 41 |
+
batch_op.drop_constraint(pk_name, type_='primary')
|
| 42 |
+
|
| 43 |
+
# Now create the new primary key with the combination of 'id' and 'user_id'
|
| 44 |
+
print("Creating new primary key with 'id' and 'user_id'.")
|
| 45 |
+
batch_op.create_primary_key('pk_id_user_id', ['id', 'user_id'])
|
| 46 |
+
|
| 47 |
+
# Drop unique constraints that could conflict with the new primary key
|
| 48 |
+
for constraint in unique_constraints:
|
| 49 |
+
if (
|
| 50 |
+
constraint['name'] == 'uq_id_user_id'
|
| 51 |
+
): # Adjust this name according to what is actually returned by the inspector
|
| 52 |
+
print(f'Dropping unique constraint: {constraint["name"]}')
|
| 53 |
+
batch_op.drop_constraint(constraint['name'], type_='unique')
|
| 54 |
+
|
| 55 |
+
for index in existing_indexes:
|
| 56 |
+
if index['unique']:
|
| 57 |
+
if not any(constraint['name'] == index['name'] for constraint in unique_constraints):
|
| 58 |
+
# You are attempting to drop unique indexes
|
| 59 |
+
print(f'Dropping unique index: {index["name"]}')
|
| 60 |
+
batch_op.drop_index(index['name'])
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def downgrade():
|
| 64 |
+
conn = op.get_bind()
|
| 65 |
+
inspector = Inspector.from_engine(conn)
|
| 66 |
+
|
| 67 |
+
current_pk = inspector.get_pk_constraint('tag')
|
| 68 |
+
|
| 69 |
+
with op.batch_alter_table('tag', schema=None) as batch_op:
|
| 70 |
+
# Drop the current primary key first, if it matches the one we know we added in upgrade
|
| 71 |
+
if current_pk and 'pk_id_user_id' == current_pk.get('name'):
|
| 72 |
+
batch_op.drop_constraint('pk_id_user_id', type_='primary')
|
| 73 |
+
|
| 74 |
+
# Restore the original primary key
|
| 75 |
+
batch_op.create_primary_key('pk_id', ['id'])
|
| 76 |
+
|
| 77 |
+
# Since primary key on just 'id' is restored, we now add back any unique constraints if necessary
|
| 78 |
+
batch_op.create_unique_constraint('uq_id_user_id', ['id', 'user_id'])
|
backend/open_webui/migrations/versions/3af16a1c9fb6_update_user_table.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""update user table
|
| 2 |
+
|
| 3 |
+
Revision ID: 3af16a1c9fb6
|
| 4 |
+
Revises: 018012973d35
|
| 5 |
+
Create Date: 2025-08-21 02:07:18.078283
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from typing import Sequence, Union
|
| 10 |
+
|
| 11 |
+
from alembic import op
|
| 12 |
+
import sqlalchemy as sa
|
| 13 |
+
|
| 14 |
+
# revision identifiers, used by Alembic.
|
| 15 |
+
revision: str = '3af16a1c9fb6'
|
| 16 |
+
down_revision: Union[str, None] = '018012973d35'
|
| 17 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 18 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def upgrade() -> None:
|
| 22 |
+
op.add_column('user', sa.Column('username', sa.String(length=50), nullable=True))
|
| 23 |
+
op.add_column('user', sa.Column('bio', sa.Text(), nullable=True))
|
| 24 |
+
op.add_column('user', sa.Column('gender', sa.Text(), nullable=True))
|
| 25 |
+
op.add_column('user', sa.Column('date_of_birth', sa.Date(), nullable=True))
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def downgrade() -> None:
|
| 29 |
+
op.drop_column('user', 'username')
|
| 30 |
+
op.drop_column('user', 'bio')
|
| 31 |
+
op.drop_column('user', 'gender')
|
| 32 |
+
op.drop_column('user', 'date_of_birth')
|
backend/open_webui/migrations/versions/3e0e00844bb0_add_knowledge_file_table.py
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Add knowledge_file table
|
| 2 |
+
|
| 3 |
+
Revision ID: 3e0e00844bb0
|
| 4 |
+
Revises: 90ef40d4714e
|
| 5 |
+
Create Date: 2025-12-02 06:54:19.401334
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from typing import Sequence, Union
|
| 10 |
+
|
| 11 |
+
from alembic import op
|
| 12 |
+
import sqlalchemy as sa
|
| 13 |
+
from sqlalchemy import inspect
|
| 14 |
+
import open_webui.internal.db
|
| 15 |
+
|
| 16 |
+
import time
|
| 17 |
+
import json
|
| 18 |
+
import uuid
|
| 19 |
+
|
| 20 |
+
# revision identifiers, used by Alembic.
|
| 21 |
+
revision: str = '3e0e00844bb0'
|
| 22 |
+
down_revision: Union[str, None] = '90ef40d4714e'
|
| 23 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 24 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def upgrade() -> None:
|
| 28 |
+
op.create_table(
|
| 29 |
+
'knowledge_file',
|
| 30 |
+
sa.Column('id', sa.Text(), primary_key=True),
|
| 31 |
+
sa.Column('user_id', sa.Text(), nullable=False),
|
| 32 |
+
sa.Column(
|
| 33 |
+
'knowledge_id',
|
| 34 |
+
sa.Text(),
|
| 35 |
+
sa.ForeignKey('knowledge.id', ondelete='CASCADE'),
|
| 36 |
+
nullable=False,
|
| 37 |
+
),
|
| 38 |
+
sa.Column(
|
| 39 |
+
'file_id',
|
| 40 |
+
sa.Text(),
|
| 41 |
+
sa.ForeignKey('file.id', ondelete='CASCADE'),
|
| 42 |
+
nullable=False,
|
| 43 |
+
),
|
| 44 |
+
sa.Column('created_at', sa.BigInteger(), nullable=False),
|
| 45 |
+
sa.Column('updated_at', sa.BigInteger(), nullable=False),
|
| 46 |
+
# indexes
|
| 47 |
+
sa.Index('ix_knowledge_file_knowledge_id', 'knowledge_id'),
|
| 48 |
+
sa.Index('ix_knowledge_file_file_id', 'file_id'),
|
| 49 |
+
sa.Index('ix_knowledge_file_user_id', 'user_id'),
|
| 50 |
+
# unique constraints
|
| 51 |
+
sa.UniqueConstraint(
|
| 52 |
+
'knowledge_id', 'file_id', name='uq_knowledge_file_knowledge_file'
|
| 53 |
+
), # prevent duplicate entries
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
connection = op.get_bind()
|
| 57 |
+
|
| 58 |
+
# 2. Read existing group with user_ids JSON column
|
| 59 |
+
knowledge_table = sa.Table(
|
| 60 |
+
'knowledge',
|
| 61 |
+
sa.MetaData(),
|
| 62 |
+
sa.Column('id', sa.Text()),
|
| 63 |
+
sa.Column('user_id', sa.Text()),
|
| 64 |
+
sa.Column('data', sa.JSON()), # JSON stored as text in SQLite + PG
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
results = connection.execute(
|
| 68 |
+
sa.select(knowledge_table.c.id, knowledge_table.c.user_id, knowledge_table.c.data)
|
| 69 |
+
).fetchall()
|
| 70 |
+
|
| 71 |
+
# 3. Insert members into group_member table
|
| 72 |
+
kf_table = sa.Table(
|
| 73 |
+
'knowledge_file',
|
| 74 |
+
sa.MetaData(),
|
| 75 |
+
sa.Column('id', sa.Text()),
|
| 76 |
+
sa.Column('user_id', sa.Text()),
|
| 77 |
+
sa.Column('knowledge_id', sa.Text()),
|
| 78 |
+
sa.Column('file_id', sa.Text()),
|
| 79 |
+
sa.Column('created_at', sa.BigInteger()),
|
| 80 |
+
sa.Column('updated_at', sa.BigInteger()),
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
file_table = sa.Table(
|
| 84 |
+
'file',
|
| 85 |
+
sa.MetaData(),
|
| 86 |
+
sa.Column('id', sa.Text()),
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
now = int(time.time())
|
| 90 |
+
for knowledge_id, user_id, data in results:
|
| 91 |
+
if not data:
|
| 92 |
+
continue
|
| 93 |
+
|
| 94 |
+
if isinstance(data, str):
|
| 95 |
+
try:
|
| 96 |
+
data = json.loads(data)
|
| 97 |
+
except Exception:
|
| 98 |
+
continue # skip invalid JSON
|
| 99 |
+
|
| 100 |
+
if not isinstance(data, dict):
|
| 101 |
+
continue
|
| 102 |
+
|
| 103 |
+
file_ids = data.get('file_ids', [])
|
| 104 |
+
|
| 105 |
+
for file_id in file_ids:
|
| 106 |
+
file_exists = connection.execute(sa.select(file_table.c.id).where(file_table.c.id == file_id)).fetchone()
|
| 107 |
+
|
| 108 |
+
if not file_exists:
|
| 109 |
+
continue # skip non-existing files
|
| 110 |
+
|
| 111 |
+
row = {
|
| 112 |
+
'id': str(uuid.uuid4()),
|
| 113 |
+
'user_id': user_id,
|
| 114 |
+
'knowledge_id': knowledge_id,
|
| 115 |
+
'file_id': file_id,
|
| 116 |
+
'created_at': now,
|
| 117 |
+
'updated_at': now,
|
| 118 |
+
}
|
| 119 |
+
connection.execute(kf_table.insert().values(**row))
|
| 120 |
+
|
| 121 |
+
with op.batch_alter_table('knowledge') as batch:
|
| 122 |
+
batch.drop_column('data')
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def downgrade() -> None:
|
| 126 |
+
# 1. Add back the old data column
|
| 127 |
+
op.add_column('knowledge', sa.Column('data', sa.JSON(), nullable=True))
|
| 128 |
+
|
| 129 |
+
connection = op.get_bind()
|
| 130 |
+
|
| 131 |
+
# 2. Read knowledge_file entries and reconstruct data JSON
|
| 132 |
+
knowledge_table = sa.Table(
|
| 133 |
+
'knowledge',
|
| 134 |
+
sa.MetaData(),
|
| 135 |
+
sa.Column('id', sa.Text()),
|
| 136 |
+
sa.Column('data', sa.JSON()),
|
| 137 |
+
)
|
| 138 |
+
|
| 139 |
+
kf_table = sa.Table(
|
| 140 |
+
'knowledge_file',
|
| 141 |
+
sa.MetaData(),
|
| 142 |
+
sa.Column('id', sa.Text()),
|
| 143 |
+
sa.Column('knowledge_id', sa.Text()),
|
| 144 |
+
sa.Column('file_id', sa.Text()),
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
results = connection.execute(sa.select(knowledge_table.c.id)).fetchall()
|
| 148 |
+
|
| 149 |
+
for (knowledge_id,) in results:
|
| 150 |
+
file_ids = connection.execute(
|
| 151 |
+
sa.select(kf_table.c.file_id).where(kf_table.c.knowledge_id == knowledge_id)
|
| 152 |
+
).fetchall()
|
| 153 |
+
|
| 154 |
+
file_ids_list = [fid for (fid,) in file_ids]
|
| 155 |
+
|
| 156 |
+
data_json = {'file_ids': file_ids_list}
|
| 157 |
+
|
| 158 |
+
connection.execute(knowledge_table.update().where(knowledge_table.c.id == knowledge_id).values(data=data_json))
|
| 159 |
+
|
| 160 |
+
# 3. Drop the knowledge_file table
|
| 161 |
+
op.drop_table('knowledge_file')
|
backend/open_webui/migrations/versions/4ace53fd72c8_update_folder_table_datetime.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Update folder table and change DateTime to BigInteger for timestamp fields
|
| 2 |
+
|
| 3 |
+
Revision ID: 4ace53fd72c8
|
| 4 |
+
Revises: af906e964978
|
| 5 |
+
Create Date: 2024-10-23 03:00:00.000000
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from alembic import op
|
| 10 |
+
import sqlalchemy as sa
|
| 11 |
+
|
| 12 |
+
revision = '4ace53fd72c8'
|
| 13 |
+
down_revision = 'af906e964978'
|
| 14 |
+
branch_labels = None
|
| 15 |
+
depends_on = None
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def upgrade():
|
| 19 |
+
# Perform safe alterations using batch operation
|
| 20 |
+
with op.batch_alter_table('folder', schema=None) as batch_op:
|
| 21 |
+
# Step 1: Remove server defaults for created_at and updated_at
|
| 22 |
+
batch_op.alter_column(
|
| 23 |
+
'created_at',
|
| 24 |
+
server_default=None, # Removing server default
|
| 25 |
+
)
|
| 26 |
+
batch_op.alter_column(
|
| 27 |
+
'updated_at',
|
| 28 |
+
server_default=None, # Removing server default
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
# Step 2: Change the column types to BigInteger for created_at
|
| 32 |
+
batch_op.alter_column(
|
| 33 |
+
'created_at',
|
| 34 |
+
type_=sa.BigInteger(),
|
| 35 |
+
existing_type=sa.DateTime(),
|
| 36 |
+
existing_nullable=False,
|
| 37 |
+
postgresql_using='extract(epoch from created_at)::bigint', # Conversion for PostgreSQL
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
# Change the column types to BigInteger for updated_at
|
| 41 |
+
batch_op.alter_column(
|
| 42 |
+
'updated_at',
|
| 43 |
+
type_=sa.BigInteger(),
|
| 44 |
+
existing_type=sa.DateTime(),
|
| 45 |
+
existing_nullable=False,
|
| 46 |
+
postgresql_using='extract(epoch from updated_at)::bigint', # Conversion for PostgreSQL
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def downgrade():
|
| 51 |
+
# Downgrade: Convert columns back to DateTime and restore defaults
|
| 52 |
+
with op.batch_alter_table('folder', schema=None) as batch_op:
|
| 53 |
+
batch_op.alter_column(
|
| 54 |
+
'created_at',
|
| 55 |
+
type_=sa.DateTime(),
|
| 56 |
+
existing_type=sa.BigInteger(),
|
| 57 |
+
existing_nullable=False,
|
| 58 |
+
server_default=sa.func.now(), # Restoring server default on downgrade
|
| 59 |
+
)
|
| 60 |
+
batch_op.alter_column(
|
| 61 |
+
'updated_at',
|
| 62 |
+
type_=sa.DateTime(),
|
| 63 |
+
existing_type=sa.BigInteger(),
|
| 64 |
+
existing_nullable=False,
|
| 65 |
+
server_default=sa.func.now(), # Restoring server default on downgrade
|
| 66 |
+
onupdate=sa.func.now(), # Restoring onupdate behavior if it was there
|
| 67 |
+
)
|
backend/open_webui/migrations/versions/56359461a091_add_calendar_tables.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""add calendar tables
|
| 2 |
+
|
| 3 |
+
Revision ID: 56359461a091
|
| 4 |
+
Revises: c1d2e3f4a5b6
|
| 5 |
+
Create Date: 2026-04-19 16:20:58.162045
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from typing import Sequence, Union
|
| 10 |
+
|
| 11 |
+
from alembic import op
|
| 12 |
+
import sqlalchemy as sa
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = '56359461a091'
|
| 17 |
+
down_revision: Union[str, None] = 'c1d2e3f4a5b6'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
op.create_table(
|
| 24 |
+
'calendar',
|
| 25 |
+
sa.Column('id', sa.Text(), nullable=False),
|
| 26 |
+
sa.Column('user_id', sa.Text(), nullable=False),
|
| 27 |
+
sa.Column('name', sa.Text(), nullable=False),
|
| 28 |
+
sa.Column('color', sa.Text(), nullable=True),
|
| 29 |
+
sa.Column('is_default', sa.Boolean(), nullable=False),
|
| 30 |
+
sa.Column('data', sa.JSON(), nullable=True),
|
| 31 |
+
sa.Column('meta', sa.JSON(), nullable=True),
|
| 32 |
+
sa.Column('created_at', sa.BigInteger(), nullable=False),
|
| 33 |
+
sa.Column('updated_at', sa.BigInteger(), nullable=False),
|
| 34 |
+
sa.PrimaryKeyConstraint('id'),
|
| 35 |
+
)
|
| 36 |
+
op.create_index('ix_calendar_user', 'calendar', ['user_id'], unique=False)
|
| 37 |
+
|
| 38 |
+
op.create_table(
|
| 39 |
+
'calendar_event',
|
| 40 |
+
sa.Column('id', sa.Text(), nullable=False),
|
| 41 |
+
sa.Column('calendar_id', sa.Text(), nullable=False),
|
| 42 |
+
sa.Column('user_id', sa.Text(), nullable=False),
|
| 43 |
+
sa.Column('title', sa.Text(), nullable=False),
|
| 44 |
+
sa.Column('description', sa.Text(), nullable=True),
|
| 45 |
+
sa.Column('start_at', sa.BigInteger(), nullable=False),
|
| 46 |
+
sa.Column('end_at', sa.BigInteger(), nullable=True),
|
| 47 |
+
sa.Column('all_day', sa.Boolean(), nullable=False),
|
| 48 |
+
sa.Column('rrule', sa.Text(), nullable=True),
|
| 49 |
+
sa.Column('color', sa.Text(), nullable=True),
|
| 50 |
+
sa.Column('location', sa.Text(), nullable=True),
|
| 51 |
+
sa.Column('data', sa.JSON(), nullable=True),
|
| 52 |
+
sa.Column('meta', sa.JSON(), nullable=True),
|
| 53 |
+
sa.Column('is_cancelled', sa.Boolean(), nullable=False),
|
| 54 |
+
sa.Column('created_at', sa.BigInteger(), nullable=False),
|
| 55 |
+
sa.Column('updated_at', sa.BigInteger(), nullable=False),
|
| 56 |
+
sa.PrimaryKeyConstraint('id'),
|
| 57 |
+
)
|
| 58 |
+
op.create_index('ix_calendar_event_calendar', 'calendar_event', ['calendar_id', 'start_at'], unique=False)
|
| 59 |
+
op.create_index('ix_calendar_event_user_date', 'calendar_event', ['user_id', 'start_at'], unique=False)
|
| 60 |
+
|
| 61 |
+
op.create_table(
|
| 62 |
+
'calendar_event_attendee',
|
| 63 |
+
sa.Column('id', sa.Text(), nullable=False),
|
| 64 |
+
sa.Column('event_id', sa.Text(), nullable=False),
|
| 65 |
+
sa.Column('user_id', sa.Text(), nullable=False),
|
| 66 |
+
sa.Column('status', sa.Text(), nullable=False),
|
| 67 |
+
sa.Column('meta', sa.JSON(), nullable=True),
|
| 68 |
+
sa.Column('created_at', sa.BigInteger(), nullable=False),
|
| 69 |
+
sa.Column('updated_at', sa.BigInteger(), nullable=False),
|
| 70 |
+
sa.PrimaryKeyConstraint('id'),
|
| 71 |
+
sa.UniqueConstraint('event_id', 'user_id', name='uq_event_attendee'),
|
| 72 |
+
)
|
| 73 |
+
op.create_index('ix_calendar_event_attendee_user', 'calendar_event_attendee', ['user_id', 'status'], unique=False)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def downgrade() -> None:
|
| 77 |
+
op.drop_index('ix_calendar_event_attendee_user', table_name='calendar_event_attendee')
|
| 78 |
+
op.drop_table('calendar_event_attendee')
|
| 79 |
+
op.drop_index('ix_calendar_event_user_date', table_name='calendar_event')
|
| 80 |
+
op.drop_index('ix_calendar_event_calendar', table_name='calendar_event')
|
| 81 |
+
op.drop_table('calendar_event')
|
| 82 |
+
op.drop_index('ix_calendar_user', table_name='calendar')
|
| 83 |
+
op.drop_table('calendar')
|