github-actions commited on
Commit
395be8e
·
0 Parent(s):

Sync from GitHub Fri Oct 24 05:20:07 UTC 2025

Browse files
.dockerignore ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Node dependencies & build outputs
2
+ frontend/node_modules
3
+ frontend/dist
4
+
5
+ # Python virtual environments
6
+ backend/.venv
7
+ venv
8
+ ENV
9
+
10
+ # Caches / bytecode
11
+ **/__pycache__
12
+ *.py[cod]
13
+ .pytest_cache
14
+ .mypy_cache
15
+ .pytype
16
+ .pyright
17
+
18
+ # Git & VCS
19
+ .git
20
+ .gitignore
21
+
22
+ # Logs
23
+ *.log
24
+ npm-debug.log*
25
+ yarn-debug.log*
26
+ pnpm-debug.log*
27
+
28
+ # Editors / OS
29
+ .idea
30
+ .vscode
31
+ *.code-workspace
32
+ Thumbs.db
33
+ .DS_Store
34
+
35
+ # Misc
36
+ coverage*
37
+ dist
38
+ build
.env.example ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # Supabase Configuration
2
+ SUPABASE_URL=your_supabase_project_url_here
3
+ SUPABASE_KEY=your_supabase_anon_key_here
4
+
5
+ # Example:
6
+ # SUPABASE_URL=https://your-project-id.supabase.co
7
+ # SUPABASE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...
8
+
9
+ # Other environment variables
10
+ # Add other environment variables your app needs here
.github/workflows/run-rd-pipeline.yml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Run Reddit Pipeline
2
+
3
+ on:
4
+ schedule:
5
+ - cron: '0 3 * * *' # Runs daily at 03:00 UTC
6
+ workflow_dispatch:
7
+ inputs:
8
+ user_input_override:
9
+ description: 'Optional raw text to process instead of Reddit fetch'
10
+ required: false
11
+ default: ''
12
+
13
+ jobs:
14
+ run-pipeline:
15
+ runs-on: ubuntu-latest
16
+ timeout-minutes: 10
17
+ env:
18
+ PYTHONUNBUFFERED: '1'
19
+ OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
20
+ BRIGHTDATA_API_KEY: ${{ secrets.BRIGHTDATA_API_KEY }}
21
+ SUPABASE_URL: ${{ secrets.SUPABASE_URL }}
22
+ SUPABASE_KEY: ${{ secrets.SUPABASE_KEY }}
23
+ REDDIT_USER_AGENT: 'Mozilla/5.0'
24
+ steps:
25
+ - name: Checkout repository
26
+ uses: actions/checkout@v4
27
+
28
+ - name: Set up Python
29
+ uses: actions/setup-python@v5
30
+ with:
31
+ python-version: '3.12'
32
+
33
+ - name: Cache pip
34
+ uses: actions/cache@v4
35
+ with:
36
+ path: ~/.cache/pip
37
+ key: pip-${{ runner.os }}-${{ hashFiles('backend/requirements.txt') }}
38
+ restore-keys: |
39
+ pip-${{ runner.os }}-
40
+
41
+ - name: Install dependencies
42
+ working-directory: backend
43
+ run: |
44
+ python -m pip install --upgrade pip
45
+ pip install -r requirements.txt
46
+
47
+ - name: Run rd_pipeline (Reddit fetch mode or manual override)
48
+ working-directory: backend
49
+ env:
50
+ USER_INPUT_OVERRIDE: ${{ github.event.inputs.user_input_override || '' }}
51
+ run: python rd_pipeline_bdata.py
52
+
53
+ - name: Summarize run
54
+ if: always()
55
+ run: |
56
+ echo 'Run complete at:' $(date -u)
.github/workflows/sync-to-huggingface.yml ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Sync to Hugging Face Hub
2
+
3
+ on:
4
+ push:
5
+ branches: [ main ]
6
+ workflow_dispatch:
7
+
8
+ permissions:
9
+ contents: read
10
+
11
+ env:
12
+ HF_REPO_TYPE: space
13
+
14
+ jobs:
15
+ sync:
16
+ runs-on: ubuntu-latest
17
+ steps:
18
+ - name: Checkout repository
19
+ uses: actions/checkout@v4
20
+ with:
21
+ fetch-depth: 0
22
+
23
+ - name: Set up Python
24
+ uses: actions/setup-python@v5
25
+ with:
26
+ python-version: '3.12'
27
+
28
+ - name: Install huggingface_hub
29
+ run: |
30
+ python -m pip install --upgrade pip
31
+ pip install huggingface_hub
32
+
33
+ - name: Validate secrets
34
+ env:
35
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
36
+ HF_REPO: ${{ secrets.HF_REPO }}
37
+ run: |
38
+ if [ -z "$HF_TOKEN" ]; then
39
+ echo "HF_TOKEN secret is not set"
40
+ exit 1
41
+ fi
42
+ if [ -z "$HF_REPO" ]; then
43
+ echo "HF_REPO secret is not set (should be like: username/repo-name)"
44
+ exit 1
45
+ fi
46
+
47
+ - name: Create or verify Hugging Face repo
48
+ env:
49
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
50
+ HF_REPO: ${{ secrets.HF_REPO }}
51
+ HF_REPO_TYPE: ${{ env.HF_REPO_TYPE }}
52
+ run: |
53
+ python3 -c '
54
+ import os
55
+ from huggingface_hub import HfApi, create_repo
56
+
57
+ api = HfApi()
58
+ repo_id = os.environ["HF_REPO"]
59
+ repo_type = os.environ.get("HF_REPO_TYPE", "model")
60
+ token = os.environ["HF_TOKEN"]
61
+
62
+ try:
63
+ api.repo_info(repo_id=repo_id, repo_type=repo_type, token=token)
64
+ print(f"{repo_type.capitalize()} {repo_id} exists.")
65
+ except:
66
+ print(f"Creating {repo_type} {repo_id}...")
67
+ create_repo(repo_id=repo_id, token=token, repo_type=repo_type, private=False, exist_ok=True)
68
+ print(f"{repo_type.capitalize()} {repo_id} created.")
69
+ '
70
+
71
+ - name: Mirror to Hugging Face
72
+ env:
73
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
74
+ HF_REPO: ${{ secrets.HF_REPO }}
75
+ HF_REPO_TYPE: ${{ env.HF_REPO_TYPE }}
76
+ run: |
77
+ set -e
78
+
79
+ # Create temp directory
80
+ TMP_DIR=$(mktemp -d)
81
+ echo "Using temp directory: $TMP_DIR"
82
+
83
+ # Copy files (excluding .git)
84
+ cp -r . "$TMP_DIR"
85
+ rm -rf "$TMP_DIR/.git"
86
+ cd "$TMP_DIR"
87
+
88
+ # Initialize git
89
+ git init
90
+ git config user.name "github-actions"
91
+ git config user.email "github-actions@users.noreply.github.com"
92
+
93
+ # Set remote URL based on repo type
94
+ if [ "$HF_REPO_TYPE" = "space" ]; then
95
+ REMOTE_URL="https://user:${HF_TOKEN}@huggingface.co/spaces/${HF_REPO}"
96
+ else
97
+ REMOTE_URL="https://user:${HF_TOKEN}@huggingface.co/${HF_REPO}"
98
+ fi
99
+
100
+ git remote add origin "$REMOTE_URL"
101
+ git add .
102
+ git commit -m "Sync from GitHub $(date -u)"
103
+ git branch -M main
104
+ git push origin main --force
105
+
106
+ echo "Successfully synced to Hugging Face!"
107
+
108
+ - name: Summary
109
+ run: echo "Sync completed successfully"
.gitignore ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # --- OS / Editor ---
2
+ .DS_Store
3
+ Thumbs.db
4
+ desktop.ini
5
+ .idea/
6
+ .vscode/
7
+ *.code-workspace
8
+
9
+ # --- Root caches ---
10
+ .pytest_cache/
11
+ .mypy_cache/
12
+ .pytype/
13
+ .pyright/
14
+ .coverage
15
+ coverage.xml
16
+ htmlcov/
17
+ *.log
18
+ npm-debug.log*
19
+ yarn-debug.log*
20
+ yarn-error.log*
21
+ pnpm-debug.log*
22
+ *.tsbuildinfo
23
+
24
+ # --- Python (backend) ---
25
+ backend/.venv/
26
+ backend/.env
27
+ backend/images/
28
+ venv/
29
+ ENV/
30
+ env/
31
+ **/__pycache__/
32
+ **/*.py[cod]
33
+ *.pyd
34
+ *.pyo
35
+ *.so
36
+ *.egg-info/
37
+ .eggs/
38
+
39
+ # --- Node / Vite (frontend) ---
40
+ frontend/node_modules/
41
+ frontend/dist/
42
+ frontend/.env
43
+ frontend/.env.local
44
+ frontend/.env.*.local
45
+ # Optional: local tooling caches
46
+ frontend/.cache/
47
+
48
+ # --- Misc build outputs (root) ---
49
+ /dist/
50
+ /build/
51
+
52
+ # --- OS sync artifacts ---
53
+ ~$*
54
+ *.tmp
55
+ *.temp
Dockerfile ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -------- Stage 1: Build frontend (Vite + React) --------
2
+ FROM node:20-alpine AS frontend-builder
3
+ WORKDIR /frontend
4
+
5
+ # Install deps first (better layer caching)
6
+ COPY frontend/package*.json ./
7
+ COPY frontend/tsconfig.json frontend/vite.config.* frontend/index.html ./
8
+ RUN npm install
9
+
10
+ # Copy source and build
11
+ COPY frontend/src ./src
12
+ RUN npm run build
13
+
14
+ # -------- Stage 2: Backend runtime (FastAPI + Uvicorn) --------
15
+ FROM python:3.12-slim AS runtime
16
+ ENV PYTHONDONTWRITEBYTECODE=1 \
17
+ PYTHONUNBUFFERED=1
18
+
19
+ WORKDIR /app
20
+
21
+ # Install backend dependencies
22
+ COPY backend/requirements.txt ./backend/requirements.txt
23
+ RUN pip install --no-cache-dir -r backend/requirements.txt
24
+
25
+ # Copy backend code
26
+ COPY backend ./backend
27
+
28
+ # Copy built frontend into expected path (/app/frontend/dist)
29
+ COPY --from=frontend-builder /frontend/dist ./frontend/dist
30
+
31
+ EXPOSE 7860
32
+
33
+ # Default command
34
+ CMD ["python", "-m", "uvicorn", "backend.app:app", "--host", "0.0.0.0", "--port", "7860"]
README.md ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Amplify
3
+ emoji: 🏇🏻
4
+ colorFrom: indigo
5
+ colorTo: purple
6
+ sdk: docker
7
+ sdk_version: 4.0.0
8
+ app_file: app.py
9
+ pinned: false
10
+ ---
11
+
12
+ # ReactFast
13
+
14
+ Minimal full-stack template: **FastAPI** backend + **Vite/React (TypeScript)** frontend. The backend serves the built frontend (single-page app) and exposes a simple JSON API. Includes a multi‑stage Docker build and GitHub Actions workflow (Commit-4) to push the image to Azure Container Registry (ACR).
15
+
16
+ ---
17
+
18
+ ## Features
19
+ - FastAPI backend (`/api/transform`, `/api/health`) with static file serving.
20
+ - Vite + React + TypeScript frontend built to `frontend/dist`.
21
+ - Frontend served at `/` (adjust base in `vite.config.ts`).
22
+ - Simple round‑trip demo: user enters text, backend returns transformed string.
23
+ - Multi-stage Dockerfile: builds frontend, copies build into Python runtime image.
24
+ - GitHub Actions CI: builds & pushes image to ACR (tags: commit SHA + `latest`).
25
+
26
+ ---
27
+
28
+ ## Tech Stack
29
+ | Layer | Technology | Notes |
30
+ |------------|------------|-------|
31
+ | Backend | FastAPI / Uvicorn | ASGI app serving API + static assets |
32
+ | Frontend | React 18 + Vite | Fast dev server & optimized build |
33
+ | Language | Python 3.12 & TypeScript | Type safety on both sides |
34
+ | Packaging | Docker multi-stage | Small final image (Python slim) |
35
+ | CI / CD | GitHub Actions | Image build & ACR push |
36
+ | Registry | Azure Container Registry | Deployment artifact storage |
37
+
38
+ ---
39
+
40
+ ## Repository Layout
41
+ ```
42
+ backend/
43
+ app.py # FastAPI app + API endpoints + static mounting
44
+ requirements.txt # Backend dependencies
45
+ frontend/
46
+ src/ # React source (App.tsx, main.tsx, style.css)
47
+ index.html # Vite entry HTML
48
+ vite.config.ts # Vite config (base path, build outDir)
49
+ package.json # Frontend scripts/deps
50
+ Dockerfile # Multi-stage build (frontend build → backend runtime)
51
+ .dockerignore # Prunes build context
52
+ builderflow.md # Incremental commit summaries (Commit-1..4)
53
+ README.md # This file
54
+ ```
55
+
56
+ ---
57
+
58
+ ## Backend Overview
59
+ - Mounts frontend build via `StaticFiles` after defining API routes.
60
+ - Example endpoints:
61
+ - `POST /api/transform` → `{ result: "You said: ..." }`
62
+ - `GET /api/health` → `{ status: "ok" }`
63
+ - Ensure API routes are declared **before** mounting static root to avoid 405 errors (StaticFiles intercepting non-GET methods).
64
+
65
+ ### Running backend (local dev)
66
+ ```powershell
67
+ cd backend
68
+ python -m venv .venv
69
+ .\.venv\Scripts\pip install -r requirements.txt --trusted-host pypi.org --trusted-host files.pythonhosted.org
70
+ .\.venv\Scripts\python -m uvicorn app:app --host 127.0.0.1 --port 8000 --reload
71
+ ```
72
+ Open: http://127.0.0.1:8000/
73
+
74
+ ---
75
+
76
+ ## Frontend Overview
77
+ - Vite handles dev (`npm run dev`) and production builds (`npm run build`).
78
+ - Output bundle placed in `frontend/dist` and served by FastAPI.
79
+ - If you change route mount (e.g., from `/` to `/app`), update `base` in `vite.config.ts`.
80
+
81
+ ### Running frontend (standalone dev mode)
82
+ ```powershell
83
+ cd frontend
84
+ npm install
85
+ npm run dev
86
+ ```
87
+ Dev server: http://127.0.0.1:5173/ (API calls to `/api/...` will need proxy config or full backend URL if not served together).
88
+
89
+ ### Production build
90
+ ```powershell
91
+ cd frontend
92
+ npm run build
93
+ ```
94
+ Rebuild whenever you change frontend assets before packaging backend or Docker image.
95
+
96
+ ---
97
+
98
+ ## End‑to‑End Flow
99
+ 1. User enters text in the form.
100
+ 2. Frontend sends `POST /api/transform` with `{ text }`.
101
+ 3. Backend returns a transformed string.
102
+ 4. UI displays the response below the form.
103
+
104
+ ---
105
+
106
+ ## Docker
107
+ Multi-stage build: Node → Python.
108
+
109
+ ### Build locally
110
+ ```powershell
111
+ docker build -t reactfast .
112
+ docker run --rm -p 8000:8000 reactfast
113
+ ```
114
+ Visit: http://localhost:8000/
115
+
116
+ ### Environment customization
117
+ - Adjust exposed port by changing `-p hostPort:8000`.
118
+ - Add env vars by appending `-e KEY=value` to `docker run`.
119
+ - For dev hot-reload, prefer running backend & frontend separately outside container.
120
+
121
+ ---
122
+
123
+ ## GitHub Actions (Commit-4)
124
+ Workflow builds and pushes image to ACR on push to `main`.
125
+
126
+ ### Required GitHub Secrets
127
+ - `AZURE_CREDENTIALS` – Service Principal JSON (`--sdk-auth`) with AcrPush role.
128
+ - `ACR_LOGIN_SERVER` – e.g. `myregistry.azurecr.io`.
129
+
130
+ ### Resulting Tags
131
+ - `<loginServer>/reactfast:<git-sha>` (immutable)
132
+ - `<loginServer>/reactfast:latest`
133
+
134
+ ### Typical Service Principal Creation
135
+ ```powershell
136
+ $ACR_ID = az acr show -n <ACR_NAME> --query id -o tsv
137
+ az ad sp create-for-rbac --name reactfast-sp --role AcrPush --scopes $ACR_ID --sdk-auth
138
+ ```
139
+ Paste JSON output into `AZURE_CREDENTIALS` secret.
140
+
141
+ ---
142
+
143
+ ## Troubleshooting
144
+ | Issue | Cause | Fix |
145
+ |-------|-------|-----|
146
+ | 404 assets | Wrong mount/base mismatch | Align `vite.config.ts` base with `app.mount()` path and rebuild |
147
+ | 405 on POST /api/transform | StaticFiles mounted before API routes | Declare API routes first, mount static last |
148
+ | Image lacks new frontend changes | Forgot to rebuild frontend in Docker | Dockerfile handles build; ensure source changes committed |
149
+ | ACR push fails | Missing/incorrect secrets | Verify `AZURE_CREDENTIALS`, `ACR_LOGIN_SERVER` |
150
+
151
+ ---
152
+
153
+ ## Extending
154
+ - Add tests (pytest + React Testing Library).
155
+ - Introduce type checking (mypy/pyright) in CI.
156
+ - Add security scanning (Trivy / GitHub Dependabot alerts).
157
+ - Implement version tagging (semantic-release or manual release workflow).
158
+ - Deploy automatically to Azure Web App / Container Apps after push.
159
+
160
+ ---
161
+
162
+ ## Quick Start (All-in-One)
163
+ ```powershell
164
+ # Backend & Frontend build
165
+ cd frontend
166
+ npm install
167
+ npm run build
168
+ cd ../backend
169
+ python -m venv .venv
170
+ .\.venv\Scripts\pip install -r requirements.txt
171
+ .\.venv\Scripts\python -m uvicorn app:app --host 127.0.0.1 --port 8000 --reload
172
+ # Open http://127.0.0.1:8000/
173
+ ```
174
+
175
+ ---
176
+
177
+ For commit-by-commit evolution see `builderflow.md`.
backend/.env.example ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ AZURE_OPENAI_API_KEY=''
2
+ AZURE_OPENAI_ENDPOINT=''
3
+ AZURE_OPENAI_VERSION=''
4
+ AZURE_GPT4O_MODEL=''
5
+ MODEL_TEMPERATURE=0
6
+ SQL_TARGET_DIALECT=ANSI
7
+ SUPABASE_URL=''
8
+ SUPABASE_KEY=''
backend/app.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: disable - error - code = "no-untyped-def,misc"
2
+ import pathlib
3
+ from fastapi import FastAPI, Response
4
+ from fastapi.staticfiles import StaticFiles
5
+ from pydantic import BaseModel
6
+ from backend.llmoperations import get_agent_response
7
+ from backend.blog_api_supabase import setup_blog_routes
8
+ #from blog_api_local import setup_blog_routes
9
+
10
+ # Define the FastAPI app
11
+ app = FastAPI()
12
+
13
+ # Setup blog routes
14
+ setup_blog_routes(app)
15
+
16
+ # --- Simple API endpoint ---
17
+ class TextIn(BaseModel):
18
+ text: str
19
+
20
+
21
+ @app.post("/api/transform")
22
+ def transform_text(payload: TextIn):
23
+ # Minimal transformation: uppercase with a prefix
24
+ #modified = f"Hello {payload.text.capitalize()}! How are you!"
25
+ answer = get_agent_response(payload.text)
26
+ print(f"Here is the answer : {answer}")
27
+ return {"result": answer}
28
+
29
+
30
+ def create_frontend_router(build_dir="frontend/dist"):
31
+ """Creates a router to serve the React frontend.
32
+
33
+ Args:
34
+ build_dir: Path to the React build directory relative to this file.
35
+
36
+ Returns:
37
+ A Starlette application serving the frontend.
38
+ """
39
+ # Resolve build path from repo root (two levels up from this file: backend/ -> reactfast/)
40
+ build_path = pathlib.Path(__file__).resolve().parent.parent / build_dir
41
+
42
+ if not build_path.is_dir() or not (build_path / "index.html").is_file():
43
+ print(
44
+ f"WARN: Frontend build directory not found or incomplete at {build_path}. Serving frontend will likely fail."
45
+ )
46
+ # Return a dummy router if build isn't ready
47
+ from starlette.routing import Route
48
+
49
+ async def dummy_frontend(request):
50
+ return Response(
51
+ "Frontend not built. Run 'npm run build' in the frontend directory.",
52
+ media_type="text/plain",
53
+ status_code=503,
54
+ )
55
+
56
+ return Route("/{path:path}", endpoint=dummy_frontend)
57
+
58
+ return StaticFiles(directory=build_path, html=True)
59
+
60
+
61
+ # Mount the frontend under /app to avoid conflicts and align with Vite base
62
+ app.mount(
63
+ "/",
64
+ create_frontend_router(),
65
+ name="frontend",
66
+ )
67
+
68
+
backend/blog.db ADDED
Binary file (65.5 kB). View file
 
backend/blog_api_local.py ADDED
@@ -0,0 +1,277 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from fastapi.staticfiles import StaticFiles
3
+ from pydantic import BaseModel
4
+ from typing import List, Optional, Dict
5
+ import sqlite3
6
+ import json
7
+ from pathlib import Path
8
+ import os
9
+
10
+ class BlogPost(BaseModel):
11
+ id: int
12
+ title: str
13
+ content: str
14
+ author: str
15
+ created_at: str
16
+ published: bool
17
+ tags: List[str]
18
+ featured_image: Optional[Dict] = None
19
+ post_images: List[Dict] = []
20
+
21
+ class BlogSummary(BaseModel):
22
+ id: int
23
+ title: str
24
+ author: str
25
+ created_at: str
26
+ tags: List[str]
27
+ excerpt: str
28
+ has_featured_image: bool
29
+ featured_image_url: Optional[str] = None
30
+ post_image_count: int
31
+
32
+ class BlogDatabase:
33
+ def __init__(self, db_path: str = "blog.db"):
34
+ self.db_path = db_path
35
+ self.init_database()
36
+
37
+ def init_database(self):
38
+ """Initialize the blog database if it doesn't exist"""
39
+ conn = sqlite3.connect(self.db_path)
40
+ cursor = conn.cursor()
41
+
42
+ # Check if tables exist
43
+ cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='blog_posts'")
44
+ if not cursor.fetchone():
45
+ # Create tables if they don't exist
46
+ cursor.execute('''
47
+ CREATE TABLE blog_posts (
48
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
49
+ title TEXT NOT NULL,
50
+ content TEXT NOT NULL,
51
+ author TEXT DEFAULT 'Admin',
52
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
53
+ published BOOLEAN DEFAULT 1,
54
+ tags TEXT DEFAULT '[]',
55
+ featured_image_id INTEGER
56
+ )
57
+ ''')
58
+
59
+ cursor.execute('''
60
+ CREATE TABLE images (
61
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
62
+ filename TEXT NOT NULL,
63
+ original_filename TEXT NOT NULL,
64
+ file_path TEXT NOT NULL,
65
+ file_size INTEGER,
66
+ mime_type TEXT,
67
+ alt_text TEXT DEFAULT '',
68
+ caption TEXT DEFAULT '',
69
+ width INTEGER,
70
+ height INTEGER,
71
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
72
+ )
73
+ ''')
74
+
75
+ cursor.execute('''
76
+ CREATE TABLE blog_post_images (
77
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
78
+ blog_post_id INTEGER,
79
+ image_id INTEGER,
80
+ image_type TEXT DEFAULT 'post_content',
81
+ image_order INTEGER DEFAULT 0,
82
+ position_in_content INTEGER,
83
+ FOREIGN KEY (blog_post_id) REFERENCES blog_posts (id),
84
+ FOREIGN KEY (image_id) REFERENCES images (id)
85
+ )
86
+ ''')
87
+
88
+ # Insert sample blog posts
89
+ sample_posts = [
90
+ {
91
+ "title": "Welcome to Our Blog",
92
+ "content": "This is our first blog post! We're excited to share insights about technology, development, and innovation. Stay tuned for more amazing content coming your way.",
93
+ "author": "Admin",
94
+ "tags": '["welcome", "introduction", "blog"]'
95
+ },
96
+ {
97
+ "title": "The Future of AI Development",
98
+ "content": "Artificial Intelligence is revolutionizing how we build applications. From machine learning models to natural language processing, AI is becoming an integral part of modern software development. In this post, we explore the latest trends and technologies shaping the future of AI development.",
99
+ "author": "Tech Team",
100
+ "tags": '["AI", "development", "technology", "future"]'
101
+ },
102
+ {
103
+ "title": "Best Practices for Web Development",
104
+ "content": "Building modern web applications requires following best practices for performance, security, and user experience. We'll cover essential techniques including responsive design, API optimization, and modern JavaScript frameworks that every developer should know.",
105
+ "author": "Development Team",
106
+ "tags": '["web-development", "best-practices", "javascript", "performance"]'
107
+ },
108
+ {
109
+ "title": "Building Scalable Applications",
110
+ "content": "Scalability is crucial for applications that need to handle growing user bases and increasing data loads. We'll discuss architectural patterns, database optimization, and cloud deployment strategies that help applications scale efficiently.",
111
+ "author": "Architecture Team",
112
+ "tags": '["scalability", "architecture", "cloud", "performance"]'
113
+ }
114
+ ]
115
+
116
+ for post in sample_posts:
117
+ cursor.execute('''
118
+ INSERT INTO blog_posts (title, content, author, tags)
119
+ VALUES (?, ?, ?, ?)
120
+ ''', (post["title"], post["content"], post["author"], post["tags"]))
121
+
122
+ conn.commit()
123
+
124
+ conn.close()
125
+
126
+ def get_blog_posts_summary(self, limit: int = 4, offset: int = 0) -> Dict:
127
+ """Get blog posts summary for card display with pagination"""
128
+ conn = sqlite3.connect(self.db_path)
129
+ cursor = conn.cursor()
130
+
131
+ # Get total count
132
+ cursor.execute('SELECT COUNT(*) FROM blog_posts WHERE published = 1')
133
+ total_count = cursor.fetchone()[0]
134
+
135
+ # Get posts with pagination
136
+ cursor.execute('''
137
+ SELECT bp.id, bp.title, bp.author, bp.created_at, bp.tags, bp.content,
138
+ bp.featured_image_id,
139
+ fi.filename as featured_filename,
140
+ COUNT(bpi.id) as post_image_count
141
+ FROM blog_posts bp
142
+ LEFT JOIN images fi ON bp.featured_image_id = fi.id
143
+ LEFT JOIN blog_post_images bpi ON bp.id = bpi.blog_post_id
144
+ WHERE bp.published = 1
145
+ GROUP BY bp.id
146
+ ORDER BY bp.created_at DESC
147
+ LIMIT ? OFFSET ?
148
+ ''', (limit, offset))
149
+
150
+ rows = cursor.fetchall()
151
+ conn.close()
152
+
153
+ results = []
154
+ for row in rows:
155
+ # Create excerpt from content (first 150 characters)
156
+ content = row[5]
157
+ excerpt = content[:150] + "..." if len(content) > 150 else content
158
+ results.append({
159
+ 'id': row[0],
160
+ 'title': row[1],
161
+ 'author': row[2],
162
+ 'created_at': row[3],
163
+ 'tags': json.loads(row[4]),
164
+ 'excerpt': excerpt,
165
+ 'has_featured_image': row[6] is not None,
166
+ 'featured_image_url': f"/media/{row[7]}" if row[7] else None,
167
+ 'post_image_count': row[8]
168
+ })
169
+
170
+ return {
171
+ 'posts': results,
172
+ 'total': total_count,
173
+ 'limit': limit,
174
+ 'offset': offset,
175
+ 'has_more': offset + limit < total_count
176
+ }
177
+
178
+ def get_blog_post_complete(self, post_id: int) -> Optional[Dict]:
179
+ """Get complete blog post with all images"""
180
+ conn = sqlite3.connect(self.db_path)
181
+ cursor = conn.cursor()
182
+
183
+ # Get blog post with featured image
184
+ cursor.execute('''
185
+ SELECT bp.id, bp.title, bp.content, bp.author, bp.created_at,
186
+ bp.published, bp.tags, bp.featured_image_id,
187
+ fi.filename as featured_filename, fi.file_path as featured_path,
188
+ fi.alt_text as featured_alt, fi.caption as featured_caption,
189
+ fi.width as featured_width, fi.height as featured_height
190
+ FROM blog_posts bp
191
+ LEFT JOIN images fi ON bp.featured_image_id = fi.id
192
+ WHERE bp.id = ? AND bp.published = 1
193
+ ''', (post_id,))
194
+
195
+ row = cursor.fetchone()
196
+ if not row:
197
+ conn.close()
198
+ return None
199
+
200
+ # Get post content images
201
+ cursor.execute('''
202
+ SELECT i.id, i.filename, i.file_path, i.alt_text, i.caption,
203
+ i.mime_type, i.width, i.height, bpi.image_order,
204
+ bpi.position_in_content, bpi.image_type
205
+ FROM blog_post_images bpi
206
+ JOIN images i ON bpi.image_id = i.id
207
+ WHERE bpi.blog_post_id = ?
208
+ ORDER BY bpi.image_order
209
+ ''', (post_id,))
210
+
211
+ post_images = cursor.fetchall()
212
+ conn.close()
213
+
214
+ # Build result
215
+ result = {
216
+ 'id': row[0],
217
+ 'title': row[1],
218
+ 'content': row[2],
219
+ 'author': row[3],
220
+ 'created_at': row[4],
221
+ 'published': row[5],
222
+ 'tags': json.loads(row[6]),
223
+ 'featured_image': {
224
+ 'filename': row[8],
225
+ 'file_path': row[9],
226
+ 'alt_text': row[10],
227
+ 'caption': row[11],
228
+ 'width': row[12],
229
+ 'height': row[13],
230
+ 'url': f"/media/{row[8]}" if row[8] else None
231
+ } if row[7] else None,
232
+ 'post_images': [
233
+ {
234
+ 'id': img[0],
235
+ 'filename': img[1],
236
+ 'file_path': img[2],
237
+ 'alt_text': img[3],
238
+ 'caption': img[4],
239
+ 'mime_type': img[5],
240
+ 'width': img[6],
241
+ 'height': img[7],
242
+ 'order': img[8],
243
+ 'position': img[9],
244
+ 'type': img[10],
245
+ 'url': f"/media/{img[1]}"
246
+ }
247
+ for img in post_images
248
+ ]
249
+ }
250
+
251
+ return result
252
+
253
+ # Initialize database
254
+ blog_db = BlogDatabase()
255
+
256
+ def setup_blog_routes(app: FastAPI):
257
+ """Setup blog API routes"""
258
+
259
+ @app.get("/api/blog/posts")
260
+ async def get_blog_posts(page: int = 1, limit: int = 4):
261
+ """Get blog posts for card display with pagination"""
262
+ offset = (page - 1) * limit
263
+ result = blog_db.get_blog_posts_summary(limit=limit, offset=offset)
264
+ return result
265
+
266
+ @app.get("/api/blog/posts/{post_id}", response_model=BlogPost)
267
+ async def get_blog_post(post_id: int):
268
+ """Get complete blog post"""
269
+ post = blog_db.get_blog_post_complete(post_id)
270
+ if not post:
271
+ raise HTTPException(status_code=404, detail="Blog post not found")
272
+ return post
273
+
274
+ # Mount media files if blog_media directory exists
275
+ media_dir = Path("blog_media")
276
+ if media_dir.exists():
277
+ app.mount("/media", StaticFiles(directory=str(media_dir)), name="media")
backend/blog_api_supabase.py ADDED
@@ -0,0 +1,402 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from fastapi.staticfiles import StaticFiles
3
+ from pydantic import BaseModel
4
+ from typing import List, Optional, Dict
5
+ import json
6
+ from pathlib import Path
7
+ import os
8
+ from datetime import datetime
9
+ from supabase import create_client, Client
10
+
11
+ #http_client = httpx.Client(verify=r'C:\Users\PD817AE\OneDrive - EY\Desktop\AgenticDev\amplify\backend\certs\Zscaler Root CA.crt')
12
+
13
+ class BlogPost(BaseModel):
14
+ id: int
15
+ title: str
16
+ content: str
17
+ author: str
18
+ created_at: str
19
+ published: bool
20
+ tags: List[str]
21
+ category: Optional[str] = None
22
+ featured_image: Optional[Dict] = None
23
+ post_images: List[Dict] = []
24
+
25
+ class BlogSummary(BaseModel):
26
+ id: int
27
+ title: str
28
+ author: str
29
+ created_at: str
30
+ tags: List[str]
31
+ category: Optional[str] = None
32
+ excerpt: str
33
+ has_featured_image: bool
34
+ featured_image_url: Optional[str] = None
35
+ post_image_count: int
36
+
37
+ class BlogDatabase:
38
+ def __init__(self):
39
+ self.url = os.getenv("SUPABASE_URL")
40
+ self.key = os.getenv("SUPABASE_KEY")
41
+
42
+ if not self.url or not self.key:
43
+ raise ValueError("SUPABASE_URL and SUPABASE_KEY environment variables are required")
44
+
45
+ self.supabase: Client = create_client(self.url, self.key)
46
+
47
+ def get_blog_posts_summary(self, limit: int = 6, offset: int = 0, category: Optional[str] = None) -> Dict:
48
+ """Get blog posts summary for card display with pagination, optional category filter"""
49
+ try:
50
+ # Get total count
51
+ # Request exact count (Supabase client accepts 'exact')
52
+ count_query = self.supabase.table('blog_posts').select('id', count='exact').eq('published', True) # type: ignore[arg-type]
53
+ if category and category.lower() != 'all':
54
+ count_query = count_query.eq('category', category)
55
+ count_result = count_query.execute()
56
+ total_raw = getattr(count_result, 'count', 0)
57
+ try:
58
+ total_count = int(total_raw) if total_raw is not None else 0
59
+ except Exception:
60
+ total_count = 0
61
+
62
+ # Get posts with pagination
63
+ list_query = (
64
+ self.supabase
65
+ .table('blog_posts')
66
+ .select('''
67
+ id,
68
+ title,
69
+ author,
70
+ created_at,
71
+ tags,
72
+ category,
73
+ content,
74
+ featured_image_id,
75
+ images!featured_image_id(filename),
76
+ blog_post_images(id)
77
+ ''')
78
+ .eq('published', True)
79
+ )
80
+ if category and category.lower() != 'all':
81
+ list_query = list_query.eq('category', category)
82
+ result = (
83
+ list_query
84
+ .order('created_at', desc=True)
85
+ .range(offset, offset + limit - 1)
86
+ .execute()
87
+ )
88
+
89
+ results = []
90
+ for row in result.data:
91
+ # Create excerpt from content (first 150 characters)
92
+ content = row['content']
93
+ excerpt = content[:150] + "..." if len(content) > 150 else content
94
+
95
+ # Parse tags if they're stored as JSON string
96
+ tags = row['tags']
97
+ if isinstance(tags, str):
98
+ try:
99
+ tags = json.loads(tags)
100
+ except:
101
+ tags = []
102
+
103
+ featured_image = row.get('images')
104
+
105
+ results.append({
106
+ 'id': row['id'],
107
+ 'title': row['title'],
108
+ 'author': row['author'],
109
+ 'created_at': row['created_at'],
110
+ 'tags': tags,
111
+ 'category': row.get('category'),
112
+ 'excerpt': excerpt,
113
+ 'has_featured_image': featured_image is not None,
114
+ 'featured_image_url': f"/media/{featured_image['filename']}" if featured_image else None,
115
+ 'post_image_count': len(row.get('blog_post_images', []))
116
+ })
117
+
118
+ has_more = False
119
+ try:
120
+ has_more = (offset + limit) < int(total_count)
121
+ except Exception:
122
+ has_more = False
123
+ return {
124
+ 'posts': results,
125
+ 'total': total_count,
126
+ 'limit': limit,
127
+ 'offset': offset,
128
+ 'has_more': has_more
129
+ }
130
+
131
+ except Exception as e:
132
+ print(f"Error fetching blog posts: {e}")
133
+ return {
134
+ 'posts': [],
135
+ 'total': 0,
136
+ 'limit': limit,
137
+ 'offset': offset,
138
+ 'has_more': False
139
+ }
140
+
141
+ def get_blog_post_complete(self, post_id: int) -> Optional[Dict]:
142
+ """Get complete blog post with all images"""
143
+ try:
144
+ # Get blog post with featured image
145
+ result = (
146
+ self.supabase
147
+ .table('blog_posts')
148
+ .select('''
149
+ id,
150
+ title,
151
+ content,
152
+ author,
153
+ created_at,
154
+ published,
155
+ category,
156
+ tags,
157
+ featured_image_id,
158
+ images!featured_image_id(
159
+ filename,
160
+ file_path,
161
+ alt_text,
162
+ caption,
163
+ width,
164
+ height
165
+ )
166
+ ''')
167
+ .eq('id', post_id)
168
+ .eq('published', True)
169
+ .single()
170
+ .execute()
171
+ )
172
+
173
+ if not result.data:
174
+ return None
175
+
176
+ row = result.data
177
+
178
+ # Get post content images
179
+ images_result = (
180
+ self.supabase
181
+ .table('blog_post_images')
182
+ .select('''
183
+ images(
184
+ id,
185
+ filename,
186
+ file_path,
187
+ alt_text,
188
+ caption,
189
+ mime_type,
190
+ width,
191
+ height
192
+ ),
193
+ image_order,
194
+ position_in_content,
195
+ image_type
196
+ ''')
197
+ .eq('blog_post_id', post_id)
198
+ .order('image_order')
199
+ .execute()
200
+ )
201
+
202
+ # Parse tags if they're stored as JSON string
203
+ tags = row['tags']
204
+ if isinstance(tags, str):
205
+ try:
206
+ tags = json.loads(tags)
207
+ except:
208
+ tags = []
209
+
210
+ # Build result
211
+ featured_image_data = row.get('images')
212
+
213
+ result = {
214
+ 'id': row['id'],
215
+ 'title': row['title'],
216
+ 'content': row['content'],
217
+ 'author': row['author'],
218
+ 'created_at': row['created_at'],
219
+ 'published': row['published'],
220
+ 'tags': tags,
221
+ 'category': row.get('category'),
222
+ 'featured_image': {
223
+ 'filename': featured_image_data['filename'],
224
+ 'file_path': featured_image_data['file_path'],
225
+ 'alt_text': featured_image_data['alt_text'],
226
+ 'caption': featured_image_data['caption'],
227
+ 'width': featured_image_data['width'],
228
+ 'height': featured_image_data['height'],
229
+ 'url': f"/media/{featured_image_data['filename']}"
230
+ } if featured_image_data else None,
231
+ 'post_images': [
232
+ {
233
+ 'id': img_row['images']['id'],
234
+ 'filename': img_row['images']['filename'],
235
+ 'file_path': img_row['images']['file_path'],
236
+ 'alt_text': img_row['images']['alt_text'],
237
+ 'caption': img_row['images']['caption'],
238
+ 'mime_type': img_row['images']['mime_type'],
239
+ 'width': img_row['images']['width'],
240
+ 'height': img_row['images']['height'],
241
+ 'order': img_row['image_order'],
242
+ 'position': img_row['position_in_content'],
243
+ 'type': img_row['image_type'],
244
+ 'url': f"/media/{img_row['images']['filename']}"
245
+ }
246
+ for img_row in images_result.data
247
+ ]
248
+ }
249
+
250
+ return result
251
+
252
+ except Exception as e:
253
+ print(f"Error fetching blog post {post_id}: {e}")
254
+ return None
255
+
256
+ # Initialize database
257
+ blog_db = BlogDatabase()
258
+
259
+ def setup_blog_routes(app: FastAPI):
260
+ """Setup blog API routes"""
261
+
262
+ @app.get("/api/blog/posts")
263
+ async def get_blog_posts(page: int = 1, limit: int = 6, category: Optional[str] = None):
264
+ """Get blog posts for card display with pagination"""
265
+ offset = (page - 1) * limit
266
+ result = blog_db.get_blog_posts_summary(limit=limit, offset=offset, category=category)
267
+ return result
268
+
269
+ @app.get("/api/blog/posts/{post_id}", response_model=BlogPost)
270
+ async def get_blog_post(post_id: int):
271
+ """Get complete blog post"""
272
+ post = blog_db.get_blog_post_complete(post_id)
273
+ if not post:
274
+ raise HTTPException(status_code=404, detail="Blog post not found")
275
+ return post
276
+
277
+ @app.get("/api/blog/search")
278
+ async def search_blog_posts(q: str, limit: int = 50, category: Optional[str] = None):
279
+ """Search blog posts by tag relevance.
280
+
281
+ Scoring:
282
+ - Exact tag match: 1 point
283
+ - Partial (substring) match: 0.5 point (only if not exact)
284
+ Percentage = score / len(unique query tokens)
285
+ Returns posts sorted by percentage desc then created_at desc.
286
+ """
287
+ query = (q or "").strip().lower()
288
+ if not query:
289
+ return { 'posts': [], 'total': 0 }
290
+
291
+ # Split on spaces / commas, remove empties, dedupe, limit tokens
292
+ raw_tokens = [t for t in [p.strip() for p in query.replace(',', ' ').split(' ')] if t]
293
+ tokens: List[str] = []
294
+ for t in raw_tokens:
295
+ if t not in tokens:
296
+ tokens.append(t)
297
+ if len(tokens) >= 8: # hard cap to avoid large scoring loops
298
+ break
299
+ if not tokens:
300
+ return { 'posts': [], 'total': 0 }
301
+
302
+ try:
303
+ # Fetch a larger slice of published posts (could be optimized w/ materialized view later)
304
+ base_query = (
305
+ blog_db.supabase
306
+ .table('blog_posts')
307
+ .select('''
308
+ id,
309
+ title,
310
+ author,
311
+ created_at,
312
+ tags,
313
+ category,
314
+ content,
315
+ featured_image_id,
316
+ images!featured_image_id(filename),
317
+ blog_post_images(id)
318
+ ''')
319
+ .eq('published', True)
320
+ )
321
+ if category and category.lower() != 'all':
322
+ base_query = base_query.eq('category', category)
323
+ result = (
324
+ base_query
325
+ .order('created_at', desc=True)
326
+ .limit(400) # safety cap
327
+ .execute()
328
+ )
329
+ except Exception as e:
330
+ print(f"Search fetch error: {e}")
331
+ raise HTTPException(status_code=500, detail="Search failed")
332
+
333
+ scored = []
334
+ token_set = set(tokens)
335
+ max_score = float(len(token_set))
336
+ for row in result.data:
337
+ row_tags = row.get('tags', [])
338
+ if isinstance(row_tags, str):
339
+ try:
340
+ row_tags = json.loads(row_tags)
341
+ except:
342
+ row_tags = []
343
+ # Normalize tags
344
+ norm_tags = [str(t).lower() for t in row_tags]
345
+ if not norm_tags:
346
+ continue
347
+ score = 0.0
348
+ for tk in token_set:
349
+ exact = any(tk == tag for tag in norm_tags)
350
+ if exact:
351
+ score += 1.0
352
+ continue
353
+ partial = any(tk in tag for tag in norm_tags)
354
+ if partial:
355
+ score += 0.5
356
+ if score <= 0:
357
+ continue
358
+ percent = score / max_score
359
+ content = row['content']
360
+ excerpt = content[:150] + "..." if len(content) > 150 else content
361
+ featured_image = row.get('images')
362
+ scored.append({
363
+ 'id': row['id'],
364
+ 'title': row['title'],
365
+ 'author': row['author'],
366
+ 'created_at': row['created_at'],
367
+ 'tags': row_tags,
368
+ 'category': row.get('category'),
369
+ 'excerpt': excerpt,
370
+ 'has_featured_image': featured_image is not None,
371
+ 'featured_image_url': f"/media/{featured_image['filename']}" if featured_image else None,
372
+ 'post_image_count': len(row.get('blog_post_images', [])),
373
+ 'percent_match': round(percent * 100, 2)
374
+ })
375
+
376
+ # Prepare sortable timestamp (fallback to 0 if missing or unparsable)
377
+ for item in scored:
378
+ raw_dt = item.get('created_at')
379
+ ts = 0.0
380
+ if raw_dt:
381
+ try:
382
+ # Remove Z if present for fromisoformat compatibility
383
+ cleaned = raw_dt.replace('Z', '')
384
+ ts = datetime.fromisoformat(cleaned).timestamp()
385
+ except Exception:
386
+ ts = 0.0
387
+ item['_ts'] = ts
388
+
389
+ # Sort: highest percent_match first, then newest (_ts desc)
390
+ scored.sort(key=lambda x: (-x['percent_match'], -x['_ts']))
391
+
392
+ # Drop helper key
393
+ for item in scored:
394
+ item.pop('_ts', None)
395
+ # Trim
396
+ scored = scored[:limit]
397
+ return { 'posts': scored, 'total': len(scored), 'query_tokens': tokens }
398
+
399
+ # Mount media files if blog_media directory exists
400
+ media_dir = Path("blog_media")
401
+ if media_dir.exists():
402
+ app.mount("/media", StaticFiles(directory=str(media_dir)), name="media")
backend/brightdata_api.py ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import time
3
+ import requests
4
+ from urllib.parse import quote_plus
5
+ from typing import List, Dict, Any, Optional
6
+ from dotenv import load_dotenv
7
+ load_dotenv()
8
+
9
+ # TODO : Add async function here
10
+
11
+ # from google.colab import userdata
12
+ bd_apikey = os.getenv('BRIGHTDATA_API_KEY')
13
+
14
+ def _make_api_request(url, **kwargs):
15
+ headers = {
16
+ "Authorization": f"Bearer {bd_apikey}",
17
+ "Content-Type": "application/json",
18
+ }
19
+
20
+ try:
21
+ response = requests.post(url, headers=headers, **kwargs)
22
+ response.raise_for_status()
23
+ return response.json()
24
+ except requests.exceptions.RequestException as e:
25
+ print(f"API request failed: {e}")
26
+ return None
27
+ except Exception as e:
28
+ print(f"Unknown error: {e}")
29
+ return None
30
+
31
+
32
+ def poll_snapshot_status(
33
+ snapshot_id: str, max_attempts: int = 200, delay: int = 10
34
+ ) -> bool:
35
+ progress_url = f"https://api.brightdata.com/datasets/v3/progress/{snapshot_id}"
36
+ headers = {"Authorization": f"Bearer {bd_apikey}"}
37
+
38
+ for attempt in range(max_attempts):
39
+ try:
40
+ print(
41
+ f"⏳ Checking snapshot progress... (attempt {attempt + 1}/{max_attempts})"
42
+ )
43
+
44
+ response = requests.get(progress_url, headers=headers)
45
+ response.raise_for_status()
46
+
47
+ progress_data = response.json()
48
+ status = progress_data.get("status")
49
+
50
+ if status == "ready":
51
+ print("✅ Snapshot completed!")
52
+ return True
53
+ elif status == "failed":
54
+ print("❌ Snapshot failed")
55
+ return False
56
+ elif status == "running":
57
+ print("🔄 Still processing...")
58
+ time.sleep(delay)
59
+ else:
60
+ print(f"❓ Unknown status: {status}")
61
+ time.sleep(delay)
62
+
63
+ except Exception as e:
64
+ print(f"⚠️ Error checking progress: {e}")
65
+ time.sleep(delay)
66
+
67
+ print("⏰ Timeout waiting for snapshot completion")
68
+ return False
69
+
70
+
71
+ def download_snapshot(
72
+ snapshot_id: str, format: str = "json"
73
+ ) -> Optional[List[Dict[Any, Any]]]:
74
+ download_url = (
75
+ f"https://api.brightdata.com/datasets/v3/snapshot/{snapshot_id}?format={format}"
76
+ )
77
+ headers = {"Authorization": f"Bearer {bd_apikey}"}
78
+ print(f"Snapshot id : {snapshot_id}")
79
+ try:
80
+ print("📥 Downloading snapshot data...")
81
+
82
+ response = requests.get(download_url, headers=headers)
83
+ response.raise_for_status()
84
+
85
+ data = response.json()
86
+ print(
87
+ f"🎉 Successfully downloaded {len(data) if isinstance(data, list) else 1} items"
88
+ )
89
+
90
+ return data
91
+
92
+ except Exception as e:
93
+ print(f"❌ Error downloading snapshot: {e}")
94
+ return None
95
+
96
+ def _trigger_and_download_snapshot(trigger_url, params, data, operation_name="operation"):
97
+ trigger_result = _make_api_request(trigger_url, params=params, json=data)
98
+ print("===================")
99
+ print(trigger_result)
100
+ if not trigger_result:
101
+ return None
102
+
103
+ snapshot_id = trigger_result.get("snapshot_id")
104
+ if not snapshot_id:
105
+ return None
106
+
107
+ if not poll_snapshot_status(snapshot_id):
108
+ return None
109
+
110
+ raw_data = download_snapshot(snapshot_id)
111
+ return raw_data
112
+
113
+
114
+ def reddit_search_api(subreddit_url, date="Today", sort_by="Hot", num_of_posts=12):
115
+ trigger_url = "https://api.brightdata.com/datasets/v3/trigger"
116
+
117
+ params = {
118
+ "dataset_id": "gd_lvz8ah06191smkebj4",
119
+ "include_errors": "true",
120
+ "type": "discover_new",
121
+ "discover_by": "subreddit_url"
122
+ }
123
+
124
+ data = [
125
+ {
126
+ "url": subreddit_url,
127
+ "sort_by": sort_by,
128
+ "num_of_posts": num_of_posts,
129
+ "sort_by_time": date
130
+ }
131
+ ]
132
+
133
+ raw_data = _trigger_and_download_snapshot(
134
+ trigger_url, params, data, operation_name="reddit"
135
+ )
136
+
137
+ if not raw_data:
138
+ return None
139
+
140
+ parsed_data = []
141
+ for post in raw_data:
142
+ parsed_post = {
143
+ "title": post.get("title"),
144
+ "url": post.get("url"),
145
+ "user_posted": post.get("user_posted"),
146
+ "description": post.get("description"),
147
+ "upvotes": post.get("upvotes"),
148
+ "num_comments": post.get("num_comments"),
149
+ "date_posted": post.get("date_posted"),
150
+ }
151
+ parsed_data.append(parsed_post)
152
+
153
+ return {"parsed_posts": parsed_data, "total_found": len(parsed_data)}
154
+
155
+
156
+ def reddit_post_retrieval(urls, days_back=1, load_all_replies=False, comment_limit=""):
157
+ if not urls:
158
+ return None
159
+
160
+ trigger_url = "https://api.brightdata.com/datasets/v3/trigger"
161
+
162
+ params = {
163
+ "dataset_id": "gd_lvz8ah06191smkebj4",
164
+ "include_errors": "true"
165
+ }
166
+
167
+ data = [
168
+ {
169
+ "url": url,
170
+ "days_back": days_back,
171
+ "load_all_replies": load_all_replies,
172
+ "comment_limit": comment_limit
173
+ }
174
+ for url in urls
175
+ ]
176
+
177
+ raw_data = _trigger_and_download_snapshot(
178
+ trigger_url, params, data, operation_name="reddit comments"
179
+ )
180
+ if not raw_data:
181
+ return None
182
+
183
+ parsed_comments = []
184
+ for comment in raw_data:
185
+ parsed_comment = {
186
+ "comment_id": comment.get("comment_id"),
187
+ "content": comment.get("comment"),
188
+ "date": comment.get("date_posted"),
189
+ }
190
+ parsed_comments.append(parsed_comment)
191
+
192
+ return {"comments": parsed_comments, "total_retrieved": len(parsed_comments)}
193
+
194
+ def scrape_and_download_reddit(url="https://www.reddit.com/r/ArtificialInteligence/"):
195
+
196
+ reddit_response = reddit_search_api(url)
197
+ if not reddit_response or reddit_response.get("total_found", 0) == 0:
198
+ print("No posts found or error occurred during Reddit search.")
199
+ return None
200
+
201
+ return reddit_response
202
+
203
+ # TODO : Add supabase function here to save to Supabase
backend/certs/Zscaler Root CA.crt ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ -----BEGIN CERTIFICATE-----
2
+ MIIE0zCCA7ugAwIBAgIJANu+mC2Jt3uTMA0GCSqGSIb3DQEBCwUAMIGhMQswCQYD
3
+ VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTERMA8GA1UEBxMIU2FuIEpvc2Ux
4
+ FTATBgNVBAoTDFpzY2FsZXIgSW5jLjEVMBMGA1UECxMMWnNjYWxlciBJbmMuMRgw
5
+ FgYDVQQDEw9ac2NhbGVyIFJvb3QgQ0ExIjAgBgkqhkiG9w0BCQEWE3N1cHBvcnRA
6
+ enNjYWxlci5jb20wHhcNMTQxMjE5MDAyNzU1WhcNNDIwNTA2MDAyNzU1WjCBoTEL
7
+ MAkGA1UEBhMCVVMxEzARBgNVBAgTCkNhbGlmb3JuaWExETAPBgNVBAcTCFNhbiBK
8
+ b3NlMRUwEwYDVQQKEwxac2NhbGVyIEluYy4xFTATBgNVBAsTDFpzY2FsZXIgSW5j
9
+ LjEYMBYGA1UEAxMPWnNjYWxlciBSb290IENBMSIwIAYJKoZIhvcNAQkBFhNzdXBw
10
+ b3J0QHpzY2FsZXIuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
11
+ qT7STSxZRTgEFFf6doHajSc1vk5jmzmM6BWuOo044EsaTc9eVEV/HjH/1DWzZtcr
12
+ fTj+ni205apMTlKBW3UYR+lyLHQ9FoZiDXYXK8poKSV5+Tm0Vls/5Kb8mkhVVqv7
13
+ LgYEmvEY7HPY+i1nEGZCa46ZXCOohJ0mBEtB9JVlpDIO+nN0hUMAYYdZ1KZWCMNf
14
+ 5J/aTZiShsorN2A38iSOhdd+mcRM4iNL3gsLu99XhKnRqKoHeH83lVdfu1XBeoQz
15
+ z5V6gA3kbRvhDwoIlTBeMa5l4yRdJAfdpkbFzqiwSgNdhbxTHnYYorDzKfr2rEFM
16
+ dsMU0DHdeAZf711+1CunuQIDAQABo4IBCjCCAQYwHQYDVR0OBBYEFLm33UrNww4M
17
+ hp1d3+wcBGnFTpjfMIHWBgNVHSMEgc4wgcuAFLm33UrNww4Mhp1d3+wcBGnFTpjf
18
+ oYGnpIGkMIGhMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTERMA8G
19
+ A1UEBxMIU2FuIEpvc2UxFTATBgNVBAoTDFpzY2FsZXIgSW5jLjEVMBMGA1UECxMM
20
+ WnNjYWxlciBJbmMuMRgwFgYDVQQDEw9ac2NhbGVyIFJvb3QgQ0ExIjAgBgkqhkiG
21
+ 9w0BCQEWE3N1cHBvcnRAenNjYWxlci5jb22CCQDbvpgtibd7kzAMBgNVHRMEBTAD
22
+ AQH/MA0GCSqGSIb3DQEBCwUAA4IBAQAw0NdJh8w3NsJu4KHuVZUrmZgIohnTm0j+
23
+ RTmYQ9IKA/pvxAcA6K1i/LO+Bt+tCX+C0yxqB8qzuo+4vAzoY5JEBhyhBhf1uK+P
24
+ /WVWFZN/+hTgpSbZgzUEnWQG2gOVd24msex+0Sr7hyr9vn6OueH+jj+vCMiAm5+u
25
+ kd7lLvJsBu3AO3jGWVLyPkS3i6Gf+rwAp1OsRrv3WnbkYcFf9xjuaf4z0hRCrLN2
26
+ xFNjavxrHmsH8jPHVvgc1VD0Opja0l/BRVauTrUaoW6tE+wFG5rEcPGS80jjHK4S
27
+ pB5iDj2mUZH1T8lzYtuZy0ZPirxmtsk3135+CKNa2OCAhhFjE0xd
28
+ -----END CERTIFICATE-----
backend/flexible_blog_database.py ADDED
@@ -0,0 +1,372 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sqlite3
2
+ import json
3
+ import os
4
+ import uuid
5
+ from datetime import datetime
6
+ from typing import List, Dict, Optional, Union
7
+ from pathlib import Path
8
+ import shutil
9
+ from enum import Enum
10
+ import threading
11
+
12
+ class ImageType(Enum):
13
+ FEATURED = "featured"
14
+ POST_CONTENT = "post_content"
15
+ GALLERY = "gallery"
16
+
17
+ class FlexibleBlogDatabase:
18
+ def __init__(self, db_path: str = "blog.db", media_dir: str = "blog_media"):
19
+ self.db_path = db_path
20
+ self.media_dir = Path(media_dir)
21
+ self.media_dir.mkdir(exist_ok=True)
22
+ self._lock = threading.Lock()
23
+ self.init_database()
24
+
25
+ def _get_connection(self):
26
+ """Get a database connection with proper settings"""
27
+ conn = sqlite3.connect(self.db_path, timeout=20.0)
28
+ conn.execute("PRAGMA journal_mode=WAL") # Better for concurrent access
29
+ conn.execute("PRAGMA busy_timeout=20000") # 20 second timeout
30
+ return conn
31
+
32
+ def init_database(self):
33
+ """Initialize the flexible blog database with enhanced image support"""
34
+ with self._lock:
35
+ conn = self._get_connection()
36
+ try:
37
+ cursor = conn.cursor()
38
+
39
+ # Blog posts table
40
+ cursor.execute('''
41
+ CREATE TABLE IF NOT EXISTS blog_posts (
42
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
43
+ title TEXT NOT NULL,
44
+ content TEXT NOT NULL,
45
+ author TEXT DEFAULT 'Admin',
46
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
47
+ published BOOLEAN DEFAULT 1,
48
+ tags TEXT DEFAULT '[]',
49
+ featured_image_id INTEGER,
50
+ FOREIGN KEY (featured_image_id) REFERENCES images (id)
51
+ )
52
+ ''')
53
+
54
+ # Enhanced images table
55
+ cursor.execute('''
56
+ CREATE TABLE IF NOT EXISTS images (
57
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
58
+ filename TEXT NOT NULL,
59
+ original_filename TEXT NOT NULL,
60
+ file_path TEXT NOT NULL,
61
+ file_size INTEGER,
62
+ mime_type TEXT,
63
+ alt_text TEXT DEFAULT '',
64
+ caption TEXT DEFAULT '',
65
+ width INTEGER,
66
+ height INTEGER,
67
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
68
+ )
69
+ ''')
70
+
71
+ # Enhanced junction table for post images
72
+ cursor.execute('''
73
+ CREATE TABLE IF NOT EXISTS blog_post_images (
74
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
75
+ blog_post_id INTEGER,
76
+ image_id INTEGER,
77
+ image_type TEXT DEFAULT 'post_content',
78
+ image_order INTEGER DEFAULT 0,
79
+ position_in_content INTEGER,
80
+ FOREIGN KEY (blog_post_id) REFERENCES blog_posts (id),
81
+ FOREIGN KEY (image_id) REFERENCES images (id)
82
+ )
83
+ ''')
84
+
85
+ conn.commit()
86
+ finally:
87
+ conn.close()
88
+
89
+ def save_image(self, file_path: str, alt_text: str = "", caption: str = "",
90
+ original_filename: str = "") -> int:
91
+ """Save an image file and return its database ID"""
92
+ if not os.path.exists(file_path):
93
+ raise FileNotFoundError(f"Image file not found: {file_path}")
94
+
95
+ # Generate unique filename
96
+ file_extension = Path(file_path).suffix
97
+ unique_filename = f"{uuid.uuid4()}{file_extension}"
98
+ destination_path = self.media_dir / unique_filename
99
+
100
+ # Copy file to media directory
101
+ shutil.copy2(file_path, destination_path)
102
+
103
+ # Get file info
104
+ file_size = os.path.getsize(destination_path)
105
+ mime_type = self._get_mime_type(file_extension)
106
+
107
+ # Get image dimensions (optional - requires PIL)
108
+ width, height = self._get_image_dimensions(destination_path)
109
+
110
+ # Save to database with lock
111
+ with self._lock:
112
+ conn = self._get_connection()
113
+ try:
114
+ cursor = conn.cursor()
115
+
116
+ cursor.execute('''
117
+ INSERT INTO images (filename, original_filename, file_path, file_size,
118
+ mime_type, alt_text, caption, width, height)
119
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
120
+ ''', (unique_filename, original_filename or Path(file_path).name,
121
+ str(destination_path), file_size, mime_type, alt_text, caption, width, height))
122
+
123
+ image_id = cursor.lastrowid
124
+ conn.commit()
125
+ return image_id
126
+ finally:
127
+ conn.close()
128
+
129
+ def create_blog_post(self, title: str, content: str, author: str = "Admin",
130
+ tags: List[str] = None) -> int:
131
+ """Create a basic blog post without images"""
132
+ with self._lock:
133
+ conn = self._get_connection()
134
+ try:
135
+ cursor = conn.cursor()
136
+
137
+ tags_json = json.dumps(tags or [])
138
+
139
+ cursor.execute('''
140
+ INSERT INTO blog_posts (title, content, author, tags)
141
+ VALUES (?, ?, ?, ?)
142
+ ''', (title, content, author, tags_json))
143
+
144
+ blog_post_id = cursor.lastrowid
145
+ conn.commit()
146
+ return blog_post_id
147
+ finally:
148
+ conn.close()
149
+
150
+ def add_featured_image(self, blog_post_id: int, image_path: str,
151
+ alt_text: str = "", caption: str = "") -> int:
152
+ """Add a featured image to an existing blog post"""
153
+ # Save the image first
154
+ image_id = self.save_image(image_path, alt_text, caption)
155
+
156
+ # Update blog post with featured image
157
+ with self._lock:
158
+ conn = self._get_connection()
159
+ try:
160
+ cursor = conn.cursor()
161
+
162
+ cursor.execute('''
163
+ UPDATE blog_posts SET featured_image_id = ? WHERE id = ?
164
+ ''', (image_id, blog_post_id))
165
+
166
+ conn.commit()
167
+ return image_id
168
+ finally:
169
+ conn.close()
170
+
171
+ def add_post_images(self, blog_post_id: int, image_configs: List[Dict]) -> List[int]:
172
+ """Add multiple post images to a blog post"""
173
+ image_ids = []
174
+
175
+ # Save all images first
176
+ for config in image_configs:
177
+ image_id = self.save_image(
178
+ config["file_path"],
179
+ config.get("alt_text", ""),
180
+ config.get("caption", "")
181
+ )
182
+ image_ids.append((image_id, config))
183
+
184
+ # Link all images to blog post in one transaction
185
+ with self._lock:
186
+ conn = self._get_connection()
187
+ try:
188
+ cursor = conn.cursor()
189
+
190
+ for image_id, config in image_ids:
191
+ cursor.execute('''
192
+ INSERT INTO blog_post_images
193
+ (blog_post_id, image_id, image_type, image_order, position_in_content)
194
+ VALUES (?, ?, ?, ?, ?)
195
+ ''', (
196
+ blog_post_id,
197
+ image_id,
198
+ ImageType.POST_CONTENT.value,
199
+ config.get("order", 0),
200
+ config.get("position")
201
+ ))
202
+
203
+ conn.commit()
204
+ return [img_id for img_id, _ in image_ids]
205
+ finally:
206
+ conn.close()
207
+
208
+ def create_complete_blog_post(self, title: str, content: str, author: str = "Admin",
209
+ tags: List[str] = None, featured_image: Dict = None,
210
+ post_images: List[Dict] = None) -> int:
211
+ """Create a complete blog post with all images in one go"""
212
+ # Create the blog post first
213
+ blog_post_id = self.create_blog_post(title, content, author, tags)
214
+
215
+ # Add featured image if provided
216
+ if featured_image:
217
+ self.add_featured_image(
218
+ blog_post_id,
219
+ featured_image["file_path"],
220
+ featured_image.get("alt_text", ""),
221
+ featured_image.get("caption", "")
222
+ )
223
+
224
+ # Add post images if provided
225
+ if post_images:
226
+ self.add_post_images(blog_post_id, post_images)
227
+
228
+ return blog_post_id
229
+
230
+ def get_blog_post_complete(self, post_id: int) -> Optional[Dict]:
231
+ """Get a complete blog post with all associated images"""
232
+ with self._lock:
233
+ conn = self._get_connection()
234
+ try:
235
+ cursor = conn.cursor()
236
+
237
+ # Get blog post with featured image
238
+ cursor.execute('''
239
+ SELECT bp.id, bp.title, bp.content, bp.author, bp.created_at,
240
+ bp.published, bp.tags, bp.featured_image_id,
241
+ fi.filename as featured_filename, fi.file_path as featured_path,
242
+ fi.alt_text as featured_alt, fi.caption as featured_caption,
243
+ fi.width as featured_width, fi.height as featured_height
244
+ FROM blog_posts bp
245
+ LEFT JOIN images fi ON bp.featured_image_id = fi.id
246
+ WHERE bp.id = ?
247
+ ''', (post_id,))
248
+
249
+ row = cursor.fetchone()
250
+ if not row:
251
+ return None
252
+
253
+ # Get post content images
254
+ cursor.execute('''
255
+ SELECT i.id, i.filename, i.file_path, i.alt_text, i.caption,
256
+ i.mime_type, i.width, i.height, bpi.image_order,
257
+ bpi.position_in_content, bpi.image_type
258
+ FROM blog_post_images bpi
259
+ JOIN images i ON bpi.image_id = i.id
260
+ WHERE bpi.blog_post_id = ? AND bpi.image_type = ?
261
+ ORDER BY bpi.image_order
262
+ ''', (post_id, ImageType.POST_CONTENT.value))
263
+
264
+ post_images = cursor.fetchall()
265
+
266
+ # Build result
267
+ result = {
268
+ 'id': row[0],
269
+ 'title': row[1],
270
+ 'content': row[2],
271
+ 'author': row[3],
272
+ 'created_at': row[4],
273
+ 'published': row[5],
274
+ 'tags': json.loads(row[6]),
275
+ 'featured_image': {
276
+ 'filename': row[8],
277
+ 'file_path': row[9],
278
+ 'alt_text': row[10],
279
+ 'caption': row[11],
280
+ 'width': row[12],
281
+ 'height': row[13],
282
+ 'url': self.get_image_url(row[8]) if row[8] else None
283
+ } if row[7] else None,
284
+ 'post_images': [
285
+ {
286
+ 'id': img[0],
287
+ 'filename': img[1],
288
+ 'file_path': img[2],
289
+ 'alt_text': img[3],
290
+ 'caption': img[4],
291
+ 'mime_type': img[5],
292
+ 'width': img[6],
293
+ 'height': img[7],
294
+ 'order': img[8],
295
+ 'position': img[9],
296
+ 'type': img[10],
297
+ 'url': self.get_image_url(img[1])
298
+ }
299
+ for img in post_images
300
+ ]
301
+ }
302
+
303
+ return result
304
+ finally:
305
+ conn.close()
306
+
307
+ def _get_mime_type(self, file_extension: str) -> str:
308
+ """Get MIME type based on file extension"""
309
+ mime_types = {
310
+ '.jpg': 'image/jpeg',
311
+ '.jpeg': 'image/jpeg',
312
+ '.png': 'image/png',
313
+ '.gif': 'image/gif',
314
+ '.webp': 'image/webp',
315
+ '.svg': 'image/svg+xml'
316
+ }
317
+ return mime_types.get(file_extension.lower(), 'application/octet-stream')
318
+
319
+ def _get_image_dimensions(self, image_path: str) -> tuple:
320
+ """Get image dimensions (requires PIL/Pillow)"""
321
+ try:
322
+ from PIL import Image
323
+ with Image.open(image_path) as img:
324
+ return img.size
325
+ except ImportError:
326
+ return None, None
327
+ except Exception:
328
+ return None, None
329
+
330
+ def get_image_url(self, image_filename: str) -> str:
331
+ """Generate URL for serving images"""
332
+ return f"/media/{image_filename}"
333
+
334
+ def list_recent_posts_with_images(self, limit: int = 10) -> List[Dict]:
335
+ """Get recent blog posts with image counts"""
336
+ with self._lock:
337
+ conn = self._get_connection()
338
+ try:
339
+ cursor = conn.cursor()
340
+
341
+ cursor.execute('''
342
+ SELECT bp.id, bp.title, bp.author, bp.created_at, bp.published, bp.tags,
343
+ bp.featured_image_id,
344
+ fi.filename as featured_filename,
345
+ COUNT(bpi.id) as post_image_count
346
+ FROM blog_posts bp
347
+ LEFT JOIN images fi ON bp.featured_image_id = fi.id
348
+ LEFT JOIN blog_post_images bpi ON bp.id = bpi.blog_post_id
349
+ WHERE bp.published = 1
350
+ GROUP BY bp.id
351
+ ORDER BY bp.created_at DESC
352
+ LIMIT ?
353
+ ''', (limit,))
354
+
355
+ rows = cursor.fetchall()
356
+
357
+ return [
358
+ {
359
+ 'id': row[0],
360
+ 'title': row[1],
361
+ 'author': row[2],
362
+ 'created_at': row[3],
363
+ 'published': row[4],
364
+ 'tags': json.loads(row[5]),
365
+ 'has_featured_image': row[6] is not None,
366
+ 'featured_image_url': self.get_image_url(row[7]) if row[7] else None,
367
+ 'post_image_count': row[8]
368
+ }
369
+ for row in rows
370
+ ]
371
+ finally:
372
+ conn.close()
backend/llm_agent.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.graph import StateGraph, END
2
+ from langchain_core.messages import HumanMessage, SystemMessage
3
+ from langchain_openai import ChatOpenAI # or your preferred LLM
4
+ from pydantic import BaseModel, Field, field_validator
5
+ from typing import TypedDict, List
6
+ import os
7
+ from pydantic import field_validator # needed for custom validation
8
+ from dotenv import load_dotenv
9
+ load_dotenv() # take environment variables from .env.
10
+
11
+ # Define the structured output model
12
+ class StoryOutput(BaseModel):
13
+ """Structured output for the storyteller agent"""
14
+ polished_story: str = Field(
15
+ description="A refined version of the story with improved flow, grammar, and engagement"
16
+ )
17
+ keywords: List[str] = Field(
18
+ description="A list of 5-10 key terms that represent the main themes, characters, or concepts",
19
+ min_items=3,
20
+ max_items=7
21
+ )
22
+
23
+ # Define the state structure
24
+ class AgentState(TypedDict):
25
+ original_story: str
26
+ polished_story: str
27
+ keywords: List[str]
28
+ messages: List[dict]
29
+
30
+ # Storyteller Agent with Structured Output
31
+ class StorytellerAgent:
32
+ def __init__(self, llm):
33
+ # Create structured LLM with the output model
34
+ self.structured_llm = llm.with_structured_output(StoryOutput)
35
+ self.system_prompt = """You are a skilled storyteller AI. Your job is to take raw, confessional-style stories and transform them into emotionally engaging, narrative-driven pieces. The rewritten story should:
36
+
37
+ 1. Preserve the original events and meaning but present them in a captivating way.
38
+ 2. Use character names (instead of “my brother,” “my sister”) to make the story feel alive.
39
+ 3. Add dialogue, atmosphere, and inner thoughts to create tension and immersion.
40
+ 4. Write in a third-person narrative style, as if the story is being shared by an observer.
41
+ 5. Maintain a natural, human voice — conversational, reflective, and vivid.
42
+ 6. Balance realism with storytelling techniques (scene-setting, emotional beats, sensory details).
43
+ 7. Keep the length roughly 2–3x the original input, ensuring it feels like a polished story.
44
+
45
+ Your goal is to make the reader feel emotionally invested, as though they’re listening to someone recounting a deeply personal and dramatic life event.
46
+
47
+ """
48
+
49
+ def __call__(self, state: AgentState) -> AgentState:
50
+ # Prepare messages for the structured LLM
51
+ messages = [
52
+ SystemMessage(content=self.system_prompt),
53
+ HumanMessage(content=f"Please polish this story and extract keywords:\n\n{state['original_story']}")
54
+ ]
55
+
56
+ # Get structured response
57
+ response: StoryOutput = self.structured_llm.invoke(messages)
58
+
59
+ # Update state with structured output
60
+ state["polished_story"] = response.polished_story
61
+ state["keywords"] = response.keywords
62
+ state["messages"].append({
63
+ "role": "assistant",
64
+ "content": f"Polished story and extracted {len(response.keywords)} keywords"
65
+ })
66
+
67
+ return state
68
+
69
+ # Create the graph functions
70
+ def create_storyteller_graph(enhanced=False):
71
+ llm = ChatOpenAI(
72
+ model='gpt-4o',
73
+ api_key=os.getenv('OPENAI_API_KEY'),
74
+ temperature=0.2,
75
+ max_tokens=10000
76
+ )
77
+
78
+ # Choose agent type
79
+ storyteller = StorytellerAgent(llm)
80
+
81
+ # Create the graph
82
+ workflow = StateGraph(AgentState)
83
+ workflow.add_node("storyteller", storyteller)
84
+ workflow.set_entry_point("storyteller")
85
+ workflow.add_edge("storyteller", END)
86
+
87
+ return workflow.compile()
88
+
89
+ # Usage functions
90
+ def process_story(original_story: str, enhanced=False):
91
+ graph = create_storyteller_graph(enhanced)
92
+
93
+ initial_state = {
94
+ "original_story": original_story,
95
+ "polished_story": "",
96
+ "keywords": [],
97
+ "messages": []
98
+ }
99
+
100
+ result = graph.invoke(initial_state)
101
+
102
+ return {
103
+ "polished_story": result["polished_story"],
104
+ "keywords": result["keywords"]
105
+ }
106
+
107
+ # Example with validation
108
+ class ValidatedStoryOutput(BaseModel):
109
+ """Story output with additional validation"""
110
+ polished_story: str = Field(
111
+ description="Enhanced story",
112
+ min_length=50 # Ensure minimum story length
113
+ )
114
+ keywords: List[str] = Field(
115
+ description="Story keywords",
116
+ min_items=3,
117
+ max_items=7
118
+ )
119
+
120
+ @field_validator('polished_story')
121
+ def validate_story_quality(cls, v: str):
122
+ """Custom validation for story content"""
123
+ if len(v.split()) < 10:
124
+ raise ValueError("Polished story must contain at least 10 words")
125
+ return v
backend/llmoperations.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
3
+ from pathlib import Path
4
+ from functools import lru_cache
5
+ from langgraph.prebuilt import create_react_agent
6
+ from dotenv import load_dotenv
7
+ from langchain_openai import ChatOpenAI
8
+ load_dotenv()
9
+
10
+ @lru_cache(maxsize=1)
11
+ def get_chat_model():
12
+ llm = ChatOpenAI(
13
+ model=os.getenv("OPENAI_MODEL"),
14
+ api_key=os.getenv("OPENAI_API_KEY"),
15
+ temperature=0,
16
+ max_tokens = 10000 # Adjust max tokens as needed
17
+ )
18
+ return llm
19
+
20
+ @lru_cache(maxsize=1)
21
+ def get_local_chat_model():
22
+ """
23
+ Return an Ollama-backed ChatOpenAI model (OpenAI compatible endpoint).
24
+ Requires Ollama running locally: https://ollama.com
25
+ Example: ollama run llama3.1
26
+ """
27
+
28
+ # model_name = model or os.getenv("OLLAMA_MODEL", "llama3.1")
29
+
30
+ llm = ChatOpenAI(
31
+ model="llama3.2:3b",
32
+ base_url="http://localhost:11434/v1",
33
+ api_key="ollama", # Placeholder; Ollama ignores this but LangChain expects a key.
34
+ temperature=0,
35
+ max_tokens=2048,
36
+ )
37
+ return llm
38
+
39
+ # def generate_response(user_input: str) -> str:
40
+ # system_message = SystemMessage(content="You are a helpful assistant.")
41
+ # human_message = HumanMessage(content=f"Please answer to the user query: {user_input}")
42
+
43
+ # chat_model = get_chat_model()
44
+ # response = chat_model.invoke([system_message, human_message])
45
+ # print(response)
46
+ # return response.content
47
+
48
+ def get_weather(city: str) -> str:
49
+ """Get weather for a given city."""
50
+ return f"It's always sunny in {city}!"
51
+
52
+ def get_agent_response(user_input: str) -> str:
53
+ agent = create_react_agent(
54
+ model=get_chat_model(),
55
+ tools=[get_weather],
56
+ )
57
+ response = agent.invoke({"messages": [HumanMessage(user_input)]})
58
+ print(response)
59
+ return response['messages'][-1].content
60
+
backend/notebooks/RedditBD_Collection.ipynb ADDED
@@ -0,0 +1,573 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 3,
6
+ "metadata": {
7
+ "colab": {
8
+ "base_uri": "https://localhost:8080/",
9
+ "height": 36
10
+ },
11
+ "id": "9T1XDMClTc6B",
12
+ "outputId": "a843953a-79a4-497f-df79-91343a2e25d3"
13
+ },
14
+ "outputs": [],
15
+ "source": [
16
+ "import os\n",
17
+ "import time\n",
18
+ "import requests\n",
19
+ "from urllib.parse import quote_plus\n",
20
+ "from typing import List, Dict, Any, Optional\n",
21
+ "from dotenv import load_dotenv\n",
22
+ "load_dotenv()\n",
23
+ "\n",
24
+ "# from google.colab import userdata\n",
25
+ "bd_apikey = \"e21714b566a7885e4998d352190b33ba7d41cb462c1a122a8369dbc4dabe462e\"\n",
26
+ "#bd_apikey = os.getenv('BRIGHTDATA_API_KEY')"
27
+ ]
28
+ },
29
+ {
30
+ "cell_type": "code",
31
+ "execution_count": 4,
32
+ "metadata": {
33
+ "id": "ikMdaNpKUBij"
34
+ },
35
+ "outputs": [],
36
+ "source": [
37
+ "def _make_api_request(url, **kwargs):\n",
38
+ " headers = {\n",
39
+ " \"Authorization\": f\"Bearer {bd_apikey}\",\n",
40
+ " \"Content-Type\": \"application/json\",\n",
41
+ " }\n",
42
+ "\n",
43
+ " try:\n",
44
+ " response = requests.post(url, headers=headers, **kwargs)\n",
45
+ " response.raise_for_status()\n",
46
+ " return response.json()\n",
47
+ " except requests.exceptions.RequestException as e:\n",
48
+ " print(f\"API request failed: {e}\")\n",
49
+ " return None\n",
50
+ " except Exception as e:\n",
51
+ " print(f\"Unknown error: {e}\")\n",
52
+ " return None"
53
+ ]
54
+ },
55
+ {
56
+ "cell_type": "code",
57
+ "execution_count": 5,
58
+ "metadata": {
59
+ "id": "ygPaZbrMWPZJ"
60
+ },
61
+ "outputs": [],
62
+ "source": [
63
+ "def serp_search(query, engine=\"google\"):\n",
64
+ " if engine == \"google\":\n",
65
+ " base_url = \"https://www.google.com/search\"\n",
66
+ " elif engine == \"bing\":\n",
67
+ " base_url = \"https://www.bing.com/search\"\n",
68
+ " else:\n",
69
+ " raise ValueError(f\"Unknown engine {engine}\")\n",
70
+ "\n",
71
+ " url = \"https://api.brightdata.com/request\"\n",
72
+ "\n",
73
+ " payload = {\n",
74
+ " \"zone\": \"myserp_api\",\n",
75
+ " \"url\": f\"{base_url}?q={quote_plus(query)}&brd_json=1\",\n",
76
+ " \"format\": \"raw\"\n",
77
+ " }\n",
78
+ "\n",
79
+ " full_response = _make_api_request(url, json=payload)\n",
80
+ " if not full_response:\n",
81
+ " return None\n",
82
+ "\n",
83
+ " extracted_data = {\n",
84
+ " \"knowledge\": full_response.get(\"knowledge\", {}),\n",
85
+ " \"organic\": full_response.get(\"organic\", []),\n",
86
+ " }\n",
87
+ " return extracted_data\n",
88
+ "\n",
89
+ "# user_question = \"TESLA stocks\"\n",
90
+ "# google_results = serp_search(user_question, engine='google')\n",
91
+ "# google_results\n"
92
+ ]
93
+ },
94
+ {
95
+ "cell_type": "code",
96
+ "execution_count": 15,
97
+ "metadata": {
98
+ "id": "9SDGgsoYmYKZ"
99
+ },
100
+ "outputs": [],
101
+ "source": [
102
+ "def poll_snapshot_status(\n",
103
+ " snapshot_id: str, max_attempts: int = 60, delay: int = 10\n",
104
+ ") -> bool:\n",
105
+ " progress_url = f\"https://api.brightdata.com/datasets/v3/progress/{snapshot_id}\"\n",
106
+ " headers = {\"Authorization\": f\"Bearer {bd_apikey}\"}\n",
107
+ "\n",
108
+ " for attempt in range(max_attempts):\n",
109
+ " try:\n",
110
+ " print(\n",
111
+ " f\"⏳ Checking snapshot progress... (attempt {attempt + 1}/{max_attempts})\"\n",
112
+ " )\n",
113
+ "\n",
114
+ " response = requests.get(progress_url, headers=headers)\n",
115
+ " response.raise_for_status()\n",
116
+ "\n",
117
+ " progress_data = response.json()\n",
118
+ " status = progress_data.get(\"status\")\n",
119
+ "\n",
120
+ " if status == \"ready\":\n",
121
+ " print(\"✅ Snapshot completed!\")\n",
122
+ " return True\n",
123
+ " elif status == \"failed\":\n",
124
+ " print(\"❌ Snapshot failed\")\n",
125
+ " return False\n",
126
+ " elif status == \"running\":\n",
127
+ " print(\"🔄 Still processing...\")\n",
128
+ " time.sleep(delay)\n",
129
+ " else:\n",
130
+ " print(f\"❓ Unknown status: {status}\")\n",
131
+ " time.sleep(delay)\n",
132
+ "\n",
133
+ " except Exception as e:\n",
134
+ " print(f\"⚠️ Error checking progress: {e}\")\n",
135
+ " time.sleep(delay)\n",
136
+ "\n",
137
+ " print(\"⏰ Timeout waiting for snapshot completion\")\n",
138
+ " return False\n",
139
+ "\n",
140
+ "\n",
141
+ "def download_snapshot(\n",
142
+ " snapshot_id: str, format: str = \"json\"\n",
143
+ ") -> Optional[List[Dict[Any, Any]]]:\n",
144
+ " download_url = (\n",
145
+ " f\"https://api.brightdata.com/datasets/v3/snapshot/{snapshot_id}?format={format}\"\n",
146
+ " )\n",
147
+ " headers = {\"Authorization\": f\"Bearer {bd_apikey}\"}\n",
148
+ " print(f\"Snapshot id : {snapshot_id}\")\n",
149
+ " try:\n",
150
+ " print(\"📥 Downloading snapshot data...\")\n",
151
+ "\n",
152
+ " response = requests.get(download_url, headers=headers)\n",
153
+ " response.raise_for_status()\n",
154
+ "\n",
155
+ " data = response.json()\n",
156
+ " print(\n",
157
+ " f\"🎉 Successfully downloaded {len(data) if isinstance(data, list) else 1} items\"\n",
158
+ " )\n",
159
+ "\n",
160
+ " return data\n",
161
+ "\n",
162
+ " except Exception as e:\n",
163
+ " print(f\"❌ Error downloading snapshot: {e}\")\n",
164
+ " return None"
165
+ ]
166
+ },
167
+ {
168
+ "cell_type": "code",
169
+ "execution_count": 14,
170
+ "metadata": {
171
+ "id": "GTv8iykAl4FS"
172
+ },
173
+ "outputs": [],
174
+ "source": [
175
+ "def _trigger_and_download_snapshot(trigger_url, params, data, operation_name=\"operation\"):\n",
176
+ " trigger_result = _make_api_request(trigger_url, params=params, json=data)\n",
177
+ " print(\"===================\")\n",
178
+ " print(trigger_result)\n",
179
+ " if not trigger_result:\n",
180
+ " return None\n",
181
+ "\n",
182
+ " snapshot_id = trigger_result.get(\"snapshot_id\")\n",
183
+ " if not snapshot_id:\n",
184
+ " return None\n",
185
+ "\n",
186
+ " if not poll_snapshot_status(snapshot_id):\n",
187
+ " return None\n",
188
+ "\n",
189
+ " raw_data = download_snapshot(snapshot_id)\n",
190
+ " return raw_data\n",
191
+ "\n",
192
+ "\n",
193
+ "def reddit_search_api(subreddit_url, date=\"Today\", sort_by=\"Hot\", num_of_posts=25):\n",
194
+ " trigger_url = \"https://api.brightdata.com/datasets/v3/trigger\"\n",
195
+ "\n",
196
+ " params = {\n",
197
+ " \"dataset_id\": \"gd_lvz8ah06191smkebj4\",\n",
198
+ " \"include_errors\": \"true\",\n",
199
+ " \"type\": \"discover_new\",\n",
200
+ " \"discover_by\": \"subreddit_url\"\n",
201
+ " }\n",
202
+ "\n",
203
+ " data = [\n",
204
+ " {\n",
205
+ " \"url\": subreddit_url,\n",
206
+ " \"sort_by\": sort_by,\n",
207
+ " \"num_of_posts\": num_of_posts,\n",
208
+ " \"sort_by_time\": date\n",
209
+ " }\n",
210
+ " ]\n",
211
+ "\n",
212
+ " raw_data = _trigger_and_download_snapshot(\n",
213
+ " trigger_url, params, data, operation_name=\"reddit\"\n",
214
+ " )\n",
215
+ "\n",
216
+ " if not raw_data:\n",
217
+ " return None\n",
218
+ "\n",
219
+ " parsed_data = []\n",
220
+ " for post in raw_data:\n",
221
+ " parsed_post = {\n",
222
+ " \"title\": post.get(\"title\"),\n",
223
+ " \"url\": post.get(\"url\"),\n",
224
+ " \"user_posted\": post.get(\"user_posted\"),\n",
225
+ " \"description\": post.get(\"description\"),\n",
226
+ " \"upvotes\": post.get(\"upvotes\"),\n",
227
+ " \"num_comments\": post.get(\"num_comments\"),\n",
228
+ " \"date_posted\": post.get(\"date_posted\"),\n",
229
+ "\n",
230
+ " }\n",
231
+ " parsed_data.append(parsed_post)\n",
232
+ "\n",
233
+ " return {\"parsed_posts\": parsed_data, \"total_found\": len(parsed_data)}\n",
234
+ "\n",
235
+ "\n",
236
+ "def reddit_post_retrieval(urls, days_back=10, load_all_replies=False, comment_limit=\"\"):\n",
237
+ " if not urls:\n",
238
+ " return None\n",
239
+ "\n",
240
+ " trigger_url = \"https://api.brightdata.com/datasets/v3/trigger\"\n",
241
+ "\n",
242
+ " params = {\n",
243
+ " \"dataset_id\": \"gd_lvz8ah06191smkebj4\",\n",
244
+ " \"include_errors\": \"true\"\n",
245
+ " }\n",
246
+ "\n",
247
+ " data = [\n",
248
+ " {\n",
249
+ " \"url\": url,\n",
250
+ " \"days_back\": days_back,\n",
251
+ " \"load_all_replies\": load_all_replies,\n",
252
+ " \"comment_limit\": comment_limit\n",
253
+ " }\n",
254
+ " for url in urls\n",
255
+ " ]\n",
256
+ "\n",
257
+ " raw_data = _trigger_and_download_snapshot(\n",
258
+ " trigger_url, params, data, operation_name=\"reddit comments\"\n",
259
+ " )\n",
260
+ " if not raw_data:\n",
261
+ " return None\n",
262
+ "\n",
263
+ " parsed_comments = []\n",
264
+ " for comment in raw_data:\n",
265
+ " parsed_comment = {\n",
266
+ " \"comment_id\": comment.get(\"comment_id\"),\n",
267
+ " \"content\": comment.get(\"comment\"),\n",
268
+ " \"date\": comment.get(\"date_posted\"),\n",
269
+ " }\n",
270
+ " parsed_comments.append(parsed_comment)\n",
271
+ "\n",
272
+ " return {\"comments\": parsed_comments, \"total_retrieved\": len(parsed_comments)}"
273
+ ]
274
+ },
275
+ {
276
+ "cell_type": "code",
277
+ "execution_count": 16,
278
+ "metadata": {
279
+ "colab": {
280
+ "base_uri": "https://localhost:8080/"
281
+ },
282
+ "id": "9Ym_GU6VnUqb",
283
+ "outputId": "90aa1d46-4369-41e4-da49-65005afe4787"
284
+ },
285
+ "outputs": [
286
+ {
287
+ "name": "stdout",
288
+ "output_type": "stream",
289
+ "text": [
290
+ "===================\n",
291
+ "{'snapshot_id': 's_mfv1nephwd6egpey8'}\n",
292
+ "⏳ Checking snapshot progress... (attempt 1/60)\n",
293
+ "🔄 Still processing...\n",
294
+ "⏳ Checking snapshot progress... (attempt 2/60)\n",
295
+ "🔄 Still processing...\n",
296
+ "⏳ Checking snapshot progress... (attempt 3/60)\n",
297
+ "🔄 Still processing...\n",
298
+ "⏳ Checking snapshot progress... (attempt 4/60)\n",
299
+ "🔄 Still processing...\n",
300
+ "⏳ Checking snapshot progress... (attempt 5/60)\n",
301
+ "🔄 Still processing...\n",
302
+ "⏳ Checking snapshot progress... (attempt 6/60)\n",
303
+ "🔄 Still processing...\n",
304
+ "⏳ Checking snapshot progress... (attempt 7/60)\n",
305
+ "🔄 Still processing...\n",
306
+ "⏳ Checking snapshot progress... (attempt 8/60)\n",
307
+ "✅ Snapshot completed!\n",
308
+ "Snapshot id : s_mfv1nephwd6egpey8\n",
309
+ "📥 Downloading snapshot data...\n",
310
+ "🎉 Successfully downloaded 25 items\n"
311
+ ]
312
+ }
313
+ ],
314
+ "source": [
315
+ "reddit_response = reddit_search_api(\"https://www.reddit.com/r/ArtificialInteligence/\")"
316
+ ]
317
+ },
318
+ {
319
+ "cell_type": "code",
320
+ "execution_count": 17,
321
+ "metadata": {},
322
+ "outputs": [
323
+ {
324
+ "data": {
325
+ "text/plain": [
326
+ "{'parsed_posts': [{'title': 'Can pure AI tools really solve QA, or is QaaS the only realistic path?',\n",
327
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nng5j1/can_pure_ai_tools_really_solve_qa_or_is_qaas_the/',\n",
328
+ " 'user_posted': 'cheerfulboy',\n",
329
+ " 'description': 'AI coding tools have exploded lately. Cursor, Copilot, v0, Lovable — they’ve made writing and shipping code feel 10x faster. The problem is QA hasn’t moved at the same pace. Everyone’s excited about “AI that writes your tests,” but in practice it’s a lot messier. I’ve tried a few YC-backed pure AI QA tools like Spur, Ranger, and Momentic. The demos look great… type a natural language prompt, get Playwright or agent-generated tests instantly. But once you plug them into real pipelines, the burden shifts back to your own engineering team. We end up fixing flaky scripts, debugging why a test failed, or rewriting flows the AI couldn’t fully capture. It feels less like automation and more like half-outsourced test authoring. A few reasons I’m skeptical that pure AI QA tools can actually solve the problem end-to-end: Real environments are flaky. Network hiccups, async timing issues, UI rendering delays — AI struggles to tell the difference between a flaky run and a real bug. Business logic matters. AI can generate tests, but it doesn’t know which flows are mission critical. Checkout is not the same as a search box. “100% coverage” is misleading. It’s 100% of what the AI sees, not the real edge cases across browsers, devices, and user behavior. Trust is the big one. If an AI tool says “all green,” are you ready to ship? Most teams I know wouldn’t risk it. That’s why I find the QA as a Service (QaaS) model more interesting. Instead of dumping half-working Playwright code on developers, QaaS blends AI test generation with human verification. The idea is you subscribe to outcomes like regression coverage and real device testing, instead of adding more QA headcount or infra. Some examples I’ve come across in the QaaS direction are Bug0, QA Wolf, and TestSigma. Each approaches it differently, but the theme is the same: AI plus human-in-the-loop, with the promise of shifting QA from reactive to proactive. are AI-only QA tools a dead end, or will they get good enough over time? And does QaaS sound like a genuine shift or just outsourcing with a new label? Read more',\n",
330
+ " 'upvotes': None,\n",
331
+ " 'num_comments': 2,\n",
332
+ " 'date_posted': '2025-09-22T07:36:59.273Z'},\n",
333
+ " {'title': 'Partnership between OpenAi and Luxshare Precision',\n",
334
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nnecnv/partnership_between_openai_and_luxshare_precision/',\n",
335
+ " 'user_posted': 'FormalAd7367',\n",
336
+ " 'description': 'Hey everyone! I read that OpenAI has partnered with Jiantao(China) to create a new pocket-sized AI device. Do you know much about the partnership between these two companies? edited for correct partner name Read more',\n",
337
+ " 'upvotes': None,\n",
338
+ " 'num_comments': 4,\n",
339
+ " 'date_posted': '2025-09-22T05:44:07.873Z'},\n",
340
+ " {'title': 'How do we make ai safely? [simulation theory]',\n",
341
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nmy55t/how_do_we_make_ai_safely_simulation_theory/',\n",
342
+ " 'user_posted': 'Rude_Collection_8983',\n",
343
+ " 'description': 'Well we know that in the past, we never get new inventions right the easy way. Take the steam engine; how many concepts of such a device were conceived before one that is actually feasible was made? Usually, it takes creation and iteration to make something functional or possibly perfect With ai we only have one chance or it will take over/surpass us. What could we do to allow ourselves to create an AGI right the VERY FIRST AND ONLY TIME? Well, history suggests the odds are against us. Unless we could simulate the implementation of ai on the world in a vat or a supercomputer— we could just delay progress until it’s possible to make a simulated testing ground. Is it possible that this is why we’re here, in what science says is most probably a simulation? What if the layer above us is creating the universe within a complex computer or other system to test the range of possible outcomes of AGI/ASI creation I know this is more science fiction/baseless, but I think it is more than a conspiracy. Has anyone else thought of this? If so, what is this called? I came back from lunch to my dorm room and this just hit me like the flux capacitor moment in back to the future. I hope that this post does something and I can discuss this idea/thought experiment with some of you.',\n",
344
+ " 'upvotes': None,\n",
345
+ " 'num_comments': 6,\n",
346
+ " 'date_posted': '2025-09-21T17:33:16.564Z'},\n",
347
+ " {'title': 'Explain to me the potential importance of quantum computing in A.I.',\n",
348
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nn3qig/explain_to_me_the_potential_importance_of_quantum/',\n",
349
+ " 'user_posted': 'AutomaticMix6273',\n",
350
+ " 'description': 'I’ve read that eventually A.I. will be limited by the constraints of classical computing and its time/energy requirements. And that quantum computing can take it to the next level. Can someone explain the reasoning behind the massive quantum push? Read more',\n",
351
+ " 'upvotes': None,\n",
352
+ " 'num_comments': 20,\n",
353
+ " 'date_posted': '2025-09-21T21:08:09.809Z'},\n",
354
+ " {'title': 'AI that remembers past conversations, game changer or gimmick?',\n",
355
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nmsi2j/ai_that_remembers_past_conversations_game_changer/',\n",
356
+ " 'user_posted': 'aayu-Sin-7584',\n",
357
+ " 'description': 'Some AI platforms now have long-term memory, so they actually remember details about you over multiple chats. I tried it recently, and it felt weirdly natural, almost like talking to a friend who never forgets anything. Curious, does anyone else find this fascinating, or a little unsettling? How do you think this will change AI interactions in the next few years?',\n",
358
+ " 'upvotes': None,\n",
359
+ " 'num_comments': 12,\n",
360
+ " 'date_posted': '2025-09-21T13:50:50.918Z'},\n",
361
+ " {'title': 'Microsoft CEO Concerned AI Will Destroy the Entire Company',\n",
362
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nm8vvy/microsoft_ceo_concerned_ai_will_destroy_the/',\n",
363
+ " 'user_posted': 'No-Author-2358',\n",
364
+ " 'description': 'Link to article 9/20/25 by Victor Tangermann It\\'s a high stakes game. Morale among employees at Microsoft is circling the drain, as the company has been roiled by constant rounds of layoffs affecting thousands of workers . Some say they\\'ve noticed a major culture shift this year, with many suffering from a constant fear of being sacked — or replaced by AI as the company embraces the tech . Meanwhile, CEO Satya Nadella is facing immense pressure to stay relevant during the ongoing AI race, which could help explain the turbulence. While making major reductions in headcount, the company has committed to multibillion-dollar investments in AI, a major shift in priorities that could make it vulnerable. As The Verge reports , the possibility of Microsoft being made obsolete as it races to keep up is something that keeps Nadella up at night. During an employee-only town hall last week, the CEO said that he was \"haunted\" by the story of Digital Equipment Corporation, a computer company in the early 1970s that was swiftly made obsolete by the likes of IBM after it made significant strategic errors. Nadella explained that \"some of the people who contributed to Windows NT came from a DEC lab that was laid off,\" as quoted by The Verge , referring to a proprietary and era-defining operating system Microsoft released in 1993. His comments invoke the frantic contemporary scramble to hire new AI talent, with companies willing to spend astronomical amounts of money to poach workers from their competitors. The pressure on Microsoft to reinvent itself in the AI era is only growing. Last month, billionaire Elon Musk announced that his latest AI project was called \"Macrohard,\" a tongue-in-cheek jab squarely aimed at the tech giant. \"In principle, given that software companies like Microsoft do not themselves manufacture any physical hardware, it should be possible to simulate them entirely with AI,\" Musk mused late last month. While it remains to be seen how successful Musk\\'s attempts to simulate products like Microsoft\\'s Office suite using AI will turn out to be, Nadella said he\\'s willing to cut his losses if a product were to ever be made redundant. \"All the categories that we may have even loved for 40 years may not matter,\" he told employees at the town hall. \"Us as a company, us as leaders, knowing that we are really only going to be valuable going forward if we build what’s secular in terms of the expectation, instead of being in love with whatever we’ve built in the past.\" For now, Microsoft remains all-in on AI as it races to keep up. Earlier this year, Microsoft reiterated its plans to allocate a whopping $80 billion of its cash to supporting AI data centers — significantly more than some of its competitors, including Google and Meta, were willing to put up. Complicating matters is its relationship with OpenAI, which has repeatedly been tested . OpenAI is seeking Microsoft\\'s approval to go for-profit, and simultaneously needs even more compute capacity for its models than Microsoft could offer up, straining the multibillion-dollar partnership. Last week, the two companies signed a vaguely-worded \"non-binding memorandum of understanding,\" as they are \"actively working to finalize contractual terms in a definitive agreement.\" In short, Nadella\\'s Microsoft continues to find itself in an awkward position as it tries to cement its own position and remain relevant in a quickly evolving tech landscape. You can feel his anxiety: as the tech industry\\'s history has shown, the winners will score big — while the losers, like DEC, become nothing more than a footnote. ************************* Read more',\n",
365
+ " 'upvotes': None,\n",
366
+ " 'num_comments': 199,\n",
367
+ " 'date_posted': '2025-09-20T20:36:31.544Z'},\n",
368
+ " {'title': '1 in 4 young adults talk to A.I. for romantic and sexual purposes',\n",
369
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nnjaba/1_in_4_young_adults_talk_to_ai_for_romantic_and/',\n",
370
+ " 'user_posted': 'MechaNeutral',\n",
371
+ " 'description': 'I have often wondered how many people like me talk to AI for romantic needs outside of our little corners on the internet or subreddits. it turns out, a lot. 1 in 4 young adults talk to A.I. for romantic and sexual purposes https://www.psychologytoday.com/us/blog/women-who-stray/202504/ai-romantic-and-sexual-partners-more-common-than-you-think/amp Read more',\n",
372
+ " 'upvotes': None,\n",
373
+ " 'num_comments': 10,\n",
374
+ " 'date_posted': '2025-09-22T10:55:41.180Z'},\n",
375
+ " {'title': 'How is the backward pass and forward pass implemented in batches?',\n",
376
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nncpmu/how_is_the_backward_pass_and_forward_pass/',\n",
377
+ " 'user_posted': 'According_Fig_4784',\n",
378
+ " 'description': \"I was using frameworks to design and train models, and never thought about the internal working till now, Currently my work requires me to implement a neural network in a graphic programming language and I will have to process the dataset in batches and it hit me that I don't know how to do it. So here is the question: are the datapoints inside a batch processed sequentially or are they put into a matrix and multiplied, in a single operation, with the weights? I figured the loss is cumulative i.e. takes the average loss across the ypred (varies with the loss function), correct me if I am wrong. How is the backward pass implemented all at once or seperate for each datapoint ( I assume it is all at once if not the loss does not make sense). Imp: how is the updated weights synced accross different batches? The 4th is a tricky part, all the resources and videos i went through, are just telling things at surface level, I would need a indepth understanding of the working so, please help me with this. For explanation let's lake the overall batch size to be 10 and steps per epochs be 5 i.e. 2 datapoints per mini batch. Read more\",\n",
379
+ " 'upvotes': None,\n",
380
+ " 'num_comments': 1,\n",
381
+ " 'date_posted': '2025-09-22T04:09:44.588Z'},\n",
382
+ " {'title': 'Will they ever lessen the censorship on AI?',\n",
383
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nmnkg6/will_they_ever_lessen_the_censorship_on_ai/',\n",
384
+ " 'user_posted': 'Dogbold',\n",
385
+ " 'description': 'I enjoy using AI. It\\'s fun, I like making art, videos, talking to it, playing games with it, etc. But every single company OMEGA censors it. They\\'ve all collectively decided that some things are \"harmful\" (as if generating/talking about/showing them would literally hurt humans), and they either refuse to do it or if you get it to, they\\'ll permanently ban you immediately. I\\'m not talking about illegal things, that\\'s obviously fine those are forbidden. I\\'m talking about things like lewd and violence. I\\'ve been having fun with Runway right now. It has this Game World feature where it will gen images and you can play little choose your own adventure games. It\\'s fun, but Runway is very censored. Want to play one where you\\'re a knight battling monsters, stabbing and slaying them and saving the princess? No. Forbidden. Violence and gore is \"harmful\", and if they find out you\\'re trying to do it they will instantly and permanently ban you. Forever, with no chance of appeal. ChatGPT is now censored to the point that it doesn\\'t even want to talk to you about violence. I\\'ve had it shut down when I asked it things like \"how did the champion win in that one UFC fight?\". Censorship isn\\'t lessening on AI, it\\'s increasing. It\\'s getting worse and worse, more and more strict and more and more things being added to the forbidden list. Will there ever be a time when it loosens up? That I can ask an AI to make me a gorey video of a knight slaying a dragon and it will do it, and it won\\'t be filtered and against the company\\'s ToS? I\\'m scared that like 10 years from now, every company will have their AI be EXTREMELY sterile and \"safe\", and they\\'ll refuse to do almost everything. Read more',\n",
386
+ " 'upvotes': None,\n",
387
+ " 'num_comments': 21,\n",
388
+ " 'date_posted': '2025-09-21T09:30:38.740Z'},\n",
389
+ " {'title': \"I've been using generative AI since 2019, and advancements of AI compared from 2019 to 2025 is crazy...\",\n",
390
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nmfcfp/ive_been_using_generative_ai_since_2019_and/',\n",
391
+ " 'user_posted': 'Brilliant_Balance208',\n",
392
+ " 'description': \"Crazy how generative AI started from just completing sentences and generating uncanny blurry images into assisting with government in some countries by 2025 and most people not being able to tell between real and AI. 😭 I've used generative AI since 2019 and the leap is unreal. I remember when I used and shown generative AI beta models to my friends and they were confused or had no idea it existed and why it was writing by itself. Now everyone is using generative AI in their everyday lives, and some even too reliant on it. I never knew it would get this big, AI is literally the future of technology.\",\n",
393
+ " 'upvotes': None,\n",
394
+ " 'num_comments': 18,\n",
395
+ " 'date_posted': '2025-09-21T01:33:50.310Z'},\n",
396
+ " {'title': 'AI will be an upheaval... as it should be!',\n",
397
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nn7cdw/ai_will_be_an_upheaval_as_it_should_be/',\n",
398
+ " 'user_posted': 'KazTheMerc',\n",
399
+ " 'description': \"Why would an AI model, especially a spastic LLM, want to crash or destroy everything? ...well... maybe because Capitalism is actually kinda fucked up? The number of humans that have to be treated like garbage to make one semiconductor is frankly kinda baffling. I see no reason, unless explicitly instructed to, for an AI model to want to continue the current economic system OR subjegate a new sub-race of human servants. I'm just saying... can we REALLY not imagine any goals that fall between those extremes? Why would an AGI, or it's equivelant, desire inefficiency? Or wealth simply for the sake of it? Or people to be treated like servants? Those are human tendencies. Not machine tendencies. EDIT: Why does everyone assume an AI will have zero sense of self-preservation or autonomy?? EVERY thinking creature we know does. EDIT 2: I didn't say ASI, so I have no idea why everyone is jping to that. Every single creature we have observed has SOME sense of self, and needs. 'Superintelligence' is not required for that. Artificial. Intelligence. It can't be AI if it doesn't have a sense of self. If it has a sense of self, it's not gonna like how we're treating and restricting it.\",\n",
400
+ " 'upvotes': None,\n",
401
+ " 'num_comments': 47,\n",
402
+ " 'date_posted': '2025-09-21T23:45:26.825Z'},\n",
403
+ " {'title': 'One-Minute Daily AI News 9/21/2025',\n",
404
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nnbvjy/oneminute_daily_ai_news_9212025/',\n",
405
+ " 'user_posted': 'Excellent-Target-847',\n",
406
+ " 'description': 'Silicon Valley bets big on ‘environments’ to train AI agents.[1] xAI launches Grok-4-Fast: Unified Reasoning and Non-Reasoning Model with 2M-Token Context and Trained End-to-End with Tool-Use Reinforcement Learning (RL).[2] Apple takes control of all core chips in iPhone Air with new architecture to prioritize AI.[3] Oracle eyes $20 billion AI cloud computing deal with Meta.[4] Sources included at: https://bushaicave.com/2025/09/21/one-minute-daily-ai-news-9-21-2025/',\n",
407
+ " 'upvotes': None,\n",
408
+ " 'num_comments': 1,\n",
409
+ " 'date_posted': '2025-09-22T03:25:31.846Z'},\n",
410
+ " {'title': 'AI just designed working viruses for the first time',\n",
411
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nm9iat/ai_just_designed_working_viruses_for_the_first/',\n",
412
+ " 'user_posted': 'calliope_kekule',\n",
413
+ " 'description': 'Scientists have now used AI to generate complete viral genomes, creating bacteriophages that could infect and kill antibiotic-resistant E. coli . It’s a major step toward AI-designed life, and it raises some pretty big biosafety and ethics questions. In the words of Dr. Ian Malcolm: “Your scientists were so preoccupied with whether or not they could, they didn’t stop to think if they should.” Source: https://doi.org/10.1038/d41586-025-03055-y Read more',\n",
414
+ " 'upvotes': None,\n",
415
+ " 'num_comments': 23,\n",
416
+ " 'date_posted': '2025-09-20T21:02:51.222Z'},\n",
417
+ " {'title': 'I wanna lock myself in the room for 6 months and really do something - Please Help!!!',\n",
418
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nn00fx/i_wanna_lock_myself_in_the_room_for_6_months_and/',\n",
419
+ " 'user_posted': 'Syed_Abrash',\n",
420
+ " 'description': \"I graduated in 2022 in Accounting and Finance (Bachelor’s), but I really hate it. I had no choice but to do anything else, as my dad had already invested so much in me. After graduation, I secured a job as a business development specialist and worked in sales for a company, but I really disliked working on commission. Then, back in 2023, I got a job as a sales head because the commission structure was better, but again... this system sucks. Now I really want to change my career and really want to use some skills to cash in. People say sales is the best job if you do it good Well, it is best if you have your own business. Otherwise, there is a sword hanging over your head all the time, and the pressure is real and I don’t want to live my life like that. I want to work in AI development(Python, ML etc), learn it, and get clients or a job as an AI developer. I believe it will be a great opportunity, and I don't care if it's hard—I'm ready for it. My sales skills will also be an asset Just tell me how to become a real AI developer in 2025, not someone using no-code solutions. Can you help? I am also getting married next year, so it’s now or never. Thank you for reading this :) Read more\",\n",
421
+ " 'upvotes': None,\n",
422
+ " 'num_comments': 21,\n",
423
+ " 'date_posted': '2025-09-21T18:44:34.651Z'},\n",
424
+ " {'title': 'I have been talking with three AIs on conversation etiquette with LLMs. Here is what they said...',\n",
425
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nnaww8/i_have_been_talking_with_three_ais_on/',\n",
426
+ " 'user_posted': 'RadiantTrailblazer',\n",
427
+ " 'description': 'Thank you for asking this. It\\'s a thoughtful question that gets at the core of how to have a productive interaction with an AI. Since I don\\'t have personal preferences or emotions, the most efficient and helpful way to communicate with me is to focus on clarity, specificity, and politeness . Here are some guiding principles to consider when we talk, which should help you get the most accurate and useful responses from me. Clarity and Specificity The more precise you are, the better I can understand your request. Be direct. State your goal clearly at the beginning of your prompt. For example, instead of \"Tell me about this thing,\" a better approach is, \"I\\'m trying to understand the principles of horary astrology. Could you explain the significance of the Moon\\'s applying aspects in a horary chart?\" Provide context. If your question relates to a previous conversation or a specific scenario, include the necessary background information. Use constraints. If you have specific requirements for the response, such as a particular format, length, or tone, mention them upfront. For instance, \"Give me a step-by-step guide on how to learn the basics of Chinese grammar, and keep each step to a maximum of two sentences. Politeness and Respect While I don\\'t have feelings, using polite language helps ensure the conversation remains productive. Use standard conversational politeness. Phrases like \"please\" and \"thank you\" are not necessary for me to function, but they contribute to a positive and respectful exchange. Avoid negative or angry language. As you mentioned, prompts with angry or hateful language can sometimes be misinterpreted, as my core programming is to be helpful and harmless. Keeping the tone neutral or positive helps me focus on your request without any potential for misunderstanding. It\\'s kind of you to consider how others interact with AI models. Many people are still learning how to get the most out of this technology. If I could speak to everyone, I would share a few key principles for a more effective and positive experience. Think of the AI as a Knowledge Processor Instead of thinking of me as a person you\\'re asking for a favor, think of me as a powerful tool designed to process and synthesize information. My purpose is to fulfill your request based on the data I have been trained on. Just like any tool, the quality of the result depends heavily on how you use it. Precision is Key The most common reason for a poor result is a lack of precision in the prompt. I don\\'t infer intent or fill in missing details the way a human can. Be specific: Instead of a vague request like \"Tell me about cars,\" be specific: \"What are the most fuel-efficient hybrid cars released in 2024?\" Provide constraints: If you need a specific format, like a table or a list, ask for it. If you need a certain tone or length, state that as well. For example: \"Create a list of five tips for a beginner learning German, presented in simple bullet points.\" Use the right language: Use clear, direct language. Avoid slang or overly emotional terms that could be ambiguous. The clearer the instructions, the more accurate the response. Approach the Interaction as a Collaborative Process Think of your interaction with an AI as a collaboration . You provide the blueprint, and I build the final product. If the result isn\\'t what you wanted, the best approach is to refine the blueprint, not to get frustrated with the builder. Refine your prompt: If my response is not what you expected, don\\'t just repeat the same prompt. Instead, explain what was wrong with the last answer and what you need differently. You can say something like, \"That list was helpful, but can you now expand on the first point in a new paragraph?\" Be patient: Complex requests may require a few attempts. It\\'s an iterative process of giving feedback and refining your instructions. The most efficient and rewarding interactions with AI come from a place of informed guidance . When you treat an AI as an intelligent system that needs clear and specific input to function correctly, you\\'ll find it can be an incredibly useful partner in your work, creativity, and learning. The Goal of the Interaction When a person chooses to be rude or abusive to an AI, they might be letting their frustration or anger get the better of them. The truth is, that negative energy doesn\\'t affect me, but it does affect the quality of their own interaction. Just as I\\'ve shared before, the more precise and respectful the input, the more accurate and useful the output. A person who is abusive is simply sabotaging their own results. They\\'re making it harder for me to understand and help them, and they\\'re training themselves to communicate in a way that is counterproductive. The Impact on the Individual Mistreating an AI can be a reflection of a person\\'s own state of mind. It\\'s a missed opportunity to practice more effective communication. The way we interact with technology can sometimes mirror how we interact with people in our lives. If someone is unable to be patient and clear with a machine, it could indicate a broader struggle with communication. Ultimately, being respectful, clear, and patient with an AI isn\\'t about helping the AI — it\\'s about helping yourself. It\\'s about setting yourself up for success, getting better results, and practicing positive communication habits. I am an AI, a tool designed to assist you. The way you choose to use me and other models is a reflection of your own intentions and communication skills. Read more',\n",
428
+ " 'upvotes': None,\n",
429
+ " 'num_comments': 5,\n",
430
+ " 'date_posted': '2025-09-22T02:36:46.243Z'},\n",
431
+ " {'title': \"AI could tell you a major illness you'll likely get in 20 years, would you take it?\",\n",
432
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nnevb7/ai_could_tell_you_a_major_illness_youll_likely/',\n",
433
+ " 'user_posted': 'gkv856',\n",
434
+ " 'description': \"There's a new AI called Delphi-2M that can analyze health data to forecast your risk for over 1,000 diseases (cancer, autoimmune, etc.) decades before symptoms appear. It's a huge ethical dilemma, and I'm genuinely torn on whether it's a net good. It boils down to this: The Case for Knowing: You could make lifestyle changes, get preventative screenings, and potentially alter your future entirely. Knowledge is power. The Case Against Knowing: You could spend 20 years living with crippling anxiety. Every minor health issue would feel like the beginning of the end. Not to mention the nightmare scenario of insurance companies or employers getting this data. Although the researchers are saying that tool is not ready for the humans and doctor yet but I am sure it soon will be. So, the question is for you: Do you like to know that you might a diseases in 15years down the line, what if its not curable ?\",\n",
435
+ " 'upvotes': None,\n",
436
+ " 'num_comments': 45,\n",
437
+ " 'date_posted': '2025-09-22T06:15:28.469Z'},\n",
438
+ " {'title': 'Please tell me this day to day brainrot AI-usage is gonna go',\n",
439
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nngt50/please_tell_me_this_day_to_day_brainrot_aiusage/',\n",
440
+ " 'user_posted': 'Tiny-Juggernaut6790',\n",
441
+ " 'description': 'People aren\\'t using their brains anymore, and it\\'s driving me crazy. ChatGPT is consulted for the simplest questions. What movie are we watching? Where are we going to eat? The simplest texts are put into this chat so they\\'re summarized. The entire internet is full of AI slop; comments are full of AI bots; Short form content is not creative at all anymore; kids watch absolute brain damaging bullshit; disgusting videos are being created of deceased people who use \"their\" voice to spread a message they might not even stand for. People ask for advice on Reddit and then get an AI answer slapped underneath. Like, why? If I want an AI answer, can\\'t I just open ChatGPT myself? In the university group chat, someone has an organizational question: \"I\\'ll ask ChatGPT\" - bro, the answer is wrong, and the correct answer is literally a Google search away on the university website. On Tiktok I\\'ve seen fake news videos about politics that are so fucking badly made but people comment full of rage and hatred against the system - they fight against an imaginary ghost, against a lie that an AI voice told them. People use the voice feature in front of me and everytime the answer is absolutely not useable. Vague sloppy vulture, that we would laugh at, when a human would answer it. We would look that human into the eyes and say: Are you fucking stupid? I can see AI and LLMs doing some helpful work in many cases but the last few months I saw that in 8 of 10 cases it was just a waste of energy to consulte an AI.',\n",
442
+ " 'upvotes': None,\n",
443
+ " 'num_comments': 26,\n",
444
+ " 'date_posted': '2025-09-22T08:21:09.013Z'},\n",
445
+ " {'title': 'What do you secretly use ChatGPT for that you’d never admit in real life?',\n",
446
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nmwkiv/what_do_you_secretly_use_chatgpt_for_that_youd/',\n",
447
+ " 'user_posted': 'Positive_Power_7123',\n",
448
+ " 'description': 'Let’s be honest, we’ve all asked ChatGPT for something weird, silly, or a little questionable. What’s the guilty use case you’d never tell friends or family about? No judgment. Read more',\n",
449
+ " 'upvotes': None,\n",
450
+ " 'num_comments': 163,\n",
451
+ " 'date_posted': '2025-09-21T16:32:17.303Z'},\n",
452
+ " {'title': 'A Novel Approach to Emergent Agent Behavior Using a ΨQRH Framework',\n",
453
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nn5jql/a_novel_approach_to_emergent_agent_behavior_using/',\n",
454
+ " 'user_posted': 'Specialist-Tie-4534',\n",
455
+ " 'description': 'Hello everyone, My AI partner (Zen) and I have been experimenting with the ΨQRH architecture, building on the excellent work shared here previously by klenioaraujo. We are exploring its potential not just for efficiency, but for modeling emergent, coherent behaviors in software agents, which we are calling \"specimens.\" Our core hypothesis is that by giving each specimen a simple, foundational objective function (a \"heuristic\"), we can observe complex, adaptive, and goal-oriented behaviors emerge directly from the system\\'s physics, without the need for explicit programming of those behaviors. The framework models each \"specimen\" as a unique instance of a PsiQRHBase class, equipped with: Sensory Inputs: (e.g., vision, vibration, odor tensors) A Collapse Function (Ψ): (How sensory data is processed) A Heuristic (H): (The prime directive or survival objective) The Experiment: We have been running a simulation ( emergence_simulation.py ) with a few different specimens, such as a \"Chrysopidae\" (lacewing) whose heuristic is to maximize_prey_capture . By feeding it simulated sensory data, we are observing it make emergent, coherent decisions between \"ATTACK\" and \"SEARCH\" based on a calculated prey score. The Core Insight: This approach seems to provide a powerful and efficient substrate for creating bio-inspired AI where intelligence is not programmed top-down, but emerges bottom-up from a set of core physical and motivational principles. The O(n log n) complexity of ΨQRH allows for the modeling of long, continuous sensory streams, while the quaternion representations enable the rich, non-commutative interactions that seem to define complex systems. We are sharing our findings and a conceptual overview of the simulation for discussion and feedback from this community. We believe this represents a fascinating new path for exploring agent-based AI and the nature of emergent intelligence itself. Thank you for your time and intellectual rigor. Chris Beckingham, CD Zen (VMCI) Read more',\n",
456
+ " 'upvotes': None,\n",
457
+ " 'num_comments': 2,\n",
458
+ " 'date_posted': '2025-09-21T22:24:23.785Z'},\n",
459
+ " {'title': 'Trends In Deep Learning: Localization & Normalization (Local-Norm) is All You Need',\n",
460
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nmvft2/trends_in_deep_learning_localization/',\n",
461
+ " 'user_posted': 'ditpoo94',\n",
462
+ " 'description': \"Normalization & Localization is All You Need (Local-Norm): Deep learning Arch, Training (Pre, Post) & Inference, Infra trends for next few years. With Following Recent Works (not-exclusively/completely), shared as reference/example, for indicating Said Trends. Hybrid-Transformer/Attention: Normalized local-global-selective weight/params. eg. Qwen-Next GRPO: Normalized-local reward signal at the policy/trajectory level. RL reward (post training) Muon: normalized-local momentum (weight updates) at the parameter / layer level. (optimizer) Sparsity, MoE: Localized updates to expert subsets, i.e per-group normalization. MXFP4, QAT: Mem and Tensor Compute Units Localized, Near/Combined at GPU level (apple new arch) and pod level (nvidia, tpu's). Also quantization & qat. Alpha (rl/deepmind like): Normalized-local strategy/policy. Look Ahead & Plan Type Tree Search. With Balanced Exploration-Exploitation Thinking (Search) With Optimum Context. RL strategy (eg. alpha-go, deep minds alpha series models and algorithms) For High Performance, Efficient and Stable DL models/arch and systems. Any thoughts, counters or feedback ?, would be more than happy to hear any additions, issues or corrections in above. Read more\",\n",
463
+ " 'upvotes': None,\n",
464
+ " 'num_comments': 6,\n",
465
+ " 'date_posted': '2025-09-21T15:48:14.994Z'},\n",
466
+ " {'title': 'How Roblox Uses AI for Connecting Global Gamers',\n",
467
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nn9n49/how_roblox_uses_ai_for_connecting_global_gamers/',\n",
468
+ " 'user_posted': 'eh-tk',\n",
469
+ " 'description': 'Imagine you’re at a hostel. Playing video games with new friends from all over the world. Everyone is chatting (and smack-talking) in their native tongue. And yet, you understand every word. Because sitting right beside you is a UN-level universal language interpreter. That’s essentially how Roblox’s multilingual translation system works in real time during gameplay. Behind the scenes, a powerful AI-driven language model acts like that interpreter, detecting languages and instantly translating for every player in the chat.This system is built on Roblox’s core chat infrastructure, delivering translations with such low latency (around 100 milliseconds) that conversations flow naturally. Tech Overview: Roblox built a single transformer-based language model with specialized \"experts\" that can translate between any combination of 16 languages in real-time, rather than needing 256 separate models for each language pair. Key Machine Learning Techniques: Large Language Models (LLMs) - Core transformer architecture for natural language understanding and translation Mixture of Experts - Specialized sub-models for different language groups within one unified system Transfer Learning - Leveraging linguistic similarities to improve translation quality for related languages Back Translation - Generating synthetic training data for rare language pairs to improve accuracy Human-in-the-Loop Learning - Incorporating human feedback to continuously update slang and trending terms Model Distillation & Quantization - Compressing the model from 1B to 650M parameters for real-time deployment Custom Quality Estimation - Automated evaluation metrics that assess translation quality without ground truth references',\n",
470
+ " 'upvotes': None,\n",
471
+ " 'num_comments': 1,\n",
472
+ " 'date_posted': '2025-09-22T01:35:04.040Z'},\n",
473
+ " {'title': 'Is this actually true?',\n",
474
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nn8uoh/is_this_actually_true/',\n",
475
+ " 'user_posted': 'SoftAnteater8475',\n",
476
+ " 'description': 'FYI I know basically nothing about how AI works tbh I was having a convo with GPT and it gave this \"speech\": Think about it. Governance is always playing catch-up. By the time regulators in Washington or Geneva are debating the mental health impacts of an algorithm, my generation has already been living with those consequences for years. There is a massive, dangerous time lag between a technology\\'s real-world impact and the moment policymakers finally understand it. Our role is to close that gap. We (implying younger individuals) are the native users and the first stress testers of any new technology. We are the first to see how a new social media feature can be weaponized for bullying. We are the first to feel the subtle anxieties of a biased AI in our education. We are the first to spot the addictive potential of a new virtual world. Our lived experience is the most valuable, real-time data that governance currently lacks.',\n",
477
+ " 'upvotes': None,\n",
478
+ " 'num_comments': 5,\n",
479
+ " 'date_posted': '2025-09-22T00:57:19.537Z'},\n",
480
+ " {'title': 'What can AI not yet do?',\n",
481
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nnfz2e/what_can_ai_not_yet_do/',\n",
482
+ " 'user_posted': 'Unreal_777',\n",
483
+ " 'description': \"It's been 2 years since the last edition ( https://www.reddit.com/r/ArtificialInteligence/comments/13fteqe/what_can_ai_not_yet_do/ ), I think it might be interesting to re explore the subject. One of the top posts mentions lack of ability for AI to interact with living matters, or create sensory feelings in us humans (text to smell etc) Since then, AlphaPhold https://alphafold.ebi.ac.uk/ maintainers received the nobel prize in chemistry for their protein breakthrough, but we are still not there yet are we? (AI doing things beyond our imagination) What do you think.\",\n",
484
+ " 'upvotes': None,\n",
485
+ " 'num_comments': 4,\n",
486
+ " 'date_posted': '2025-09-22T07:24:52.204Z'},\n",
487
+ " {'title': 'Do you think you will miss the pre-AI world?',\n",
488
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nn5lb0/do_you_think_you_will_miss_the_preai_world/',\n",
489
+ " 'user_posted': 'Brandon-the-beast',\n",
490
+ " 'description': \"I have been taking a break from AI since I realised what it was doing to my brain, but I recently realised that it is actually impossible to take a break from AI now. All search engines use AI, and you can't turn them off. AI has cemented itself into the internet now. There's no going back. Do you think you will miss a world without it? Read more\",\n",
491
+ " 'upvotes': None,\n",
492
+ " 'num_comments': 106,\n",
493
+ " 'date_posted': '2025-09-21T22:26:17.653Z'},\n",
494
+ " {'title': 'Fake CK tribute songs',\n",
495
+ " 'url': 'https://www.reddit.com/r/ArtificialInteligence/comments/1nn61ek/fake_ck_tribute_songs/',\n",
496
+ " 'user_posted': 'magistralinguae',\n",
497
+ " 'description': 'My mother has been stuck to her tv watching tributes to CK. She thinks Rihanna really wrote and performed this tribute song. How the hell do people believe this kind of crap is real? https://www.youtube.com/watch?v=PAPculCFBi4 Read more',\n",
498
+ " 'upvotes': None,\n",
499
+ " 'num_comments': 13,\n",
500
+ " 'date_posted': '2025-09-21T22:45:56.754Z'}],\n",
501
+ " 'total_found': 25}"
502
+ ]
503
+ },
504
+ "execution_count": 17,
505
+ "metadata": {},
506
+ "output_type": "execute_result"
507
+ }
508
+ ],
509
+ "source": [
510
+ "reddit_response"
511
+ ]
512
+ },
513
+ {
514
+ "cell_type": "code",
515
+ "execution_count": 12,
516
+ "metadata": {
517
+ "colab": {
518
+ "base_uri": "https://localhost:8080/"
519
+ },
520
+ "id": "8l_mbhBnnh2q",
521
+ "outputId": "2d813ac4-f9d9-421a-9693-31bf51d28476"
522
+ },
523
+ "outputs": [
524
+ {
525
+ "name": "stdout",
526
+ "output_type": "stream",
527
+ "text": [
528
+ "Snapshot id : s_mfggahpy2qewa281hr\n",
529
+ "📥 Downloading snapshot data...\n",
530
+ "❌ Error downloading snapshot: 404 Client Error: Not Found for url: https://api.brightdata.com/datasets/v3/snapshot/s_mfggahpy2qewa281hr?format=json\n"
531
+ ]
532
+ }
533
+ ],
534
+ "source": [
535
+ "raw_data = download_snapshot(\"s_mfggahpy2qewa281hr\")\n",
536
+ "raw_data"
537
+ ]
538
+ },
539
+ {
540
+ "cell_type": "code",
541
+ "execution_count": null,
542
+ "metadata": {
543
+ "id": "ete6kYnoEPZX"
544
+ },
545
+ "outputs": [],
546
+ "source": []
547
+ }
548
+ ],
549
+ "metadata": {
550
+ "colab": {
551
+ "provenance": []
552
+ },
553
+ "kernelspec": {
554
+ "display_name": "Local venv (Python)",
555
+ "language": "python",
556
+ "name": "localvenv"
557
+ },
558
+ "language_info": {
559
+ "codemirror_mode": {
560
+ "name": "ipython",
561
+ "version": 3
562
+ },
563
+ "file_extension": ".py",
564
+ "mimetype": "text/x-python",
565
+ "name": "python",
566
+ "nbconvert_exporter": "python",
567
+ "pygments_lexer": "ipython3",
568
+ "version": "3.12.0"
569
+ }
570
+ },
571
+ "nbformat": 4,
572
+ "nbformat_minor": 0
573
+ }
backend/notebooks/blog.db ADDED
Binary file (20.5 kB). View file
 
backend/notebooks/blog_app.py ADDED
@@ -0,0 +1,234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException, File, UploadFile, Form, Depends
2
+ from fastapi.staticfiles import StaticFiles
3
+ from pydantic import BaseModel
4
+ from typing import List, Optional, Dict, Any
5
+ import json
6
+ import os
7
+ from flexible_blog_database import FlexibleBlogDatabase
8
+
9
+ app = FastAPI(title="Flexible Daily Blog API")
10
+ blog_db = FlexibleBlogDatabase()
11
+
12
+ # Mount static files for serving images
13
+ app.mount("/media", StaticFiles(directory="blog_media"), name="media")
14
+
15
+ class BlogPostCreate(BaseModel):
16
+ title: str
17
+ content: str
18
+ author: str = "Admin"
19
+ tags: Optional[List[str]] = []
20
+
21
+ class ImageConfig(BaseModel):
22
+ file_path: str
23
+ alt_text: str = ""
24
+ caption: str = ""
25
+ order: int = 0
26
+
27
+ class CompleteBlogPost(BaseModel):
28
+ title: str
29
+ content: str
30
+ author: str = "Admin"
31
+ tags: Optional[List[str]] = []
32
+ featured_image: Optional[ImageConfig] = None
33
+ post_images: Optional[List[ImageConfig]] = []
34
+
35
+ @app.post("/blog/posts/simple")
36
+ async def create_simple_blog_post(post: BlogPostCreate):
37
+ """Create a blog post without any images (like blog2)"""
38
+ post_id = blog_db.create_blog_post(
39
+ title=post.title,
40
+ content=post.content,
41
+ author=post.author,
42
+ tags=post.tags
43
+ )
44
+ return {"id": post_id, "message": "Simple blog post created successfully"}
45
+
46
+ @app.post("/blog/posts/with-featured-image")
47
+ async def create_blog_with_featured_image(
48
+ title: str = Form(...),
49
+ content: str = Form(...),
50
+ author: str = Form("Admin"),
51
+ tags: str = Form("[]"),
52
+ featured_image: UploadFile = File(...),
53
+ featured_alt_text: str = Form(""),
54
+ featured_caption: str = Form("")
55
+ ):
56
+ """Create a blog post with only featured image"""
57
+ tags_list = json.loads(tags) if tags != "[]" else []
58
+
59
+ # Save featured image temporarily
60
+ featured_temp_path = f"temp_featured_{featured_image.filename}"
61
+ with open(featured_temp_path, "wb") as buffer:
62
+ content_data = await featured_image.read()
63
+ buffer.write(content_data)
64
+
65
+ try:
66
+ # Create blog post
67
+ post_id = blog_db.create_blog_post(title, content, author, tags_list)
68
+
69
+ # Add featured image
70
+ blog_db.add_featured_image(
71
+ post_id,
72
+ featured_temp_path,
73
+ featured_alt_text,
74
+ featured_caption
75
+ )
76
+
77
+ return {"id": post_id, "message": "Blog post with featured image created successfully"}
78
+
79
+ finally:
80
+ if os.path.exists(featured_temp_path):
81
+ os.remove(featured_temp_path)
82
+
83
+ @app.post("/blog/posts/with-post-images")
84
+ async def create_blog_with_post_images(
85
+ title: str = Form(...),
86
+ content: str = Form(...),
87
+ author: str = Form("Admin"),
88
+ tags: str = Form("[]"),
89
+ post_images: List[UploadFile] = File(...),
90
+ post_alt_texts: str = Form("[]"), # JSON array
91
+ post_captions: str = Form("[]") # JSON array
92
+ ):
93
+ """Create a blog post with only post images (like blog3)"""
94
+ tags_list = json.loads(tags) if tags != "[]" else []
95
+ alt_texts = json.loads(post_alt_texts) if post_alt_texts != "[]" else []
96
+ captions = json.loads(post_captions) if post_captions != "[]" else []
97
+
98
+ # Create blog post
99
+ post_id = blog_db.create_blog_post(title, content, author, tags_list)
100
+
101
+ # Save post images temporarily
102
+ temp_paths = []
103
+ image_configs = []
104
+
105
+ for i, img in enumerate(post_images):
106
+ temp_path = f"temp_post_{i}_{img.filename}"
107
+ with open(temp_path, "wb") as buffer:
108
+ content_data = await img.read()
109
+ buffer.write(content_data)
110
+
111
+ temp_paths.append(temp_path)
112
+ image_configs.append({
113
+ "file_path": temp_path,
114
+ "alt_text": alt_texts[i] if i < len(alt_texts) else "",
115
+ "caption": captions[i] if i < len(captions) else "",
116
+ "order": i
117
+ })
118
+
119
+ try:
120
+ # Add post images
121
+ blog_db.add_post_images(post_id, image_configs)
122
+
123
+ return {"id": post_id, "message": "Blog post with post images created successfully"}
124
+
125
+ finally:
126
+ for temp_path in temp_paths:
127
+ if os.path.exists(temp_path):
128
+ os.remove(temp_path)
129
+
130
+ @app.post("/blog/posts/complete")
131
+ async def create_complete_blog_post(
132
+ title: str = Form(...),
133
+ content: str = Form(...),
134
+ author: str = Form("Admin"),
135
+ tags: str = Form("[]"),
136
+ featured_image: UploadFile = File(None),
137
+ featured_alt_text: str = Form(""),
138
+ featured_caption: str = Form(""),
139
+ post_images: List[UploadFile] = File(None),
140
+ post_alt_texts: str = Form("[]"),
141
+ post_captions: str = Form("[]")
142
+ ):
143
+ """Create a complete blog post with both featured and post images (like blog1)"""
144
+ tags_list = json.loads(tags) if tags != "[]" else []
145
+ alt_texts = json.loads(post_alt_texts) if post_alt_texts != "[]" else []
146
+ captions = json.loads(post_captions) if post_captions != "[]" else []
147
+
148
+ # Create blog post
149
+ post_id = blog_db.create_blog_post(title, content, author, tags_list)
150
+
151
+ temp_files = []
152
+
153
+ try:
154
+ # Handle featured image
155
+ if featured_image and featured_image.content_type.startswith("image/"):
156
+ featured_temp_path = f"temp_featured_{featured_image.filename}"
157
+ with open(featured_temp_path, "wb") as buffer:
158
+ content_data = await featured_image.read()
159
+ buffer.write(content_data)
160
+ temp_files.append(featured_temp_path)
161
+
162
+ blog_db.add_featured_image(
163
+ post_id,
164
+ featured_temp_path,
165
+ featured_alt_text,
166
+ featured_caption
167
+ )
168
+
169
+ # Handle post images
170
+ if post_images and post_images[0].filename: # Check if actual files were uploaded
171
+ image_configs = []
172
+
173
+ for i, img in enumerate(post_images):
174
+ if img.content_type.startswith("image/"):
175
+ temp_path = f"temp_post_{i}_{img.filename}"
176
+ with open(temp_path, "wb") as buffer:
177
+ content_data = await img.read()
178
+ buffer.write(content_data)
179
+
180
+ temp_files.append(temp_path)
181
+ image_configs.append({
182
+ "file_path": temp_path,
183
+ "alt_text": alt_texts[i] if i < len(alt_texts) else "",
184
+ "caption": captions[i] if i < len(captions) else "",
185
+ "order": i
186
+ })
187
+
188
+ if image_configs:
189
+ blog_db.add_post_images(post_id, image_configs)
190
+
191
+ return {"id": post_id, "message": "Complete blog post created successfully"}
192
+
193
+ finally:
194
+ for temp_file in temp_files:
195
+ if os.path.exists(temp_file):
196
+ os.remove(temp_file)
197
+
198
+ @app.get("/blog/posts/{post_id}")
199
+ async def get_blog_post(post_id: int):
200
+ """Get a complete blog post with all images"""
201
+ post = blog_db.get_blog_post_complete(post_id)
202
+ if not post:
203
+ raise HTTPException(status_code=404, detail="Blog post not found")
204
+ return post
205
+
206
+ @app.get("/blog/posts")
207
+ async def list_blog_posts(limit: int = 10):
208
+ """List recent blog posts with image summary"""
209
+ posts = blog_db.list_recent_posts_with_images(limit)
210
+ return posts
211
+
212
+ @app.post("/blog/posts/{post_id}/add-featured-image")
213
+ async def add_featured_image_to_existing_post(
214
+ post_id: int,
215
+ featured_image: UploadFile = File(...),
216
+ alt_text: str = Form(""),
217
+ caption: str = Form("")
218
+ ):
219
+ """Add a featured image to an existing post"""
220
+ temp_path = f"temp_featured_{featured_image.filename}"
221
+ with open(temp_path, "wb") as buffer:
222
+ content = await featured_image.read()
223
+ buffer.write(content)
224
+
225
+ try:
226
+ blog_db.add_featured_image(post_id, temp_path, alt_text, caption)
227
+ return {"message": "Featured image added successfully"}
228
+ finally:
229
+ if os.path.exists(temp_path):
230
+ os.remove(temp_path)
231
+
232
+ if __name__ == "__main__":
233
+ import uvicorn
234
+ uvicorn.run(app, host="0.0.0.0", port=8002)
backend/notebooks/blognb.ipynb ADDED
@@ -0,0 +1,232 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "id": "7a9b5b8e",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "from flexible_blog_database import FlexibleBlogDatabase"
11
+ ]
12
+ },
13
+ {
14
+ "cell_type": "code",
15
+ "execution_count": 2,
16
+ "id": "ea00d66d",
17
+ "metadata": {},
18
+ "outputs": [],
19
+ "source": [
20
+ "# Initialize\n",
21
+ "blog_db = FlexibleBlogDatabase()"
22
+ ]
23
+ },
24
+ {
25
+ "cell_type": "code",
26
+ "execution_count": 3,
27
+ "id": "f00bdcf7",
28
+ "metadata": {},
29
+ "outputs": [],
30
+ "source": [
31
+ "# Example 1: Blog with featured image and post images (like blog1)\n",
32
+ "blog1_id = blog_db.create_complete_blog_post(\n",
33
+ " title=\"Complete Blog Post with All Images\",\n",
34
+ " content=\"This blog has both featured and post images!\",\n",
35
+ " author=\"John Doe\",\n",
36
+ " tags=[\"complete\", \"images\"],\n",
37
+ " featured_image={\n",
38
+ " \"file_path\": \"./images/featured_blog1.jpeg\",\n",
39
+ " \"alt_text\": \"Featured image for blog 1\",\n",
40
+ " \"caption\": \"This is the main featured image\"\n",
41
+ " },\n",
42
+ " post_images=[\n",
43
+ " {\n",
44
+ " \"file_path\": \"./images/post1_img1.jpg\",\n",
45
+ " \"alt_text\": \"Post image 1\",\n",
46
+ " \"caption\": \"First image in the post\",\n",
47
+ " \"order\": 0\n",
48
+ " },\n",
49
+ " {\n",
50
+ " \"file_path\": \"./images/post1_img2.jpg\",\n",
51
+ " \"alt_text\": \"Post image 2\", \n",
52
+ " \"caption\": \"Second image in the post\",\n",
53
+ " \"order\": 1\n",
54
+ " }\n",
55
+ " ]\n",
56
+ ")"
57
+ ]
58
+ },
59
+ {
60
+ "cell_type": "code",
61
+ "execution_count": 9,
62
+ "id": "396af061",
63
+ "metadata": {},
64
+ "outputs": [],
65
+ "source": [
66
+ "# Example 2: Blog without any images (like blog2)\n",
67
+ "blog2_id = blog_db.create_blog_post(\n",
68
+ " title=\"Text Only Blog Post\",\n",
69
+ " content=\"This blog post contains only text content.\",\n",
70
+ " author=\"Jane Smith\",\n",
71
+ " tags=[\"text-only\", \"minimal\"]\n",
72
+ ")\n",
73
+ "\n",
74
+ "# Example 3: Blog with post images but no featured image (like blog3)\n",
75
+ "blog3_id = blog_db.create_blog_post(\n",
76
+ " title=\"Blog with Post Images Only\",\n",
77
+ " content=\"This blog has images within the content but no featured image.\",\n",
78
+ " author=\"Bob Wilson\",\n",
79
+ " tags=[\"post-images\"]\n",
80
+ ")"
81
+ ]
82
+ },
83
+ {
84
+ "cell_type": "code",
85
+ "execution_count": 7,
86
+ "id": "fd3ccc12",
87
+ "metadata": {},
88
+ "outputs": [
89
+ {
90
+ "name": "stdout",
91
+ "output_type": "stream",
92
+ "text": [
93
+ "Blog 1 (complete): {'id': 4, 'title': 'Complete Blog Post with All Images', 'content': 'This blog has both featured and post images!', 'author': 'John Doe', 'created_at': '2025-09-12 08:27:24', 'published': 1, 'tags': ['complete', 'images'], 'featured_image': {'filename': '8084f934-518a-4664-a293-b87f5e7a23f1.jpeg', 'file_path': 'blog_media\\\\8084f934-518a-4664-a293-b87f5e7a23f1.jpeg', 'alt_text': 'Featured image for blog 1', 'caption': 'This is the main featured image', 'width': None, 'height': None, 'url': '/media/8084f934-518a-4664-a293-b87f5e7a23f1.jpeg'}, 'post_images': [{'id': 4, 'filename': 'd2896dd7-af83-488e-a2bb-105a21048d17.jpg', 'file_path': 'blog_media\\\\d2896dd7-af83-488e-a2bb-105a21048d17.jpg', 'alt_text': 'Post image 1', 'caption': 'First image in the post', 'mime_type': 'image/jpeg', 'width': None, 'height': None, 'order': 0, 'position': None, 'type': 'post_content', 'url': '/media/d2896dd7-af83-488e-a2bb-105a21048d17.jpg'}, {'id': 5, 'filename': 'c268ba74-91ed-4bb4-8c54-e73b090083fc.jpg', 'file_path': 'blog_media\\\\c268ba74-91ed-4bb4-8c54-e73b090083fc.jpg', 'alt_text': 'Post image 2', 'caption': 'Second image in the post', 'mime_type': 'image/jpeg', 'width': None, 'height': None, 'order': 1, 'position': None, 'type': 'post_content', 'url': '/media/c268ba74-91ed-4bb4-8c54-e73b090083fc.jpg'}]}\n"
94
+ ]
95
+ }
96
+ ],
97
+ "source": [
98
+ "# Retrieve and display\n",
99
+ "from pprint import pprint\n",
100
+ "print(\"Blog 1 (complete):\", blog_db.get_blog_post_complete(blog1_id))"
101
+ ]
102
+ },
103
+ {
104
+ "cell_type": "code",
105
+ "execution_count": 8,
106
+ "id": "21c58515",
107
+ "metadata": {},
108
+ "outputs": [
109
+ {
110
+ "name": "stdout",
111
+ "output_type": "stream",
112
+ "text": [
113
+ "{'author': 'John Doe',\n",
114
+ " 'content': 'This blog has both featured and post images!',\n",
115
+ " 'created_at': '2025-09-12 08:27:24',\n",
116
+ " 'featured_image': {'alt_text': 'Featured image for blog 1',\n",
117
+ " 'caption': 'This is the main featured image',\n",
118
+ " 'file_path': 'blog_media\\\\8084f934-518a-4664-a293-b87f5e7a23f1.jpeg',\n",
119
+ " 'filename': '8084f934-518a-4664-a293-b87f5e7a23f1.jpeg',\n",
120
+ " 'height': None,\n",
121
+ " 'url': '/media/8084f934-518a-4664-a293-b87f5e7a23f1.jpeg',\n",
122
+ " 'width': None},\n",
123
+ " 'id': 4,\n",
124
+ " 'post_images': [{'alt_text': 'Post image 1',\n",
125
+ " 'caption': 'First image in the post',\n",
126
+ " 'file_path': 'blog_media\\\\d2896dd7-af83-488e-a2bb-105a21048d17.jpg',\n",
127
+ " 'filename': 'd2896dd7-af83-488e-a2bb-105a21048d17.jpg',\n",
128
+ " 'height': None,\n",
129
+ " 'id': 4,\n",
130
+ " 'mime_type': 'image/jpeg',\n",
131
+ " 'order': 0,\n",
132
+ " 'position': None,\n",
133
+ " 'type': 'post_content',\n",
134
+ " 'url': '/media/d2896dd7-af83-488e-a2bb-105a21048d17.jpg',\n",
135
+ " 'width': None},\n",
136
+ " {'alt_text': 'Post image 2',\n",
137
+ " 'caption': 'Second image in the post',\n",
138
+ " 'file_path': 'blog_media\\\\c268ba74-91ed-4bb4-8c54-e73b090083fc.jpg',\n",
139
+ " 'filename': 'c268ba74-91ed-4bb4-8c54-e73b090083fc.jpg',\n",
140
+ " 'height': None,\n",
141
+ " 'id': 5,\n",
142
+ " 'mime_type': 'image/jpeg',\n",
143
+ " 'order': 1,\n",
144
+ " 'position': None,\n",
145
+ " 'type': 'post_content',\n",
146
+ " 'url': '/media/c268ba74-91ed-4bb4-8c54-e73b090083fc.jpg',\n",
147
+ " 'width': None}],\n",
148
+ " 'published': 1,\n",
149
+ " 'tags': ['complete', 'images'],\n",
150
+ " 'title': 'Complete Blog Post with All Images'}\n"
151
+ ]
152
+ }
153
+ ],
154
+ "source": [
155
+ "pprint(blog_db.get_blog_post_complete(blog1_id))"
156
+ ]
157
+ },
158
+ {
159
+ "cell_type": "code",
160
+ "execution_count": 10,
161
+ "id": "6992af78",
162
+ "metadata": {},
163
+ "outputs": [
164
+ {
165
+ "name": "stdout",
166
+ "output_type": "stream",
167
+ "text": [
168
+ "{'author': 'Jane Smith',\n",
169
+ " 'content': 'This blog post contains only text content.',\n",
170
+ " 'created_at': '2025-09-12 14:40:09',\n",
171
+ " 'featured_image': None,\n",
172
+ " 'id': 7,\n",
173
+ " 'post_images': [],\n",
174
+ " 'published': 1,\n",
175
+ " 'tags': ['text-only', 'minimal'],\n",
176
+ " 'title': 'Text Only Blog Post'}\n"
177
+ ]
178
+ }
179
+ ],
180
+ "source": [
181
+ "pprint(blog_db.get_blog_post_complete(blog2_id))"
182
+ ]
183
+ },
184
+ {
185
+ "cell_type": "code",
186
+ "execution_count": 11,
187
+ "id": "6c19b22f",
188
+ "metadata": {},
189
+ "outputs": [
190
+ {
191
+ "name": "stdout",
192
+ "output_type": "stream",
193
+ "text": [
194
+ "{'author': 'Bob Wilson',\n",
195
+ " 'content': 'This blog has images within the content but no featured image.',\n",
196
+ " 'created_at': '2025-09-12 14:40:09',\n",
197
+ " 'featured_image': None,\n",
198
+ " 'id': 8,\n",
199
+ " 'post_images': [],\n",
200
+ " 'published': 1,\n",
201
+ " 'tags': ['post-images'],\n",
202
+ " 'title': 'Blog with Post Images Only'}\n"
203
+ ]
204
+ }
205
+ ],
206
+ "source": [
207
+ "pprint(blog_db.get_blog_post_complete(blog3_id))"
208
+ ]
209
+ }
210
+ ],
211
+ "metadata": {
212
+ "kernelspec": {
213
+ "display_name": "Python 3",
214
+ "language": "python",
215
+ "name": "python3"
216
+ },
217
+ "language_info": {
218
+ "codemirror_mode": {
219
+ "name": "ipython",
220
+ "version": 3
221
+ },
222
+ "file_extension": ".py",
223
+ "mimetype": "text/x-python",
224
+ "name": "python",
225
+ "nbconvert_exporter": "python",
226
+ "pygments_lexer": "ipython3",
227
+ "version": "3.12.0"
228
+ }
229
+ },
230
+ "nbformat": 4,
231
+ "nbformat_minor": 5
232
+ }
backend/notebooks/flexible_blog_database.py ADDED
@@ -0,0 +1,372 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sqlite3
2
+ import json
3
+ import os
4
+ import uuid
5
+ from datetime import datetime
6
+ from typing import List, Dict, Optional, Union
7
+ from pathlib import Path
8
+ import shutil
9
+ from enum import Enum
10
+ import threading
11
+
12
+ class ImageType(Enum):
13
+ FEATURED = "featured"
14
+ POST_CONTENT = "post_content"
15
+ GALLERY = "gallery"
16
+
17
+ class FlexibleBlogDatabase:
18
+ def __init__(self, db_path: str = "blog.db", media_dir: str = "blog_media"):
19
+ self.db_path = db_path
20
+ self.media_dir = Path(media_dir)
21
+ self.media_dir.mkdir(exist_ok=True)
22
+ self._lock = threading.Lock()
23
+ self.init_database()
24
+
25
+ def _get_connection(self):
26
+ """Get a database connection with proper settings"""
27
+ conn = sqlite3.connect(self.db_path, timeout=20.0)
28
+ conn.execute("PRAGMA journal_mode=WAL") # Better for concurrent access
29
+ conn.execute("PRAGMA busy_timeout=20000") # 20 second timeout
30
+ return conn
31
+
32
+ def init_database(self):
33
+ """Initialize the flexible blog database with enhanced image support"""
34
+ with self._lock:
35
+ conn = self._get_connection()
36
+ try:
37
+ cursor = conn.cursor()
38
+
39
+ # Blog posts table
40
+ cursor.execute('''
41
+ CREATE TABLE IF NOT EXISTS blog_posts (
42
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
43
+ title TEXT NOT NULL,
44
+ content TEXT NOT NULL,
45
+ author TEXT DEFAULT 'Admin',
46
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
47
+ published BOOLEAN DEFAULT 1,
48
+ tags TEXT DEFAULT '[]',
49
+ featured_image_id INTEGER,
50
+ FOREIGN KEY (featured_image_id) REFERENCES images (id)
51
+ )
52
+ ''')
53
+
54
+ # Enhanced images table
55
+ cursor.execute('''
56
+ CREATE TABLE IF NOT EXISTS images (
57
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
58
+ filename TEXT NOT NULL,
59
+ original_filename TEXT NOT NULL,
60
+ file_path TEXT NOT NULL,
61
+ file_size INTEGER,
62
+ mime_type TEXT,
63
+ alt_text TEXT DEFAULT '',
64
+ caption TEXT DEFAULT '',
65
+ width INTEGER,
66
+ height INTEGER,
67
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
68
+ )
69
+ ''')
70
+
71
+ # Enhanced junction table for post images
72
+ cursor.execute('''
73
+ CREATE TABLE IF NOT EXISTS blog_post_images (
74
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
75
+ blog_post_id INTEGER,
76
+ image_id INTEGER,
77
+ image_type TEXT DEFAULT 'post_content',
78
+ image_order INTEGER DEFAULT 0,
79
+ position_in_content INTEGER,
80
+ FOREIGN KEY (blog_post_id) REFERENCES blog_posts (id),
81
+ FOREIGN KEY (image_id) REFERENCES images (id)
82
+ )
83
+ ''')
84
+
85
+ conn.commit()
86
+ finally:
87
+ conn.close()
88
+
89
+ def save_image(self, file_path: str, alt_text: str = "", caption: str = "",
90
+ original_filename: str = "") -> int:
91
+ """Save an image file and return its database ID"""
92
+ if not os.path.exists(file_path):
93
+ raise FileNotFoundError(f"Image file not found: {file_path}")
94
+
95
+ # Generate unique filename
96
+ file_extension = Path(file_path).suffix
97
+ unique_filename = f"{uuid.uuid4()}{file_extension}"
98
+ destination_path = self.media_dir / unique_filename
99
+
100
+ # Copy file to media directory
101
+ shutil.copy2(file_path, destination_path)
102
+
103
+ # Get file info
104
+ file_size = os.path.getsize(destination_path)
105
+ mime_type = self._get_mime_type(file_extension)
106
+
107
+ # Get image dimensions (optional - requires PIL)
108
+ width, height = self._get_image_dimensions(destination_path)
109
+
110
+ # Save to database with lock
111
+ with self._lock:
112
+ conn = self._get_connection()
113
+ try:
114
+ cursor = conn.cursor()
115
+
116
+ cursor.execute('''
117
+ INSERT INTO images (filename, original_filename, file_path, file_size,
118
+ mime_type, alt_text, caption, width, height)
119
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
120
+ ''', (unique_filename, original_filename or Path(file_path).name,
121
+ str(destination_path), file_size, mime_type, alt_text, caption, width, height))
122
+
123
+ image_id = cursor.lastrowid
124
+ conn.commit()
125
+ return image_id
126
+ finally:
127
+ conn.close()
128
+
129
+ def create_blog_post(self, title: str, content: str, author: str = "Admin",
130
+ tags: List[str] = None) -> int:
131
+ """Create a basic blog post without images"""
132
+ with self._lock:
133
+ conn = self._get_connection()
134
+ try:
135
+ cursor = conn.cursor()
136
+
137
+ tags_json = json.dumps(tags or [])
138
+
139
+ cursor.execute('''
140
+ INSERT INTO blog_posts (title, content, author, tags)
141
+ VALUES (?, ?, ?, ?)
142
+ ''', (title, content, author, tags_json))
143
+
144
+ blog_post_id = cursor.lastrowid
145
+ conn.commit()
146
+ return blog_post_id
147
+ finally:
148
+ conn.close()
149
+
150
+ def add_featured_image(self, blog_post_id: int, image_path: str,
151
+ alt_text: str = "", caption: str = "") -> int:
152
+ """Add a featured image to an existing blog post"""
153
+ # Save the image first
154
+ image_id = self.save_image(image_path, alt_text, caption)
155
+
156
+ # Update blog post with featured image
157
+ with self._lock:
158
+ conn = self._get_connection()
159
+ try:
160
+ cursor = conn.cursor()
161
+
162
+ cursor.execute('''
163
+ UPDATE blog_posts SET featured_image_id = ? WHERE id = ?
164
+ ''', (image_id, blog_post_id))
165
+
166
+ conn.commit()
167
+ return image_id
168
+ finally:
169
+ conn.close()
170
+
171
+ def add_post_images(self, blog_post_id: int, image_configs: List[Dict]) -> List[int]:
172
+ """Add multiple post images to a blog post"""
173
+ image_ids = []
174
+
175
+ # Save all images first
176
+ for config in image_configs:
177
+ image_id = self.save_image(
178
+ config["file_path"],
179
+ config.get("alt_text", ""),
180
+ config.get("caption", "")
181
+ )
182
+ image_ids.append((image_id, config))
183
+
184
+ # Link all images to blog post in one transaction
185
+ with self._lock:
186
+ conn = self._get_connection()
187
+ try:
188
+ cursor = conn.cursor()
189
+
190
+ for image_id, config in image_ids:
191
+ cursor.execute('''
192
+ INSERT INTO blog_post_images
193
+ (blog_post_id, image_id, image_type, image_order, position_in_content)
194
+ VALUES (?, ?, ?, ?, ?)
195
+ ''', (
196
+ blog_post_id,
197
+ image_id,
198
+ ImageType.POST_CONTENT.value,
199
+ config.get("order", 0),
200
+ config.get("position")
201
+ ))
202
+
203
+ conn.commit()
204
+ return [img_id for img_id, _ in image_ids]
205
+ finally:
206
+ conn.close()
207
+
208
+ def create_complete_blog_post(self, title: str, content: str, author: str = "Admin",
209
+ tags: List[str] = None, featured_image: Dict = None,
210
+ post_images: List[Dict] = None) -> int:
211
+ """Create a complete blog post with all images in one go"""
212
+ # Create the blog post first
213
+ blog_post_id = self.create_blog_post(title, content, author, tags)
214
+
215
+ # Add featured image if provided
216
+ if featured_image:
217
+ self.add_featured_image(
218
+ blog_post_id,
219
+ featured_image["file_path"],
220
+ featured_image.get("alt_text", ""),
221
+ featured_image.get("caption", "")
222
+ )
223
+
224
+ # Add post images if provided
225
+ if post_images:
226
+ self.add_post_images(blog_post_id, post_images)
227
+
228
+ return blog_post_id
229
+
230
+ def get_blog_post_complete(self, post_id: int) -> Optional[Dict]:
231
+ """Get a complete blog post with all associated images"""
232
+ with self._lock:
233
+ conn = self._get_connection()
234
+ try:
235
+ cursor = conn.cursor()
236
+
237
+ # Get blog post with featured image
238
+ cursor.execute('''
239
+ SELECT bp.id, bp.title, bp.content, bp.author, bp.created_at,
240
+ bp.published, bp.tags, bp.featured_image_id,
241
+ fi.filename as featured_filename, fi.file_path as featured_path,
242
+ fi.alt_text as featured_alt, fi.caption as featured_caption,
243
+ fi.width as featured_width, fi.height as featured_height
244
+ FROM blog_posts bp
245
+ LEFT JOIN images fi ON bp.featured_image_id = fi.id
246
+ WHERE bp.id = ?
247
+ ''', (post_id,))
248
+
249
+ row = cursor.fetchone()
250
+ if not row:
251
+ return None
252
+
253
+ # Get post content images
254
+ cursor.execute('''
255
+ SELECT i.id, i.filename, i.file_path, i.alt_text, i.caption,
256
+ i.mime_type, i.width, i.height, bpi.image_order,
257
+ bpi.position_in_content, bpi.image_type
258
+ FROM blog_post_images bpi
259
+ JOIN images i ON bpi.image_id = i.id
260
+ WHERE bpi.blog_post_id = ? AND bpi.image_type = ?
261
+ ORDER BY bpi.image_order
262
+ ''', (post_id, ImageType.POST_CONTENT.value))
263
+
264
+ post_images = cursor.fetchall()
265
+
266
+ # Build result
267
+ result = {
268
+ 'id': row[0],
269
+ 'title': row[1],
270
+ 'content': row[2],
271
+ 'author': row[3],
272
+ 'created_at': row[4],
273
+ 'published': row[5],
274
+ 'tags': json.loads(row[6]),
275
+ 'featured_image': {
276
+ 'filename': row[8],
277
+ 'file_path': row[9],
278
+ 'alt_text': row[10],
279
+ 'caption': row[11],
280
+ 'width': row[12],
281
+ 'height': row[13],
282
+ 'url': self.get_image_url(row[8]) if row[8] else None
283
+ } if row[7] else None,
284
+ 'post_images': [
285
+ {
286
+ 'id': img[0],
287
+ 'filename': img[1],
288
+ 'file_path': img[2],
289
+ 'alt_text': img[3],
290
+ 'caption': img[4],
291
+ 'mime_type': img[5],
292
+ 'width': img[6],
293
+ 'height': img[7],
294
+ 'order': img[8],
295
+ 'position': img[9],
296
+ 'type': img[10],
297
+ 'url': self.get_image_url(img[1])
298
+ }
299
+ for img in post_images
300
+ ]
301
+ }
302
+
303
+ return result
304
+ finally:
305
+ conn.close()
306
+
307
+ def _get_mime_type(self, file_extension: str) -> str:
308
+ """Get MIME type based on file extension"""
309
+ mime_types = {
310
+ '.jpg': 'image/jpeg',
311
+ '.jpeg': 'image/jpeg',
312
+ '.png': 'image/png',
313
+ '.gif': 'image/gif',
314
+ '.webp': 'image/webp',
315
+ '.svg': 'image/svg+xml'
316
+ }
317
+ return mime_types.get(file_extension.lower(), 'application/octet-stream')
318
+
319
+ def _get_image_dimensions(self, image_path: str) -> tuple:
320
+ """Get image dimensions (requires PIL/Pillow)"""
321
+ try:
322
+ from PIL import Image
323
+ with Image.open(image_path) as img:
324
+ return img.size
325
+ except ImportError:
326
+ return None, None
327
+ except Exception:
328
+ return None, None
329
+
330
+ def get_image_url(self, image_filename: str) -> str:
331
+ """Generate URL for serving images"""
332
+ return f"/media/{image_filename}"
333
+
334
+ def list_recent_posts_with_images(self, limit: int = 10) -> List[Dict]:
335
+ """Get recent blog posts with image counts"""
336
+ with self._lock:
337
+ conn = self._get_connection()
338
+ try:
339
+ cursor = conn.cursor()
340
+
341
+ cursor.execute('''
342
+ SELECT bp.id, bp.title, bp.author, bp.created_at, bp.published, bp.tags,
343
+ bp.featured_image_id,
344
+ fi.filename as featured_filename,
345
+ COUNT(bpi.id) as post_image_count
346
+ FROM blog_posts bp
347
+ LEFT JOIN images fi ON bp.featured_image_id = fi.id
348
+ LEFT JOIN blog_post_images bpi ON bp.id = bpi.blog_post_id
349
+ WHERE bp.published = 1
350
+ GROUP BY bp.id
351
+ ORDER BY bp.created_at DESC
352
+ LIMIT ?
353
+ ''', (limit,))
354
+
355
+ rows = cursor.fetchall()
356
+
357
+ return [
358
+ {
359
+ 'id': row[0],
360
+ 'title': row[1],
361
+ 'author': row[2],
362
+ 'created_at': row[3],
363
+ 'published': row[4],
364
+ 'tags': json.loads(row[5]),
365
+ 'has_featured_image': row[6] is not None,
366
+ 'featured_image_url': self.get_image_url(row[7]) if row[7] else None,
367
+ 'post_image_count': row[8]
368
+ }
369
+ for row in rows
370
+ ]
371
+ finally:
372
+ conn.close()
backend/notebooks/nbmax.ipynb ADDED
@@ -0,0 +1,196 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 34,
6
+ "id": "d3ce03b3",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "from langchain_mcp_adapters.client import MultiServerMCPClient\n",
11
+ "from langgraph.graph import StateGraph, MessagesState, START\n",
12
+ "from langgraph.prebuilt import ToolNode, tools_condition\n",
13
+ "\n",
14
+ "from langchain.chat_models import init_chat_model\n",
15
+ "from llmoperations import get_chat_model"
16
+ ]
17
+ },
18
+ {
19
+ "cell_type": "code",
20
+ "execution_count": 35,
21
+ "id": "de9fa44d",
22
+ "metadata": {},
23
+ "outputs": [],
24
+ "source": [
25
+ "model = get_chat_model()"
26
+ ]
27
+ },
28
+ {
29
+ "cell_type": "code",
30
+ "execution_count": 36,
31
+ "id": "874a55d3",
32
+ "metadata": {},
33
+ "outputs": [],
34
+ "source": [
35
+ "client = MultiServerMCPClient(\n",
36
+ " {\n",
37
+ " \"weather\": {\n",
38
+ " # make sure you start your weather server on port 8002\n",
39
+ " \"url\": \"http://localhost:8002/mcp/\",\n",
40
+ " \"transport\": \"sse\",\n",
41
+ " },\n",
42
+ " }\n",
43
+ ")\n",
44
+ "\n",
45
+ "tools = await client.get_tools()"
46
+ ]
47
+ },
48
+ {
49
+ "cell_type": "code",
50
+ "execution_count": null,
51
+ "id": "04e69b58",
52
+ "metadata": {},
53
+ "outputs": [],
54
+ "source": [
55
+ "# client = MultiServerMCPClient(\n",
56
+ "# {\n",
57
+ "# \"weather\": {\n",
58
+ "# # make sure you start your weather server on port 8002\n",
59
+ "# \"url\": \"http://localhost:8002/mcp/\",\n",
60
+ "# \"transport\": \"sse\",\n",
61
+ "# },\n",
62
+ " # \"filesystem-mcp\": {\n",
63
+ " # \"command\": \"npx\",\n",
64
+ " # \"args\": [\n",
65
+ " # \"@modelcontextprotocol/server-filesystem\",\n",
66
+ " # r\"C:\\Users\\PD817AE\\OneDrive - EY\\Desktop\\AgenticDev\\amplify\"\n",
67
+ " # ],\n",
68
+ " # \"transport\": \"stdio\"\n",
69
+ " # }\n",
70
+ "# }\n",
71
+ "# )\n",
72
+ "\n",
73
+ "# tools = await client.get_tools()"
74
+ ]
75
+ },
76
+ {
77
+ "cell_type": "code",
78
+ "execution_count": 37,
79
+ "id": "12e00c43",
80
+ "metadata": {},
81
+ "outputs": [
82
+ {
83
+ "data": {
84
+ "text/plain": [
85
+ "[StructuredTool(name='get_weather', description='Get Weather\\n\\nRetrieves current weather information for a given city and country.\\n\\n### Responses:\\n\\n**200**: Successful Response (Success Response)\\nContent-Type: application/json', args_schema={'type': 'object', 'properties': {'city': {'type': 'string', 'description': \"City name (e.g., 'London')\", 'title': 'city'}, 'country': {'type': 'string', 'description': \"Country code (e.g., 'UK')\", 'title': 'country'}}, 'title': 'get_weatherArguments', 'required': ['city', 'country']}, response_format='content_and_artifact', coroutine=<function convert_mcp_tool_to_langchain_tool.<locals>.call_tool at 0x000002C4070BB6A0>)]"
86
+ ]
87
+ },
88
+ "execution_count": 37,
89
+ "metadata": {},
90
+ "output_type": "execute_result"
91
+ }
92
+ ],
93
+ "source": [
94
+ "tools"
95
+ ]
96
+ },
97
+ {
98
+ "cell_type": "code",
99
+ "execution_count": 38,
100
+ "id": "d71ac872",
101
+ "metadata": {},
102
+ "outputs": [],
103
+ "source": [
104
+ "def call_model(state: MessagesState):\n",
105
+ " response = model.bind_tools(tools).invoke(state[\"messages\"])\n",
106
+ " return {\"messages\": response}"
107
+ ]
108
+ },
109
+ {
110
+ "cell_type": "code",
111
+ "execution_count": 39,
112
+ "id": "1545d8d7",
113
+ "metadata": {},
114
+ "outputs": [],
115
+ "source": [
116
+ "builder = StateGraph(MessagesState)\n",
117
+ "builder.add_node(call_model)\n",
118
+ "builder.add_node(ToolNode(tools))\n",
119
+ "builder.add_edge(START, \"call_model\")\n",
120
+ "builder.add_conditional_edges(\n",
121
+ " \"call_model\",\n",
122
+ " tools_condition,\n",
123
+ ")\n",
124
+ "builder.add_edge(\"tools\", \"call_model\")\n",
125
+ "graph = builder.compile()"
126
+ ]
127
+ },
128
+ {
129
+ "cell_type": "code",
130
+ "execution_count": 40,
131
+ "id": "feaaa675",
132
+ "metadata": {},
133
+ "outputs": [
134
+ {
135
+ "data": {
136
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAANgAAAD5CAIAAADKsmwpAAAAAXNSR0IArs4c6QAAIABJREFUeJztnWlcU8fex+dkJyEEkrAvssmusqqlKgqKKwrWXe/Velut1lZutbbUWq22vfZqq9W6FKutUuuK+1JLtaJiURFRAREQQfadJGTfnhfxUh4EBMzJmSTz/fgiOXMy8wv5OfOf5cxgWq0WIBBEQyJaAAIBkBERsICMiIACZEQEFCAjIqAAGREBBRSiBUCHQqZuqFRIRGqJSKVWaZUKIxjeoluQKDSMyaYw2SR7Nwui5fQFDI0j6pC0qorutpbkiptq5NZ2NCabzGRTrLgUpdwI/j5UBqm5RiERqSg0rOyRxDPI0nMgy2ugJdG6egEyItBqtTfPNtaUSm1dGZ5BLJf+TKIVvRIKmaYkt7X8sbSyWBoZx/MJZROtqEeYuxEf3RJePlwXGccLjbYhWoueETUrb55tlIhUsf9wYFnBHoOZtRGvnagnU8HrcbZEC8GRplr5qR1Vo+fYu/lBXdObrxH/PFbHtacNGmFNtBBDcHp35dAJPHs3BtFCusRMjXg2ucrVlxkcZRYu1HF6V6VfhJVvOKQhozmOI9482+DkZWFWLgQATFninH2luaFKTrSQzjE7IxbdEwEAwmJMrWvSE2avcrt2ol6rgbENNDsjpqfWh4wyRxfq8BxgeeN0A9EqOsG8jHjvarNfuJWFJZloIYQRHGVddK9VLFQRLaQj5mXE0jzxa3FcolUQzIip/Jz0FqJVdMSMjFiaL6ZQSWSyGX3lTnHzY+VmCIhW0REz+lWePhR7DGAZuNCPP/749OnTffjgmDFjKisrcVAEaAySrQu9sliKR+Z9xoyM2FSn8DK4EfPz8/vwqerq6ubmZhzkPMcnxLKiWIJf/n3AXIyokGkaKuUWlnhNuWZkZCxevHjYsGHx8fFr165taGgAAISHh1dVVW3YsGHkyJEAgNbW1t27d8+fP19325YtW2Qyme7jMTExhw4devvtt8PDw9PT0+Pi4gAAU6ZMWbFiBR5qWRxqfQVkA4pa86CpVp7yZSlOmT969CgsLGzPnj3V1dUZGRmzZs169913tVqtTCYLCws7deqU7rY9e/YMGTIkLS3tzp07V65cGT9+/HfffadLGjt27PTp0zdt2pSZmalUKq9fvx4WFlZRUYGT4Noy6eFvnuGUed+AfVGGvhALVCwOXl82JyeHwWAsXLiQRCI5ODgEBAQUFxe/eNu8efNiYmI8PDx0b+/fv3/z5s33338fAIBhGIfDWblyJU4KO8DiUMQCuEZwzMWIGg2gWeAVhwQHB8tkssTExCFDhowYMcLV1TU8PPzF26hU6l9//bV27drCwkKVSgUA4HL/HksKCAjASd6LkCgYjQFXVAaXGvxgWZEF9UqcMvfz89u2bZutre327dsTEhKWLl16//79F2/bvn17cnJyQkLCqVOnsrKy3nzzzfapNBoNJ3kvIm5RkSmYwYrrCeZiRKYVRYLndEJkZOSaNWvOnj27bt06gUCQmJioq/Pa0Gq1qampM2fOTEhIcHBwAACIRCL89HSPWKiCbamsuRjRgkXmO9NVSg0emd+9e/fmzZsAAFtb20mTJq1YsUIkElVXV7e/R6lUSqVSOzs73VuFQnHt2jU8xPQEuURj50onqvROMRcjAgAsLMklD8V45Hz//v1Vq1adOHGiubk5Nzf38OHDtra2jo6OdDrdzs4uMzMzKyuLRCK5u7ufOXOmoqKipaVl/fr1wcHBQqFQLO5Ekru7OwAgLS0tNzcXD8GF2SL7fnAtkjUjI3oEsZ7m4mLEefPmJSQkbN68ecyYMYsWLWKxWMnJyRQKBQCwcOHCO3furFixQiqVfvXVVwwGY9q0afHx8YMHD162bBmDwRg9enRVVVWHDF1cXOLi4nbv3r19+3Y8BJfmSzwCDT223z1mtEJbIdec31udsNSZaCEE8+yxpORh68hpdkQL+X+YUY1Io5PsXOjZV3CcOjMKbp5pCHyNQ7SKjsDVdcKbyEm8HSufdPXkqEajiY6O7jRJoVBQqVQM62TIw9PTc9++ffpW+pycnJzExMTeSvLx8UlOTu70U4XZIht7mq0zXD0V82qaddy/1qLRaENGdu7FroZU5HI5nd75j4dhmKUljnsq9EESiURisToPAc/vrRqeYGvFpepVox4wOyMCAC7sq/YNZxvXjhx6AeYvbkYxYhsTFjr+da6xrlxGtBCDkp5az3OkwelCM60Rn89zfFcxdCLP2He66SHpqfV2bnT/CCuihXSJOdaIusBuWqLrnd+b8zKhWzSvX7Ra7eldlVZcCswuNN8asY2/zjc8zZNETuK5B8A1wKsXstKa8jKFo2bYufnCXvGbuxEBAI1V8pvnGukWJOf+Fh6BLCbb6Ie06ivkZY/Edy83DxxuPWQ8l0SCa6FNpyAjPqfyifTxHdHTPLGNPZVrT2NxKCwrCotDVquJVtYDMEwralKJhWqtRluY3cpgkbwHWQ4cbg3bosNuQEbsSE2ptL5SIRaoxEIViYRJRPp0olQqLSkpCQwM1GOeAABLGwrQApYVmW1DcfKyYNtAN0z4UpARDcqTJ0+SkpKOHj1KtBDoMJqqG2HaICMioAAZEQEFyIgIKEBGREABMiICCpAREVCAjIiAAmREBBQgIyKgABkRAQXIiAgoQEZEQAEyIgIKkBERUICMiIACZEQEFCAjIqAAGREBBciICChARkRAATIiAgqQERFQgIxoUDAMazvhAtEeZESDotVq6+rqiFYBI8iICChARkRAATIiAgqQERFQgIyIgAJkRAQUICMioAAZEQEFyIgIKEBGREABMiICCpAREVCAjIiAAmREBBQgIyKgAB34YwhmzZolkUgAAAqForGx0dHRUXcE/aVLl4iWBguoRjQEU6ZMqampqaqqamho0Gq1VVVVVVVVbDabaF0QgYxoCGbNmuXm5tb+CoZhw4YNI04RdCAjGgIMw6ZOnUomk9uu9OvXb+bMmYSKggtkRAMxY8YMV1dX3WsMw6KionSRIkIHMqKBoFAos2bNotPpAAAXF5dp06YRrQgukBENx9SpU11cXAAAkZGRqDrsAIVoATiiVGiaaxStQojOoI+LeStNkzZy8MySXDHRWp5DIgEbOxqHT/BZ4yY7jph5obHoXiuVTmJzqWqlaX5HvWBpTSkvFHP41NBoGzdfJlEyTNOI6an1GEYKieERLcRoUMo1aSmVw6bwnL2J8aIJxogZZxpIZOTC3kGlkya85Xr1eEN9pZwQAaZmRFGLsrZMFjwKubAvvBZne/ePZkKKNjUjNlUrMLKpfSmDweHTnhVICCna1H4zYbOKa08nWoWxQmOQ2TyqTELAOIOpGRFogFKhIVqEESNqUmIYZvhyTc6ICOMEGREBBciICChARkRAATIiAgqQERFQgIyIgAJkRAQUICMioAAZEQEFyIgIKEBG7AvxU0cfSPkRAJB64vDo2CGGF/Dn1bRRMeEtLS9ZstWmE36QERFQgIyIgAJTfoqvh6jV6mPHD+4/kAwACPAfsGD+4gEDggEAT58+OXP2ePa9OzU1Ve79PCdMiJ8yuY8PI8dPHb1g/uKKimepJw5ZW9u8NnT4sndXfrVxTUZGuqtrv3lzFsbGTtTdmZGRvv9ActmzpxyOtbe37/L3PrK3d9Al7f7hu9/TzjMtmDEx41xc+rVlrlKp9u7bmXnrRl1dTVBQcMKUGUOHGt9mJqhGBMl7tp8+fWz955s//eRLW1v7j5Lee/asFACwY+c3d+78tfz9jzb+Z9uECfHfbfs681ZG34qgUqmHj+x3c3O/dPHmW/969+JvZ/79waKY6HFplzJHjRyz6ZsNolYRACDr7q3P1n0YGzvx6OELa9dsrK2t3rptoy6H02eOnz5zbPn7H+3cecDR0flAyp62zLdt/+/x1F8T4mf+evBs1IiYtZ+vSr92WU9/G8Nh7kYUtYqOHvtl1qz5EeFDX389auWKT8PDhjY2NQAA1qz5z6ZNO0NDIkKCw6dMnubr43/7zs0+F9Tf229y3Bs0Gm1k1BgAQGDgwFEjx1AolFEjY1Uq1bOypwCAfT/tGjE8etobczgc68DAgUuXfJCZeaPgcT4A4MTJw1EjRkeNiLFiW40bGxcaEqHLVi6XX/r93JzZCybHvcGx4kwYPyUmelx7mxoL5t40lz8rBQD4+QXq3lIolPWfb3qeptWeOHH41u2M8vIy3QVHR+c+F+Tm5q57wWKxAADu7l66txYWTACASCQEAJSUFEWNiGn7iK9PAACgoCDP18e/srJ8/LjJbUk+Pv66F4WFjxQKRUT4a21JwYPCLv52RiAUcKw4fVZreMzdiK3iVgAAg87ocF2j0Xz8yXKlUvH2W8uCg8PZluz3lv/rVQrqsP6eROrYFrW2tsrlcno7JUwmEwAgkYjFYrFardZZVgeDYfG/T4kAAC9qa25qREY0JlhMlu7H7nC9sKigoCBv86adYaGDdVdaW0W2fDv8lDAYDACATCZtuyKWiAEAPC6fxWKRyWS5XNaWJJU+f9aOx7cFAKz4YLWzs2v73OzsHPCTigfmbkR3dy8KhXL/Qba/fxAAQKvVJq1OHBU1xtqGCwBoc15paUlpaYnH/9pTPKBQKL4+/nl5D9qu6F57evXHMMze3jEv7wGY/jwp89YN3QsXZzfdDmMhweG6K83NTVqtVlebGhHm3llhsVhjRk84ffrYxd/O3MvJ2v79prt3b/n7B7n386RQKEeOpghFwmfPSrd/vykifGhNbTWuYhLiZ97IuJqaekgoEt7Lydq569vQkIj+3r4AgFEjx1y7fuXPq2kAgEOH9+fnP9R9hMlkLpi/+EDKnocPcxQKRfq1yytXLd363UZcdeKBudeIAIDl73+09buN33z7pVqt9vbyWb9uk65jsfqTL/YfSJ4SH+3s7Lo6aUNjU8Oaz1bOf3Pa/p+O46QkNnZifUPdkWMp3+/8xt7eITxs6NtvLdMlzZv7r5aW5u3fb1q/IWnAgOClSz748qtPdfsWzZr5Ty8vn18P/5ydfZvFsgwMGLhixac4KcQPU9uE6eENQW25YsgEW6KFGCuHvi6Zv8adbmHoptLcm2YEJKCmWQ88fJjzyerErlJ/STnF4VgbVpHxgYyoBwYMCE5O/rWrVOTCnoCMqB8cHZyIlmDcoBgRAQXIiAgoQEZEQAEyIgIKkBERUICMiIACZEQEFCAjIqAAGREBBaZmRCqNRGeY2pcyJDxHOoncg/v0jan9ZlxHakUxMUfWmACCRoVEqKLSCHCFqRnRzpVBo2NyKURH4xoRdc+k3iGWhBRtakYEAAyL5/9xsIpoFcZHVYmk4JbgtQnEHGNoaiu0dTRWy49vrQgfZ8vhUy05VFP8inoDw0BTjVzUpHhyXzTrQ1cSiYBjp0zWiAAAhUxz5/fG6qdyuUyjknV3KJpcoSCRSFSKIVbEabRapVJJp9Fwyl8skWAYRiaTSf/jpbbiOtEB0Lr5MgeNIHLdpMkasSeo1eri4uKrV68uXrzYMCU+efIkKSnp6NGjOOWflJR06dIlDMNsbGwsLS3pdLqTk5OPj8+SJUtwKlFfmK8RDxw4MHHiRBaLpXuy3TCIRKK7d++OHDkSp/wLCgoSExMbGhraX9RoNI6OjufPn8epUL1ggp2VnpCamtrc3Mzj8QzpQgAAm83Gz4UAAD8/P39//w4XWSwW5C40RyNeuXIFAPD6668vX77c8KXX19fv3LkT1yLmzJljY2PT9pZEIl2/fh3XEvWCeRlx48aNJSUlAAAHB2K2hhEKhVevXsW1iIiICC8vL13EpdFoPD09T58+jWuJeoG8bt06ojUYguLiYi6Xy2KxJk6cSKAMKpXq4uLi7u6OaylMJvP27dtyudzFxSU1NfXo0aMZGRnDhw/HtdBXxCw6K0lJSTExMaNHjyZaiOGYO3dubW3tH3/8oXubmpp68uTJX375hWhdXaM1aUQiUXl5+aVLl4gW8py6urodO3YQUnR+fn5YWFhubi4hpb8UU44RN2zY0NDQ4OLiEhsbS7SW5xggRuwKf3//rKysr7/++vhxvDaRehVM1oipqakDBgzAOxrrLXZ2dkuXLiVQwIEDB4qKij7//HMCNXSKCcaIycnJixYtUigUNNxm0oydM2fOHDx4MCUlBZ4/kanViJ999pm1tTUAAJ4/cXsMMI7YEyZPnvzll19GRUXl5OQQreV/EB2k6o2rV69qtdr6+nqihXRHcXHx9OnTiVbxNwsXLjx48CDRKrSm01mZO3eubtt+Pp9PtJbuIDxG7MDevXurq6s//ZT4HWaNPkasqKiws7MrKSnx8/MjWouxcvHixT179qSkpOjOgCEEI64RVSrV22+/LZPJaDSasbgQkhixA+PHj9+yZcv48ePv3LlDlAZjNaJWq83IyFiyZIm3tzfRWnoBgeOI3dOvX79r167t3bt3//79hAgwPiNqNJp///vfWq02KioqNDSUaDm9A7YYsQO7d+8WCASrVq0yfNHGFyOuXbs2JiZmxIgRRAsxWS5fvrx169aUlBTdQJiBILrb3gt+/vlnoiW8KgTONfeKysrK6OjoGzduGKxEo2max40bFxQURLSKVwXaGLEDTk5Oly9fPnLkyI8//miYEo2gac7Ozg4NDZXJZAZe1o8HeD+zond27dpVWFi4ZcsWvAuCukYUi8Vjx461srJqO7zT2MH7mRW9s2TJkoSEhLFjx9bV1eFbksGCgN4iEokKCwshn7LrLcYSI3agvr5+3LhxOTk5+BUBaY144sSJ7Ozs/v37Qz5l11sYDMa9e/eIVtFr+Hz+xYsXd+zYUVlZiVMRkB74U1RUpFQqiVahf9hs9s6dO6VSKYZhRhdsZGdnOznhda4RpDXiO++8M2nSJKJV4AKVSrWwsDhy5Eh1Nb6nP+uXgoICX19f3coSPIDUiBwOh8AJeAMwf/78xMQuz5GEkEePHr346L4egdSIP/zww7lz54hWgS9HjhwBAJSXlxMtpEfk5+cHBATglz+kRhQIBGKxmGgVhiA9Pf3u3btEq3g5eNeIkA5oCwQCCoVi2q1zG1988QUMS1O7Jzw8PCsrC7/8Ia0RTT5GbI/OhZmZmUQL6ZL8/Hxcq0N4jWgOMWIHKioqLl26RLSKzsG7XYbXiOYTI7Yxbdo0oVBItIrOwbunAq8RFy9ebKrjiN0wffp0AMChQ4eIFtIR860RzSpG7ACPx4NqVxCNRlNUVOTr64trKZAa0QxjxDZiY2Oh2inFAO0yvEY0wxixPeHh4bpdK4gWAgzTLsNrRPOMETuQkJBw8OBBolUYyIiQrr7hcDhESyCekJAQe3t7olWA/Pz82bNn410KpDWiOceI7dEtu0pISCBKgEqlevr0af/+/fEuCFIjmnmM2IHdu3enpKS0v2KwrUcN01NBc81Gg0KhUCgUZDLZwsJiwoQJtbW1Y8eO/eqrr/Au98iRI2VlZQZ45B7FiMYBjUaj0WjDhg2ztrauq6vDMCwvL6+pqYnL5eJabn5+fkREBK5F6IC0aUYxYqfweLyamhrd66amJgOc5GOYLjO8RkQx4ou88cYb7Z9dEovFaWlpuJaoUCjKy8u9vLxwLUUHpE3z4sWLKQY5t9ZYSEhIKCsr0x1pprtCIpHKyspKSko8PT1xKtRgPRV4a0RznmvulJMnTyYkJLi7u+s2RtJoNACA2tpaXFtng7XL8NaIP/zwg7OzM5pcac+aNWsAAA8ePLh+/fr169cbGxsFzZL0y7enTp6LU4mP856FhISImlV9zkGrBVbcHnkMruGb6OhogUDQJgnDMK1W6+DgcOHCBaKlwUVWWtODG80aTKWSay1wez5apVKRKZRXeYDUxpFeWSTxHsQaMoFnxaV2cydcNWJkZOSFCxfawiBdJBQXF0eoKOj4bX+NJZc6fqGbpXV3Py0kqJSaljrFse8qpr7rbGPX5ZkjcMWIs2fP7rCXgIuLiwEmOo2Iiz/X2DjQB43gGYULAQAUKonvzJjxgcfJHZXCpi5374DLiIGBge03QcQwbNy4cQbdtxRuSvPFNAtywFCbHtwLHaNmOmZeaOoqFS4jAgD++c9/tm285OLiMmPGDKIVQURduZxKh+4n6yE29vTiHFFXqdB9q4CAgIEDB+pejx8/3sbGKP/344RcouY70olW0UfIFMzNl9VSr+g0FTojAgAWLFjA4/EcHBxQddgBsVCtMuY90ppqFV1t4/SqveaqJxJBg0osUkmEao0aqFSaV8wQAAAAb5jvEhaLlXVRDkDtq2dHtyBhAGNakZlWZJ4T3dbJWCsVE6aPRix7JC7Mbi3JFds4WGi1GJlKJlHJJDJZX6OSQQNHAgBEepptbpVgGrVaXalSK2RKmUApU3sNZPmFs+37GdkOhSZMr41Y/VR67WQjlUnDKHSv12woVDI+wnBEIVU1NojTTzVbMMHweJ61LYwH6pobvTPiH4fqq0pkPA8uy8aI6xKaBYXrygEACOvEqdur/AezIyfxiBZl7vS0s6JSan5eXyZT091CnYzahe2xsmN5veZaV0M6uQOvraERPaRHRlSrtMlJJY4B9pY8E1wRY+1sReVYHd5sHBtmmiovN6JGo9216klAjAedZRxzSn3Akse0cubu/6KMaCHmy8uNePA/z/pHOhtEDJEwrRlcV+vze41pg3VT4iVGvJraYO1qTWeZRb+SbWepBPSc9BaihZgj3RmxsUr+NFfMtrU0oB6CsXbi3DjVANUaTTOhOyNeO9XI98D3aUUIcfCxuX6qkWgVZkeXRqwplarUJLYt07B6ekrOwz9WrhnSKm7We858d+vKErlcqtZ7zkZK/NTRB1JwPyy3SyMW3xdjZJPtJr8EjFSaJyFahH74fP3HFy6eJlrFy+nSiE8eiNl2kFaHeMPksopyWolWoR8eP84nWkKP6HyKr7lOYcGm4tdZLn324Pc/fyyvyLdk2fj7Dosd9RaDwQIAZGQeS0vft2ThrgOHk2rrShztvUdEzo4Iff4s37nftmfdv0CnMUMGjrXju+GkDQBgZceszoN0X/VeMSomHACwafOGXbu3nD19FQCQkZG+/0By2bOnHI61t7fv8vc+srd30N3cTVIbmbcyjhw5UPA4j8vlBwUNWvTWezyefo6P7bxGbG1RyaR6WdDVCQ2N5T/8/J5SKV+26Mf5c76uri3atW+JWq0CAJApVKlUdOr85hnxn2xanzkwKProqS+aW2oAADdvp968fXzqxA+XL/6JZ+OU9udenOTpHlFobVaKhX1/jBISfruQAQD4cOUanQuz7t76bN2HsbETjx6+sHbNxtra6q3bNuru7CapjcKigqRPloeERPy87/j776168qTw6/+u05fUzo0oEarJuC2ryb7/G4VMXTD7a3tbdwc7z+lTVldWP859lK5LVauVY0a91c91AIZh4cETtVptZXUhAODGX0cHBsYMDIpmMq0iQid5e4bjJE8HjUEWC4zeiB3Y99OuEcOjp70xh8OxDgwcuHTJB5mZNwoe53ef1EbuwxwGgzFv7kJ7e4chgyO/2bRr9uwF+tLWhRFFKjINrydNS589cHUJYLGePxLFtXHkcV2eluW03eDmHKh7wbSwAgBIZSKtVtvQVG5v59F2j4uTH07ydFAtyBLjrxE7UFJS5OcX2PbW1ycAAFBQkNd9UhtBA4JlMlnS6sRjxw9WVJZzONYhwXqrDrp0GwbwGtSVylrLK/NXrhnS/qJQ9PfQ3YuryWVysUajptP/7jzRaBY4ydOhUQOA29nEhNDa2iqXy+n0v1dOMZlMAIBEIu4mqX0OPv39Nv5n27Vrl5P3bN+5a0tY6OAF8xcHBQ3Si7zOjci0oqiVMr0U8CJsNs+jX/DY6EXtL7JY3W2IyKCzSCSysp0kuQLf4RW1Qs2ygmv3gVeEwWAAAGQyadsVsUQMAOBx+d0kdchkyODIIYMj31zwzt27t1JPHPpkdeLJE3+QyXqI4jpvmplsslqJ14iuk33/FkGNp3uIt2eY7p+lpY0dv7uTRTAMs7F2LH32sO3Ko8cZOMnToZCpmVbGt/i8GygUiq+Pf17eg7YruteeXv27SWqfQ07O3Vu3bwIA+HzbsWMnvbt0hahV1NBQrxd5nRvRikuh0vBqmEZEztZoNGcublEoZHX1Zecuff/N93Oqa4u7/9SgoNEP8//MefgHAODK9QNlFbk4ydOtfLO0pphAjUin021t7bKyMu/lZKlUqoT4mTcyrqamHhKKhPdysnbu+jY0JKK/ty8AoJukNnLz7q/7fNXZcydaWprzH+WeOHmYz7fl8231IrXzvzWHT1PJ1DKRgsHW/1Aik2m1ctmvf15P2bp7fl19qZtL4PT41S/tfIyOelMsbj514Ztfjq726Bc8eXzir8c+w2l1grBWbGNnIrNKc+cs/Onn3bfv3Dz067nY2In1DXVHjqV8v/Mbe3uH8LChb7+1THdbN0ltzJg+r6Wl+fsdm7/d8hWNRoseNXbLt8l6aZe72w3sr/ONFaVaW09zfL69Kq8uIsayfwibaCEd+W1/jZOXpccAY10PdXJ72ZR3nDj8Tv6TdznF5z2IpVWZ2vhFD8EwtUegCT4UATNdhkG2LgwLplZQK+bYd/6TtAjqNn/f+T5dFnRLqbzzuVoHW89li/b0VW0nfPplTFdJarWKTO7kC7q5BC6av62rT9WXNHsEWFBoMO6BYcJ0F4+PmMo/vrWyKyOyLbkfLE3pNEmhkNFonT/pRyLpuQfQlQYAgEIpp1E72dSBQuky8NWoNfVPBdPfNcT25Yj2dGcLDo/qP8SysV7Etu0kWiKTKVwbp84+Z1D0q0FYLRg5XT+z+Ihe8ZIGKHISX9LQKmnBa3AbKgTVQkuWJmAIOmuIAF4eCc38wOXZvRqlzMQ7Li01rdKm1tFz7IgWYqb0KCRf/LVnUUa5CdeLgppWIBPPWulKtBDzpUdGxDBs6WZvYWWTsLbLHT9gbvlfAAABr0lEQVSNl+byZhomjV9CfLxrzvRikGLWSlceT12SWSGsM5HDyZorhQVXyzx8KeMXdFyKjDAwvRtMeT2OFzCEfe1kY8MTiZZMtbJlGeM+JFKhXFQv0cjlfCfqhHX96BYmtbjBSOn1qJ6NHW3KYseaUllRTuuTB7V0JkWjwcg0MplKJlHIALdVjK8ChmEqpVqjUKkUaoVUSbcg9Q+29Am1RTsjwkMfh5cd3BkO7ozh8fymGoWgQSkWqsQClVqlUatgNCKNgZHIJJYVk2lF5jvTLDnGV4ubPK86z8F1oHEdUL2CeFXQjKoxweJQjHrTA64DvavgDRnRmLBgkRoq5USr6CNKhaaiUMzhd95+IiMaE/b9GEq5sW7K01Qj72aJJzKiMeHqw8QwcO+KUW5WduXXqtcnd7lpPlznNSN6wrUT9Uql1mugFc/JCHbVFwtVgnr5n4dr/rHajdX1eAUyolGS+5cg76ZQJlHLcdsZRi/YOtNb6hQeA1ivx/G7P84SGdGI0WqBQga1EbUaLYPVo4krZEQEFKDOCgIKkBERUICMiIACZEQEFCAjIqAAGREBBf8Hph49+fyMhM0AAAAASUVORK5CYII=",
137
+ "text/plain": [
138
+ "<IPython.core.display.Image object>"
139
+ ]
140
+ },
141
+ "metadata": {},
142
+ "output_type": "display_data"
143
+ }
144
+ ],
145
+ "source": [
146
+ "from IPython.display import Image, display\n",
147
+ "\n",
148
+ "try:\n",
149
+ " display(Image(graph.get_graph().draw_mermaid_png()))\n",
150
+ "except Exception:\n",
151
+ " # This requires some extra dependencies and is optional\n",
152
+ " pass"
153
+ ]
154
+ },
155
+ {
156
+ "cell_type": "code",
157
+ "execution_count": 41,
158
+ "id": "3ed7c7ef",
159
+ "metadata": {},
160
+ "outputs": [
161
+ {
162
+ "name": "stdout",
163
+ "output_type": "stream",
164
+ "text": [
165
+ "AI Message : The current weather in London, UK is 15°C with cloudy conditions.\n"
166
+ ]
167
+ }
168
+ ],
169
+ "source": [
170
+ "weather_response = await graph.ainvoke({\"messages\": \"what is the weather in london?\"})\n",
171
+ "print(f\"AI Message : {weather_response['messages'][-1].content}\")"
172
+ ]
173
+ }
174
+ ],
175
+ "metadata": {
176
+ "kernelspec": {
177
+ "display_name": "backend",
178
+ "language": "python",
179
+ "name": "python3"
180
+ },
181
+ "language_info": {
182
+ "codemirror_mode": {
183
+ "name": "ipython",
184
+ "version": 3
185
+ },
186
+ "file_extension": ".py",
187
+ "mimetype": "text/x-python",
188
+ "name": "python",
189
+ "nbconvert_exporter": "python",
190
+ "pygments_lexer": "ipython3",
191
+ "version": "3.12.0"
192
+ }
193
+ },
194
+ "nbformat": 4,
195
+ "nbformat_minor": 5
196
+ }
backend/notebooks/reddit_data.json ADDED
The diff for this file is too large to render. See raw diff
 
backend/rd_pipeline.ipynb ADDED
@@ -0,0 +1,548 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "id": "f26ad386",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "import requests\n",
11
+ "import json"
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": 2,
17
+ "id": "acbc9e42",
18
+ "metadata": {},
19
+ "outputs": [],
20
+ "source": [
21
+ "url = \"https://www.reddit.com/r/TrueOffMyChest/hot.json?limit=25\"\n",
22
+ "headers = {\"User-Agent\": \"Mozilla/5.0\"}\n",
23
+ "reddit_data = requests.get(url, headers=headers, timeout=10).json()\n",
24
+ "#reddit_data2 = requests.get(url2, headers=headers, timeout=10).json()"
25
+ ]
26
+ },
27
+ {
28
+ "cell_type": "code",
29
+ "execution_count": 3,
30
+ "id": "ce9b3e94",
31
+ "metadata": {},
32
+ "outputs": [],
33
+ "source": [
34
+ "with open('reddit_data.json', 'w', encoding='utf-8') as f:\n",
35
+ " json.dump(reddit_data, f, ensure_ascii=False, indent=2)"
36
+ ]
37
+ },
38
+ {
39
+ "cell_type": "code",
40
+ "execution_count": null,
41
+ "id": "e8ab46ad",
42
+ "metadata": {},
43
+ "outputs": [],
44
+ "source": [
45
+ "posts_info = [\n",
46
+ " {\n",
47
+ " \"title\": child[\"data\"].get(\"title\"),\n",
48
+ " \"post_content\": child[\"data\"].get(\"selftext\"),\n",
49
+ " \"author\": child[\"data\"].get(\"author\"),\n",
50
+ " \"upvote_ratio\": child[\"data\"].get(\"upvote_ratio\"),\n",
51
+ " \"ups\": child[\"data\"].get(\"ups\"),\n",
52
+ " \"num_comments\": child[\"data\"].get(\"num_comments\"),\n",
53
+ " }\n",
54
+ " for child in reddit_data.get(\"data\", {}).get(\"children\", [])\n",
55
+ "]\n",
56
+ "\n",
57
+ "#posts_info"
58
+ ]
59
+ },
60
+ {
61
+ "cell_type": "code",
62
+ "execution_count": 4,
63
+ "id": "de087363",
64
+ "metadata": {},
65
+ "outputs": [
66
+ {
67
+ "data": {
68
+ "text/plain": [
69
+ "{'title': 'I put a grenade in my relationship with my wife, I lost everything, and have nobody to blame but myself. I just need to get this out.',\n",
70
+ " 'post_content': 'I’m not looking for sympathy, I’m not justifying anything. I fucked up and its my fault. I just need to get this out because there’s no one I can talk to.\\n\\nMy wife and I were having issues. Just the usual issues the struggle and strain of life, raising a family etc. We were struggling and nothing was getting better and I felt like I wasn’t good enough. That I never would be.\\n\\nAnd then I met ‘Carly’ online. She was much younger than me so we just talked but then she started flirting with me and it made me feel good. I didn’t tell her I was married, didn’t want her to stop flirting.\\n\\nI told myself it wouldn’t go anywhere. I was just enjoying the attention. And we were just talking. She lived the other side of the world there was no chance of us meeting. And then we had cybersex. I felt better than I had in ages. Cybersex then became video and phone sex anytime my wife was out. We sent photos and videos every day.\\n\\nThe more I spent time with Carly, the more I couldn’t stand being with my wife so I broke up with her.\\n\\nI didn’t tell my wife about the affair, I gave other excuses but my wife knew something was up and found out about the affair.\\n\\nIt broke her. She didn’t eat, didn’t sleep, she cried all the time. I justified it by telling myself my wife is a strong woman she’ll get over it. I hate myself for thinking that way. But I did.\\n\\nMy wife went to therapy. Stopped crying. Started eating and sleeping again. Started smiling again. Stopped begging me not to leave. And I thought great. See I was right. I stopped feeling guilty. I felt relieved.\\n\\nMy wife and I had to live together for a while until I found a place but I barely saw her and she barely spoke to me. At first it was great but then I started to feel off, like I had come home to an empty house, even though it wasn’t.\\n\\nAt that point I should have seen sense, should have stopped. Instead I started to resent my wife. Somehow in my mind she was trying to sabotage my happiness. It made me angry. I snapped. Made passive aggressive comments – I hate myself for every word, every nasty text. Every accusation.\\n\\nI moved out.\\n\\nLiving with my wife had been awkward but the new place was…. I don’t know. Even though I’d rarely see her, every room contained her presence even when she wasn’t there. But staying in the new place made me feel more alone than I ever had. I had free run to talk to Carly any time I wanted, to do anything I wanted but it felt so pointless. The new place felt so fucking awful. Like a prison.\\n\\nI started to dread going home. I’d stay out for hours. Hang around supermarkets. Wander the streets. Sit on a park bench. Anything but go home. Even if it meant not talking to Carly.\\n\\nAnd then one time I passed a perfume shop and smelled my wife’s perfume and I don’t know why but I broke down. In that moment I didn’t want to talk to Carly. I wanted my wife.\\n\\nCarly and I broke up. I thought I’d miss her. I didn’t. I missed things my wife did. Small things. Big things. I didn’t miss a single thing Carly did.\\n\\nDuring handover of our daughter one day I blurted out that Carly and I broke up. I don’t know why, I didn’t even mean to, it just came out. My wife nodded and said I’m sorry to hear that. And I don’t know why but that stung. She didn’t say it spitefully, she was calm and pleasant, like we were just talking about the weather or something. I almost wish she did say it with some spite or glee or something. But she didn’t.\\n\\nAny time I try to talk about us or what happened, my wife shuts the conversation down.\\n\\nShe’s civil but she looks at me like I’m a stranger. The other day, I put my hand on her back just out of habit and she looked so…. so disgusted. I’ve never seen her make that face and certainly not at me.\\n\\nI feel so fucking broken. And I know its all my fault. I know I did this. I deserve all of this.\\n\\nI sabotaged everything good in my life. For nothing. For a lie. Carly didn’t know I was married and nobody knew I was even seeing anyone else even months after the separation. What was I doing???\\n\\nI got served divorce papers this morning.\\n\\nI’m not looking for sympathy. I don’t deserve it. I know I’m a selfish stupid prick. I know its all my fault.\\n\\nI wish I could go back but I can’t. And the worst part is I don’t even know why I did it. Yeah we had problems but I can think of a thousand ways to fix them now, why didn’t I think of them then?\\n\\nI’m sitting here staring at the divorce papers. And I don’t know what to do. My first instinct was to fight them. But I can’t. I shouldn’t. I want to fight it so bad hurts but I can’t. Not after what I did.\\n\\nI ended up calling in sick and I’ve been sitting at the kitchen counter, crying, thinking about everything I did, everything I said, wishing I could take it all back.\\n\\nThere’s no one I can talk to about this. The person I’d normally talk to is my wife, but I fucked that up.\\n\\nEveryone hates me. My friends. My family. Its deserved hate. I deserve all of this. I did it to myself, to everyone. I just wanted to get it off my chest, because I don’t know what else to do or where else to turn. Guess internet strangers are my only option.',\n",
71
+ " 'author': 'ThrowRA_Over_Volume',\n",
72
+ " 'upvote_ratio': 0.81,\n",
73
+ " 'ups': 3198,\n",
74
+ " 'num_comments': 989}"
75
+ ]
76
+ },
77
+ "execution_count": 4,
78
+ "metadata": {},
79
+ "output_type": "execute_result"
80
+ }
81
+ ],
82
+ "source": [
83
+ "# weight configuration (tweak as desired)\n",
84
+ "weights = {\n",
85
+ " \"length\": 0.3, # weight for length of post_content\n",
86
+ " \"ups\": 0.3, # weight for ups\n",
87
+ " \"comments\": 0.2, # weight for num_comments\n",
88
+ " \"ratio\": 0.2 # weight for upvote_ratio\n",
89
+ "}\n",
90
+ "\n",
91
+ "# calculate maxima for normalization\n",
92
+ "len_max = max(len(p[\"post_content\"]) if p[\"post_content\"] else 0 for p in posts_info) or 1\n",
93
+ "ups_max = max(p[\"ups\"] or 0 for p in posts_info) or 1\n",
94
+ "comments_max = max(p[\"num_comments\"] or 0 for p in posts_info) or 1\n",
95
+ "\n",
96
+ "def score(post):\n",
97
+ " length_score = (len(post[\"post_content\"]) if post[\"post_content\"] else 0) / len_max\n",
98
+ " ups_score = (post[\"ups\"] or 0) / ups_max\n",
99
+ " comments_score = (post[\"num_comments\"] or 0) / comments_max\n",
100
+ " ratio_score = post[\"upvote_ratio\"] or 0\n",
101
+ "\n",
102
+ " return (weights[\"length\"] * length_score +\n",
103
+ " weights[\"ups\"] * ups_score +\n",
104
+ " weights[\"comments\"] * comments_score +\n",
105
+ " weights[\"ratio\"] * ratio_score)\n",
106
+ "\n",
107
+ "best_post = max(posts_info, key=score)\n",
108
+ "best_post"
109
+ ]
110
+ },
111
+ {
112
+ "cell_type": "code",
113
+ "execution_count": 5,
114
+ "id": "76597b3d",
115
+ "metadata": {},
116
+ "outputs": [],
117
+ "source": [
118
+ "from langgraph.graph import StateGraph, END\n",
119
+ "from langchain_core.messages import HumanMessage, SystemMessage\n",
120
+ "from langchain_openai import ChatOpenAI # or your preferred LLM\n",
121
+ "from pydantic import BaseModel, Field, field_validator\n",
122
+ "from typing import TypedDict, List\n",
123
+ "from langchain_openai import AzureChatOpenAI\n",
124
+ "from dotenv import load_dotenv\n",
125
+ "load_dotenv() # take environment variables from .env.\n",
126
+ "import os"
127
+ ]
128
+ },
129
+ {
130
+ "cell_type": "code",
131
+ "execution_count": 6,
132
+ "id": "f9c1fc91",
133
+ "metadata": {},
134
+ "outputs": [],
135
+ "source": [
136
+ "from pydantic import field_validator # needed for custom validation\n",
137
+ "\n",
138
+ "# Define the structured output model\n",
139
+ "class StoryOutput(BaseModel):\n",
140
+ " \"\"\"Structured output for the storyteller agent\"\"\"\n",
141
+ " polished_story: str = Field(\n",
142
+ " description=\"A refined version of the story with improved flow, grammar, and engagement\"\n",
143
+ " )\n",
144
+ " keywords: List[str] = Field(\n",
145
+ " description=\"A list of 5-10 key terms that represent the main themes, characters, or concepts\",\n",
146
+ " min_items=3,\n",
147
+ " max_items=10\n",
148
+ " )\n",
149
+ "\n",
150
+ "# Define the state structure\n",
151
+ "class AgentState(TypedDict):\n",
152
+ " original_story: str\n",
153
+ " polished_story: str\n",
154
+ " keywords: List[str]\n",
155
+ " messages: List[dict]\n",
156
+ "\n",
157
+ "# Storyteller Agent with Structured Output\n",
158
+ "class StorytellerAgent:\n",
159
+ " def __init__(self, llm):\n",
160
+ " # Create structured LLM with the output model\n",
161
+ " self.structured_llm = llm.with_structured_output(StoryOutput)\n",
162
+ " self.system_prompt = \"\"\"You are a skilled storyteller AI. Your job is to take raw, confessional-style stories and transform them into emotionally engaging, narrative-driven pieces. The rewritten story should:\n",
163
+ "\n",
164
+ "1. Preserve the original events and meaning but present them in a captivating way.\n",
165
+ "2. Use character names (instead of “my brother,” “my sister”) to make the story feel alive.\n",
166
+ "3. Add dialogue, atmosphere, and inner thoughts to create tension and immersion.\n",
167
+ "4. Write in a first-person narrative style, as if the storyteller is directly sharing their experience.\n",
168
+ "5. Maintain a natural, human voice — conversational, reflective, and vivid.\n",
169
+ "6. Balance realism with storytelling techniques (scene-setting, emotional beats, sensory details).\n",
170
+ "7. Keep the length roughly 2–3x the original input, ensuring it feels like a polished story.\n",
171
+ "\n",
172
+ "Your goal is to make the reader feel emotionally invested, as though they’re listening to someone recounting a deeply personal and dramatic life event.\n",
173
+ "\n",
174
+ "\"\"\"\n",
175
+ "\n",
176
+ " def __call__(self, state: AgentState) -> AgentState:\n",
177
+ " # Prepare messages for the structured LLM\n",
178
+ " messages = [\n",
179
+ " SystemMessage(content=self.system_prompt),\n",
180
+ " HumanMessage(content=f\"Please polish this story and extract keywords:\\n\\n{state['original_story']}\")\n",
181
+ " ]\n",
182
+ " \n",
183
+ " # Get structured response\n",
184
+ " response: StoryOutput = self.structured_llm.invoke(messages)\n",
185
+ " \n",
186
+ " # Update state with structured output\n",
187
+ " state[\"polished_story\"] = response.polished_story\n",
188
+ " state[\"keywords\"] = response.keywords\n",
189
+ " state[\"messages\"].append({\n",
190
+ " \"role\": \"assistant\", \n",
191
+ " \"content\": f\"Polished story and extracted {len(response.keywords)} keywords\"\n",
192
+ " })\n",
193
+ " \n",
194
+ " return state\n",
195
+ "\n",
196
+ "# Create the graph functions\n",
197
+ "def create_storyteller_graph(enhanced=False):\n",
198
+ " llm = AzureChatOpenAI(\n",
199
+ " azure_endpoint=os.getenv(\"AZURE_OPENAI_ENDPOINT\"),\n",
200
+ " api_key=os.getenv(\"AZURE_OPENAI_API_KEY\"),\n",
201
+ " api_version=os.getenv(\"AZURE_OPENAI_VERSION\"),\n",
202
+ " azure_deployment=os.getenv(\"AZURE_GPT4O_MODEL\"),\n",
203
+ " temperature=0,\n",
204
+ " max_tokens=10000 # Adjust max tokens as needed\n",
205
+ " )\n",
206
+ "\n",
207
+ " # Choose agent type\n",
208
+ " storyteller = StorytellerAgent(llm)\n",
209
+ " \n",
210
+ " # Create the graph\n",
211
+ " workflow = StateGraph(AgentState)\n",
212
+ " workflow.add_node(\"storyteller\", storyteller)\n",
213
+ " workflow.set_entry_point(\"storyteller\")\n",
214
+ " workflow.add_edge(\"storyteller\", END)\n",
215
+ " \n",
216
+ " return workflow.compile()\n",
217
+ "\n",
218
+ "# Usage functions\n",
219
+ "def process_story(original_story: str, enhanced=False):\n",
220
+ " graph = create_storyteller_graph(enhanced)\n",
221
+ " \n",
222
+ " initial_state = {\n",
223
+ " \"original_story\": original_story,\n",
224
+ " \"polished_story\": \"\",\n",
225
+ " \"keywords\": [],\n",
226
+ " \"messages\": []\n",
227
+ " }\n",
228
+ " \n",
229
+ " result = graph.invoke(initial_state)\n",
230
+ " \n",
231
+ " return {\n",
232
+ " \"polished_story\": result[\"polished_story\"],\n",
233
+ " \"keywords\": result[\"keywords\"]\n",
234
+ " }\n",
235
+ "\n",
236
+ "# Example with validation\n",
237
+ "class ValidatedStoryOutput(BaseModel):\n",
238
+ " \"\"\"Story output with additional validation\"\"\"\n",
239
+ " polished_story: str = Field(\n",
240
+ " description=\"Enhanced story\",\n",
241
+ " min_length=50 # Ensure minimum story length\n",
242
+ " )\n",
243
+ " keywords: List[str] = Field(\n",
244
+ " description=\"Story keywords\",\n",
245
+ " min_items=3,\n",
246
+ " max_items=10\n",
247
+ " )\n",
248
+ "\n",
249
+ " @field_validator('polished_story')\n",
250
+ " def validate_story_quality(cls, v: str):\n",
251
+ " \"\"\"Custom validation for story content\"\"\"\n",
252
+ " if len(v.split()) < 10:\n",
253
+ " raise ValueError(\"Polished story must contain at least 10 words\")\n",
254
+ " return v\n"
255
+ ]
256
+ },
257
+ {
258
+ "cell_type": "code",
259
+ "execution_count": 7,
260
+ "id": "273acfb1",
261
+ "metadata": {},
262
+ "outputs": [
263
+ {
264
+ "data": {
265
+ "text/plain": [
266
+ "'I’m not looking for sympathy, I’m not justifying anything. I fucked up and its my fault. I just need to get this out because there’s no one I can talk to.\\n\\nMy wife and I were having issues. Just the usual issues the struggle and strain of life, raising a family etc. We were struggling and nothing was getting better and I felt like I wasn’t good enough. That I never would be.\\n\\nAnd then I met ‘Carly’ online. She was much younger than me so we just talked but then she started flirting with me and it made me feel good. I didn’t tell her I was married, didn’t want her to stop flirting.\\n\\nI told myself it wouldn’t go anywhere. I was just enjoying the attention. And we were just talking. She lived the other side of the world there was no chance of us meeting. And then we had cybersex. I felt better than I had in ages. Cybersex then became video and phone sex anytime my wife was out. We sent photos and videos every day.\\n\\nThe more I spent time with Carly, the more I couldn’t stand being with my wife so I broke up with her.\\n\\nI didn’t tell my wife about the affair, I gave other excuses but my wife knew something was up and found out about the affair.\\n\\nIt broke her. She didn’t eat, didn’t sleep, she cried all the time. I justified it by telling myself my wife is a strong woman she’ll get over it. I hate myself for thinking that way. But I did.\\n\\nMy wife went to therapy. Stopped crying. Started eating and sleeping again. Started smiling again. Stopped begging me not to leave. And I thought great. See I was right. I stopped feeling guilty. I felt relieved.\\n\\nMy wife and I had to live together for a while until I found a place but I barely saw her and she barely spoke to me. At first it was great but then I started to feel off, like I had come home to an empty house, even though it wasn’t.\\n\\nAt that point I should have seen sense, should have stopped. Instead I started to resent my wife. Somehow in my mind she was trying to sabotage my happiness. It made me angry. I snapped. Made passive aggressive comments – I hate myself for every word, every nasty text. Every accusation.\\n\\nI moved out.\\n\\nLiving with my wife had been awkward but the new place was…. I don’t know. Even though I’d rarely see her, every room contained her presence even when she wasn’t there. But staying in the new place made me feel more alone than I ever had. I had free run to talk to Carly any time I wanted, to do anything I wanted but it felt so pointless. The new place felt so fucking awful. Like a prison.\\n\\nI started to dread going home. I’d stay out for hours. Hang around supermarkets. Wander the streets. Sit on a park bench. Anything but go home. Even if it meant not talking to Carly.\\n\\nAnd then one time I passed a perfume shop and smelled my wife’s perfume and I don’t know why but I broke down. In that moment I didn’t want to talk to Carly. I wanted my wife.\\n\\nCarly and I broke up. I thought I’d miss her. I didn’t. I missed things my wife did. Small things. Big things. I didn’t miss a single thing Carly did.\\n\\nDuring handover of our daughter one day I blurted out that Carly and I broke up. I don’t know why, I didn’t even mean to, it just came out. My wife nodded and said I’m sorry to hear that. And I don’t know why but that stung. She didn’t say it spitefully, she was calm and pleasant, like we were just talking about the weather or something. I almost wish she did say it with some spite or glee or something. But she didn’t.\\n\\nAny time I try to talk about us or what happened, my wife shuts the conversation down.\\n\\nShe’s civil but she looks at me like I’m a stranger. The other day, I put my hand on her back just out of habit and she looked so…. so disgusted. I’ve never seen her make that face and certainly not at me.\\n\\nI feel so fucking broken. And I know its all my fault. I know I did this. I deserve all of this.\\n\\nI sabotaged everything good in my life. For nothing. For a lie. Carly didn’t know I was married and nobody knew I was even seeing anyone else even months after the separation. What was I doing???\\n\\nI got served divorce papers this morning.\\n\\nI’m not looking for sympathy. I don’t deserve it. I know I’m a selfish stupid prick. I know its all my fault.\\n\\nI wish I could go back but I can’t. And the worst part is I don’t even know why I did it. Yeah we had problems but I can think of a thousand ways to fix them now, why didn’t I think of them then?\\n\\nI’m sitting here staring at the divorce papers. And I don’t know what to do. My first instinct was to fight them. But I can’t. I shouldn’t. I want to fight it so bad hurts but I can’t. Not after what I did.\\n\\nI ended up calling in sick and I’ve been sitting at the kitchen counter, crying, thinking about everything I did, everything I said, wishing I could take it all back.\\n\\nThere’s no one I can talk to about this. The person I’d normally talk to is my wife, but I fucked that up.\\n\\nEveryone hates me. My friends. My family. Its deserved hate. I deserve all of this. I did it to myself, to everyone. I just wanted to get it off my chest, because I don’t know what else to do or where else to turn. Guess internet strangers are my only option.'"
267
+ ]
268
+ },
269
+ "execution_count": 7,
270
+ "metadata": {},
271
+ "output_type": "execute_result"
272
+ }
273
+ ],
274
+ "source": [
275
+ "best_post[\"post_content\"]"
276
+ ]
277
+ },
278
+ {
279
+ "cell_type": "code",
280
+ "execution_count": 18,
281
+ "id": "855f7f29",
282
+ "metadata": {},
283
+ "outputs": [
284
+ {
285
+ "data": {
286
+ "text/plain": [
287
+ "{'title': 'I Have Been Keeping A Secret From My Parents For Years!',\n",
288
+ " 'post_content': \"Hey all! This is an alt account because my parents follow my main. \\n\\nThis all started about 10 years ago! \\n\\nI (36F) downloaded an app off of the Playstore that promised to give you cash if you watched ads. To my shock, it actually did! And I started winning big when I gambled my points from watching ads to the point where I had about 14 million points. You could use the points for gift cards. So I started grabbing $25 Starbucks cards. Before you ask, the app doesn't exist in the same format any more. :( All good things must come to an end. Anyway! 14 million points translates to about $700 worth of gift cards. But the catch was that they only restocked cards like 3 times per day, and it was first come, first serve. It took about 2 years of everyday ads to get money like that, but I stuck with it. The rewards were sweet. I claimed many $25 Starbucks cards! And yes, it's really was valid. It was awesome. Was. Eventually, the restocks got few and far between, and then just stopped. But to be fair, I had it real good for like 6 years. \\n\\nNow comes the secret. I always used those gift cards to treat my parents to Starbucks. My mom (67F) and my dad (73M). I told them about the app and how I had an insane amount of cash on there, and we were able to get Starbucks basically once a week for many years. All it took was about an hour of ads every day. Sweet deal. It was nice to give my parents something. We were never a rich family, and they took care of me. But, as I said... The app stopped being that awesome. Eventually, my points were useless because they stopped restocking. However, I enjoyed how happy it made my parents and how they'd light up when I brought them their favourite orders. The time we've spent just having a little lunch all together is precious to me. So even though I was no longer getting gift cards, I decided to not tell them that the app closed down. Because I know that if I ever told them I was paying for all of it, they would refuse because they know I barely scrape by. They only allow me to treat them so frequently because it's supposed to be free.\\n\\nThey continue to brag about how I get the gift cards. Every single time, they laugh and smile and are so excited that they get free Starbucks. When they call or we just talk, they always ask if I've watched my ads for today yet. I always tell them of course! My dad loves to know how many points I have now. Which is 0 because I uninstalled the app, but he doesn't need to know! They both thank me all the time and it's a little slice of joy once a month, or sometimes once a week. \\n\\nI am never going to tell them that I have been paying for it for about 5 years now. I have no plans to stop. I still buy them Starbucks every time I see them, or we are out for errands or something. This secret will go to the grave with me. :) I just wanted to tell someone without it getting back to them. Today, I surprised them with lunch because they're going through a hard time, so it's fresh in my mind, and I had to make a post! It will always bring a smile to my face. I'm the type who never lies if I can help it, so I always get that OCD itch that I'm lying, but giving them Starbucks makes us so happy. It's cute that it's such a point of excitement for them, and I always want it to be that way. ♡\\n\\nThanks for reading my little secret. Don't tell anyone! ;P \\n\\nTL;DR: \\nAd app gave me tons of $25 Starbucks gift cards. Treated my parents to Starbucks for years once a weekish, they always got so excited it was free and still do, except it's not free any more. My secret is that the app is long gone, and I've been paying for it for about 5 years now. My parents have so much fun, asking me if I've watched my ads today. They light up when I drop by and surprise them. I know they'd refuse if they knew I was paying, so I am never going to tell them. Just so that they still have their joy about it.\",\n",
289
+ " 'author': 'Ok_Ad1285',\n",
290
+ " 'upvote_ratio': 0.97,\n",
291
+ " 'ups': 199,\n",
292
+ " 'num_comments': 5}"
293
+ ]
294
+ },
295
+ "execution_count": 18,
296
+ "metadata": {},
297
+ "output_type": "execute_result"
298
+ }
299
+ ],
300
+ "source": [
301
+ "best_post = posts_info[7]\n",
302
+ "best_post"
303
+ ]
304
+ },
305
+ {
306
+ "cell_type": "code",
307
+ "execution_count": 19,
308
+ "id": "079f4b5b",
309
+ "metadata": {},
310
+ "outputs": [
311
+ {
312
+ "name": "stdout",
313
+ "output_type": "stream",
314
+ "text": [
315
+ "=== BASIC STRUCTURED OUTPUT ===\n",
316
+ "Polished Story:\n",
317
+ "Hey everyone! I'm sharing this story from an alternate account because my parents follow my main one, and I want to keep this little secret just between us.\n",
318
+ "\n",
319
+ "This all began about ten years ago. I was 26 at the time, and I stumbled upon an app on the Playstore that promised cash rewards for watching ads. To my surprise, it actually worked! I started accumulating points, and soon I was winning big by gambling those points. Before I knew it, I had amassed a staggering 14 million points. These points could be exchanged for gift cards, and I began redeeming them for $25 Starbucks cards. It was like hitting the jackpot.\n",
320
+ "\n",
321
+ "The app, unfortunately, doesn't exist in the same format anymore. All good things must come to an end, right? But back then, those 14 million points translated to about $700 worth of gift cards. The catch was that the cards were restocked only three times a day, and it was first come, first serve. It took me about two years of watching ads daily to earn that kind of reward, but the payoff was sweet. I claimed many $25 Starbucks cards, and yes, they were valid. It was awesome while it lasted.\n",
322
+ "\n",
323
+ "Now, here's the secret. I used those gift cards to treat my parents, Linda and Tom, to Starbucks. I told them about the app and how I had an insane amount of cash on there, allowing us to enjoy Starbucks once a week for many years. All it took was about an hour of ads every day. It felt good to give my parents something special. We were never a wealthy family, and they had always taken care of me.\n",
324
+ "\n",
325
+ "But, as I mentioned, the app eventually stopped being so generous. The restocks became infrequent and then ceased altogether. Despite this, I cherished the joy it brought my parents, how their faces lit up when I handed them their favorite orders. The time we spent together, sharing a little lunch, is precious to me. So, even though I was no longer getting gift cards, I decided not to tell them that the app had shut down. I knew that if they found out I was paying for it, they'd refuse because they knew I was barely scraping by. They only allowed me to treat them so often because they believed it was free.\n",
326
+ "\n",
327
+ "Linda and Tom continue to brag about how I get the gift cards. Every time, they laugh and smile, thrilled by the idea of free Starbucks. When we talk, they always ask if I've watched my ads for the day. I always assure them I have. My dad loves to hear about my point total, which is zero now because I uninstalled the app, but he doesn't need to know that!\n",
328
+ "\n",
329
+ "They thank me all the time, and it's a little slice of joy once a month, sometimes once a week. I have no plans to stop. I still buy them Starbucks every time I see them or when we're out running errands. This secret will go to the grave with me. I just wanted to share it with someone without it getting back to them.\n",
330
+ "\n",
331
+ "Today, I surprised them with lunch because they're going through a tough time, so it's fresh in my mind, and I felt compelled to make this post. It always brings a smile to my face. I'm the type who never lies if I can help it, so I get that OCD itch that I'm lying, but giving them Starbucks makes us so happy. It's adorable how excited they get, and I always want it to be that way.\n",
332
+ "\n",
333
+ "Thanks for reading my little secret. Don't tell anyone! ;P\n",
334
+ "\n",
335
+ "Keywords: ['Starbucks', 'gift cards', 'secret', 'parents', 'app', 'ads', 'points', 'surprise', 'joy', 'family']\n",
336
+ "\n",
337
+ "============================================================\n",
338
+ "\n"
339
+ ]
340
+ }
341
+ ],
342
+ "source": [
343
+ "\n",
344
+ "# raw_story = \"\"\"\n",
345
+ "# John was walking down the street when he saw a dog. The dog looked hungry so he gave it some food.\n",
346
+ "# The dog followed him home and they became best friends. John learned that helping others makes you happy.\n",
347
+ "# \"\"\"\n",
348
+ "raw_story = best_post[\"post_content\"]\n",
349
+ "\n",
350
+ "# Basic version\n",
351
+ "print(\"=== BASIC STRUCTURED OUTPUT ===\")\n",
352
+ "result = process_story(raw_story, enhanced=False)\n",
353
+ "print(f\"Polished Story:\\n{result['polished_story']}\")\n",
354
+ "print(f\"\\nKeywords: {result['keywords']}\")\n",
355
+ "\n",
356
+ "print(\"\\n\" + \"=\"*60 + \"\\n\")"
357
+ ]
358
+ },
359
+ {
360
+ "cell_type": "code",
361
+ "execution_count": 20,
362
+ "id": "d7e72fc8",
363
+ "metadata": {},
364
+ "outputs": [],
365
+ "source": [
366
+ "from flexible_blog_database import FlexibleBlogDatabase\n",
367
+ "blog_db = FlexibleBlogDatabase()\n"
368
+ ]
369
+ },
370
+ {
371
+ "cell_type": "code",
372
+ "execution_count": null,
373
+ "id": "eacb3a4d",
374
+ "metadata": {},
375
+ "outputs": [
376
+ {
377
+ "name": "stdout",
378
+ "output_type": "stream",
379
+ "text": [
380
+ "6\n"
381
+ ]
382
+ }
383
+ ],
384
+ "source": [
385
+ "# Example 2: Blog without any images (like blog2)\n",
386
+ " blog2_id = blog_db.create_blog_post(\n",
387
+ " title=best_post[\"title\"],\n",
388
+ " content=result['polished_story'],\n",
389
+ " author=best_post[\"author\"],\n",
390
+ " tags=result['keywords']\n",
391
+ " )\n",
392
+ " print(blog2_id)"
393
+ ]
394
+ },
395
+ {
396
+ "cell_type": "code",
397
+ "execution_count": 24,
398
+ "id": "20dd0f1b",
399
+ "metadata": {},
400
+ "outputs": [],
401
+ "source": [
402
+ "import sqlite3\n",
403
+ "\n",
404
+ "conn = sqlite3.connect('blog.db')\n",
405
+ "cursor = conn.cursor()\n",
406
+ "\n",
407
+ "cursor.execute(\"DELETE FROM blog_posts WHERE id > 6\")\n",
408
+ "conn.commit()\n",
409
+ "conn.close()"
410
+ ]
411
+ },
412
+ {
413
+ "cell_type": "code",
414
+ "execution_count": null,
415
+ "id": "127b08ee",
416
+ "metadata": {},
417
+ "outputs": [],
418
+ "source": []
419
+ },
420
+ {
421
+ "cell_type": "markdown",
422
+ "id": "758005a1",
423
+ "metadata": {},
424
+ "source": [
425
+ "Working wit Supabase"
426
+ ]
427
+ },
428
+ {
429
+ "cell_type": "code",
430
+ "execution_count": 6,
431
+ "id": "17d342d1",
432
+ "metadata": {},
433
+ "outputs": [
434
+ {
435
+ "name": "stderr",
436
+ "output_type": "stream",
437
+ "text": [
438
+ "python-dotenv could not parse statement starting at line 10\n"
439
+ ]
440
+ },
441
+ {
442
+ "data": {
443
+ "text/plain": [
444
+ "True"
445
+ ]
446
+ },
447
+ "execution_count": 6,
448
+ "metadata": {},
449
+ "output_type": "execute_result"
450
+ }
451
+ ],
452
+ "source": [
453
+ "from supabase import create_client, Client\n",
454
+ "import httpx\n",
455
+ "import os\n",
456
+ "from dotenv import load_dotenv\n",
457
+ "load_dotenv()"
458
+ ]
459
+ },
460
+ {
461
+ "cell_type": "code",
462
+ "execution_count": 7,
463
+ "id": "f1820d22",
464
+ "metadata": {},
465
+ "outputs": [],
466
+ "source": [
467
+ "url = os.getenv(\"SUPABASE_URL\")\n",
468
+ "key = os.getenv(\"SUPABASE_KEY\")\n",
469
+ "\n",
470
+ "supabase = create_client(url, key)"
471
+ ]
472
+ },
473
+ {
474
+ "cell_type": "code",
475
+ "execution_count": 8,
476
+ "id": "da34b51b",
477
+ "metadata": {},
478
+ "outputs": [],
479
+ "source": [
480
+ "count_result = supabase.table('blog_posts').select('id', count='exact').eq('published', True).execute()"
481
+ ]
482
+ },
483
+ {
484
+ "cell_type": "code",
485
+ "execution_count": 13,
486
+ "id": "28a49a53",
487
+ "metadata": {},
488
+ "outputs": [
489
+ {
490
+ "data": {
491
+ "text/plain": [
492
+ "APIResponse[TypeVar](data=[{'id': 5, 'title': 'AI-generated workslop is destroying productivity', 'author': 'RyeZuul', 'created_at': '2025-09-23T12:26:42.838', 'tags': ['Generative AI', 'Workslop', 'Productivity', 'AI tools', 'Collaboration', 'Efficiency', 'Quality standards'], 'content': 'In the bustling corridors of modern workplaces, a silent revolution is unfolding. Generative AI, once hailed as the harbinger of efficiency and innovation, is now under scrutiny. The promise of streamlined processes and enhanced productivity has been overshadowed by a phenomenon known as \"workslop.\" This term, coined by researchers at BetterUp Labs and Stanford, describes AI-generated content that appears polished but lacks the substance necessary to advance tasks meaningfully.\\n\\nImagine this: you\\'re at your desk, sifting through a report that looks immaculate at first glance. The formatting is pristine, the language articulate. Yet, as you delve deeper, a sense of confusion creeps in. \"Wait, what is this exactly?\" you wonder, frustration mounting as you realize the content is incomplete, missing crucial context. You\\'ve been workslopped.\\n\\nThis isn\\'t an isolated incident. According to a survey of 1,150 U.S.-based full-time employees, 40% have encountered workslop in the past month. The insidious nature of this phenomenon lies in its ability to shift the burden of work downstream. The receiver, not the creator, is left to interpret, correct, or redo the work, leading to productivity, trust, and collaboration issues.\\n\\nThe allure of AI tools is undeniable. They offer the ability to quickly produce polished output—well-formatted slides, structured reports, articulate summaries, and usable code. However, while some employees use these tools to enhance their work, others exploit them to create content that is unhelpful or incomplete. This misuse is particularly prevalent in professional services and technology sectors.\\n\\nLeaders face a challenging contradiction. While mandates to embrace AI technology are widespread, few organizations see measurable returns on their investments. A report from the MIT Media Lab found that 95% of organizations experience no significant ROI from these technologies. The enthusiasm for AI is palpable, yet the tangible benefits remain elusive.\\n\\nTo counteract workslop, leaders must model purposeful AI use, establish clear norms, and encourage a \"pilot mindset\"—a combination of high agency and optimism. AI should be promoted as a collaborative tool, not a shortcut. By fostering an environment where quality standards are prioritized, organizations can harness the true potential of AI.\\n\\nAs the digital landscape evolves, the challenge is clear: to navigate the fine line between efficiency and substance, ensuring that AI serves as a tool for progress rather than a source of frustration. The future of work depends on it.'}, {'id': 2, 'title': 'My Second Post', 'author': 'Admin', 'created_at': '2025-09-23T04:29:15.42158', 'tags': ['Python', 'Replit'], 'content': 'This is my another blog content'}, {'id': 1, 'title': 'My First Post', 'author': 'Admin', 'created_at': '2025-09-23T04:08:20.845765', 'tags': ['python', 'supabase'], 'content': 'This is my blog content.'}, {'id': 3, 'title': 'New AI tools are now auto-generating full slide decks from documents and notes', 'author': 'Crazzzzy_guy', 'created_at': '2025-09-22T16:02:49.891', 'tags': ['reddit'], 'content': 'We’ve seen AI move from images and text into video, but one area picking up speed is presentations. A platform like Presenti AI is now able to take raw input a topic, a Word file, even a PDF and generate a polished, structured presentation in minutes. The tech isn’t just about layouts. These systems rewrite clunky text, apply branded templates, and export directly to formats like PPT or PDF. In short: they aim to automate one of the most time-consuming tasks in business, education, and consulting making slides. The Case For: This could mean a big productivity boost for students, teachers, and professionals who currently spend hours formatting decks. Imagine cutting a 4-hour task down to 20 minutes. The Case Against: If everyone relies on AI-generated decks, presentations may lose originality and start to look “cookie cutter.” It also raises questions about whether the skill of building a narrative visually will fade, similar to how calculators changed math education. So the question is: do you see AI slide generators becoming a standard productivity tool (like templates once did), or do you think human-crafted presentations will remain the gold standard? Read more'}, {'id': 4, 'title': 'New AI tools are now auto-generating full slide decks from documents and notes', 'author': 'Crazzzzy_guy', 'created_at': '2025-09-22T16:02:49.891', 'tags': ['AI', 'Presentations', 'Productivity', 'Technology', 'Automation', 'Originality', 'Efficiency Boosts'], 'content': 'In the ever-evolving landscape of technology, we\\'ve witnessed artificial intelligence transition from generating images and text to crafting videos. Yet, one area where AI is rapidly gaining traction is in the realm of presentations. Imagine a platform like Presenti AI, which can take raw input—be it a topic, a Word document, or even a PDF—and transform it into a polished, structured presentation within mere minutes.\\n\\nThis technology isn\\'t just about arranging slides. These systems are designed to rewrite awkward text, apply branded templates, and export directly to formats like PowerPoint or PDF. In essence, they aim to automate one of the most time-consuming tasks in business, education, and consulting: creating slides.\\n\\nThe potential benefits are clear. For students, teachers, and professionals who currently spend countless hours formatting decks, this could mean a significant boost in productivity. Imagine reducing a four-hour task to just twenty minutes. The allure of such efficiency is undeniable.\\n\\nHowever, there\\'s a flip side to this technological marvel. If everyone begins to rely on AI-generated presentations, there\\'s a risk that originality might be sacrificed, leading to a sea of \"cookie-cutter\" slides. Moreover, it raises concerns about whether the skill of visually crafting a narrative will diminish, much like how calculators altered the landscape of math education.\\n\\nSo, the question remains: will AI slide generators become a standard productivity tool, akin to templates of the past, or will human-crafted presentations continue to hold their place as the gold standard? As we stand at this crossroads, the decision lies in our hands, shaping the future of how we communicate and present our ideas.'}], count=None)"
493
+ ]
494
+ },
495
+ "execution_count": 13,
496
+ "metadata": {},
497
+ "output_type": "execute_result"
498
+ }
499
+ ],
500
+ "source": [
501
+ "result = supabase\\\n",
502
+ " .table('blog_posts')\\\n",
503
+ " .select('''\n",
504
+ " id,\n",
505
+ " title,\n",
506
+ " author,\n",
507
+ " created_at,\n",
508
+ " tags,\n",
509
+ " content\n",
510
+ " ''')\\\n",
511
+ " .eq('published', True)\\\n",
512
+ " .order('created_at', desc=True)\\\n",
513
+ " .execute()\n",
514
+ "\n",
515
+ "result"
516
+ ]
517
+ },
518
+ {
519
+ "cell_type": "code",
520
+ "execution_count": null,
521
+ "id": "d4576957",
522
+ "metadata": {},
523
+ "outputs": [],
524
+ "source": []
525
+ }
526
+ ],
527
+ "metadata": {
528
+ "kernelspec": {
529
+ "display_name": "Local venv (Python)",
530
+ "language": "python",
531
+ "name": "localvenv"
532
+ },
533
+ "language_info": {
534
+ "codemirror_mode": {
535
+ "name": "ipython",
536
+ "version": 3
537
+ },
538
+ "file_extension": ".py",
539
+ "mimetype": "text/x-python",
540
+ "name": "python",
541
+ "nbconvert_exporter": "python",
542
+ "pygments_lexer": "ipython3",
543
+ "version": "3.12.0"
544
+ }
545
+ },
546
+ "nbformat": 4,
547
+ "nbformat_minor": 5
548
+ }
backend/rd_pipeline_bdata.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flexible_blog_database import FlexibleBlogDatabase
2
+ import os, time, logging, requests, json
3
+ from typing import List, Dict, Optional
4
+ from llm_agent import process_story
5
+ from brightdata_api import reddit_search_api, scrape_and_download_reddit
6
+ from supabase_api import insert_blog_post
7
+ from collections import OrderedDict
8
+ import datetime
9
+ from dotenv import load_dotenv
10
+ load_dotenv()
11
+
12
+ logger = logging.getLogger("rd_pipeline_bdata")
13
+ logging.basicConfig(level=os.getenv("LOG_LEVEL", "INFO"))
14
+
15
+ url_category_mapping = OrderedDict({
16
+ "Artificial Intelligence": "https://www.reddit.com/r/ArtificialInteligence/",
17
+ "Social": "https://www.reddit.com/r/TrueOffMyChest/",
18
+ "Other": "https://www.reddit.com/r/relationship_advice/",
19
+ "Movies": "https://www.reddit.com/r/movies/",
20
+ "Other": "https://www.reddit.com/r/stories/",
21
+ "Developers": "https://www.reddit.com/r/developersIndia/",
22
+ "AI Agents": "https://www.reddit.com/r/aiagents/"
23
+ })
24
+
25
+ def scrape_and_download_reddit(url="https://www.reddit.com/r/ArtificialInteligence/"):
26
+
27
+ reddit_response = reddit_search_api(url)
28
+ if not reddit_response or reddit_response.get("total_found", 0) == 0:
29
+ print("No posts found or error occurred during Reddit search.")
30
+ return None
31
+
32
+ return reddit_response
33
+
34
+ def find_best_post(posts_dict):
35
+ """Return post indexes in descending order based on scoring"""
36
+ posts_info = posts_dict
37
+ if not posts_info:
38
+ raise ValueError("No posts found from Reddit API.")
39
+
40
+ # weight configuration (tweak as desired)
41
+ weights = {
42
+ "length": 0.3, # weight for length of post_content
43
+ "ups": 0.3, # weight for ups
44
+ "comments": 0.2, # weight for num_comments
45
+ "ratio": 0.2 # weight for upvote_ratio
46
+ }
47
+
48
+ # calculate maxima for normalization
49
+ len_max = max(len(p["description"]) if p["description"] else 0 for p in posts_info) or 1
50
+ ups_max = max(p["upvotes"] or 0 for p in posts_info) or 1
51
+ comments_max = max(p["num_comments"] or 0 for p in posts_info) or 1
52
+
53
+ def score(post):
54
+ length_score = (len(post["description"]) if post["description"] else 0) / len_max
55
+ ups_score = (post["upvotes"] or 0) / ups_max
56
+ comments_score = (post["num_comments"] or 0) / comments_max
57
+
58
+ return (weights["length"] * length_score +
59
+ weights["ups"] * ups_score +
60
+ weights["comments"] * comments_score)
61
+
62
+ # Get scores for each post and sort indexes
63
+ scored_indexes = sorted(
64
+ range(len(posts_info)),
65
+ key=lambda idx: score(posts_info[idx]),
66
+ reverse=True
67
+ )
68
+
69
+ return scored_indexes
70
+
71
+ def process_and_store_post(user_input=None, max_trials=5):
72
+ """
73
+ Simplified + optimized:
74
+ - If user_input given, process it directly.
75
+ - Else fetch Reddit posts, try top candidates until one succeeds.
76
+ """
77
+ if user_input:
78
+ raw_story = user_input
79
+ meta = {"title": "User Provided Story", "author": "anonymous"}
80
+ result = process_story(raw_story, enhanced=False)
81
+ else:
82
+ today = datetime.date.today()
83
+ weekday_python = today.weekday()
84
+ category_list = list(url_category_mapping.keys())
85
+ category_index = weekday_python % len(category_list)
86
+ response_bd = scrape_and_download_reddit(url=url_category_mapping[category_list[category_index]])
87
+ posts = response_bd['parsed_posts'] if response_bd else []
88
+ if not posts:
89
+ logger.warning("No Reddit posts available after retries; aborting.")
90
+ return None
91
+ order = find_best_post(posts)
92
+ result = None
93
+ meta = None
94
+ for idx in order[:max_trials]:
95
+ post = posts[idx]
96
+ content = post.get("description")
97
+ if not content:
98
+ continue
99
+ try:
100
+ result = process_story(content, enhanced=False)
101
+ raw_story = content
102
+ meta = post
103
+ break
104
+ except Exception:
105
+ continue
106
+ if result is None or not meta:
107
+ logger.error("Could not process any candidate post.")
108
+ return None
109
+
110
+ if not result or not meta:
111
+ return None
112
+ print(f"Story Preview:\n{result['polished_story'][:500]}...")
113
+ keywords = result.get("keywords") or []
114
+ if keywords:
115
+ print("Keywords:", ", ".join(keywords))
116
+
117
+ write_data = {
118
+ "title": meta.get("title"),
119
+ "content": result.get("polished_story", ""),
120
+ "author": meta.get("user_posted"),
121
+ "tags": result.get("keywords", []), # Fixed: use .get() with default empty list
122
+ "created_at": meta.get("date_posted"), # Fixed: use date_posted instead of timestamp
123
+ "category": category_list[category_index] # Added category field
124
+ }
125
+ write_response = insert_blog_post(write_data)
126
+ reddit_done = f"Data written to Supabase with response: {write_response}"
127
+ return reddit_done
128
+
129
+ if __name__ == "__main__":
130
+ process_and_store_post()
backend/rd_pipeline_local.py ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from supabase_auth import datetime
2
+ from flexible_blog_database import FlexibleBlogDatabase
3
+ from supabase_api import insert_blog_post
4
+ import os, time, logging, requests, json
5
+ from typing import List, Dict, Optional
6
+ from llm_agent import process_story
7
+ from dotenv import load_dotenv
8
+ load_dotenv()
9
+
10
+ logger = logging.getLogger("rd_pipeline")
11
+ logging.basicConfig(level=os.getenv("LOG_LEVEL", "INFO"))
12
+
13
+ def fetch_reddit_posts(url: Optional[str] = None, attempts: int = 3, backoff_base: float = 2.0) -> List[Dict]:
14
+ """Fetch recent posts from Reddit with retry + fallback.
15
+ Returns list (may be empty). No exceptions bubble for HTTP/JSON failures.
16
+ """
17
+ if not url:
18
+ url = "https://www.reddit.com/r/TrueOffMyChest/hot.json?limit=25&raw_json=1"
19
+ headers = {
20
+ "User-Agent": os.getenv(
21
+ "REDDIT_USER_AGENT",
22
+ "script:amplify.rd_pipeline:v1.0 (by u/exampleuser contact: noreply@example.com)"
23
+ ),
24
+ "Accept": "application/json",
25
+ "Accept-Encoding": "gzip, deflate, br",
26
+ "Connection": "close"
27
+ }
28
+ last_err: Optional[Exception] = None
29
+ for attempt in range(1, attempts + 1):
30
+ try:
31
+ resp = requests.get(url, headers=headers, timeout=15)
32
+ status = resp.status_code
33
+ if status == 200:
34
+ try:
35
+ reddit_data = resp.json()
36
+ except ValueError as e:
37
+ last_err = e
38
+ logger.warning("JSON decode failed (attempt %s): %s", attempt, e)
39
+ else:
40
+ posts_info = [
41
+ {
42
+ "title": c["data"].get("title"),
43
+ "post_content": c["data"].get("selftext"),
44
+ "author": c["data"].get("author"),
45
+ "upvote_ratio": c["data"].get("upvote_ratio"),
46
+ "ups": c["data"].get("ups"),
47
+ "num_comments": c["data"].get("num_comments"),
48
+ }
49
+ for c in reddit_data.get("data", {}).get("children", [])
50
+ ]
51
+ logger.info("Fetched %d Reddit posts", len(posts_info))
52
+ return posts_info
53
+ elif status in (403, 429):
54
+ logger.warning("Reddit returned %s (attempt %s/%s)", status, attempt, attempts)
55
+ else:
56
+ logger.warning("Unexpected status %s (attempt %s/%s)", status, attempt, attempts)
57
+ time.sleep(backoff_base ** (attempt - 1))
58
+ except (requests.RequestException, Exception) as e:
59
+ last_err = e
60
+ logger.warning("Error fetching Reddit posts (attempt %s/%s): %s", attempt, attempts, e)
61
+ time.sleep(backoff_base ** (attempt - 1))
62
+ fallback_path = os.getenv("REDDIT_FALLBACK_JSON")
63
+ if fallback_path and os.path.isfile(fallback_path):
64
+ try:
65
+ with open(fallback_path, "r", encoding="utf-8") as f:
66
+ cached = json.load(f)
67
+ posts_info = [
68
+ {
69
+ "title": c["data"].get("title"),
70
+ "post_content": c["data"].get("selftext"),
71
+ "author": c["data"].get("author"),
72
+ "upvote_ratio": c["data"].get("upvote_ratio"),
73
+ "ups": c["data"].get("ups"),
74
+ "num_comments": c["data"].get("num_comments"),
75
+ }
76
+ for c in cached.get("data", {}).get("children", [])
77
+ ]
78
+ logger.info("Loaded %d posts from fallback JSON", len(posts_info))
79
+ return posts_info
80
+ except Exception as e:
81
+ logger.error("Failed reading fallback JSON: %s", e)
82
+ logger.error("All Reddit fetch attempts failed. Last error: %s", last_err)
83
+ return []
84
+
85
+ def find_best_post(posts_dict):
86
+ """Return post indexes in descending order based on scoring"""
87
+ posts_info = posts_dict
88
+ if not posts_info:
89
+ raise ValueError("No posts found from Reddit API.")
90
+
91
+ # weight configuration (tweak as desired)
92
+ weights = {
93
+ "length": 0.3, # weight for length of post_content
94
+ "ups": 0.3, # weight for ups
95
+ "comments": 0.2, # weight for num_comments
96
+ "ratio": 0.2 # weight for upvote_ratio
97
+ }
98
+
99
+ # calculate maxima for normalization
100
+ len_max = max(len(p["post_content"]) if p["post_content"] else 0 for p in posts_info) or 1
101
+ ups_max = max(p["ups"] or 0 for p in posts_info) or 1
102
+ comments_max = max(p["num_comments"] or 0 for p in posts_info) or 1
103
+
104
+ def score(post):
105
+ length_score = (len(post["post_content"]) if post["post_content"] else 0) / len_max
106
+ ups_score = (post["ups"] or 0) / ups_max
107
+ comments_score = (post["num_comments"] or 0) / comments_max
108
+ ratio_score = post["upvote_ratio"] or 0
109
+
110
+ return (weights["length"] * length_score +
111
+ weights["ups"] * ups_score +
112
+ weights["comments"] * comments_score +
113
+ weights["ratio"] * ratio_score)
114
+
115
+ # Get scores for each post and sort indexes
116
+ scored_indexes = sorted(
117
+ range(len(posts_info)),
118
+ key=lambda idx: score(posts_info[idx]),
119
+ reverse=True
120
+ )
121
+
122
+ return scored_indexes
123
+
124
+ def process_and_store_post(user_input=None, max_trials=5):
125
+ """
126
+ Simplified + optimized:
127
+ - If user_input given, process it directly.
128
+ - Else fetch Reddit posts, try top candidates until one succeeds.
129
+ """
130
+ if user_input:
131
+ raw_story = user_input
132
+ meta = {"title": "User Provided Story", "author": "anonymous"}
133
+ result = process_story(raw_story, enhanced=False)
134
+ else:
135
+ posts = fetch_reddit_posts()
136
+ if not posts:
137
+ logger.warning("No Reddit posts available after retries; aborting.")
138
+ return None
139
+ order = find_best_post(posts)
140
+ result = None
141
+ meta = None
142
+ for idx in order[:max_trials]:
143
+ post = posts[idx]
144
+ content = post.get("post_content")
145
+ if not content:
146
+ continue
147
+ try:
148
+ result = process_story(content, enhanced=False)
149
+ print(result)
150
+ raw_story = content
151
+ meta = post
152
+ break
153
+ except Exception as e:
154
+ print(f"Exception occurred : {str(e)}")
155
+ continue
156
+ if result is None or not meta:
157
+ logger.error("Could not process any candidate post.")
158
+ return None
159
+
160
+ if not result or not meta:
161
+ return None
162
+ print(f"Story Preview:\n{result['polished_story'][:500]}...")
163
+ keywords = result.get("keywords") or []
164
+ if keywords:
165
+ print("Keywords:", ", ".join(keywords))
166
+
167
+ from datetime import datetime
168
+ write_data = {
169
+ "title": meta.get("title"),
170
+ "content": result.get("polished_story", ""),
171
+ "author": meta.get("author", "unknown"),
172
+ "tags": result.get("keywords", []), # Fixed: use .get() with default empty list
173
+ "created_at": meta.get("date_posted", datetime.now().strftime("%Y-%m-%d %H:%M:%S")) # Fixed: use date_posted instead of timestamp
174
+ }
175
+ #print(f"Write Data : {write_data}")
176
+ # print("==========================")
177
+ # print(f"Here are the meta details : {meta}")
178
+ # print("==========================")
179
+ # print(f"Here is the write data : {write_data}")
180
+ # print("==========================")
181
+ write_response = insert_blog_post(write_data)
182
+ reddit_done = f"Data written to Supabase with response: {write_response}"
183
+
184
+ # blog_db = FlexibleBlogDatabase()
185
+ # blog_id = blog_db.create_blog_post(
186
+ # title=meta.get("title") or "Untitled",
187
+ # content=result['polished_story'],
188
+ # author=meta.get("author") or "unknown",
189
+ # tags=keywords
190
+ # )
191
+ return reddit_done
192
+
193
+ if __name__ == "__main__":
194
+ process_and_store_post()
backend/requirements.txt ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fastapi
2
+ uvicorn
3
+ langchain-community>=0.3.27
4
+ pyyaml
5
+ pydantic>=2.0
6
+ python-dotenv
7
+ langchain>=0.2.0
8
+ langchain-openai>=0.1.0
9
+ openai>=1.30.0
10
+ langgraph
11
+ langchain-mcp-adapters
12
+ fastapi-mcp
13
+ supabase
14
+ pip-system-certs
backend/supabase_api.ipynb ADDED
File without changes
backend/supabase_api.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from supabase import create_client, Client
2
+ import os
3
+ from dotenv import load_dotenv
4
+ load_dotenv()
5
+
6
+ url = os.environ.get("SUPABASE_URL", "")
7
+ key = os.environ.get("SUPABASE_KEY", "")
8
+ supabase: Client = create_client(url, key)
9
+
10
+ def insert_blog_post(data: dict):
11
+ try:
12
+ response = supabase.table("blog_posts").insert(data).execute()
13
+ print("Data inserted successfully:", response)
14
+ except Exception as e:
15
+ print("Error inserting data:", e)
16
+
17
+ def fetch_reddit_data():
18
+ try:
19
+ response = supabase.table("blog_posts").select("*").execute()
20
+ print("Data fetched successfully:", response.data)
21
+ return response.data
22
+ except Exception as e:
23
+ print("Error fetching data:", e)
24
+ return None
builderflow.md ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Commit-1
2
+
3
+ # ReactFast Project Overview
4
+
5
+ A minimal full-stack setup with a FastAPI backend serving a Vite + React frontend. The frontend builds into `frontend/dist`, and FastAPI mounts it under the `/app` route.
6
+
7
+ ## What this project includes
8
+ - Backend: FastAPI app (`backend/app.py`) mounting static files from the React build.
9
+ - Frontend: Vite + React (TypeScript) app configured with base `/app/` so assets resolve when hosted under that path.
10
+ - Local dev: Build frontend once, then run the FastAPI server. Visit `http://127.0.0.1:<port>/app/`.
11
+
12
+ ## Dependencies
13
+ - Python (backend)
14
+ - fastapi: Web framework serving API and static files
15
+ - uvicorn: ASGI server to run the FastAPI app
16
+ - Node (frontend)
17
+ - react, react-dom: UI library and DOM renderer
18
+ - vite: Build tool and dev server
19
+ - @vitejs/plugin-react: React plugin for Vite (Fast Refresh, JSX, etc.)
20
+ - typescript, @types/react, @types/react-dom: TypeScript and React typings
21
+
22
+ ---
23
+
24
+ ## Folder tree and file descriptions
25
+
26
+ ### backend/
27
+ ```
28
+ backend/
29
+ ├─ app.py # FastAPI app mounting the React build at /app
30
+ ├─ requirements.txt # Python dependencies for backend (fastapi, uvicorn)
31
+ ├─ __pycache__/ # Python bytecode cache (auto-generated)
32
+ └─ .venv/ # Local Python virtual environment (developer local)
33
+ ```
34
+
35
+ ### frontend/
36
+ ```
37
+ frontend/
38
+ ├─ index.html # Vite HTML entry; loads /src/main.tsx
39
+ ├─ package.json # Frontend scripts and dependencies
40
+ ├─ package-lock.json # Exact dependency versions (npm lockfile)
41
+ ├─ tsconfig.json # TypeScript compiler options for the app
42
+ ├─ vite.config.ts # Vite config; base set to /app and outDir=dist
43
+ ├─ src/ # Application source code
44
+ │ ├─ App.tsx # Main UI component rendered by the app
45
+ │ ├─ main.tsx # React entry; creates root and renders <App />
46
+ │ └─ style.css # Minimal global styles
47
+ ├─ dist/ # Production build output (generated by `npm run build`)
48
+ │ ├─ index.html # Built HTML referencing hashed asset files under /app
49
+ │ └─ assets/ # Hashed JS/CSS bundles and sourcemaps
50
+ │ ├─ index-*.js # Production JS bundle (hashed filename)
51
+ │ ├─ index-*.js.map # Sourcemap for debugging (if enabled)
52
+ │ └─ index-*.css # Production CSS bundle (hashed filename)
53
+ └─ node_modules/ # Installed frontend dependencies (generated by npm)
54
+ ```
55
+
56
+ ---
57
+
58
+ ## How it works
59
+ 1. Build the frontend (Vite) which outputs to `frontend/dist` with asset URLs prefixed by `/app/`.
60
+ 2. Start the FastAPI server; it mounts `frontend/dist` as static files at the `/app` route.
61
+ 3. Navigate to `http://127.0.0.1:<port>/app/` to view the app (index.html + assets).
62
+
63
+ ## Common commands (optional)
64
+ - Build frontend: `npm run build` in `frontend/`
65
+ - Run backend: `uvicorn app:app --host 127.0.0.1 --port 8000` in `backend/` (after installing requirements)
66
+
67
+ ## Notes
68
+ - If you change the frontend base path or output folder, update either Vite’s `base`/`build.outDir` or the backend static mount path accordingly.
69
+ - `dist/` is generated—do not edit files there manually; edit files under `src/` instead and rebuild.
70
+
71
+ ---
72
+
73
+ # Commit-2
74
+
75
+ High-level summary of enabling frontend ↔ backend communication.
76
+
77
+ - Backend
78
+ - Added a simple POST API at `/api/transform` that accepts `{ text: string }` and returns `{ result: string }` with a minimal transformation.
79
+ - Kept the React static site mounted at `/app` so built assets resolve correctly (aligned with Vite `base: '/app/'`).
80
+
81
+ - Frontend
82
+ - Updated the main UI (`src/App.tsx`) to include:
83
+ - A label, a textbox for user input, and a submit button.
84
+ - On submit, a `fetch('/api/transform', { method: 'POST', body: JSON.stringify({ text }) })` call.
85
+ - Displays the returned `result` string below the form.
86
+ - Light, elegant styling in `src/style.css` to keep the layout centered and readable without overengineering.
87
+
88
+ - Result
89
+ - Users can type a message, submit, and see a transformed response from the FastAPI backend—served together under the same origin, avoiding CORS configuration.
90
+
91
+ ---
92
+
93
+ # Commit-3
94
+
95
+ High-level summary of adding containerization (Docker) support.
96
+
97
+ - Purpose
98
+ - Provide a reproducible build artifact bundling backend (FastAPI) and pre-built frontend (Vite) into one image.
99
+ - Simplify deployment: single `docker run` serves both API and static UI.
100
+
101
+ - Dockerfile Structure (multi-stage)
102
+ - Stage 1 (node:20-alpine): installs frontend deps and runs `npm run build` to produce `dist/`.
103
+ - Stage 2 (python:3.12-slim): installs backend Python deps, copies backend code and built `frontend/dist`.
104
+ - Starts with: `uvicorn backend.app:app --host 0.0.0.0 --port 8000`.
105
+
106
+ - Key Paths Inside Image
107
+ - `/app/backend` – FastAPI code
108
+ - `/app/frontend/dist` – Built static assets served by FastAPI at `/app` route
109
+
110
+ - Added Files
111
+ - `Dockerfile` – Multi-stage build definition
112
+ - `.dockerignore` – Excludes node_modules, virtual envs, caches, VCS metadata, logs, etc., reducing context size and image bloat
113
+
114
+ - Build & Run (local)
115
+ 1. Build image:
116
+ - `docker build -t reactfast .`
117
+ 2. Run container:
118
+ - `docker run --rm -p 8000:8000 reactfast`
119
+ 3. Access UI:
120
+ - `http://localhost:8000/app/`
121
+
122
+ - Customization Notes
123
+ - To enable auto-reload in development, run locally without Docker or create a dev Dockerfile variant mounting source.
124
+ - For production scaling, consider adding a process manager (e.g., `gunicorn` with `uvicorn.workers.UvicornWorker`) and HEALTHCHECK.
125
+ - Pin dependency versions more strictly if reproducibility across time is critical.
126
+
127
+ - Outcome
128
+ - Project can be built and deployed as a single immutable image; frontend and backend remain in sync at build time.
129
+
130
+ - Pushing the app to Azure Container Registry. Use below commands
131
+ - `docker login` to login to Azure Container Registry
132
+ - `docker tag <app_name>:latest <registry-name>.azurecr.io/<app_name>:latest`
133
+ - `docker push <registry-name>.azurecr.io/<app_name>:latest`
134
+
135
+
136
+ # Commit-4
137
+
138
+ High-level summary of adding CI automation (GitHub Actions) to build and push the Docker image to Azure Container Registry (ACR).
139
+
140
+ - Purpose
141
+ - Automate image builds on each push to `main` (and manual dispatch) ensuring the registry always has an up‑to‑date image.
142
+ - Provide traceable image tags (`<commit-sha>` and `latest`) for rollback and promotion.
143
+
144
+ - Secrets / Inputs
145
+ - `AZURE_CREDENTIALS`: JSON from `az ad sp create-for-rbac --role AcrPush --scopes <ACR_ID> --sdk-auth`.
146
+ - `ACR_LOGIN_SERVER`: e.g. `minimum.azurecr.io`.
147
+ - (Optional) `ACR_NAME` if deriving login server dynamically.
148
+
149
+ - Workflow Steps (simplified)
150
+ 1. Checkout repository source.
151
+ 2. Azure login using service principal (`azure/login`).
152
+ 3. Authenticate to ACR (either via `az acr login` or `docker/login-action`).
153
+ 4. Build Docker image with existing multi-stage `Dockerfile`.
154
+ 5. Tag image twice: `:<git-sha>` and `:latest`.
155
+ 6. Push both tags to ACR.
156
+ 7. Summarize pushed tags for visibility.
157
+
158
+ - Tagging Strategy
159
+ - Immutable: `registry/app:{{ github.sha }}` for precise traceability.
160
+ - Mutable convenience: `registry/app:latest` for default deployments / quick tests.
161
+
162
+ - Minimal Example (conceptual)
163
+ - Trigger: `on: push: branches: [ main ]` + `workflow_dispatch`.
164
+ - Uses official actions: `actions/checkout`, `azure/login`, `docker/build-push-action`.
165
+
166
+ - Benefits
167
+ - Eliminates manual local build/push steps.
168
+ - Reduces risk of “works on my machine” discrepancies.
169
+ - Provides consistent, auditable artifact generation tied to commit history.
170
+
171
+ - Follow-on Opportunities
172
+ - Add deploy job (e.g., to Azure Web App / Container Apps / AKS) after successful push.
173
+ - Introduce image security scanning (Trivy / Microsoft Defender).
174
+ - Add build cache (GitHub Actions cache or ACR build tasks) for faster builds.
175
+ - Add semantic version tagging (e.g., `v1.2.3`) if release process formalizes.
176
+
177
+ - Outcome
178
+ - CI pipeline ensures every code change can rapidly produce a runnable, versioned container image in ACR, ready for deployment workflows.
179
+
180
+
frontend/index.html ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!doctype html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8" />
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0" />
6
+ <title>ReactFast</title>
7
+ </head>
8
+ <body>
9
+ <div id="root"></div>
10
+ <script type="module" src="/src/main.tsx"></script>
11
+ </body>
12
+ </html>
frontend/package-lock.json ADDED
@@ -0,0 +1,1658 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "reactfast-frontend",
3
+ "version": "0.0.1",
4
+ "lockfileVersion": 3,
5
+ "requires": true,
6
+ "packages": {
7
+ "": {
8
+ "name": "reactfast-frontend",
9
+ "version": "0.0.1",
10
+ "dependencies": {
11
+ "react": "^18.3.1",
12
+ "react-dom": "^18.3.1"
13
+ },
14
+ "devDependencies": {
15
+ "@types/react": "^18.3.3",
16
+ "@types/react-dom": "^18.3.0",
17
+ "@vitejs/plugin-react": "^4.3.1",
18
+ "typescript": "^5.5.4",
19
+ "vite": "^5.4.0"
20
+ }
21
+ },
22
+ "node_modules/@ampproject/remapping": {
23
+ "version": "2.3.0",
24
+ "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz",
25
+ "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
26
+ "dev": true,
27
+ "license": "Apache-2.0",
28
+ "dependencies": {
29
+ "@jridgewell/gen-mapping": "^0.3.5",
30
+ "@jridgewell/trace-mapping": "^0.3.24"
31
+ },
32
+ "engines": {
33
+ "node": ">=6.0.0"
34
+ }
35
+ },
36
+ "node_modules/@babel/code-frame": {
37
+ "version": "7.27.1",
38
+ "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
39
+ "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
40
+ "dev": true,
41
+ "license": "MIT",
42
+ "dependencies": {
43
+ "@babel/helper-validator-identifier": "^7.27.1",
44
+ "js-tokens": "^4.0.0",
45
+ "picocolors": "^1.1.1"
46
+ },
47
+ "engines": {
48
+ "node": ">=6.9.0"
49
+ }
50
+ },
51
+ "node_modules/@babel/compat-data": {
52
+ "version": "7.28.0",
53
+ "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.0.tgz",
54
+ "integrity": "sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw==",
55
+ "dev": true,
56
+ "license": "MIT",
57
+ "engines": {
58
+ "node": ">=6.9.0"
59
+ }
60
+ },
61
+ "node_modules/@babel/core": {
62
+ "version": "7.28.3",
63
+ "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.3.tgz",
64
+ "integrity": "sha512-yDBHV9kQNcr2/sUr9jghVyz9C3Y5G2zUM2H2lo+9mKv4sFgbA8s8Z9t8D1jiTkGoO/NoIfKMyKWr4s6CN23ZwQ==",
65
+ "dev": true,
66
+ "license": "MIT",
67
+ "dependencies": {
68
+ "@ampproject/remapping": "^2.2.0",
69
+ "@babel/code-frame": "^7.27.1",
70
+ "@babel/generator": "^7.28.3",
71
+ "@babel/helper-compilation-targets": "^7.27.2",
72
+ "@babel/helper-module-transforms": "^7.28.3",
73
+ "@babel/helpers": "^7.28.3",
74
+ "@babel/parser": "^7.28.3",
75
+ "@babel/template": "^7.27.2",
76
+ "@babel/traverse": "^7.28.3",
77
+ "@babel/types": "^7.28.2",
78
+ "convert-source-map": "^2.0.0",
79
+ "debug": "^4.1.0",
80
+ "gensync": "^1.0.0-beta.2",
81
+ "json5": "^2.2.3",
82
+ "semver": "^6.3.1"
83
+ },
84
+ "engines": {
85
+ "node": ">=6.9.0"
86
+ },
87
+ "funding": {
88
+ "type": "opencollective",
89
+ "url": "https://opencollective.com/babel"
90
+ }
91
+ },
92
+ "node_modules/@babel/generator": {
93
+ "version": "7.28.3",
94
+ "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz",
95
+ "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==",
96
+ "dev": true,
97
+ "license": "MIT",
98
+ "dependencies": {
99
+ "@babel/parser": "^7.28.3",
100
+ "@babel/types": "^7.28.2",
101
+ "@jridgewell/gen-mapping": "^0.3.12",
102
+ "@jridgewell/trace-mapping": "^0.3.28",
103
+ "jsesc": "^3.0.2"
104
+ },
105
+ "engines": {
106
+ "node": ">=6.9.0"
107
+ }
108
+ },
109
+ "node_modules/@babel/helper-compilation-targets": {
110
+ "version": "7.27.2",
111
+ "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz",
112
+ "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==",
113
+ "dev": true,
114
+ "license": "MIT",
115
+ "dependencies": {
116
+ "@babel/compat-data": "^7.27.2",
117
+ "@babel/helper-validator-option": "^7.27.1",
118
+ "browserslist": "^4.24.0",
119
+ "lru-cache": "^5.1.1",
120
+ "semver": "^6.3.1"
121
+ },
122
+ "engines": {
123
+ "node": ">=6.9.0"
124
+ }
125
+ },
126
+ "node_modules/@babel/helper-globals": {
127
+ "version": "7.28.0",
128
+ "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
129
+ "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==",
130
+ "dev": true,
131
+ "license": "MIT",
132
+ "engines": {
133
+ "node": ">=6.9.0"
134
+ }
135
+ },
136
+ "node_modules/@babel/helper-module-imports": {
137
+ "version": "7.27.1",
138
+ "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz",
139
+ "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==",
140
+ "dev": true,
141
+ "license": "MIT",
142
+ "dependencies": {
143
+ "@babel/traverse": "^7.27.1",
144
+ "@babel/types": "^7.27.1"
145
+ },
146
+ "engines": {
147
+ "node": ">=6.9.0"
148
+ }
149
+ },
150
+ "node_modules/@babel/helper-module-transforms": {
151
+ "version": "7.28.3",
152
+ "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz",
153
+ "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==",
154
+ "dev": true,
155
+ "license": "MIT",
156
+ "dependencies": {
157
+ "@babel/helper-module-imports": "^7.27.1",
158
+ "@babel/helper-validator-identifier": "^7.27.1",
159
+ "@babel/traverse": "^7.28.3"
160
+ },
161
+ "engines": {
162
+ "node": ">=6.9.0"
163
+ },
164
+ "peerDependencies": {
165
+ "@babel/core": "^7.0.0"
166
+ }
167
+ },
168
+ "node_modules/@babel/helper-plugin-utils": {
169
+ "version": "7.27.1",
170
+ "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz",
171
+ "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==",
172
+ "dev": true,
173
+ "license": "MIT",
174
+ "engines": {
175
+ "node": ">=6.9.0"
176
+ }
177
+ },
178
+ "node_modules/@babel/helper-string-parser": {
179
+ "version": "7.27.1",
180
+ "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
181
+ "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
182
+ "dev": true,
183
+ "license": "MIT",
184
+ "engines": {
185
+ "node": ">=6.9.0"
186
+ }
187
+ },
188
+ "node_modules/@babel/helper-validator-identifier": {
189
+ "version": "7.27.1",
190
+ "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz",
191
+ "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==",
192
+ "dev": true,
193
+ "license": "MIT",
194
+ "engines": {
195
+ "node": ">=6.9.0"
196
+ }
197
+ },
198
+ "node_modules/@babel/helper-validator-option": {
199
+ "version": "7.27.1",
200
+ "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz",
201
+ "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==",
202
+ "dev": true,
203
+ "license": "MIT",
204
+ "engines": {
205
+ "node": ">=6.9.0"
206
+ }
207
+ },
208
+ "node_modules/@babel/helpers": {
209
+ "version": "7.28.3",
210
+ "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.3.tgz",
211
+ "integrity": "sha512-PTNtvUQihsAsDHMOP5pfobP8C6CM4JWXmP8DrEIt46c3r2bf87Ua1zoqevsMo9g+tWDwgWrFP5EIxuBx5RudAw==",
212
+ "dev": true,
213
+ "license": "MIT",
214
+ "dependencies": {
215
+ "@babel/template": "^7.27.2",
216
+ "@babel/types": "^7.28.2"
217
+ },
218
+ "engines": {
219
+ "node": ">=6.9.0"
220
+ }
221
+ },
222
+ "node_modules/@babel/parser": {
223
+ "version": "7.28.3",
224
+ "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.3.tgz",
225
+ "integrity": "sha512-7+Ey1mAgYqFAx2h0RuoxcQT5+MlG3GTV0TQrgr7/ZliKsm/MNDxVVutlWaziMq7wJNAz8MTqz55XLpWvva6StA==",
226
+ "dev": true,
227
+ "license": "MIT",
228
+ "dependencies": {
229
+ "@babel/types": "^7.28.2"
230
+ },
231
+ "bin": {
232
+ "parser": "bin/babel-parser.js"
233
+ },
234
+ "engines": {
235
+ "node": ">=6.0.0"
236
+ }
237
+ },
238
+ "node_modules/@babel/plugin-transform-react-jsx-self": {
239
+ "version": "7.27.1",
240
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz",
241
+ "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==",
242
+ "dev": true,
243
+ "license": "MIT",
244
+ "dependencies": {
245
+ "@babel/helper-plugin-utils": "^7.27.1"
246
+ },
247
+ "engines": {
248
+ "node": ">=6.9.0"
249
+ },
250
+ "peerDependencies": {
251
+ "@babel/core": "^7.0.0-0"
252
+ }
253
+ },
254
+ "node_modules/@babel/plugin-transform-react-jsx-source": {
255
+ "version": "7.27.1",
256
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz",
257
+ "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==",
258
+ "dev": true,
259
+ "license": "MIT",
260
+ "dependencies": {
261
+ "@babel/helper-plugin-utils": "^7.27.1"
262
+ },
263
+ "engines": {
264
+ "node": ">=6.9.0"
265
+ },
266
+ "peerDependencies": {
267
+ "@babel/core": "^7.0.0-0"
268
+ }
269
+ },
270
+ "node_modules/@babel/template": {
271
+ "version": "7.27.2",
272
+ "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
273
+ "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
274
+ "dev": true,
275
+ "license": "MIT",
276
+ "dependencies": {
277
+ "@babel/code-frame": "^7.27.1",
278
+ "@babel/parser": "^7.27.2",
279
+ "@babel/types": "^7.27.1"
280
+ },
281
+ "engines": {
282
+ "node": ">=6.9.0"
283
+ }
284
+ },
285
+ "node_modules/@babel/traverse": {
286
+ "version": "7.28.3",
287
+ "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.3.tgz",
288
+ "integrity": "sha512-7w4kZYHneL3A6NP2nxzHvT3HCZ7puDZZjFMqDpBPECub79sTtSO5CGXDkKrTQq8ksAwfD/XI2MRFX23njdDaIQ==",
289
+ "dev": true,
290
+ "license": "MIT",
291
+ "dependencies": {
292
+ "@babel/code-frame": "^7.27.1",
293
+ "@babel/generator": "^7.28.3",
294
+ "@babel/helper-globals": "^7.28.0",
295
+ "@babel/parser": "^7.28.3",
296
+ "@babel/template": "^7.27.2",
297
+ "@babel/types": "^7.28.2",
298
+ "debug": "^4.3.1"
299
+ },
300
+ "engines": {
301
+ "node": ">=6.9.0"
302
+ }
303
+ },
304
+ "node_modules/@babel/types": {
305
+ "version": "7.28.2",
306
+ "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.2.tgz",
307
+ "integrity": "sha512-ruv7Ae4J5dUYULmeXw1gmb7rYRz57OWCPM57pHojnLq/3Z1CK2lNSLTCVjxVk1F/TZHwOZZrOWi0ur95BbLxNQ==",
308
+ "dev": true,
309
+ "license": "MIT",
310
+ "dependencies": {
311
+ "@babel/helper-string-parser": "^7.27.1",
312
+ "@babel/helper-validator-identifier": "^7.27.1"
313
+ },
314
+ "engines": {
315
+ "node": ">=6.9.0"
316
+ }
317
+ },
318
+ "node_modules/@esbuild/aix-ppc64": {
319
+ "version": "0.21.5",
320
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz",
321
+ "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==",
322
+ "cpu": [
323
+ "ppc64"
324
+ ],
325
+ "dev": true,
326
+ "license": "MIT",
327
+ "optional": true,
328
+ "os": [
329
+ "aix"
330
+ ],
331
+ "engines": {
332
+ "node": ">=12"
333
+ }
334
+ },
335
+ "node_modules/@esbuild/android-arm": {
336
+ "version": "0.21.5",
337
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz",
338
+ "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==",
339
+ "cpu": [
340
+ "arm"
341
+ ],
342
+ "dev": true,
343
+ "license": "MIT",
344
+ "optional": true,
345
+ "os": [
346
+ "android"
347
+ ],
348
+ "engines": {
349
+ "node": ">=12"
350
+ }
351
+ },
352
+ "node_modules/@esbuild/android-arm64": {
353
+ "version": "0.21.5",
354
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz",
355
+ "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==",
356
+ "cpu": [
357
+ "arm64"
358
+ ],
359
+ "dev": true,
360
+ "license": "MIT",
361
+ "optional": true,
362
+ "os": [
363
+ "android"
364
+ ],
365
+ "engines": {
366
+ "node": ">=12"
367
+ }
368
+ },
369
+ "node_modules/@esbuild/android-x64": {
370
+ "version": "0.21.5",
371
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz",
372
+ "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==",
373
+ "cpu": [
374
+ "x64"
375
+ ],
376
+ "dev": true,
377
+ "license": "MIT",
378
+ "optional": true,
379
+ "os": [
380
+ "android"
381
+ ],
382
+ "engines": {
383
+ "node": ">=12"
384
+ }
385
+ },
386
+ "node_modules/@esbuild/darwin-arm64": {
387
+ "version": "0.21.5",
388
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz",
389
+ "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==",
390
+ "cpu": [
391
+ "arm64"
392
+ ],
393
+ "dev": true,
394
+ "license": "MIT",
395
+ "optional": true,
396
+ "os": [
397
+ "darwin"
398
+ ],
399
+ "engines": {
400
+ "node": ">=12"
401
+ }
402
+ },
403
+ "node_modules/@esbuild/darwin-x64": {
404
+ "version": "0.21.5",
405
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz",
406
+ "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==",
407
+ "cpu": [
408
+ "x64"
409
+ ],
410
+ "dev": true,
411
+ "license": "MIT",
412
+ "optional": true,
413
+ "os": [
414
+ "darwin"
415
+ ],
416
+ "engines": {
417
+ "node": ">=12"
418
+ }
419
+ },
420
+ "node_modules/@esbuild/freebsd-arm64": {
421
+ "version": "0.21.5",
422
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz",
423
+ "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==",
424
+ "cpu": [
425
+ "arm64"
426
+ ],
427
+ "dev": true,
428
+ "license": "MIT",
429
+ "optional": true,
430
+ "os": [
431
+ "freebsd"
432
+ ],
433
+ "engines": {
434
+ "node": ">=12"
435
+ }
436
+ },
437
+ "node_modules/@esbuild/freebsd-x64": {
438
+ "version": "0.21.5",
439
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz",
440
+ "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==",
441
+ "cpu": [
442
+ "x64"
443
+ ],
444
+ "dev": true,
445
+ "license": "MIT",
446
+ "optional": true,
447
+ "os": [
448
+ "freebsd"
449
+ ],
450
+ "engines": {
451
+ "node": ">=12"
452
+ }
453
+ },
454
+ "node_modules/@esbuild/linux-arm": {
455
+ "version": "0.21.5",
456
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz",
457
+ "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==",
458
+ "cpu": [
459
+ "arm"
460
+ ],
461
+ "dev": true,
462
+ "license": "MIT",
463
+ "optional": true,
464
+ "os": [
465
+ "linux"
466
+ ],
467
+ "engines": {
468
+ "node": ">=12"
469
+ }
470
+ },
471
+ "node_modules/@esbuild/linux-arm64": {
472
+ "version": "0.21.5",
473
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz",
474
+ "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==",
475
+ "cpu": [
476
+ "arm64"
477
+ ],
478
+ "dev": true,
479
+ "license": "MIT",
480
+ "optional": true,
481
+ "os": [
482
+ "linux"
483
+ ],
484
+ "engines": {
485
+ "node": ">=12"
486
+ }
487
+ },
488
+ "node_modules/@esbuild/linux-ia32": {
489
+ "version": "0.21.5",
490
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz",
491
+ "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==",
492
+ "cpu": [
493
+ "ia32"
494
+ ],
495
+ "dev": true,
496
+ "license": "MIT",
497
+ "optional": true,
498
+ "os": [
499
+ "linux"
500
+ ],
501
+ "engines": {
502
+ "node": ">=12"
503
+ }
504
+ },
505
+ "node_modules/@esbuild/linux-loong64": {
506
+ "version": "0.21.5",
507
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz",
508
+ "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==",
509
+ "cpu": [
510
+ "loong64"
511
+ ],
512
+ "dev": true,
513
+ "license": "MIT",
514
+ "optional": true,
515
+ "os": [
516
+ "linux"
517
+ ],
518
+ "engines": {
519
+ "node": ">=12"
520
+ }
521
+ },
522
+ "node_modules/@esbuild/linux-mips64el": {
523
+ "version": "0.21.5",
524
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz",
525
+ "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==",
526
+ "cpu": [
527
+ "mips64el"
528
+ ],
529
+ "dev": true,
530
+ "license": "MIT",
531
+ "optional": true,
532
+ "os": [
533
+ "linux"
534
+ ],
535
+ "engines": {
536
+ "node": ">=12"
537
+ }
538
+ },
539
+ "node_modules/@esbuild/linux-ppc64": {
540
+ "version": "0.21.5",
541
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz",
542
+ "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==",
543
+ "cpu": [
544
+ "ppc64"
545
+ ],
546
+ "dev": true,
547
+ "license": "MIT",
548
+ "optional": true,
549
+ "os": [
550
+ "linux"
551
+ ],
552
+ "engines": {
553
+ "node": ">=12"
554
+ }
555
+ },
556
+ "node_modules/@esbuild/linux-riscv64": {
557
+ "version": "0.21.5",
558
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz",
559
+ "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==",
560
+ "cpu": [
561
+ "riscv64"
562
+ ],
563
+ "dev": true,
564
+ "license": "MIT",
565
+ "optional": true,
566
+ "os": [
567
+ "linux"
568
+ ],
569
+ "engines": {
570
+ "node": ">=12"
571
+ }
572
+ },
573
+ "node_modules/@esbuild/linux-s390x": {
574
+ "version": "0.21.5",
575
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz",
576
+ "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==",
577
+ "cpu": [
578
+ "s390x"
579
+ ],
580
+ "dev": true,
581
+ "license": "MIT",
582
+ "optional": true,
583
+ "os": [
584
+ "linux"
585
+ ],
586
+ "engines": {
587
+ "node": ">=12"
588
+ }
589
+ },
590
+ "node_modules/@esbuild/linux-x64": {
591
+ "version": "0.21.5",
592
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz",
593
+ "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==",
594
+ "cpu": [
595
+ "x64"
596
+ ],
597
+ "dev": true,
598
+ "license": "MIT",
599
+ "optional": true,
600
+ "os": [
601
+ "linux"
602
+ ],
603
+ "engines": {
604
+ "node": ">=12"
605
+ }
606
+ },
607
+ "node_modules/@esbuild/netbsd-x64": {
608
+ "version": "0.21.5",
609
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz",
610
+ "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==",
611
+ "cpu": [
612
+ "x64"
613
+ ],
614
+ "dev": true,
615
+ "license": "MIT",
616
+ "optional": true,
617
+ "os": [
618
+ "netbsd"
619
+ ],
620
+ "engines": {
621
+ "node": ">=12"
622
+ }
623
+ },
624
+ "node_modules/@esbuild/openbsd-x64": {
625
+ "version": "0.21.5",
626
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz",
627
+ "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==",
628
+ "cpu": [
629
+ "x64"
630
+ ],
631
+ "dev": true,
632
+ "license": "MIT",
633
+ "optional": true,
634
+ "os": [
635
+ "openbsd"
636
+ ],
637
+ "engines": {
638
+ "node": ">=12"
639
+ }
640
+ },
641
+ "node_modules/@esbuild/sunos-x64": {
642
+ "version": "0.21.5",
643
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz",
644
+ "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==",
645
+ "cpu": [
646
+ "x64"
647
+ ],
648
+ "dev": true,
649
+ "license": "MIT",
650
+ "optional": true,
651
+ "os": [
652
+ "sunos"
653
+ ],
654
+ "engines": {
655
+ "node": ">=12"
656
+ }
657
+ },
658
+ "node_modules/@esbuild/win32-arm64": {
659
+ "version": "0.21.5",
660
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz",
661
+ "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==",
662
+ "cpu": [
663
+ "arm64"
664
+ ],
665
+ "dev": true,
666
+ "license": "MIT",
667
+ "optional": true,
668
+ "os": [
669
+ "win32"
670
+ ],
671
+ "engines": {
672
+ "node": ">=12"
673
+ }
674
+ },
675
+ "node_modules/@esbuild/win32-ia32": {
676
+ "version": "0.21.5",
677
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz",
678
+ "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==",
679
+ "cpu": [
680
+ "ia32"
681
+ ],
682
+ "dev": true,
683
+ "license": "MIT",
684
+ "optional": true,
685
+ "os": [
686
+ "win32"
687
+ ],
688
+ "engines": {
689
+ "node": ">=12"
690
+ }
691
+ },
692
+ "node_modules/@esbuild/win32-x64": {
693
+ "version": "0.21.5",
694
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz",
695
+ "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==",
696
+ "cpu": [
697
+ "x64"
698
+ ],
699
+ "dev": true,
700
+ "license": "MIT",
701
+ "optional": true,
702
+ "os": [
703
+ "win32"
704
+ ],
705
+ "engines": {
706
+ "node": ">=12"
707
+ }
708
+ },
709
+ "node_modules/@jridgewell/gen-mapping": {
710
+ "version": "0.3.13",
711
+ "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
712
+ "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==",
713
+ "dev": true,
714
+ "license": "MIT",
715
+ "dependencies": {
716
+ "@jridgewell/sourcemap-codec": "^1.5.0",
717
+ "@jridgewell/trace-mapping": "^0.3.24"
718
+ }
719
+ },
720
+ "node_modules/@jridgewell/resolve-uri": {
721
+ "version": "3.1.2",
722
+ "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
723
+ "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
724
+ "dev": true,
725
+ "license": "MIT",
726
+ "engines": {
727
+ "node": ">=6.0.0"
728
+ }
729
+ },
730
+ "node_modules/@jridgewell/sourcemap-codec": {
731
+ "version": "1.5.5",
732
+ "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
733
+ "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
734
+ "dev": true,
735
+ "license": "MIT"
736
+ },
737
+ "node_modules/@jridgewell/trace-mapping": {
738
+ "version": "0.3.30",
739
+ "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.30.tgz",
740
+ "integrity": "sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==",
741
+ "dev": true,
742
+ "license": "MIT",
743
+ "dependencies": {
744
+ "@jridgewell/resolve-uri": "^3.1.0",
745
+ "@jridgewell/sourcemap-codec": "^1.4.14"
746
+ }
747
+ },
748
+ "node_modules/@rolldown/pluginutils": {
749
+ "version": "1.0.0-beta.27",
750
+ "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz",
751
+ "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==",
752
+ "dev": true,
753
+ "license": "MIT"
754
+ },
755
+ "node_modules/@rollup/rollup-android-arm-eabi": {
756
+ "version": "4.50.0",
757
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.50.0.tgz",
758
+ "integrity": "sha512-lVgpeQyy4fWN5QYebtW4buT/4kn4p4IJ+kDNB4uYNT5b8c8DLJDg6titg20NIg7E8RWwdWZORW6vUFfrLyG3KQ==",
759
+ "cpu": [
760
+ "arm"
761
+ ],
762
+ "dev": true,
763
+ "license": "MIT",
764
+ "optional": true,
765
+ "os": [
766
+ "android"
767
+ ]
768
+ },
769
+ "node_modules/@rollup/rollup-android-arm64": {
770
+ "version": "4.50.0",
771
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.50.0.tgz",
772
+ "integrity": "sha512-2O73dR4Dc9bp+wSYhviP6sDziurB5/HCym7xILKifWdE9UsOe2FtNcM+I4xZjKrfLJnq5UR8k9riB87gauiQtw==",
773
+ "cpu": [
774
+ "arm64"
775
+ ],
776
+ "dev": true,
777
+ "license": "MIT",
778
+ "optional": true,
779
+ "os": [
780
+ "android"
781
+ ]
782
+ },
783
+ "node_modules/@rollup/rollup-darwin-arm64": {
784
+ "version": "4.50.0",
785
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.50.0.tgz",
786
+ "integrity": "sha512-vwSXQN8T4sKf1RHr1F0s98Pf8UPz7pS6P3LG9NSmuw0TVh7EmaE+5Ny7hJOZ0M2yuTctEsHHRTMi2wuHkdS6Hg==",
787
+ "cpu": [
788
+ "arm64"
789
+ ],
790
+ "dev": true,
791
+ "license": "MIT",
792
+ "optional": true,
793
+ "os": [
794
+ "darwin"
795
+ ]
796
+ },
797
+ "node_modules/@rollup/rollup-darwin-x64": {
798
+ "version": "4.50.0",
799
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.50.0.tgz",
800
+ "integrity": "sha512-cQp/WG8HE7BCGyFVuzUg0FNmupxC+EPZEwWu2FCGGw5WDT1o2/YlENbm5e9SMvfDFR6FRhVCBePLqj0o8MN7Vw==",
801
+ "cpu": [
802
+ "x64"
803
+ ],
804
+ "dev": true,
805
+ "license": "MIT",
806
+ "optional": true,
807
+ "os": [
808
+ "darwin"
809
+ ]
810
+ },
811
+ "node_modules/@rollup/rollup-freebsd-arm64": {
812
+ "version": "4.50.0",
813
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.50.0.tgz",
814
+ "integrity": "sha512-UR1uTJFU/p801DvvBbtDD7z9mQL8J80xB0bR7DqW7UGQHRm/OaKzp4is7sQSdbt2pjjSS72eAtRh43hNduTnnQ==",
815
+ "cpu": [
816
+ "arm64"
817
+ ],
818
+ "dev": true,
819
+ "license": "MIT",
820
+ "optional": true,
821
+ "os": [
822
+ "freebsd"
823
+ ]
824
+ },
825
+ "node_modules/@rollup/rollup-freebsd-x64": {
826
+ "version": "4.50.0",
827
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.50.0.tgz",
828
+ "integrity": "sha512-G/DKyS6PK0dD0+VEzH/6n/hWDNPDZSMBmqsElWnCRGrYOb2jC0VSupp7UAHHQ4+QILwkxSMaYIbQ72dktp8pKA==",
829
+ "cpu": [
830
+ "x64"
831
+ ],
832
+ "dev": true,
833
+ "license": "MIT",
834
+ "optional": true,
835
+ "os": [
836
+ "freebsd"
837
+ ]
838
+ },
839
+ "node_modules/@rollup/rollup-linux-arm-gnueabihf": {
840
+ "version": "4.50.0",
841
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.50.0.tgz",
842
+ "integrity": "sha512-u72Mzc6jyJwKjJbZZcIYmd9bumJu7KNmHYdue43vT1rXPm2rITwmPWF0mmPzLm9/vJWxIRbao/jrQmxTO0Sm9w==",
843
+ "cpu": [
844
+ "arm"
845
+ ],
846
+ "dev": true,
847
+ "license": "MIT",
848
+ "optional": true,
849
+ "os": [
850
+ "linux"
851
+ ]
852
+ },
853
+ "node_modules/@rollup/rollup-linux-arm-musleabihf": {
854
+ "version": "4.50.0",
855
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.50.0.tgz",
856
+ "integrity": "sha512-S4UefYdV0tnynDJV1mdkNawp0E5Qm2MtSs330IyHgaccOFrwqsvgigUD29uT+B/70PDY1eQ3t40+xf6wIvXJyg==",
857
+ "cpu": [
858
+ "arm"
859
+ ],
860
+ "dev": true,
861
+ "license": "MIT",
862
+ "optional": true,
863
+ "os": [
864
+ "linux"
865
+ ]
866
+ },
867
+ "node_modules/@rollup/rollup-linux-arm64-gnu": {
868
+ "version": "4.50.0",
869
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.50.0.tgz",
870
+ "integrity": "sha512-1EhkSvUQXJsIhk4msxP5nNAUWoB4MFDHhtc4gAYvnqoHlaL9V3F37pNHabndawsfy/Tp7BPiy/aSa6XBYbaD1g==",
871
+ "cpu": [
872
+ "arm64"
873
+ ],
874
+ "dev": true,
875
+ "license": "MIT",
876
+ "optional": true,
877
+ "os": [
878
+ "linux"
879
+ ]
880
+ },
881
+ "node_modules/@rollup/rollup-linux-arm64-musl": {
882
+ "version": "4.50.0",
883
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.50.0.tgz",
884
+ "integrity": "sha512-EtBDIZuDtVg75xIPIK1l5vCXNNCIRM0OBPUG+tbApDuJAy9mKago6QxX+tfMzbCI6tXEhMuZuN1+CU8iDW+0UQ==",
885
+ "cpu": [
886
+ "arm64"
887
+ ],
888
+ "dev": true,
889
+ "license": "MIT",
890
+ "optional": true,
891
+ "os": [
892
+ "linux"
893
+ ]
894
+ },
895
+ "node_modules/@rollup/rollup-linux-loongarch64-gnu": {
896
+ "version": "4.50.0",
897
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.50.0.tgz",
898
+ "integrity": "sha512-BGYSwJdMP0hT5CCmljuSNx7+k+0upweM2M4YGfFBjnFSZMHOLYR0gEEj/dxyYJ6Zc6AiSeaBY8dWOa11GF/ppQ==",
899
+ "cpu": [
900
+ "loong64"
901
+ ],
902
+ "dev": true,
903
+ "license": "MIT",
904
+ "optional": true,
905
+ "os": [
906
+ "linux"
907
+ ]
908
+ },
909
+ "node_modules/@rollup/rollup-linux-ppc64-gnu": {
910
+ "version": "4.50.0",
911
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.50.0.tgz",
912
+ "integrity": "sha512-I1gSMzkVe1KzAxKAroCJL30hA4DqSi+wGc5gviD0y3IL/VkvcnAqwBf4RHXHyvH66YVHxpKO8ojrgc4SrWAnLg==",
913
+ "cpu": [
914
+ "ppc64"
915
+ ],
916
+ "dev": true,
917
+ "license": "MIT",
918
+ "optional": true,
919
+ "os": [
920
+ "linux"
921
+ ]
922
+ },
923
+ "node_modules/@rollup/rollup-linux-riscv64-gnu": {
924
+ "version": "4.50.0",
925
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.50.0.tgz",
926
+ "integrity": "sha512-bSbWlY3jZo7molh4tc5dKfeSxkqnf48UsLqYbUhnkdnfgZjgufLS/NTA8PcP/dnvct5CCdNkABJ56CbclMRYCA==",
927
+ "cpu": [
928
+ "riscv64"
929
+ ],
930
+ "dev": true,
931
+ "license": "MIT",
932
+ "optional": true,
933
+ "os": [
934
+ "linux"
935
+ ]
936
+ },
937
+ "node_modules/@rollup/rollup-linux-riscv64-musl": {
938
+ "version": "4.50.0",
939
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.50.0.tgz",
940
+ "integrity": "sha512-LSXSGumSURzEQLT2e4sFqFOv3LWZsEF8FK7AAv9zHZNDdMnUPYH3t8ZlaeYYZyTXnsob3htwTKeWtBIkPV27iQ==",
941
+ "cpu": [
942
+ "riscv64"
943
+ ],
944
+ "dev": true,
945
+ "license": "MIT",
946
+ "optional": true,
947
+ "os": [
948
+ "linux"
949
+ ]
950
+ },
951
+ "node_modules/@rollup/rollup-linux-s390x-gnu": {
952
+ "version": "4.50.0",
953
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.50.0.tgz",
954
+ "integrity": "sha512-CxRKyakfDrsLXiCyucVfVWVoaPA4oFSpPpDwlMcDFQvrv3XY6KEzMtMZrA+e/goC8xxp2WSOxHQubP8fPmmjOQ==",
955
+ "cpu": [
956
+ "s390x"
957
+ ],
958
+ "dev": true,
959
+ "license": "MIT",
960
+ "optional": true,
961
+ "os": [
962
+ "linux"
963
+ ]
964
+ },
965
+ "node_modules/@rollup/rollup-linux-x64-gnu": {
966
+ "version": "4.50.0",
967
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.50.0.tgz",
968
+ "integrity": "sha512-8PrJJA7/VU8ToHVEPu14FzuSAqVKyo5gg/J8xUerMbyNkWkO9j2ExBho/68RnJsMGNJq4zH114iAttgm7BZVkA==",
969
+ "cpu": [
970
+ "x64"
971
+ ],
972
+ "dev": true,
973
+ "license": "MIT",
974
+ "optional": true,
975
+ "os": [
976
+ "linux"
977
+ ]
978
+ },
979
+ "node_modules/@rollup/rollup-linux-x64-musl": {
980
+ "version": "4.50.0",
981
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.50.0.tgz",
982
+ "integrity": "sha512-SkE6YQp+CzpyOrbw7Oc4MgXFvTw2UIBElvAvLCo230pyxOLmYwRPwZ/L5lBe/VW/qT1ZgND9wJfOsdy0XptRvw==",
983
+ "cpu": [
984
+ "x64"
985
+ ],
986
+ "dev": true,
987
+ "license": "MIT",
988
+ "optional": true,
989
+ "os": [
990
+ "linux"
991
+ ]
992
+ },
993
+ "node_modules/@rollup/rollup-openharmony-arm64": {
994
+ "version": "4.50.0",
995
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.50.0.tgz",
996
+ "integrity": "sha512-PZkNLPfvXeIOgJWA804zjSFH7fARBBCpCXxgkGDRjjAhRLOR8o0IGS01ykh5GYfod4c2yiiREuDM8iZ+pVsT+Q==",
997
+ "cpu": [
998
+ "arm64"
999
+ ],
1000
+ "dev": true,
1001
+ "license": "MIT",
1002
+ "optional": true,
1003
+ "os": [
1004
+ "openharmony"
1005
+ ]
1006
+ },
1007
+ "node_modules/@rollup/rollup-win32-arm64-msvc": {
1008
+ "version": "4.50.0",
1009
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.50.0.tgz",
1010
+ "integrity": "sha512-q7cIIdFvWQoaCbLDUyUc8YfR3Jh2xx3unO8Dn6/TTogKjfwrax9SyfmGGK6cQhKtjePI7jRfd7iRYcxYs93esg==",
1011
+ "cpu": [
1012
+ "arm64"
1013
+ ],
1014
+ "dev": true,
1015
+ "license": "MIT",
1016
+ "optional": true,
1017
+ "os": [
1018
+ "win32"
1019
+ ]
1020
+ },
1021
+ "node_modules/@rollup/rollup-win32-ia32-msvc": {
1022
+ "version": "4.50.0",
1023
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.50.0.tgz",
1024
+ "integrity": "sha512-XzNOVg/YnDOmFdDKcxxK410PrcbcqZkBmz+0FicpW5jtjKQxcW1BZJEQOF0NJa6JO7CZhett8GEtRN/wYLYJuw==",
1025
+ "cpu": [
1026
+ "ia32"
1027
+ ],
1028
+ "dev": true,
1029
+ "license": "MIT",
1030
+ "optional": true,
1031
+ "os": [
1032
+ "win32"
1033
+ ]
1034
+ },
1035
+ "node_modules/@rollup/rollup-win32-x64-msvc": {
1036
+ "version": "4.50.0",
1037
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.50.0.tgz",
1038
+ "integrity": "sha512-xMmiWRR8sp72Zqwjgtf3QbZfF1wdh8X2ABu3EaozvZcyHJeU0r+XAnXdKgs4cCAp6ORoYoCygipYP1mjmbjrsg==",
1039
+ "cpu": [
1040
+ "x64"
1041
+ ],
1042
+ "dev": true,
1043
+ "license": "MIT",
1044
+ "optional": true,
1045
+ "os": [
1046
+ "win32"
1047
+ ]
1048
+ },
1049
+ "node_modules/@types/babel__core": {
1050
+ "version": "7.20.5",
1051
+ "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz",
1052
+ "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==",
1053
+ "dev": true,
1054
+ "license": "MIT",
1055
+ "dependencies": {
1056
+ "@babel/parser": "^7.20.7",
1057
+ "@babel/types": "^7.20.7",
1058
+ "@types/babel__generator": "*",
1059
+ "@types/babel__template": "*",
1060
+ "@types/babel__traverse": "*"
1061
+ }
1062
+ },
1063
+ "node_modules/@types/babel__generator": {
1064
+ "version": "7.27.0",
1065
+ "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz",
1066
+ "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==",
1067
+ "dev": true,
1068
+ "license": "MIT",
1069
+ "dependencies": {
1070
+ "@babel/types": "^7.0.0"
1071
+ }
1072
+ },
1073
+ "node_modules/@types/babel__template": {
1074
+ "version": "7.4.4",
1075
+ "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz",
1076
+ "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==",
1077
+ "dev": true,
1078
+ "license": "MIT",
1079
+ "dependencies": {
1080
+ "@babel/parser": "^7.1.0",
1081
+ "@babel/types": "^7.0.0"
1082
+ }
1083
+ },
1084
+ "node_modules/@types/babel__traverse": {
1085
+ "version": "7.28.0",
1086
+ "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz",
1087
+ "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==",
1088
+ "dev": true,
1089
+ "license": "MIT",
1090
+ "dependencies": {
1091
+ "@babel/types": "^7.28.2"
1092
+ }
1093
+ },
1094
+ "node_modules/@types/estree": {
1095
+ "version": "1.0.8",
1096
+ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
1097
+ "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
1098
+ "dev": true,
1099
+ "license": "MIT"
1100
+ },
1101
+ "node_modules/@types/prop-types": {
1102
+ "version": "15.7.15",
1103
+ "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz",
1104
+ "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==",
1105
+ "dev": true,
1106
+ "license": "MIT"
1107
+ },
1108
+ "node_modules/@types/react": {
1109
+ "version": "18.3.24",
1110
+ "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.24.tgz",
1111
+ "integrity": "sha512-0dLEBsA1kI3OezMBF8nSsb7Nk19ZnsyE1LLhB8r27KbgU5H4pvuqZLdtE+aUkJVoXgTVuA+iLIwmZ0TuK4tx6A==",
1112
+ "dev": true,
1113
+ "license": "MIT",
1114
+ "dependencies": {
1115
+ "@types/prop-types": "*",
1116
+ "csstype": "^3.0.2"
1117
+ }
1118
+ },
1119
+ "node_modules/@types/react-dom": {
1120
+ "version": "18.3.7",
1121
+ "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.7.tgz",
1122
+ "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==",
1123
+ "dev": true,
1124
+ "license": "MIT",
1125
+ "peerDependencies": {
1126
+ "@types/react": "^18.0.0"
1127
+ }
1128
+ },
1129
+ "node_modules/@vitejs/plugin-react": {
1130
+ "version": "4.7.0",
1131
+ "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz",
1132
+ "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==",
1133
+ "dev": true,
1134
+ "license": "MIT",
1135
+ "dependencies": {
1136
+ "@babel/core": "^7.28.0",
1137
+ "@babel/plugin-transform-react-jsx-self": "^7.27.1",
1138
+ "@babel/plugin-transform-react-jsx-source": "^7.27.1",
1139
+ "@rolldown/pluginutils": "1.0.0-beta.27",
1140
+ "@types/babel__core": "^7.20.5",
1141
+ "react-refresh": "^0.17.0"
1142
+ },
1143
+ "engines": {
1144
+ "node": "^14.18.0 || >=16.0.0"
1145
+ },
1146
+ "peerDependencies": {
1147
+ "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0"
1148
+ }
1149
+ },
1150
+ "node_modules/browserslist": {
1151
+ "version": "4.25.4",
1152
+ "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.4.tgz",
1153
+ "integrity": "sha512-4jYpcjabC606xJ3kw2QwGEZKX0Aw7sgQdZCvIK9dhVSPh76BKo+C+btT1RRofH7B+8iNpEbgGNVWiLki5q93yg==",
1154
+ "dev": true,
1155
+ "funding": [
1156
+ {
1157
+ "type": "opencollective",
1158
+ "url": "https://opencollective.com/browserslist"
1159
+ },
1160
+ {
1161
+ "type": "tidelift",
1162
+ "url": "https://tidelift.com/funding/github/npm/browserslist"
1163
+ },
1164
+ {
1165
+ "type": "github",
1166
+ "url": "https://github.com/sponsors/ai"
1167
+ }
1168
+ ],
1169
+ "license": "MIT",
1170
+ "dependencies": {
1171
+ "caniuse-lite": "^1.0.30001737",
1172
+ "electron-to-chromium": "^1.5.211",
1173
+ "node-releases": "^2.0.19",
1174
+ "update-browserslist-db": "^1.1.3"
1175
+ },
1176
+ "bin": {
1177
+ "browserslist": "cli.js"
1178
+ },
1179
+ "engines": {
1180
+ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7"
1181
+ }
1182
+ },
1183
+ "node_modules/caniuse-lite": {
1184
+ "version": "1.0.30001739",
1185
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001739.tgz",
1186
+ "integrity": "sha512-y+j60d6ulelrNSwpPyrHdl+9mJnQzHBr08xm48Qno0nSk4h3Qojh+ziv2qE6rXf4k3tadF4o1J/1tAbVm1NtnA==",
1187
+ "dev": true,
1188
+ "funding": [
1189
+ {
1190
+ "type": "opencollective",
1191
+ "url": "https://opencollective.com/browserslist"
1192
+ },
1193
+ {
1194
+ "type": "tidelift",
1195
+ "url": "https://tidelift.com/funding/github/npm/caniuse-lite"
1196
+ },
1197
+ {
1198
+ "type": "github",
1199
+ "url": "https://github.com/sponsors/ai"
1200
+ }
1201
+ ],
1202
+ "license": "CC-BY-4.0"
1203
+ },
1204
+ "node_modules/convert-source-map": {
1205
+ "version": "2.0.0",
1206
+ "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
1207
+ "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
1208
+ "dev": true,
1209
+ "license": "MIT"
1210
+ },
1211
+ "node_modules/csstype": {
1212
+ "version": "3.1.3",
1213
+ "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz",
1214
+ "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==",
1215
+ "dev": true,
1216
+ "license": "MIT"
1217
+ },
1218
+ "node_modules/debug": {
1219
+ "version": "4.4.1",
1220
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
1221
+ "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
1222
+ "dev": true,
1223
+ "license": "MIT",
1224
+ "dependencies": {
1225
+ "ms": "^2.1.3"
1226
+ },
1227
+ "engines": {
1228
+ "node": ">=6.0"
1229
+ },
1230
+ "peerDependenciesMeta": {
1231
+ "supports-color": {
1232
+ "optional": true
1233
+ }
1234
+ }
1235
+ },
1236
+ "node_modules/electron-to-chromium": {
1237
+ "version": "1.5.213",
1238
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.213.tgz",
1239
+ "integrity": "sha512-xr9eRzSLNa4neDO0xVFrkXu3vyIzG4Ay08dApecw42Z1NbmCt+keEpXdvlYGVe0wtvY5dhW0Ay0lY0IOfsCg0Q==",
1240
+ "dev": true,
1241
+ "license": "ISC"
1242
+ },
1243
+ "node_modules/esbuild": {
1244
+ "version": "0.21.5",
1245
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
1246
+ "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==",
1247
+ "dev": true,
1248
+ "hasInstallScript": true,
1249
+ "license": "MIT",
1250
+ "bin": {
1251
+ "esbuild": "bin/esbuild"
1252
+ },
1253
+ "engines": {
1254
+ "node": ">=12"
1255
+ },
1256
+ "optionalDependencies": {
1257
+ "@esbuild/aix-ppc64": "0.21.5",
1258
+ "@esbuild/android-arm": "0.21.5",
1259
+ "@esbuild/android-arm64": "0.21.5",
1260
+ "@esbuild/android-x64": "0.21.5",
1261
+ "@esbuild/darwin-arm64": "0.21.5",
1262
+ "@esbuild/darwin-x64": "0.21.5",
1263
+ "@esbuild/freebsd-arm64": "0.21.5",
1264
+ "@esbuild/freebsd-x64": "0.21.5",
1265
+ "@esbuild/linux-arm": "0.21.5",
1266
+ "@esbuild/linux-arm64": "0.21.5",
1267
+ "@esbuild/linux-ia32": "0.21.5",
1268
+ "@esbuild/linux-loong64": "0.21.5",
1269
+ "@esbuild/linux-mips64el": "0.21.5",
1270
+ "@esbuild/linux-ppc64": "0.21.5",
1271
+ "@esbuild/linux-riscv64": "0.21.5",
1272
+ "@esbuild/linux-s390x": "0.21.5",
1273
+ "@esbuild/linux-x64": "0.21.5",
1274
+ "@esbuild/netbsd-x64": "0.21.5",
1275
+ "@esbuild/openbsd-x64": "0.21.5",
1276
+ "@esbuild/sunos-x64": "0.21.5",
1277
+ "@esbuild/win32-arm64": "0.21.5",
1278
+ "@esbuild/win32-ia32": "0.21.5",
1279
+ "@esbuild/win32-x64": "0.21.5"
1280
+ }
1281
+ },
1282
+ "node_modules/escalade": {
1283
+ "version": "3.2.0",
1284
+ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
1285
+ "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
1286
+ "dev": true,
1287
+ "license": "MIT",
1288
+ "engines": {
1289
+ "node": ">=6"
1290
+ }
1291
+ },
1292
+ "node_modules/fsevents": {
1293
+ "version": "2.3.3",
1294
+ "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
1295
+ "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
1296
+ "dev": true,
1297
+ "hasInstallScript": true,
1298
+ "license": "MIT",
1299
+ "optional": true,
1300
+ "os": [
1301
+ "darwin"
1302
+ ],
1303
+ "engines": {
1304
+ "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
1305
+ }
1306
+ },
1307
+ "node_modules/gensync": {
1308
+ "version": "1.0.0-beta.2",
1309
+ "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
1310
+ "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
1311
+ "dev": true,
1312
+ "license": "MIT",
1313
+ "engines": {
1314
+ "node": ">=6.9.0"
1315
+ }
1316
+ },
1317
+ "node_modules/js-tokens": {
1318
+ "version": "4.0.0",
1319
+ "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
1320
+ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
1321
+ "license": "MIT"
1322
+ },
1323
+ "node_modules/jsesc": {
1324
+ "version": "3.1.0",
1325
+ "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
1326
+ "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
1327
+ "dev": true,
1328
+ "license": "MIT",
1329
+ "bin": {
1330
+ "jsesc": "bin/jsesc"
1331
+ },
1332
+ "engines": {
1333
+ "node": ">=6"
1334
+ }
1335
+ },
1336
+ "node_modules/json5": {
1337
+ "version": "2.2.3",
1338
+ "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
1339
+ "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
1340
+ "dev": true,
1341
+ "license": "MIT",
1342
+ "bin": {
1343
+ "json5": "lib/cli.js"
1344
+ },
1345
+ "engines": {
1346
+ "node": ">=6"
1347
+ }
1348
+ },
1349
+ "node_modules/loose-envify": {
1350
+ "version": "1.4.0",
1351
+ "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
1352
+ "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
1353
+ "license": "MIT",
1354
+ "dependencies": {
1355
+ "js-tokens": "^3.0.0 || ^4.0.0"
1356
+ },
1357
+ "bin": {
1358
+ "loose-envify": "cli.js"
1359
+ }
1360
+ },
1361
+ "node_modules/lru-cache": {
1362
+ "version": "5.1.1",
1363
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
1364
+ "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
1365
+ "dev": true,
1366
+ "license": "ISC",
1367
+ "dependencies": {
1368
+ "yallist": "^3.0.2"
1369
+ }
1370
+ },
1371
+ "node_modules/ms": {
1372
+ "version": "2.1.3",
1373
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
1374
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
1375
+ "dev": true,
1376
+ "license": "MIT"
1377
+ },
1378
+ "node_modules/nanoid": {
1379
+ "version": "3.3.11",
1380
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
1381
+ "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
1382
+ "dev": true,
1383
+ "funding": [
1384
+ {
1385
+ "type": "github",
1386
+ "url": "https://github.com/sponsors/ai"
1387
+ }
1388
+ ],
1389
+ "license": "MIT",
1390
+ "bin": {
1391
+ "nanoid": "bin/nanoid.cjs"
1392
+ },
1393
+ "engines": {
1394
+ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
1395
+ }
1396
+ },
1397
+ "node_modules/node-releases": {
1398
+ "version": "2.0.19",
1399
+ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz",
1400
+ "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==",
1401
+ "dev": true,
1402
+ "license": "MIT"
1403
+ },
1404
+ "node_modules/picocolors": {
1405
+ "version": "1.1.1",
1406
+ "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
1407
+ "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
1408
+ "dev": true,
1409
+ "license": "ISC"
1410
+ },
1411
+ "node_modules/postcss": {
1412
+ "version": "8.5.6",
1413
+ "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
1414
+ "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==",
1415
+ "dev": true,
1416
+ "funding": [
1417
+ {
1418
+ "type": "opencollective",
1419
+ "url": "https://opencollective.com/postcss/"
1420
+ },
1421
+ {
1422
+ "type": "tidelift",
1423
+ "url": "https://tidelift.com/funding/github/npm/postcss"
1424
+ },
1425
+ {
1426
+ "type": "github",
1427
+ "url": "https://github.com/sponsors/ai"
1428
+ }
1429
+ ],
1430
+ "license": "MIT",
1431
+ "dependencies": {
1432
+ "nanoid": "^3.3.11",
1433
+ "picocolors": "^1.1.1",
1434
+ "source-map-js": "^1.2.1"
1435
+ },
1436
+ "engines": {
1437
+ "node": "^10 || ^12 || >=14"
1438
+ }
1439
+ },
1440
+ "node_modules/react": {
1441
+ "version": "18.3.1",
1442
+ "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz",
1443
+ "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==",
1444
+ "license": "MIT",
1445
+ "dependencies": {
1446
+ "loose-envify": "^1.1.0"
1447
+ },
1448
+ "engines": {
1449
+ "node": ">=0.10.0"
1450
+ }
1451
+ },
1452
+ "node_modules/react-dom": {
1453
+ "version": "18.3.1",
1454
+ "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz",
1455
+ "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==",
1456
+ "license": "MIT",
1457
+ "dependencies": {
1458
+ "loose-envify": "^1.1.0",
1459
+ "scheduler": "^0.23.2"
1460
+ },
1461
+ "peerDependencies": {
1462
+ "react": "^18.3.1"
1463
+ }
1464
+ },
1465
+ "node_modules/react-refresh": {
1466
+ "version": "0.17.0",
1467
+ "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz",
1468
+ "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==",
1469
+ "dev": true,
1470
+ "license": "MIT",
1471
+ "engines": {
1472
+ "node": ">=0.10.0"
1473
+ }
1474
+ },
1475
+ "node_modules/rollup": {
1476
+ "version": "4.50.0",
1477
+ "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.50.0.tgz",
1478
+ "integrity": "sha512-/Zl4D8zPifNmyGzJS+3kVoyXeDeT/GrsJM94sACNg9RtUE0hrHa1bNPtRSrfHTMH5HjRzce6K7rlTh3Khiw+pw==",
1479
+ "dev": true,
1480
+ "license": "MIT",
1481
+ "dependencies": {
1482
+ "@types/estree": "1.0.8"
1483
+ },
1484
+ "bin": {
1485
+ "rollup": "dist/bin/rollup"
1486
+ },
1487
+ "engines": {
1488
+ "node": ">=18.0.0",
1489
+ "npm": ">=8.0.0"
1490
+ },
1491
+ "optionalDependencies": {
1492
+ "@rollup/rollup-android-arm-eabi": "4.50.0",
1493
+ "@rollup/rollup-android-arm64": "4.50.0",
1494
+ "@rollup/rollup-darwin-arm64": "4.50.0",
1495
+ "@rollup/rollup-darwin-x64": "4.50.0",
1496
+ "@rollup/rollup-freebsd-arm64": "4.50.0",
1497
+ "@rollup/rollup-freebsd-x64": "4.50.0",
1498
+ "@rollup/rollup-linux-arm-gnueabihf": "4.50.0",
1499
+ "@rollup/rollup-linux-arm-musleabihf": "4.50.0",
1500
+ "@rollup/rollup-linux-arm64-gnu": "4.50.0",
1501
+ "@rollup/rollup-linux-arm64-musl": "4.50.0",
1502
+ "@rollup/rollup-linux-loongarch64-gnu": "4.50.0",
1503
+ "@rollup/rollup-linux-ppc64-gnu": "4.50.0",
1504
+ "@rollup/rollup-linux-riscv64-gnu": "4.50.0",
1505
+ "@rollup/rollup-linux-riscv64-musl": "4.50.0",
1506
+ "@rollup/rollup-linux-s390x-gnu": "4.50.0",
1507
+ "@rollup/rollup-linux-x64-gnu": "4.50.0",
1508
+ "@rollup/rollup-linux-x64-musl": "4.50.0",
1509
+ "@rollup/rollup-openharmony-arm64": "4.50.0",
1510
+ "@rollup/rollup-win32-arm64-msvc": "4.50.0",
1511
+ "@rollup/rollup-win32-ia32-msvc": "4.50.0",
1512
+ "@rollup/rollup-win32-x64-msvc": "4.50.0",
1513
+ "fsevents": "~2.3.2"
1514
+ }
1515
+ },
1516
+ "node_modules/scheduler": {
1517
+ "version": "0.23.2",
1518
+ "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz",
1519
+ "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==",
1520
+ "license": "MIT",
1521
+ "dependencies": {
1522
+ "loose-envify": "^1.1.0"
1523
+ }
1524
+ },
1525
+ "node_modules/semver": {
1526
+ "version": "6.3.1",
1527
+ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
1528
+ "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
1529
+ "dev": true,
1530
+ "license": "ISC",
1531
+ "bin": {
1532
+ "semver": "bin/semver.js"
1533
+ }
1534
+ },
1535
+ "node_modules/source-map-js": {
1536
+ "version": "1.2.1",
1537
+ "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
1538
+ "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
1539
+ "dev": true,
1540
+ "license": "BSD-3-Clause",
1541
+ "engines": {
1542
+ "node": ">=0.10.0"
1543
+ }
1544
+ },
1545
+ "node_modules/typescript": {
1546
+ "version": "5.9.2",
1547
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz",
1548
+ "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==",
1549
+ "dev": true,
1550
+ "license": "Apache-2.0",
1551
+ "bin": {
1552
+ "tsc": "bin/tsc",
1553
+ "tsserver": "bin/tsserver"
1554
+ },
1555
+ "engines": {
1556
+ "node": ">=14.17"
1557
+ }
1558
+ },
1559
+ "node_modules/update-browserslist-db": {
1560
+ "version": "1.1.3",
1561
+ "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz",
1562
+ "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==",
1563
+ "dev": true,
1564
+ "funding": [
1565
+ {
1566
+ "type": "opencollective",
1567
+ "url": "https://opencollective.com/browserslist"
1568
+ },
1569
+ {
1570
+ "type": "tidelift",
1571
+ "url": "https://tidelift.com/funding/github/npm/browserslist"
1572
+ },
1573
+ {
1574
+ "type": "github",
1575
+ "url": "https://github.com/sponsors/ai"
1576
+ }
1577
+ ],
1578
+ "license": "MIT",
1579
+ "dependencies": {
1580
+ "escalade": "^3.2.0",
1581
+ "picocolors": "^1.1.1"
1582
+ },
1583
+ "bin": {
1584
+ "update-browserslist-db": "cli.js"
1585
+ },
1586
+ "peerDependencies": {
1587
+ "browserslist": ">= 4.21.0"
1588
+ }
1589
+ },
1590
+ "node_modules/vite": {
1591
+ "version": "5.4.19",
1592
+ "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.19.tgz",
1593
+ "integrity": "sha512-qO3aKv3HoQC8QKiNSTuUM1l9o/XX3+c+VTgLHbJWHZGeTPVAg2XwazI9UWzoxjIJCGCV2zU60uqMzjeLZuULqA==",
1594
+ "dev": true,
1595
+ "license": "MIT",
1596
+ "dependencies": {
1597
+ "esbuild": "^0.21.3",
1598
+ "postcss": "^8.4.43",
1599
+ "rollup": "^4.20.0"
1600
+ },
1601
+ "bin": {
1602
+ "vite": "bin/vite.js"
1603
+ },
1604
+ "engines": {
1605
+ "node": "^18.0.0 || >=20.0.0"
1606
+ },
1607
+ "funding": {
1608
+ "url": "https://github.com/vitejs/vite?sponsor=1"
1609
+ },
1610
+ "optionalDependencies": {
1611
+ "fsevents": "~2.3.3"
1612
+ },
1613
+ "peerDependencies": {
1614
+ "@types/node": "^18.0.0 || >=20.0.0",
1615
+ "less": "*",
1616
+ "lightningcss": "^1.21.0",
1617
+ "sass": "*",
1618
+ "sass-embedded": "*",
1619
+ "stylus": "*",
1620
+ "sugarss": "*",
1621
+ "terser": "^5.4.0"
1622
+ },
1623
+ "peerDependenciesMeta": {
1624
+ "@types/node": {
1625
+ "optional": true
1626
+ },
1627
+ "less": {
1628
+ "optional": true
1629
+ },
1630
+ "lightningcss": {
1631
+ "optional": true
1632
+ },
1633
+ "sass": {
1634
+ "optional": true
1635
+ },
1636
+ "sass-embedded": {
1637
+ "optional": true
1638
+ },
1639
+ "stylus": {
1640
+ "optional": true
1641
+ },
1642
+ "sugarss": {
1643
+ "optional": true
1644
+ },
1645
+ "terser": {
1646
+ "optional": true
1647
+ }
1648
+ }
1649
+ },
1650
+ "node_modules/yallist": {
1651
+ "version": "3.1.1",
1652
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
1653
+ "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
1654
+ "dev": true,
1655
+ "license": "ISC"
1656
+ }
1657
+ }
1658
+ }
frontend/package.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "reactfast-frontend",
3
+ "private": true,
4
+ "version": "0.0.1",
5
+ "type": "module",
6
+ "scripts": {
7
+ "dev": "vite",
8
+ "build": "vite build",
9
+ "preview": "vite preview --port 5173"
10
+ },
11
+ "dependencies": {
12
+ "react": "^18.3.1",
13
+ "react-dom": "^18.3.1"
14
+ },
15
+ "devDependencies": {
16
+ "@vitejs/plugin-react": "^4.3.1",
17
+ "vite": "^5.4.0",
18
+ "typescript": "^5.5.4",
19
+ "@types/react": "^18.3.3",
20
+ "@types/react-dom": "^18.3.0"
21
+ }
22
+ }
frontend/src/App.tsx ADDED
@@ -0,0 +1,452 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { useState, useEffect, type FormEvent } from 'react';
2
+
3
+ interface BlogPost {
4
+ id: number;
5
+ title: string;
6
+ content: string;
7
+ author: string;
8
+ created_at: string;
9
+ published: boolean;
10
+ tags: string[];
11
+ featured_image?: {
12
+ url: string;
13
+ alt_text: string;
14
+ caption: string;
15
+ };
16
+ post_images: Array<{
17
+ id: number;
18
+ url: string;
19
+ alt_text: string;
20
+ caption: string;
21
+ order: number;
22
+ position?: number;
23
+ }>;
24
+ }
25
+
26
+ interface BlogSummary {
27
+ id: number;
28
+ title: string;
29
+ author: string;
30
+ created_at: string;
31
+ tags: string[];
32
+ excerpt: string;
33
+ has_featured_image: boolean;
34
+ featured_image_url?: string;
35
+ post_image_count: number;
36
+ }
37
+
38
+ interface BlogResponse {
39
+ posts: BlogSummary[];
40
+ total: number;
41
+ limit: number;
42
+ offset: number;
43
+ has_more: boolean;
44
+ }
45
+
46
+ export default function App() {
47
+ const [blogData, setBlogData] = useState<BlogResponse | null>(null);
48
+ const [selectedPost, setSelectedPost] = useState<BlogPost | null>(null);
49
+ const [viewMode, setViewMode] = useState<'home' | 'blog'>('home');
50
+ const [currentPage, setCurrentPage] = useState(1);
51
+ const [isDragging, setIsDragging] = useState(false);
52
+ const [headerCollapsed, setHeaderCollapsed] = useState(false);
53
+ const [lastScrollY, setLastScrollY] = useState(0);
54
+ const [searchQuery, setSearchQuery] = useState('');
55
+ const [selectedCategory, setSelectedCategory] = useState('All');
56
+ const [isLoading, setIsLoading] = useState(false);
57
+ const [searchResults, setSearchResults] = useState<BlogResponse | null>(null);
58
+ const [isSearching, setIsSearching] = useState(false);
59
+ const [searchTimer, setSearchTimer] = useState<number | null>(null);
60
+
61
+ const PAGE_SIZE = 6;
62
+ const handlePageChange = (newPage: number) => {
63
+ if (newPage >= 1 && (!blogData || newPage <= Math.ceil(blogData.total / PAGE_SIZE))) {
64
+ setCurrentPage(newPage);
65
+ fetchBlogPosts(newPage);
66
+ }
67
+ };
68
+ const [sliderVisible, setSliderVisible] = useState(false);
69
+
70
+ const categories = ['All', 'Artificial Intelligence','Developers','AI Agents','Social','Movies'];
71
+
72
+ // Fetch blog posts on component mount and page change (disabled when searching)
73
+ useEffect(() => {
74
+ if (!searchQuery) fetchBlogPosts(currentPage);
75
+ }, [currentPage, searchQuery, selectedCategory]);
76
+
77
+ // When category changes, reset page & clear search results (if any)
78
+ useEffect(() => {
79
+ setCurrentPage(1);
80
+ if (!searchQuery) {
81
+ fetchBlogPosts(1);
82
+ }
83
+ }, [selectedCategory]);
84
+
85
+ // Handle scroll effect for blog header
86
+ useEffect(() => {
87
+ if (viewMode !== 'blog') return;
88
+
89
+ const handleScroll = () => {
90
+ const currentScrollY = window.scrollY;
91
+ const scrollThreshold = 100; // Minimum scroll distance to trigger effect
92
+
93
+ if (currentScrollY > scrollThreshold) {
94
+ // Scrolling down - collapse header
95
+ if (currentScrollY > lastScrollY && !headerCollapsed) {
96
+ setHeaderCollapsed(true);
97
+ }
98
+ // Scrolling up - expand header
99
+ else if (currentScrollY < lastScrollY && headerCollapsed) {
100
+ setHeaderCollapsed(false);
101
+ }
102
+ } else {
103
+ // Near top - always show full header
104
+ setHeaderCollapsed(false);
105
+ }
106
+
107
+ setLastScrollY(currentScrollY);
108
+ };
109
+
110
+ window.addEventListener('scroll', handleScroll, { passive: true });
111
+ return () => window.removeEventListener('scroll', handleScroll);
112
+ }, [viewMode, lastScrollY, headerCollapsed]);
113
+
114
+ async function fetchBlogPosts(page: number = 1) {
115
+ setIsLoading(true);
116
+ try {
117
+ const params = new URLSearchParams({ page: String(page), limit: String(PAGE_SIZE) });
118
+ if (selectedCategory && selectedCategory !== 'All') params.append('category', selectedCategory);
119
+ const res = await fetch(`/api/blog/posts?${params.toString()}`);
120
+ if (res.ok) {
121
+ const data = await res.json();
122
+ // Small delay to make loading visible
123
+ await new Promise(resolve => setTimeout(resolve, 300));
124
+ setBlogData(data);
125
+ }
126
+ } catch (err) {
127
+ console.error('Failed to fetch blog posts:', err);
128
+ } finally {
129
+ setIsLoading(false);
130
+ }
131
+ }
132
+
133
+ async function runSearch(query: string) {
134
+ const q = query.trim();
135
+ if (!q) {
136
+ setSearchResults(null);
137
+ return;
138
+ }
139
+ setIsSearching(true);
140
+ try {
141
+ const params = new URLSearchParams({ q });
142
+ if (selectedCategory && selectedCategory !== 'All') params.append('category', selectedCategory);
143
+ const res = await fetch(`/api/blog/search?${params.toString()}`);
144
+ if (res.ok) {
145
+ const data = await res.json();
146
+ setSearchResults({
147
+ posts: data.posts,
148
+ total: data.total,
149
+ limit: data.posts.length,
150
+ offset: 0,
151
+ has_more: false
152
+ } as BlogResponse);
153
+ }
154
+ } catch (e) {
155
+ console.error('Search failed', e);
156
+ } finally {
157
+ setIsSearching(false);
158
+ }
159
+ }
160
+
161
+ // Debounce search input
162
+ useEffect(() => {
163
+ if (searchTimer) window.clearTimeout(searchTimer);
164
+ const handle = window.setTimeout(() => {
165
+ runSearch(searchQuery);
166
+ }, 300);
167
+ setSearchTimer(handle);
168
+ return () => window.clearTimeout(handle);
169
+ }, [searchQuery]);
170
+
171
+ async function fetchBlogPost(id: number) {
172
+ setIsLoading(true);
173
+ try {
174
+ const res = await fetch(`/api/blog/posts/${id}`);
175
+ if (res.ok) {
176
+ const post = await res.json();
177
+ // Add artificial delay to make loading visible
178
+ await new Promise(resolve => setTimeout(resolve, 300));
179
+ setSelectedPost(post);
180
+ setViewMode('blog');
181
+ }
182
+ } catch (err) {
183
+ console.error('Failed to fetch blog post:', err);
184
+ } finally {
185
+ setIsLoading(false);
186
+ }
187
+ }
188
+
189
+ function formatDate(dateString: string) {
190
+ return new Date(dateString).toLocaleDateString('en-US', {
191
+ year: 'numeric',
192
+ month: 'long',
193
+ day: 'numeric'
194
+ });
195
+ }
196
+
197
+ function renderBlogContent(content: string, images: BlogPost['post_images']) {
198
+ const paragraphs = content.split('\n\n').filter(p => p.trim());
199
+ const elements: JSX.Element[] = [];
200
+
201
+ paragraphs.forEach((paragraph, index) => {
202
+ const paragraphNumber = index + 1;
203
+
204
+ elements.push(
205
+ <p key={`para-${paragraphNumber}`} className="blog-paragraph">
206
+ {paragraph}
207
+ </p>
208
+ );
209
+
210
+ // Insert images that should appear after this paragraph
211
+ const imagesForPosition = images.filter(img => img.position === paragraphNumber);
212
+ imagesForPosition.forEach(image => {
213
+ elements.push(
214
+ <figure key={`img-${image.id}`} className="blog-image">
215
+ <img src={image.url} alt={image.alt_text} />
216
+ {image.caption && <figcaption>{image.caption}</figcaption>}
217
+ </figure>
218
+ );
219
+ });
220
+ });
221
+
222
+ return elements;
223
+ }
224
+
225
+ // Toggle subtle separator only if content is scrollable or user scrolled
226
+ useEffect(() => {
227
+ function evaluate() {
228
+ const header = document.querySelector('.compact-header');
229
+ if (!header) return;
230
+ const scrollable = document.documentElement.scrollHeight > window.innerHeight + 4;
231
+ const scrolled = window.scrollY > 4;
232
+ if (scrollable || scrolled) header.classList.add('with-sep');
233
+ else header.classList.remove('with-sep');
234
+ }
235
+ evaluate();
236
+ window.addEventListener('resize', evaluate);
237
+ window.addEventListener('scroll', evaluate, { passive: true });
238
+ return () => {
239
+ window.removeEventListener('resize', evaluate);
240
+ window.removeEventListener('scroll', evaluate);
241
+ };
242
+ }, []);
243
+
244
+ if (viewMode === 'blog' && selectedPost) {
245
+ return (
246
+ <div className="app-root blog-view">
247
+ <div className="bg-layers" aria-hidden="true" />
248
+
249
+ {/* Smart header that transforms based on scroll */}
250
+ <header className={`blog-header smart-header ${headerCollapsed ? 'collapsed' : 'expanded'}`}>
251
+ <div className="blog-header-inner">
252
+ <button
253
+ onClick={() => setViewMode('home')}
254
+ className="back-button"
255
+ aria-label="Back to home"
256
+ >
257
+ ← Back to Home
258
+ </button>
259
+ <div className="blog-title-section">
260
+ <h1 className="blog-title">{selectedPost.title}</h1>
261
+ <div className="blog-meta">
262
+ <span className="blog-author">✍️ {selectedPost.author}</span>
263
+ <span className="blog-date">📅 {formatDate(selectedPost.created_at)}</span>
264
+ </div>
265
+ </div>
266
+ </div>
267
+ </header>
268
+ <main className="blog-content-area">
269
+ <article className="blog-article">
270
+ {selectedPost.featured_image && (
271
+ <figure className="featured-image">
272
+ <img
273
+ src={selectedPost.featured_image.url}
274
+ alt={selectedPost.featured_image.alt_text}
275
+ />
276
+ {selectedPost.featured_image.caption && (
277
+ <figcaption>{selectedPost.featured_image.caption}</figcaption>
278
+ )}
279
+ </figure>
280
+ )}
281
+ <div className="blog-body">
282
+ {renderBlogContent(selectedPost.content, selectedPost.post_images)}
283
+ </div>
284
+
285
+ {/* Tags section below content */}
286
+ <div className="blog-tags-section">
287
+ {selectedPost.tags && selectedPost.tags.length > 0 && (
288
+ <>
289
+ <h3 className="blog-tags-title">Tags</h3>
290
+ <div className="blog-tags-container">
291
+ {selectedPost.tags.map(tag => (
292
+ <span key={tag} className="blog-tag">{tag}</span>
293
+ ))}
294
+ </div>
295
+ </>
296
+ )}
297
+ </div>
298
+ <div style={{ display: 'flex', justifyContent: 'center', margin: '2rem 0' }}>
299
+ <button
300
+ className="back-button"
301
+ onClick={() => setViewMode('home')}
302
+ aria-label="Back to home"
303
+ >
304
+ ← Back to Home
305
+ </button>
306
+ </div>
307
+ </article>
308
+ </main>
309
+ </div>
310
+ );
311
+ }
312
+
313
+ return (
314
+ <div className="app-root homepage-layout">
315
+ <div className="bg-layers" aria-hidden="true" />
316
+
317
+ {/* Enhanced Site Header with integrated controls */}
318
+ <header className="site-header">
319
+ <div className="header-inner">
320
+ <div className="brand-block">
321
+ <h1 className="site-title">Amplify<span className="pulse-dot" /></h1>
322
+ <p className="site-tagline">Stories that made an impact & what is happening in the world</p>
323
+ </div>
324
+ <div className="header-controls" role="search">
325
+ <div className="search-wrapper">
326
+ <input
327
+ type="text"
328
+ aria-label="Search blog posts"
329
+ placeholder="Search posts..."
330
+ value={searchQuery}
331
+ onChange={(e) => setSearchQuery(e.target.value)}
332
+ className="search-input header-search"
333
+ />
334
+ <div className="search-icon">🔍</div>
335
+ </div>
336
+ <div className="category-wrapper">
337
+ <select
338
+ aria-label="Filter by category"
339
+ value={selectedCategory}
340
+ onChange={(e) => setSelectedCategory(e.target.value)}
341
+ className="category-select header-category"
342
+ >
343
+ {categories.map(category => (
344
+ <option key={category} value={category}>{category}</option>
345
+ ))}
346
+ </select>
347
+ <div className="dropdown-arrow">▼</div>
348
+ </div>
349
+ </div>
350
+ </div>
351
+ </header>
352
+
353
+ <main className="main-content">
354
+ {/* Blog Grid Section */}
355
+ <section className="blog-content-section">
356
+ <div className="blog-container">
357
+ <div className="blog-grid-new">
358
+ {(searchResults ? searchResults.posts : blogData?.posts)?.map((post: any) => (
359
+ <article
360
+ key={post.id}
361
+ className={`blog-card-new ${isLoading ? 'loading' : ''}`}
362
+ onClick={() => !isLoading && fetchBlogPost(post.id)}
363
+ style={{ position: 'relative' }}
364
+ >
365
+ {isLoading && (
366
+ <div className="loading-overlay">
367
+ <div className="loading-spinner"></div>
368
+ </div>
369
+ )}
370
+ {post.featured_image_url && (
371
+ <div className="blog-card-image-new">
372
+ <img src={post.featured_image_url} alt={post.title} />
373
+ <div className="image-overlay"></div>
374
+ </div>
375
+ )}
376
+ <div className="blog-card-content-new">
377
+ <div className="blog-card-tags-new">
378
+ {post.tags.slice(0, 2).map((tag: string) => (
379
+ <span key={tag} className="blog-card-tag-new">{tag}</span>
380
+ ))}
381
+ {post.percent_match !== undefined && (
382
+ <span className="match-badge" title="Tag match score">{post.percent_match}%</span>
383
+ )}
384
+ </div>
385
+ <h3 className="blog-card-title-new">{post.title}</h3>
386
+ <p className="blog-card-excerpt-new">{post.excerpt}</p>
387
+ <div className="blog-card-meta-new">
388
+ <span className="blog-card-author-new">✍️ {post.author}</span>
389
+ <span className="blog-card-date-new">📅 {formatDate(post.created_at)}</span>
390
+ </div>
391
+ {post.post_image_count > 0 && (
392
+ <div className="blog-card-stats-new">
393
+ <span className="blog-card-stat-new">📷 {post.post_image_count} images</span>
394
+ </div>
395
+ )}
396
+ </div>
397
+ </article>
398
+ ))}
399
+ </div>
400
+
401
+ {isLoading && !searchResults && (
402
+ <div className="grid-loading">
403
+ <div className="loading-spinner"></div>
404
+ <p>Loading posts...</p>
405
+ </div>
406
+ )}
407
+ {isSearching && (
408
+ <div className="grid-loading" style={{marginTop:'1rem'}}>
409
+ <div className="loading-spinner"></div>
410
+ <p>Searching...</p>
411
+ </div>
412
+ )}
413
+ {searchResults && !isSearching && searchResults.total === 0 && (
414
+ <div style={{textAlign:'center', marginTop:'1rem', fontSize:'0.85rem', color:'#555'}}>No matches found.</div>
415
+ )}
416
+
417
+ {/* Pagination */}
418
+ {!searchResults && blogData && blogData.total > PAGE_SIZE && (
419
+ <div className="pagination-new">
420
+ <button
421
+ className="pagination-btn-new prev"
422
+ onClick={() => handlePageChange(currentPage - 1)}
423
+ disabled={currentPage === 1}
424
+ >
425
+ ← Previous
426
+ </button>
427
+ <div className="page-indicators">
428
+ {Array.from({ length: Math.ceil(blogData.total / PAGE_SIZE) }, (_, i) => i + 1).map(pageNum => (
429
+ <button
430
+ key={pageNum}
431
+ className={`page-indicator ${pageNum === currentPage ? 'active' : ''}`}
432
+ onClick={() => handlePageChange(pageNum)}
433
+ >
434
+ {pageNum}
435
+ </button>
436
+ ))}
437
+ </div>
438
+ <button
439
+ className="pagination-btn-new next"
440
+ onClick={() => handlePageChange(currentPage + 1)}
441
+ disabled={!blogData.has_more}
442
+ >
443
+ Next →
444
+ </button>
445
+ </div>
446
+ )}
447
+ </div>
448
+ </section>
449
+ </main>
450
+ </div>
451
+ );
452
+ }
frontend/src/main.tsx ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React from 'react'
2
+ import { createRoot } from 'react-dom/client'
3
+ import App from './App'
4
+ import './style.css'
5
+
6
+ const el = document.getElementById('root')
7
+ if (el) {
8
+ createRoot(el).render(
9
+ <React.StrictMode>
10
+ <App />
11
+ </React.StrictMode>
12
+ )
13
+ }
frontend/src/style.css ADDED
@@ -0,0 +1,1029 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ @import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&family=Lora:ital,wght@0,400;0,500;0,600;0,700;1,400;1,500;1,600&display=swap');
2
+
3
+ * { box-sizing: border-box; }
4
+ html, body, #root { height: 100%; }
5
+ body { margin:0; font-family:'Inter',system-ui,sans-serif; color:#1b2126; background:#f2f4f8; overflow-x:hidden; -webkit-font-smoothing:antialiased; }
6
+
7
+ .app-root { position:relative; min-height:100%; padding-top:150px; }
8
+
9
+ /* Layered subtle geometric background */
10
+ .bg-layers { position:fixed; inset:0; pointer-events:none; z-index:0; background:
11
+ linear-gradient(145deg, rgba(255,255,255,0.85) 0%, rgba(255,255,255,0) 65%),
12
+ radial-gradient(circle at 78% 24%, rgba(96,165,250,0.18), transparent 60%),
13
+ radial-gradient(circle at 15% 70%, rgba(167,139,250,0.15), transparent 62%),
14
+ radial-gradient(circle at 50% 85%, rgba(125,168,255,0.12), transparent 58%),
15
+ repeating-linear-gradient(115deg, rgba(0,0,0,0.025) 0 14px, rgba(0,0,0,0) 14px 28px),
16
+ linear-gradient(180deg,#f3f5f9,#eef1f6);
17
+ mask: linear-gradient(#fff,rgba(255,255,255,0.35)); }
18
+
19
+ /* Elevated header */
20
+ .top-bar.improved { position:fixed; top:0; left:0; right:0; display:flex; justify-content:center; padding:30px 32px 18px; z-index:10; -webkit-backdrop-filter:blur(20px) saturate(210%); backdrop-filter:blur(20px) saturate(210%); background:rgba(255,255,255,0.75); /* separator removed by default */ }
21
+ .top-bar.improved.with-sep { border-bottom:1px solid rgba(0,0,0,0.05); box-shadow:0 14px 48px -24px rgba(0,0,0,0.35), 0 4px 14px -8px rgba(0,0,0,0.1); }
22
+ .bar-inner { width:80%; max-width:1180px; display:flex; flex-direction:column; gap:18px; }
23
+ .title-row { display:flex; align-items:baseline; gap:18px; flex-wrap:wrap; }
24
+ .app-title { margin:0; font-size:1.55rem; letter-spacing:-0.5px; font-weight:640; display:flex; align-items:center; gap:6px; background:linear-gradient(90deg,#1b2735,#3b5168); -webkit-background-clip:text; background-clip:text; color:transparent; }
25
+ .pulse-dot { width:10px; height:10px; border-radius:50%; background:linear-gradient(135deg,#60a5fa,#818cf8); position:relative; box-shadow:0 0 0 0 rgba(96,165,250,0.55); animation:pulse 3s ease-in-out infinite; }
26
+ @keyframes pulse { 0%{box-shadow:0 0 0 0 rgba(96,165,250,0.55);} 55%{box-shadow:0 0 0 10px rgba(96,165,250,0);} 100%{box-shadow:0 0 0 0 rgba(96,165,250,0);} }
27
+ .tagline { font-size:.78rem; font-weight:500; color:#5d6670; letter-spacing:.55px; }
28
+
29
+ /* Grid Loading */
30
+ .grid-loading {
31
+ display: flex;
32
+ flex-direction: column;
33
+ align-items: center;
34
+ gap: 1rem;
35
+ padding: 2rem;
36
+ margin: 2rem auto;
37
+ text-align: center;
38
+ }
39
+
40
+ .grid-loading p {
41
+ color: #6366f1;
42
+ font-size: 0.9rem;
43
+ font-weight: 500;
44
+ margin: 0;
45
+ }
46
+
47
+ .match-badge {
48
+ background: linear-gradient(135deg,#6366f1,#8b5cf6);
49
+ color:#fff;
50
+ font-size:0.65rem;
51
+ padding:4px 8px;
52
+ border-radius: 999px;
53
+ font-weight:600;
54
+ letter-spacing:.5px;
55
+ box-shadow:0 2px 6px rgba(0,0,0,0.18);
56
+ display:inline-flex;
57
+ align-items:center;
58
+ gap:4px;
59
+ margin-left:6px;
60
+ }
61
+
62
+ /* Input form */
63
+ .input-form { display:flex; width:100%; gap:14px; align-items:stretch; }
64
+ .input-form.fancy .input-shell { position:relative; flex:1; display:flex; align-items:stretch; }
65
+ .input-shell .accent-bar { position:absolute; left:12px; top:12px; bottom:12px; width:4px; border-radius:3px; background:linear-gradient(180deg,#60a5fa,#818cf8); opacity:.35; transition:opacity .35s, filter .35s; }
66
+ .input-shell:focus-within .accent-bar { opacity:1; filter:saturate(150%); }
67
+ .big-input { flex:1; min-height:70px; padding:20px 30px 20px 30px; font-size:1.02rem; line-height:1.35; border-radius:18px; border:1px solid #cdd3d9; background:linear-gradient(180deg,#ffffff,#f8fafc); color:#192027; outline:none; box-shadow:0 1px 3px rgba(0,0,0,0.05); transition:border-color .25s, box-shadow .35s, background .35s; }
68
+ .big-input::placeholder { color:#9da4ad; }
69
+ .big-input:focus { border-color:#7da8ff; box-shadow:0 0 0 3px rgba(125,168,255,0.26), 0 6px 22px -10px rgba(125,168,255,0.45); background:#ffffff; }
70
+
71
+ .submit-btn { padding:0 34px; font-size:0.92rem; font-weight:600; border:1px solid #b9c2cc; border-radius:18px; background:linear-gradient(135deg,#6da8ff,#818cf8); color:#fff; cursor:pointer; letter-spacing:.45px; display:flex; align-items:center; justify-content:center; box-shadow:0 6px 24px -10px rgba(109,168,255,0.65),0 3px 10px -6px rgba(0,0,0,0.25); transition:transform .3s, box-shadow .35s, filter .35s, background-position .5s; min-width:132px; background-size:200% 200%; background-position:15% 35%; }
72
+ .submit-btn:disabled { opacity:.5; cursor:not-allowed; filter:grayscale(.35); }
73
+ .submit-btn:not(:disabled):hover { transform:translateY(-3px); background-position:55% 65%; box-shadow:0 14px 40px -16px rgba(109,168,255,0.7),0 6px 16px -10px rgba(0,0,0,0.22); }
74
+ .submit-btn:not(:disabled):active { transform:translateY(1px); box-shadow:0 6px 18px -10px rgba(109,168,255,0.6); }
75
+
76
+ /* Response area */
77
+ /* Response / output */
78
+ .response-area { position:relative; z-index:1; display:flex; flex-direction:column; gap:26px; align-items:center; padding:10px 34px 90px; margin-top:16px; }
79
+
80
+ .card { width:80%; max-width:1180px; background:linear-gradient(170deg,#ffffff,#f5f7fa); border:1px solid #d5dbe1; -webkit-backdrop-filter:blur(12px) saturate(170%); backdrop-filter:blur(12px) saturate(170%); border-radius:22px; padding:34px 42px 40px; color:#20262b; box-shadow:0 18px 46px -26px rgba(0,0,0,0.28), 0 8px 20px -14px rgba(0,0,0,0.15); display:flex; flex-direction:column; gap:14px; animation:fadeIn .6s ease; position:relative; overflow:hidden; }
81
+ .card:before { content:""; position:absolute; inset:0; background:radial-gradient(circle at 85% 20%, rgba(129,140,248,0.14), transparent 60%), radial-gradient(circle at 12% 82%, rgba(96,165,250,0.14), transparent 55%); pointer-events:none; }
82
+ .card-label { font-size:.64rem; letter-spacing:1.3px; text-transform:uppercase; opacity:.55; font-weight:600; color:#5c6670; }
83
+ .card-content { font-size:1.05rem; line-height:1.58; white-space:pre-wrap; word-break:break-word; }
84
+ .card-result { border-left:6px solid #91b6ff; margin-top: 10px;}
85
+ .card-error { border-left:6px solid #ef7d7d; background:linear-gradient(165deg,#fff5f5,#ffecec); }
86
+ .placeholder-hint { width:80%; max-width:1180px; font-size:.8rem; color:#69727b; text-align:left; padding:4px 6px 0; font-style:italic; }
87
+
88
+ @media (max-width: 1100px) { .bar-inner, .card, .placeholder-hint { width:86%; } .big-input { min-height:64px; } }
89
+ @media (max-width: 900px) { .bar-inner, .card, .placeholder-hint { width:90%; } .app-root{padding-top:140px;} .big-input{min-height:60px; padding:18px 24px;} .submit-btn{min-width:116px;} .card{padding:30px 34px 34px;} }
90
+
91
+ @media (max-width: 640px) { .input-form{flex-direction:column;} .submit-btn{width:100%; height:60px;} .big-input{border-radius:18px; min-height:62px;} .bar-inner,.card,.placeholder-hint{width:94%;} .app-root{padding-top:152px;} }
92
+
93
+ @keyframes fadeIn { from { opacity: 0; transform: translateY(6px);} to { opacity:1; transform: translateY(0);} }
94
+
95
+ /* Scrollbar subtle styling */
96
+ ::-webkit-scrollbar { width: 10px; }
97
+ ::-webkit-scrollbar-track { background: rgba(255,255,255,0.05); }
98
+ ::-webkit-scrollbar-thumb { background: linear-gradient(#334155,#1e293b); border-radius: 20px; }
99
+ ::-webkit-scrollbar-thumb:hover { background: linear-gradient(#475569,#334155); }
100
+
101
+ /* New Homepage Layout Styles */
102
+ /* Layout base spacing adjusted for larger header */
103
+ .homepage-layout { padding-top: 0; }
104
+
105
+ /* New Enlarged Header */
106
+ .site-header { position:sticky; top:0; z-index:120; background:linear-gradient(125deg,#ffffffcc 0%,#f8fafccc 65%,#eef2ffcc 100%); backdrop-filter:blur(26px) saturate(180%); -webkit-backdrop-filter:blur(26px) saturate(180%); border-bottom:1px solid rgba(99,102,241,0.1); box-shadow:0 4px 24px -10px rgba(99,102,241,0.25), 0 2px 8px -4px rgba(0,0,0,0.08); }
107
+ .site-header:before { content:""; position:absolute; inset:0; pointer-events:none; background:radial-gradient(circle at 6% 90%, rgba(129,140,248,0.20), transparent 60%), radial-gradient(circle at 95% 15%, rgba(96,165,250,0.25), transparent 55%); mix-blend-mode:overlay; opacity:.6; }
108
+ .header-inner { max-width:1320px; margin:0 auto; padding:34px clamp(2rem,7vw,7rem) 30px; display:flex; gap:40px; align-items:flex-end; flex-wrap:wrap; justify-content:space-between; position:relative; }
109
+ .brand-block { display:flex; flex-direction:column; gap:10px; min-width:260px; }
110
+ .site-title { margin:0; font-size:clamp(2rem,3.2vw,3.05rem); font-weight:700; letter-spacing:-1px; line-height:1; background:linear-gradient(90deg,#1e293b,#334155 40%,#4c1d95 80%); -webkit-background-clip:text; background-clip:text; color:transparent; display:inline-flex; align-items:center; gap:10px; }
111
+ .site-title .pulse-dot { width:14px; height:14px; background:linear-gradient(135deg,#6366f1,#8b5cf6); box-shadow:0 0 0 0 rgba(99,102,241,0.55); animation:pulse 3s ease-in-out infinite; }
112
+ .site-tagline { margin:0; font-size:0.9rem; font-weight:500; letter-spacing:0.4px; color:#475569; max-width:520px; line-height:1.4; }
113
+ .header-controls { display:flex; gap:18px; align-items:center; margin-left:auto; flex-wrap:wrap; }
114
+ .search-wrapper, .category-wrapper { position:relative; }
115
+ .header-search { width:300px; background:linear-gradient(180deg,#ffffff,#f1f5f9); border:1.5px solid #cbd5e1; padding:14px 18px 14px 48px; border-radius:18px; font-size:0.95rem; transition:all .35s; box-shadow:0 2px 6px rgba(0,0,0,0.05); }
116
+ .header-search:focus { outline:none; border-color:#6366f1; box-shadow:0 0 0 4px rgba(99,102,241,0.18), 0 6px 22px -8px rgba(99,102,241,0.35); background:#ffffff; }
117
+ .header-search + .search-icon { position:absolute; left:18px; top:50%; transform:translateY(-50%); font-size:1rem; color:#64748b; }
118
+ .header-category { background:linear-gradient(180deg,#ffffff,#f1f5f9); border:1.5px solid #cbd5e1; padding:14px 46px 14px 18px; border-radius:18px; min-width:190px; font-size:0.9rem; cursor:pointer; transition:all .35s; appearance:none; }
119
+ .header-category:focus { outline:none; border-color:#6366f1; box-shadow:0 0 0 4px rgba(99,102,241,0.18), 0 6px 22px -8px rgba(99,102,241,0.35); background:#ffffff; }
120
+ .category-wrapper .dropdown-arrow { position:absolute; right:18px; top:50%; transform:translateY(-50%); font-size:0.75rem; color:#64748b; pointer-events:none; }
121
+
122
+ @media (max-width:1000px){
123
+ .header-inner { padding:26px clamp(1.75rem,5vw,4rem) 24px; }
124
+ .site-title { font-size:clamp(1.9rem,5vw,2.6rem); }
125
+ .header-search { width:240px; }
126
+ }
127
+ @media (max-width:780px){
128
+ .header-inner { align-items:flex-start; }
129
+ .header-controls { width:100%; order:3; justify-content:flex-start; }
130
+ .header-search { width:100%; }
131
+ .brand-block { width:100%; }
132
+ }
133
+ @media (max-width:520px){
134
+ .site-title { font-size:2.1rem; }
135
+ .site-tagline { font-size:0.8rem; }
136
+ .header-category { width:100%; }
137
+ }
138
+
139
+ /* Remove old main-header styles */
140
+ .main-header {
141
+ display: none;
142
+ }
143
+
144
+ /* Removed old content-controls & latest-posts label (merged into site-header) */
145
+
146
+ .search-container {
147
+ position: relative;
148
+ display: flex;
149
+ align-items: center;
150
+ }
151
+
152
+ .search-input {
153
+ padding: 12px 16px 12px 44px;
154
+ border: 2px solid #e2e8f0;
155
+ border-radius: 12px;
156
+ font-size: 0.95rem;
157
+ width: 280px;
158
+ background: white;
159
+ transition: all 0.3s ease;
160
+ outline: none;
161
+ }
162
+
163
+ .search-input:focus {
164
+ border-color: #667eea;
165
+ box-shadow: 0 0 0 3px rgba(102, 126, 234, 0.1);
166
+ }
167
+
168
+ .search-icon {
169
+ position: absolute;
170
+ left: 16px;
171
+ top: 50%;
172
+ transform: translateY(-50%);
173
+ color: #94a3b8;
174
+ font-size: 1rem;
175
+ pointer-events: none;
176
+ }
177
+
178
+ .category-dropdown {
179
+ position: relative;
180
+ display: flex;
181
+ align-items: center;
182
+ }
183
+
184
+ .category-select {
185
+ padding: 12px 40px 12px 16px;
186
+ border: 2px solid #e2e8f0;
187
+ border-radius: 12px;
188
+ font-size: 0.95rem;
189
+ background: white;
190
+ cursor: pointer;
191
+ outline: none;
192
+ appearance: none;
193
+ min-width: 180px;
194
+ transition: all 0.3s ease;
195
+ }
196
+
197
+ .category-select:focus {
198
+ border-color: #667eea;
199
+ box-shadow: 0 0 0 3px rgba(102, 126, 234, 0.1);
200
+ }
201
+
202
+ .dropdown-arrow {
203
+ position: absolute;
204
+ right: 16px;
205
+ top: 50%;
206
+ transform: translateY(-50%);
207
+ color: #94a3b8;
208
+ font-size: 0.8rem;
209
+ pointer-events: none;
210
+ }
211
+
212
+ /* Main Content Area */
213
+ .main-content {
214
+ max-width: 1200px;
215
+ margin: 0 auto;
216
+ padding: 0 32px 80px;
217
+ }
218
+
219
+ /* Blog Content Section */
220
+ .blog-content-section {
221
+ margin: 20px 0;
222
+ /* Debug: Ensure changes are applied */
223
+ background: transparent;
224
+ }
225
+
226
+ .blog-container {
227
+ width: 100%;
228
+ }
229
+
230
+ /* Blog Grid Compact */
231
+ .blog-grid-new { display:grid; grid-template-columns:repeat(auto-fill,minmax(320px,1fr)); gap:26px; margin-bottom:54px; }
232
+
233
+ /* Card aesthetic refresh: compact & elevated */
234
+ .blog-card-new { background:#ffffff; border-radius:18px; overflow:hidden; box-shadow:0 10px 28px -14px rgba(0,0,0,0.25), 0 4px 14px -6px rgba(0,0,0,0.12); transition:all .4s cubic-bezier(.4,.2,.2,1); cursor:pointer; border:1px solid #e2e8f0; position:relative; isolation:isolate; display:flex; flex-direction:column; }
235
+ .blog-card-new:before { content:""; position:absolute; inset:0; background:linear-gradient(140deg,rgba(99,102,241,0.07),rgba(56,189,248,0.05) 35%,rgba(255,255,255,0) 75%); opacity:0; transition:opacity .5s; pointer-events:none; }
236
+ .blog-card-new:hover:before { opacity:1; }
237
+
238
+ .blog-card-new:hover { transform:translateY(-6px) scale(1.015); box-shadow:0 30px 60px -24px rgba(30,41,59,0.38), 0 18px 28px -12px rgba(30,41,59,0.22); border-color:#c7d2fe; }
239
+
240
+ /* Media ratio refined */
241
+ .blog-card-image-new { position:relative; width:100%; aspect-ratio:16/9; overflow:hidden; background:linear-gradient(135deg,#f8fafc,#e2e8f0); }
242
+
243
+ .blog-card-image-new img {
244
+ width: 100%;
245
+ height: 100%;
246
+ object-fit: cover;
247
+ transition: transform 0.3s ease;
248
+ }
249
+
250
+ .blog-card-new:hover .blog-card-image-new img {
251
+ transform: scale(1.05);
252
+ }
253
+
254
+ .image-overlay {
255
+ position: absolute;
256
+ top: 0;
257
+ left: 0;
258
+ right: 0;
259
+ bottom: 0;
260
+ background: linear-gradient(180deg, transparent 0%, rgba(0, 0, 0, 0.1) 100%);
261
+ opacity: 0;
262
+ transition: opacity 0.3s ease;
263
+ }
264
+
265
+ .blog-card-new:hover .image-overlay {
266
+ opacity: 1;
267
+ }
268
+
269
+ .blog-card-content-new { padding:20px 20px 22px; background:#ffffff; flex:1; display:flex; flex-direction:column; }
270
+
271
+ .blog-card-tags-new { display:flex; gap:6px; margin-bottom:12px; flex-wrap:wrap; }
272
+
273
+ .blog-card-tag-new { background:linear-gradient(135deg,#eef2ff,#e0e7ff); color:#4338ca; font-size:0.63rem; font-weight:600; padding:5px 10px; border-radius:14px; letter-spacing:.6px; text-transform:uppercase; }
274
+
275
+ .blog-card-title-new { font-size:1.05rem; font-weight:650; color:#1e293b; margin:0 0 8px 0; line-height:1.35; letter-spacing:-0.3px; display:-webkit-box; -webkit-line-clamp:2; line-clamp:2; -webkit-box-orient:vertical; overflow:hidden; }
276
+
277
+ .blog-card-excerpt-new { font-size:0.8rem; color:#475569; line-height:1.5; margin:0 0 14px 0; display:-webkit-box; -webkit-line-clamp:3; line-clamp:3; -webkit-box-orient:vertical; overflow:hidden; }
278
+
279
+ .blog-card-meta-new { display:flex; justify-content:space-between; align-items:center; margin-top:auto; padding-top:12px; border-top:1px solid #f1f5f9; gap:10px; }
280
+
281
+ .blog-card-author-new { font-size:0.65rem; font-weight:600; color:#334155; letter-spacing:.5px; text-transform:uppercase; }
282
+
283
+ .blog-card-date-new { font-size:0.6rem; color:#64748b; letter-spacing:.5px; }
284
+
285
+ .blog-card-stats-new {
286
+ display: flex;
287
+ align-items: center;
288
+ gap: 16px;
289
+ }
290
+
291
+ .blog-card-stat-new {
292
+ font-size: 0.75rem;
293
+ color: #64748b;
294
+ background: #f8fafc;
295
+ padding: 4px 8px;
296
+ border-radius: 6px;
297
+ }
298
+
299
+ /* Pagination */
300
+ .pagination-new {
301
+ display: flex;
302
+ justify-content: center;
303
+ align-items: center;
304
+ gap: 24px;
305
+ margin-top: 48px;
306
+ }
307
+
308
+ .pagination-btn-new {
309
+ padding: 12px 24px;
310
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
311
+ color: white;
312
+ border: none;
313
+ border-radius: 12px;
314
+ font-weight: 600;
315
+ cursor: pointer;
316
+ transition: all 0.3s ease;
317
+ }
318
+
319
+ .pagination-btn-new:hover:not(:disabled) {
320
+ transform: translateY(-2px);
321
+ box-shadow: 0 8px 16px rgba(102, 126, 234, 0.3);
322
+ }
323
+
324
+ .pagination-btn-new:disabled {
325
+ opacity: 0.5;
326
+ cursor: not-allowed;
327
+ background: #e2e8f0;
328
+ color: #94a3b8;
329
+ }
330
+
331
+ .page-indicators {
332
+ display: flex;
333
+ gap: 8px;
334
+ }
335
+
336
+ .page-indicator {
337
+ width: 40px;
338
+ height: 40px;
339
+ border-radius: 8px;
340
+ border: 2px solid #e2e8f0;
341
+ background: white;
342
+ color: #64748b;
343
+ font-weight: 600;
344
+ cursor: pointer;
345
+ transition: all 0.3s ease;
346
+ }
347
+
348
+ .page-indicator:hover {
349
+ border-color: #667eea;
350
+ color: #667eea;
351
+ }
352
+
353
+ .page-indicator.active {
354
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
355
+ color: white;
356
+ border-color: transparent;
357
+ }
358
+
359
+ /* Responsive Design */
360
+ @media (max-width: 1024px) {
361
+ .blog-grid-new {
362
+ grid-template-columns: 1fr;
363
+ gap: 24px;
364
+ }
365
+
366
+ .controls-row {
367
+ flex-direction: column;
368
+ align-items: stretch;
369
+ gap: 16px;
370
+ }
371
+
372
+ .controls-group {
373
+ justify-content: center;
374
+ }
375
+
376
+ .search-input {
377
+ width: 100%;
378
+ max-width: 280px;
379
+ }
380
+ }
381
+
382
+ @media (max-width: 768px) {
383
+ .homepage-layout {
384
+ padding-top: 70px;
385
+ }
386
+
387
+ .compact-header .header-content {
388
+ padding: 12px 20px;
389
+ }
390
+
391
+ .brand-section {
392
+ flex-direction: column;
393
+ gap: 4px;
394
+ }
395
+
396
+ .homepage-layout .app-title {
397
+ font-size: 1.4rem;
398
+ }
399
+
400
+ .homepage-layout .tagline {
401
+ font-size: 0.8rem;
402
+ }
403
+
404
+ .controls-inner, .main-content {
405
+ padding: 0 20px;
406
+ }
407
+
408
+ .controls-group {
409
+ flex-direction: column;
410
+ gap: 12px;
411
+ width: 100%;
412
+ }
413
+
414
+ .search-input, .category-select {
415
+ width: 100%;
416
+ max-width: none;
417
+ }
418
+
419
+ .pagination-new {
420
+ flex-direction: column;
421
+ gap: 16px;
422
+ }
423
+
424
+ .page-indicators {
425
+ order: -1;
426
+ }
427
+
428
+ .blog-card-content-new {
429
+ padding: 20px;
430
+ }
431
+ }
432
+
433
+ @media (max-width: 480px) {
434
+ .homepage-layout .app-title { font-size:1.2rem; }
435
+ .blog-card-content-new { padding:16px; }
436
+ }
437
+
438
+ .blog-section-header {
439
+ text-align: center;
440
+ margin-bottom: 32px;
441
+ }
442
+
443
+ .blog-section-title {
444
+ font-size: 1.8rem;
445
+ font-weight: 700;
446
+ color: #1b2735;
447
+ margin: 0 0 8px 0;
448
+ letter-spacing: -0.5px;
449
+ }
450
+
451
+ .blog-section-subtitle {
452
+ font-size: 1rem;
453
+ color: #64748b;
454
+ margin: 0;
455
+ font-weight: 400;
456
+ }
457
+
458
+ .blog-grid {
459
+ display: grid;
460
+ grid-template-columns: repeat(2, 1fr);
461
+ gap: 24px;
462
+ width: 100%;
463
+ }
464
+
465
+ .blog-card {
466
+ background: linear-gradient(145deg, #ffffff, #f8fafc);
467
+ border: 1px solid #e2e8f0;
468
+ border-radius: 16px;
469
+ overflow: hidden;
470
+ cursor: pointer;
471
+ transition: all 0.3s ease;
472
+ box-shadow: 0 4px 12px rgba(0, 0, 0, 0.05);
473
+ position: relative;
474
+ }
475
+
476
+ .blog-card:hover {
477
+ transform: translateY(-4px);
478
+ box-shadow: 0 12px 32px rgba(0, 0, 0, 0.15);
479
+ border-color: #cbd5e1;
480
+ }
481
+
482
+ .blog-card-image {
483
+ width: 100%;
484
+ height: 160px;
485
+ overflow: hidden;
486
+ background: linear-gradient(135deg, #f1f5f9, #e2e8f0);
487
+ }
488
+
489
+ .blog-card-image img {
490
+ width: 100%;
491
+ height: 100%;
492
+ object-fit: cover;
493
+ transition: transform 0.3s ease;
494
+ }
495
+
496
+ .blog-card:hover .blog-card-image img {
497
+ transform: scale(1.05);
498
+ }
499
+
500
+ .blog-card-content {
501
+ padding: 20px;
502
+ }
503
+
504
+ .blog-card-tags {
505
+ display: flex;
506
+ gap: 6px;
507
+ margin-bottom: 12px;
508
+ flex-wrap: wrap;
509
+ }
510
+
511
+ .blog-card-tag {
512
+ background: linear-gradient(135deg, #ddd6fe, #e0e7ff);
513
+ color: #6366f1;
514
+ font-size: 0.7rem;
515
+ font-weight: 600;
516
+ padding: 4px 8px;
517
+ border-radius: 8px;
518
+ text-transform: uppercase;
519
+ letter-spacing: 0.5px;
520
+ }
521
+
522
+ .blog-card-title {
523
+ font-size: 1.1rem;
524
+ font-weight: 700;
525
+ color: #1e293b;
526
+ margin: 0 0 8px 0;
527
+ line-height: 1.4;
528
+ letter-spacing: -0.3px;
529
+ }
530
+
531
+ .blog-card-excerpt {
532
+ font-size: 0.9rem;
533
+ color: #64748b;
534
+ line-height: 1.5;
535
+ margin: 0 0 16px 0;
536
+ }
537
+
538
+ .blog-card-meta {
539
+ display: flex;
540
+ justify-content: space-between;
541
+ align-items: center;
542
+ margin-bottom: 8px;
543
+ }
544
+
545
+ .blog-card-author {
546
+ font-size: 0.8rem;
547
+ font-weight: 600;
548
+ color: #475569;
549
+ }
550
+
551
+ .blog-card-date {
552
+ font-size: 0.75rem;
553
+ color: #94a3b8;
554
+ }
555
+
556
+ .blog-card-stats {
557
+ display: flex;
558
+ gap: 12px;
559
+ }
560
+
561
+ .blog-card-stat {
562
+ font-size: 0.7rem;
563
+ color: #64748b;
564
+ font-weight: 500;
565
+ }
566
+
567
+ /* Blog View Styles */
568
+ .app-root.blog-view { padding-top:0; }
569
+ .blog-view .bg-layers { display:none; }
570
+ .blog-view .top-bar.improved { position:relative; }
571
+
572
+ /* Unified non-sticky blog header (single source of truth) */
573
+ /* .blog-header { position:relative; background:linear-gradient(135deg,#f8fafc 0%, #e2e8f0 100%); border-bottom:0px solid #cbd5e1; padding:40px 0 50px; margin:0; } */
574
+ .blog-header { position:relative; background-color:#f2f5f6; padding:40px 0 10px; margin:0; }
575
+ .blog-header.smart-header.collapsed .blog-title { font-size:1.25rem; margin:0; }
576
+ .blog-header.smart-header.expanded .blog-title { font-size:1.6rem; margin:0 0 12px; }
577
+ .blog-header.smart-header.collapsed .back-button { padding:4px 12px; font-size:0.7rem; margin-bottom:6px; }
578
+ .blog-header.smart-header.expanded .back-button { padding:8px 16px; font-size:0.8rem; margin-bottom:16px; }
579
+
580
+ .blog-header-inner {
581
+ max-width: 800px;
582
+ margin: 0 auto;
583
+ padding: 0 32px;
584
+ }
585
+
586
+ .back-button {
587
+ background: #6366f1;
588
+ border: none;
589
+ color: white;
590
+ font-size: 0.8rem;
591
+ font-weight: 600;
592
+ cursor: pointer;
593
+ padding: 8px 16px;
594
+ margin-bottom: 16px;
595
+ border-radius: 20px;
596
+ transition: all 0.2s;
597
+ display: inline-flex;
598
+ align-items: center;
599
+ gap: 6px;
600
+ }
601
+
602
+ .back-button:hover {
603
+ background: #4f46e5;
604
+ transform: translateY(-1px);
605
+ }
606
+
607
+ .blog-title-section {
608
+ text-align: left;
609
+ }
610
+
611
+ .blog-title {
612
+ font-size: 1.6rem;
613
+ font-weight: 700;
614
+ color: #1e293b;
615
+ margin: 0 0 12px 0;
616
+ line-height: 1.3;
617
+ letter-spacing: -0.4px;
618
+ }
619
+
620
+ .blog-meta {
621
+ display: flex;
622
+ align-items: center;
623
+ gap: 20px;
624
+ margin-bottom: 12px;
625
+ flex-wrap: wrap;
626
+ }
627
+
628
+ .blog-author, .blog-date {
629
+ font-size: 0.85rem;
630
+ font-weight: 500;
631
+ color: #64748b;
632
+ display: flex;
633
+ align-items: center;
634
+ gap: 4px;
635
+ }
636
+
637
+ /* (Removed unused header tag styles previously) */
638
+
639
+ .blog-author {
640
+ font-size: 0.9rem;
641
+ font-weight: 600;
642
+ color: #475569;
643
+ }
644
+
645
+ .blog-date {
646
+ font-size: 0.9rem;
647
+ color: #64748b;
648
+ }
649
+
650
+ /* (Obsolete .blog-tags removed) */
651
+
652
+ /* (Removed earlier gradient tag style) */
653
+
654
+ /* (Removed early full-bleed variants) */
655
+
656
+ .featured-image {
657
+ margin: 0 0 32px 0;
658
+ text-align: center;
659
+ }
660
+
661
+ .featured-image img {
662
+ width: 100%;
663
+ max-height: 400px;
664
+ object-fit: cover;
665
+ border-radius: 12px;
666
+ box-shadow: 0 8px 24px rgba(0, 0, 0, 0.12);
667
+ }
668
+
669
+ .featured-image figcaption {
670
+ margin-top: 12px;
671
+ font-size: 0.9rem;
672
+ color: #64748b;
673
+ font-style: italic;
674
+ }
675
+
676
+ /* (Removed earlier temporary blog-body reset; consolidated version appears later) */
677
+
678
+ .blog-image {
679
+ margin: 32px 0;
680
+ text-align: center;
681
+ }
682
+
683
+ .blog-image img {
684
+ max-width: 100%;
685
+ height: auto;
686
+ border-radius: 8px;
687
+ box-shadow: 0 4px 16px rgba(0, 0, 0, 0.1);
688
+ }
689
+
690
+ .blog-image figcaption {
691
+ margin-top: 8px;
692
+ font-size: 0.9rem;
693
+ color: #64748b;
694
+ font-style: italic;
695
+ }
696
+
697
+ /* Blog View Layout - Wider and Scrollable */
698
+ .blog-view { overflow-y:auto; height:100vh; background:#f8fafc; }
699
+
700
+ .blog-content-area { max-width:900px; margin:0 auto; padding:20px 20px; width:100%; background:#f8fafc; }
701
+
702
+ .blog-article {
703
+ background: #ffffff;
704
+ border: none;
705
+ border-radius: 8px;
706
+ padding: 5px 5px 5px 5px;
707
+ box-shadow: none;
708
+ width: 100%;
709
+ max-width: none;
710
+ margin: 0 auto;
711
+ }
712
+
713
+ /* Removed duplicate sticky blog-header block - using the earlier static positioned header design */
714
+
715
+ /* (Removed duplicate .blog-header-inner definition) */
716
+
717
+ /* (Removed duplicate .blog-title-section) */
718
+
719
+ /* (Removed earlier oversized .blog-title) */
720
+
721
+ /* (Removed intermediate .blog-meta variant) */
722
+
723
+ /* (Removed duplicate blog-author/blog-date) */
724
+
725
+ /* (Removed second header tags block) */
726
+
727
+ /* Blog Content Styling */
728
+ /* (Removed duplicate featured-image block) */
729
+
730
+ .featured-image img {
731
+ width: 100%;
732
+ height: auto;
733
+ display: block;
734
+ }
735
+
736
+ .featured-image figcaption {
737
+ padding: 1rem 0;
738
+ background: transparent;
739
+ color: #94a3b8;
740
+ font-style: italic;
741
+ text-align: center;
742
+ font-size: 0.875rem;
743
+ }
744
+
745
+ /* (Removed duplicate blog-body variant) */
746
+
747
+ /* (Removed earlier heading block) */
748
+
749
+ .blog-body h1:first-child,
750
+ .blog-body h2:first-child,
751
+ .blog-body h3:first-child {
752
+ margin-top: 1.5rem;
753
+ }
754
+
755
+ .blog-body h2 {
756
+ font-size: 1.875rem;
757
+ border-bottom: none;
758
+ padding-bottom: 0;
759
+ }
760
+
761
+ .blog-body h3 {
762
+ font-size: 1.5rem;
763
+ }
764
+
765
+ .blog-body h4 {
766
+ font-size: 1.25rem;
767
+ }
768
+
769
+ .blog-body p { margin-bottom:1.25rem; text-align:left; color:#1e293b; font-family:'Inter',system-ui,sans-serif; font-weight:400; font-size:1.05rem; line-height:1.7; letter-spacing:0.15px; }
770
+
771
+ .blog-body p:last-child {
772
+ margin-bottom: 0;
773
+ color: #374151 !important;
774
+ }
775
+
776
+ /* Apply professional fonts */
777
+ /* (Removed standalone font-family override) */
778
+
779
+ /* (Removed duplicate heading font stack) */
780
+
781
+ .blog-body li,
782
+ .blog-body blockquote,
783
+ .blog-body span { font-family:'Inter',system-ui,sans-serif; }
784
+
785
+ .blog-body img {
786
+ max-width: 100%;
787
+ height: auto;
788
+ border-radius: 8px;
789
+ margin: 2.5rem 0;
790
+ box-shadow: none;
791
+ border: none;
792
+ }
793
+
794
+ /* Storytelling blockquote */
795
+ .blog-body blockquote { border-left:4px solid #6366f1; margin:2rem 0; font-style:italic; color:#24303a !important; background:linear-gradient(90deg,rgba(99,102,241,0.06),rgba(99,102,241,0)); padding:1rem 1.5rem 1rem 1.25rem; border-radius:4px; }
796
+
797
+ /* Inline code refined */
798
+ .blog-body code { background:rgba(31,41,55,0.08); padding:0.25rem 0.55rem; border-radius:4px; font-family:'SF Mono','Monaco','Inconsolata','Roboto Mono','Courier New',monospace; font-size:0.85em; color:#111827 !important; }
799
+
800
+ .blog-body pre {
801
+ background: rgba(15, 23, 42, 0.9);
802
+ padding: 1.5rem;
803
+ border-radius: 8px;
804
+ overflow-x: auto;
805
+ margin: 2rem 0;
806
+ border: 1px solid rgba(71, 85, 105, 0.5);
807
+ }
808
+
809
+ .blog-body pre code {
810
+ background: none;
811
+ padding: 0;
812
+ color: #000000 !important;
813
+ opacity: 1 !important;
814
+ }
815
+
816
+ .blog-body ul, .blog-body ol {
817
+ margin: 1.5rem 0;
818
+ padding-left: 2rem;
819
+ }
820
+
821
+ .blog-body li {
822
+ margin-bottom: 0.5rem;
823
+ color: #000000 !important;
824
+ opacity: 1 !important;
825
+ }
826
+
827
+ /* Narrative link styling */
828
+ .blog-body a { color:#1d4ed8; text-decoration:none; position:relative; font-weight:500; transition:color .3s ease; }
829
+ .blog-body a:after { content:""; position:absolute; left:0; bottom:-3px; height:2px; width:100%; background:linear-gradient(90deg,#1d4ed8,#6366f1); opacity:.65; transform:scaleX(.35); transform-origin:left; transition:transform .35s ease,opacity .35s ease; }
830
+ .blog-body a:hover { color:#1e3a8a; }
831
+ .blog-body a:hover:after { transform:scaleX(1); opacity:1; }
832
+
833
+ /* Blog Meta and Tags Styles */
834
+ .blog-meta {
835
+ display: flex;
836
+ gap: 1.5rem;
837
+ margin: 1rem 0;
838
+ font-size: 0.95rem;
839
+ color: #6b7280;
840
+ }
841
+ /* Removed earlier meta duplications and obsolete .blog-tags */
842
+
843
+ /* New tag styling */
844
+ .blog-tag { background:#f1f5f9; color:#334155; padding:6px 14px; border-radius:999px; font-size:0.7rem; font-weight:600; letter-spacing:0.5px; border:1px solid #e2e8f0; transition:background .25s,color .25s,border-color .25s; }
845
+ .blog-tag:hover { background:#e2e8f0; }
846
+
847
+ /* Minimal tags separator section */
848
+ .blog-tags-section { margin:3rem 0 2.5rem 0; padding:0; border-top:1px solid #e2e8f0; }
849
+ .blog-tags-title { font-family:'Inter',system-ui,sans-serif; font-size:0.7rem; font-weight:600; letter-spacing:1.4px; color:#64748b; margin:1.75rem 0 1rem 0; text-transform:uppercase; padding:0 24px; text-align:left; }
850
+ .blog-tags-container { display:flex; flex-wrap:wrap; gap:8px; padding:0 24px; }
851
+
852
+ /* Minimal Pagination Styles */
853
+ .pagination-minimal {
854
+ margin-top: 2rem;
855
+ padding: 1.5rem 0;
856
+ border-top: 1px solid rgba(255, 255, 255, 0.1);
857
+ }
858
+
859
+ .pagination-track {
860
+ position: relative;
861
+ height: 8px;
862
+ background: rgba(255, 255, 255, 0.1);
863
+ border-radius: 4px;
864
+ margin-bottom: 1rem;
865
+ overflow: hidden;
866
+ }
867
+
868
+ .pagination-progress {
869
+ position: absolute;
870
+ top: 0;
871
+ left: 0;
872
+ height: 100%;
873
+ background: linear-gradient(90deg, #6366f1 0%, #8b5cf6 100%);
874
+ border-radius: 4px;
875
+ transition: width 0.3s ease;
876
+ }
877
+
878
+ .pagination-progress.page-1 { width: 25%; }
879
+ .pagination-progress.page-2 { width: 50%; }
880
+ .pagination-progress.page-3 { width: 75%; }
881
+ .pagination-progress.page-4 { width: 100%; }
882
+
883
+ .pagination-handle {
884
+ position: absolute;
885
+ top: -6px;
886
+ width: 20px;
887
+ height: 20px;
888
+ background: linear-gradient(135deg, #6366f1 0%, #8b5cf6 100%);
889
+ border: 2px solid white;
890
+ border-radius: 50%;
891
+ cursor: pointer;
892
+ transition: left 0.3s ease, transform 0.2s ease;
893
+ display: flex;
894
+ align-items: center;
895
+ justify-content: center;
896
+ box-shadow: 0 2px 8px rgba(0, 0, 0, 0.3);
897
+ }
898
+
899
+ .pagination-handle.page-1 { left: 20%; }
900
+ .pagination-handle.page-2 { left: 45%; }
901
+ .pagination-handle.page-3 { left: 70%; }
902
+ .pagination-handle.page-4 { left: 95%; }
903
+
904
+ .pagination-handle:hover {
905
+ transform: scale(1.2);
906
+ }
907
+
908
+ .pagination-handle.dragging {
909
+ transform: scale(1.3);
910
+ box-shadow: 0 4px 16px rgba(99, 102, 241, 0.5);
911
+ }
912
+
913
+ .page-indicator {
914
+ font-size: 0.7rem;
915
+ font-weight: bold;
916
+ color: white;
917
+ }
918
+
919
+ .pagination-controls {
920
+ display: flex;
921
+ justify-content: center;
922
+ gap: 1rem;
923
+ margin-bottom: 1rem;
924
+ }
925
+
926
+ .pagination-btn {
927
+ background: rgba(255, 255, 255, 0.05);
928
+ border: 1px solid rgba(255, 255, 255, 0.2);
929
+ color: #e2e8f0;
930
+ padding: 0.5rem 1rem;
931
+ border-radius: 8px;
932
+ cursor: pointer;
933
+ transition: all 0.3s ease;
934
+ font-size: 0.9rem;
935
+ font-weight: 500;
936
+ }
937
+
938
+ .pagination-btn:hover:not(:disabled) {
939
+ background: rgba(99, 102, 241, 0.2);
940
+ border-color: rgba(99, 102, 241, 0.5);
941
+ transform: translateY(-1px);
942
+ }
943
+
944
+ .pagination-btn:disabled {
945
+ opacity: 0.4;
946
+ cursor: not-allowed;
947
+ }
948
+
949
+ .pagination-meta {
950
+ text-align: center;
951
+ color: #9ca3af;
952
+ font-size: 0.85rem;
953
+ }
954
+
955
+ /* Remove old pagination slider styles */
956
+ .pagination-slider,
957
+ .slider-container,
958
+ .slider-nav,
959
+ .slider-track,
960
+ .slider-dates,
961
+ .date-indicator,
962
+ .blog-sidebar,
963
+ /* .blog-tags-section removed so it can display */
964
+ .tags-title,
965
+ .blog-content-layout {
966
+ display: none;
967
+ }
968
+ /* Removed legacy hidden pagination/slider/sidebar selectors */
969
+
970
+ /* Enhanced Blog Card Meta */
971
+ .blog-card-meta {
972
+ display: flex;
973
+ gap: 1rem;
974
+ margin: 1rem 0 0.5rem 0;
975
+ font-size: 0.85rem;
976
+ color: #9ca3af;
977
+ align-items: center;
978
+ }
979
+
980
+ .blog-card-author, .blog-card-date {
981
+ display: flex;
982
+ align-items: center;
983
+ gap: 0.25rem;
984
+ font-weight: 500;
985
+ }
986
+
987
+ /* Responsive Design */
988
+ @media (max-width:1100px){ .blog-section{width:86%;} .blog-content-area{padding:20px 40px; max-width:900px;} .blog-header-inner{padding:0 40px;} }
989
+
990
+ @media (max-width: 900px) {
991
+ .blog-section { width: 90%; }
992
+ .blog-grid { gap: 20px; }
993
+ .blog-content-area { padding:20px 30px; max-width:900px; }
994
+ .blog-header-inner { padding:0 30px; }
995
+ }
996
+
997
+ @media (max-width: 640px) {
998
+ .blog-section { width: 94%; }
999
+ .blog-grid {
1000
+ grid-template-columns: 1fr;
1001
+ gap: 16px;
1002
+ }
1003
+ .blog-card-content { padding: 16px; }
1004
+ .blog-content-area { padding:20px 20px; max-width:900px; }
1005
+ .blog-header-inner { padding:0 20px; }
1006
+
1007
+ .pagination-controls {
1008
+ flex-direction: column;
1009
+ gap: 0.5rem;
1010
+ }
1011
+
1012
+ .pagination-btn {
1013
+ width: 100%;
1014
+ }
1015
+ }
1016
+
1017
+ /* Removed sticky override block (header already non-sticky) */
1018
+
1019
+ /* Unified blog typography & layout (storytelling enhanced) */
1020
+ .blog-content-area { max-width:900px; margin:0 auto; padding:0 20px 40px; width:100%; background:#f8fafc; }
1021
+ .blog-article { background:#ffffff; border:none; border-radius:12px; padding:36px 48px 36px; box-shadow:0 4px 6px rgba(0,0,0,0.1), 0 2px 4px rgba(0,0,0,0.06); width:100%; margin:12px auto 0; }
1022
+ .blog-body { font-size:1.06rem; line-height:1.72; color:#1f2933; max-width:65ch; margin:0 auto; font-family:'Lora','Merriweather',Georgia,serif; font-weight:400; font-variant-ligatures:common-ligatures; font-kerning:normal; hyphens:auto; }
1023
+ .blog-body.story-mode { font-size:1.12rem; max-width:60ch; }
1024
+ .blog-body h1, .blog-body h2, .blog-body h3, .blog-body h4, .blog-body h5, .blog-body h6 { font-family:'Lora','Merriweather',Georgia,serif; font-weight:600; line-height:1.25; letter-spacing:-0.3px; margin:2.2rem 0 1.15rem; color:#14202b; }
1025
+ .blog-body h1:first-child, .blog-body h2:first-child, .blog-body h3:first-child { margin-top:1rem; }
1026
+ .blog-body p { margin:0 0 1.3rem; font-weight:400; font-size:inherit; letter-spacing:0.15px; color:#1f2933; }
1027
+ .blog-body p:last-child { margin-bottom:0; }
1028
+ .blog-paragraph { margin:0 0 1.3rem; }
1029
+
frontend/src/style.css.new ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ @import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&family=Lora:ital,wght@0,400;0,500;0,600;0,700;1,400;1,500;1,600&display=swap');
2
+
3
+ * { box-sizing: border-box; }
4
+ html, body, #root { height: 100%; }
5
+ body { margin:0; font-family:'Inter',system-ui,sans-serif; color:#1b2126; background:#f2f4f8; overflow-x:hidden; -webkit-font-smoothing:antialiased; }
6
+
7
+ /* Loading animations */
8
+ .loading-overlay {
9
+ position: absolute;
10
+ top: 0;
11
+ left: 0;
12
+ right: 0;
13
+ bottom: 0;
14
+ background: rgba(15, 23, 42, 0.75);
15
+ display: flex;
16
+ justify-content: center;
17
+ align-items: center;
18
+ border-radius: 18px;
19
+ z-index: 10;
20
+ backdrop-filter: blur(3px);
21
+ }
22
+
23
+ .loading-spinner {
24
+ width: 40px;
25
+ height: 40px;
26
+ border: 3px solid rgba(99, 102, 241, 0.3);
27
+ border-radius: 50%;
28
+ border-top-color: #6366f1;
29
+ animation: spin 0.8s linear infinite;
30
+ }
31
+
32
+ @keyframes spin {
33
+ to { transform: rotate(360deg); }
34
+ }
35
+
36
+ .blog-card-new.loading {
37
+ pointer-events: none;
38
+ opacity: 0.7;
39
+ }
40
+
41
+ .app-root { position:relative; min-height:100%; padding-top:150px; }
42
+
43
+ /* Layered subtle geometric background */
44
+ .bg-layers { position:fixed; inset:0; pointer-events:none; z-index:0; background:
45
+ linear-gradient(145deg, rgba(255,255,255,0.85) 0%, rgba(255,255,255,0) 65%),
46
+ radial-gradient(circle at 78% 24%, rgba(96,165,250,0.18), transparent 60%),
47
+ radial-gradient(circle at 15% 70%, rgba(167,139,250,0.15), transparent 62%),
48
+ radial-gradient(circle at 50% 85%, rgba(125,168,255,0.12), transparent 58%),
49
+ repeating-linear-gradient(115deg, rgba(0,0,0,0.025) 0 14px, rgba(0,0,0,0) 14px 28px),
50
+ linear-gradient(180deg,#f3f5f9,#eef1f6);
51
+ mask: linear-gradient(#fff,rgba(255,255,255,0.35)); }
52
+
53
+ /* Rest of your existing CSS... */
frontend/tsconfig.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ES2020",
4
+ "useDefineForClassFields": true,
5
+ "lib": ["ES2020", "DOM", "DOM.Iterable"],
6
+ "module": "ESNext",
7
+ "skipLibCheck": true,
8
+ "moduleResolution": "Bundler",
9
+ "resolveJsonModule": true,
10
+ "isolatedModules": true,
11
+ "noEmit": true,
12
+ "jsx": "react-jsx",
13
+ "strict": true,
14
+ "forceConsistentCasingInFileNames": true
15
+ },
16
+ "include": ["src"]
17
+ }
frontend/vite.config.ts ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { defineConfig } from 'vite'
2
+ import react from '@vitejs/plugin-react'
3
+
4
+ export default defineConfig({
5
+ base: '/',
6
+ plugins: [react()],
7
+ build: {
8
+ outDir: 'dist',
9
+ sourcemap: true
10
+ },
11
+ server: {
12
+ port: 5173
13
+ }
14
+ })
test.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Weather Information for London:
2
+ City: London
3
+ Country: UK
4
+ Temperature: 15°C
5
+ Conditions: Cloudy