byoung-hf commited on
Commit
c855984
·
verified ·
1 Parent(s): 63256f9

Upload folder using huggingface_hub

Browse files
.github/copilot-instructions.md CHANGED
@@ -27,6 +27,7 @@ The constitution covers:
27
  - Use `uv` for all code execution (never `pip` directly, never manually activate venv)
28
  - Use `uv run` to execute scripts and commands, never bare `python` or shell activation
29
  - **NEVER use `tail`, `head`, `grep`, or similar output filters** — show full output always so you can see everything that's happening
 
30
  - See `TESTING.md` for detailed test setup
31
 
32
  ## Common Gotchas & Reminders
@@ -35,3 +36,4 @@ The constitution covers:
35
  2. **VPN blocks Groq API**: If tests/app fail with 403 errors, disconnect from VPN
36
  3. **ChromaDB is ephemeral**: Using EphemeralClient means vectorstore rebuilds on each restart (stateless by design)
37
  4. **Package manager**: Use `uv` exclusively for this project; don't use `pip` directly
 
 
27
  - Use `uv` for all code execution (never `pip` directly, never manually activate venv)
28
  - Use `uv run` to execute scripts and commands, never bare `python` or shell activation
29
  - **NEVER use `tail`, `head`, `grep`, or similar output filters** — show full output always so you can see everything that's happening
30
+ - **NEVER use `timeout` command** — macOS doesn't have it, user will kill hung processes manually
31
  - See `TESTING.md` for detailed test setup
32
 
33
  ## Common Gotchas & Reminders
 
36
  2. **VPN blocks Groq API**: If tests/app fail with 403 errors, disconnect from VPN
37
  3. **ChromaDB is ephemeral**: Using EphemeralClient means vectorstore rebuilds on each restart (stateless by design)
38
  4. **Package manager**: Use `uv` exclusively for this project; don't use `pip` directly
39
+ 5. **User is on macOS**: Never use GNU-specific commands (timeout, sed, etc.) without checking for macOS alternatives
.github/workflows/update_space.yml CHANGED
@@ -14,27 +14,37 @@ jobs:
14
  - name: Checkout
15
  uses: actions/checkout@v4
16
 
17
- - name: Build Docker image
18
- run: docker build -t ai-me:test .
19
 
20
- - name: Run tests in Docker container
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  env:
22
- OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
23
  GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
24
  GITHUB_PERSONAL_ACCESS_TOKEN: ${{ secrets.GITHUB_TOKEN }}
25
- run: |
26
- rm -rf ${{ github.workspace }}/htmlcov
27
- docker run --rm \
28
- -e OPENAI_API_KEY="${OPENAI_API_KEY}" \
29
- -e GROQ_API_KEY="${GROQ_API_KEY}" \
30
- -e GITHUB_PERSONAL_ACCESS_TOKEN="${GITHUB_PERSONAL_ACCESS_TOKEN}" \
31
- -v "${{ github.workspace }}/htmlcov:/app/htmlcov" \
32
- --user 0 \
33
- --entrypoint uv \
34
- ai-me:test \
35
- run pytest tests/ -v --cov=src --cov-report=html --cov-report=term-missing
36
-
37
- - name: Upload coverage reports as artifact
38
  uses: actions/upload-artifact@v4
39
  if: always()
40
  with:
 
14
  - name: Checkout
15
  uses: actions/checkout@v4
16
 
17
+ - name: Free disk space (hosted toolcache)
18
+ run: rm -rf /opt/hostedtoolcache
19
 
20
+ - name: Check disk space (initial)
21
+ run: df -h
22
+
23
+ - name: Build test image (multi-stage build)
24
+ run: docker compose build test
25
+
26
+ - name: Check disk space (after build)
27
+ run: df -h
28
+
29
+ - name: Run unit + integration tests in Docker
30
+ env:
31
+ GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
32
+ GITHUB_PERSONAL_ACCESS_TOKEN: ${{ secrets.GITHUB_TOKEN }}
33
+ run: docker compose run --rm test
34
+
35
+ - name: Check disk space (after unit/integration tests)
36
+ run: df -h
37
+
38
+ - name: Run E2E tests in Docker
39
  env:
 
40
  GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
41
  GITHUB_PERSONAL_ACCESS_TOKEN: ${{ secrets.GITHUB_TOKEN }}
42
+ run: docker compose run --rm test uv run pytest tests/e2e/ -v --log-cli-level=INFO
43
+
44
+ - name: Check disk space (after E2E tests)
45
+ run: df -h
46
+
47
+ - name: Upload coverage report
 
 
 
 
 
 
 
48
  uses: actions/upload-artifact@v4
49
  if: always()
50
  with:
Dockerfile CHANGED
@@ -1,42 +1,116 @@
1
- FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim
2
-
3
- # Python runtime behavior
4
- ENV PYTHONDONTWRITEBYTECODE=1
5
- ENV PYTHONUNBUFFERED=1
6
-
7
- # Install system dependencies: git (for GitPython) and Node.js (for MCP servers via npx)
8
- RUN apt-get update \
9
- && apt-get install -y --no-install-recommends ca-certificates curl gnupg git git-lfs \
10
- && install -m 0755 -d /etc/apt/keyrings \
11
- && curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg \
12
- && echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" > /etc/apt/sources.list.d/nodesource.list \
13
- && apt-get update \
14
- && apt-get install -y --no-install-recommends nodejs \
 
15
  && rm -rf /var/lib/apt/lists/*
16
 
17
- # Download official GitHub MCP server binary
18
- RUN mkdir -p /app/bin \
19
- && curl -L https://github.com/github/github-mcp-server/releases/download/v0.19.0/github-mcp-server_Linux_x86_64.tar.gz \
20
- | tar -xz -C /app/bin \
21
- && chmod +x /app/bin/github-mcp-server
22
 
23
- WORKDIR /app
24
 
25
- # Copy only dependency specifications for layer caching
26
  COPY pyproject.toml uv.lock ./
27
 
28
- # Create virtual environment and sync dependencies from lock file
29
- # --no-install-project defers building the local package until source is copied
30
- RUN uv venv && uv sync --locked --no-install-project
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
 
32
- # Now copy the complete source code
33
- COPY . /app
34
 
35
- # Sync again to install the local package (now that source is present)
36
- RUN uv sync --locked
37
 
38
- # Non-root user with access to /app
39
- RUN adduser -u 5678 --disabled-password --gecos "" appuser && chown -R appuser /app
 
 
 
 
 
 
 
 
 
40
  USER appuser
41
 
42
- ENTRYPOINT ["uv", "run", "src/app.py"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # =============================================================================
2
+ # Builder Stage: Install dependencies and build wheels
3
+ # =============================================================================
4
+ FROM python:3.12-slim AS builder
5
+
6
+ ENV PYTHONDONTWRITEBYTECODE=1 \
7
+ PYTHONUNBUFFERED=1 \
8
+ PIP_NO_CACHE_DIR=1 \
9
+ PIP_DISABLE_PIP_VERSION_CHECK=1
10
+
11
+ # Install build dependencies
12
+ RUN apt-get update && apt-get install -y --no-install-recommends \
13
+ build-essential \
14
+ curl \
15
+ git \
16
  && rm -rf /var/lib/apt/lists/*
17
 
18
+ # Install uv
19
+ RUN curl -LsSf https://astral.sh/uv/install.sh | sh
20
+ ENV PATH="/root/.local/bin:$PATH"
 
 
21
 
22
+ WORKDIR /build
23
 
24
+ # Copy dependency specifications
25
  COPY pyproject.toml uv.lock ./
26
 
27
+ # Create venv and install ONLY production dependencies in builder
28
+ RUN uv venv /opt/venv && \
29
+ . /opt/venv/bin/activate && \
30
+ uv sync --locked --no-install-project
31
+
32
+ # Copy source and install project
33
+ COPY . .
34
+ RUN . /opt/venv/bin/activate && uv sync --locked
35
+
36
+ # =============================================================================
37
+ # Runtime Stage: Minimal image with Playwright support
38
+ # =============================================================================
39
+ FROM python:3.12-slim AS runtime
40
+
41
+ ENV PYTHONDONTWRITEBYTECODE=1 \
42
+ PYTHONUNBUFFERED=1 \
43
+ PATH="/opt/venv/bin:$PATH" \
44
+ TORCH_DEVICE=cpu \
45
+ NO_CUDA=1 \
46
+ CUDA_VISIBLE_DEVICES=""
47
+
48
+ # Install runtime + Playwright dependencies
49
+ RUN apt-get update && apt-get install -y --no-install-recommends \
50
+ curl \
51
+ git \
52
+ # Playwright Chromium dependencies
53
+ libnss3 \
54
+ libnspr4 \
55
+ libatk1.0-0 \
56
+ libatk-bridge2.0-0 \
57
+ libcups2 \
58
+ libdrm2 \
59
+ libxkbcommon0 \
60
+ libxcomposite1 \
61
+ libxdamage1 \
62
+ libxfixes3 \
63
+ libxrandr2 \
64
+ libgbm1 \
65
+ libasound2 \
66
+ libpango-1.0-0 \
67
+ libcairo2 \
68
+ && curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
69
+ && apt-get install -y --no-install-recommends nodejs \
70
+ && rm -rf /var/lib/apt/lists/* \
71
+ && npm cache clean --force
72
+
73
+ # Install uv in runtime
74
+ RUN curl -LsSf https://astral.sh/uv/install.sh | sh
75
+ ENV PATH="/root/.local/bin:$PATH"
76
 
77
+ # Create non-root user early
78
+ RUN useradd -m -u 5678 appuser
79
 
80
+ # Copy venv from builder with ownership
81
+ COPY --from=builder --chown=appuser:appuser /opt/venv /opt/venv
82
 
83
+ # Download GitHub MCP server binary
84
+ RUN mkdir -p /app/bin && \
85
+ curl -L https://github.com/github/github-mcp-server/releases/download/v0.19.0/github-mcp-server_Linux_x86_64.tar.gz \
86
+ | tar -xz -C /app/bin && \
87
+ chmod +x /app/bin/github-mcp-server && \
88
+ chown appuser:appuser /app/bin/github-mcp-server
89
+
90
+ WORKDIR /app
91
+ COPY --chown=appuser:appuser . .
92
+
93
+ # Switch to non-root user for runtime
94
  USER appuser
95
 
96
+ ENTRYPOINT ["uv", "run"]
97
+ CMD ["src/app.py"]
98
+
99
+ # ============================================================================
100
+ # Test Stage - Extends runtime with dev dependencies for testing
101
+ # ============================================================================
102
+ FROM runtime AS test
103
+
104
+ # Switch back to root to install test dependencies
105
+ USER root
106
+
107
+ # Install dev dependencies (playwright, pytest, etc.) into existing venv
108
+ # Use UV_PROJECT_ENVIRONMENT to force uv to use /opt/venv instead of creating .venv
109
+ ENV UV_PROJECT_ENVIRONMENT=/opt/venv
110
+ RUN uv sync --locked --group dev
111
+
112
+ # Install Playwright browsers (now that playwright package is available)
113
+ RUN /opt/venv/bin/python -m playwright install chromium
114
+
115
+ # Switch back to appuser for test execution
116
+ USER appuser
TESTING.md CHANGED
@@ -1,22 +1,14 @@
1
- # Integration Tests
2
 
3
- ## Overview
4
-
5
- The test suite (`tests/integration/spec-001.py`) validates the ai-me agent system including:
6
- - Vectorstore setup and document loading
7
- - Agent configuration and initialization
8
- - RAG (Retrieval Augmented Generation) functionality
9
- - Basic agent response quality and accuracy
10
-
11
- ## Running Tests
12
 
13
  ### Prerequisites
14
 
15
- 1. Ensure environment variables are set in `.env` file at project root:
16
  ```bash
17
- OPENAI_API_KEY=<your-openai-key> # For tracing
18
- GROQ_API_KEY=<your-groq-key> # For LLM inference
19
- GITHUB_PERSONAL_ACCESS_TOKEN=<token> # For GitHub integration (optional for tests)
20
  ```
21
 
22
  2. Install dependencies:
@@ -24,81 +16,141 @@ The test suite (`tests/integration/spec-001.py`) validates the ai-me agent syste
24
  uv sync
25
  ```
26
 
27
- ### Run All Tests
 
 
 
28
 
29
- From project root:
 
 
30
  ```bash
31
- # All tests
32
  uv run pytest tests/ -v
 
33
 
34
- # With detailed output
35
- uv run pytest tests/ -v -o log_cli=true --log-cli-level=INFO --capture=no
36
 
37
- # Specific test
38
- uv run pytest tests/integration/spec-001.py::test_rear_knowledge_contains_it245 -v -o log_cli=true --log-cli-level=INFO --capture=no
 
39
  ```
40
 
41
- ### Run Tests with Code Coverage
42
 
 
43
  ```bash
44
- # Run tests with coverage report
45
- uv run pytest tests/ --cov=src --cov-report=term-missing -v
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
 
47
- # Generate HTML coverage report
48
- uv run pytest tests/ --cov=src --cov-report=html -v
 
49
 
50
- # View HTML report (opens in browser)
 
51
  open htmlcov/index.html
 
 
 
52
 
53
- # Integration tests only with coverage
54
- uv run pytest tests/integration/ --cov=src --cov-report=term-missing -v
55
 
56
- # Show only uncovered lines
57
- uv run pytest tests/ --cov=src --cov-report=term:skip-covered -v
 
 
 
 
 
 
 
 
 
 
 
58
  ```
59
 
60
- ## Test Architecture
61
 
62
- ### Fixture: `ai_me_agent`
 
 
 
63
 
64
- **Scope**: Module (shared across all tests in the file)
 
 
 
65
 
66
- **Purpose**: Creates a single agent instance that is reused across all tests to avoid expensive reinitialization.
 
 
 
67
 
68
- **Configuration**:
69
- - **Temperature**: Set to 0.0 for deterministic, reproducible responses
70
- - **Model**: Uses model specified in config (default: `openai/openai/gpt-oss-120b` via Groq)
71
- - **Data Source**: `tests/data/` directory (configured via `doc_root` parameter)
72
- - **GitHub Repos**: Disabled (`GITHUB_REPOS=""`) for faster test execution
73
 
74
- The temperature of 0 ensures that the agent's responses are consistent across test runs, making assertions more reliable.
75
 
76
- ## Session Isolation Testing (Manual)
77
 
78
- **Note on Concurrency Testing**: Rather than implement brittle pytest-based concurrency tests, session isolation (SC-006) is verified through **manual browser-based testing**:
 
 
79
 
80
- ### Steps to Manually Test Session Isolation
 
 
 
81
 
82
- 1. **Start the app**:
83
- ```bash
84
- uv run src/app.py
85
- ```
 
 
 
 
 
 
 
 
 
 
 
 
 
86
 
87
- 2. **Open multiple browser tabs** (or separate browsers):
88
- - Tab A: http://localhost:7860
89
- - Tab B: http://localhost:7860
90
- - Tab C: http://localhost:7860
91
-
92
- 3. **Test scenario**: Interleave conversations across tabs
93
- - Tab A: "Hi, My name is Slartibartfast."
94
- - Tab B: "Hi, how are you?"
95
- - Tab A: "what is my name?"
96
- - Tab B: "what is my name?"
97
-
98
- 4. **Verify**:
99
- - ✅ Each tab maintains independent conversation history
100
- - ✅ No information leaks between tabs -- tab B should say I don't know your name.
101
- - ✅ Memory tool doesn't share state (different users in Memory graphs)
102
- - ✅ Each session gets unique `session_id` in logs (check `uv run src/app.py` output)
103
 
104
 
 
1
+ # Testing Guide
2
 
3
+ ## Quick Start
 
 
 
 
 
 
 
 
4
 
5
  ### Prerequisites
6
 
7
+ 1. Set environment variables in `.env`:
8
  ```bash
9
+ OPENAI_API_KEY=<your-key>
10
+ GROQ_API_KEY=<your-key>
11
+ GITHUB_PERSONAL_ACCESS_TOKEN=<token> # optional
12
  ```
13
 
14
  2. Install dependencies:
 
16
  uv sync
17
  ```
18
 
19
+ For E2E tests, install Playwright browsers (one-time):
20
+ ```bash
21
+ uv run playwright install chromium
22
+ ```
23
 
24
+ ## Running Tests Locally
25
+
26
+ ### Default (Unit + Integration)
27
  ```bash
28
+ # By default, E2E tests are excluded to avoid asyncio conflicts
29
  uv run pytest tests/ -v
30
+ ```
31
 
32
+ Runs unit and integration tests.
 
33
 
34
+ ### Unit Tests Only
35
+ ```bash
36
+ uv run pytest tests/unit/ -v
37
  ```
38
 
39
+ Fast tests with no external dependencies.
40
 
41
+ ### Integration Tests Only
42
  ```bash
43
+ uv run pytest tests/integration/ -v
44
+ ```
45
+
46
+ Async tests that call Groq/OpenAI APIs and test the full agent stack.
47
+
48
+ ### E2E Tests (Run Separately)
49
+ ```bash
50
+ # E2E tests must run separately due to asyncio event loop isolation
51
+ uv run pytest tests/e2e/ -v
52
+ ```
53
+
54
+ Browser automation test that starts the app in a subprocess as Playwright manages its own event loops. Running E2E with async integration tests causes asyncio conflicts.
55
+
56
+ ## Running Tests in Docker
57
+
58
+ The Docker environment has all system dependencies and Playwright browsers pre-installed.
59
+
60
+ ### Unit + Integration Tests (Default)
61
+ ```bash
62
+ # Run the default test service (unit + integration)
63
+ docker compose run --rm test
64
+ ```
65
+
66
+ ### E2E Tests
67
+
68
+ E2E tests use headless Chromium by default via Playwright, so they can run in Docker:
69
+
70
+ ```bash
71
+ # Run E2E tests in Docker (uses headless Chromium)
72
+ docker compose run --rm test uv run pytest tests/e2e/ -v
73
+ ```
74
+
75
+ ## With Code Coverage
76
 
77
+ ```bash
78
+ # All tests with coverage report
79
+ uv run pytest tests/ --cov=src --cov-report=term-missing -v
80
 
81
+ # Generate HTML report
82
+ uv run pytest tests/ --cov=src --cov-report=html
83
  open htmlcov/index.html
84
+ ```
85
+
86
+ ## E2E Test Details
87
 
88
+ **Important**: E2E tests run in a separate pytest session from integration tests to avoid asyncio event loop conflicts. This is industry standard practice.
 
89
 
90
+ The E2E test runs a single happy-path scenario:
91
+
92
+ 1. Starts the Gradio app in a subprocess on port 7870
93
+ 2. Launches a real Chromium browser via Playwright
94
+ 3. Sends realistic prompts:
95
+ - "Hi" → Greeting response
96
+ - "Do you have python experience?" → Python experience answer
97
+ - "What is the hardest thing you've ever done?" → Challenge story
98
+ 4. Verifies responses are substantive and error-free
99
+
100
+ Run locally only:
101
+ ```bash
102
+ uv run pytest tests/e2e/ -v
103
  ```
104
 
105
+ ## Troubleshooting
106
 
107
+ **Port already in use**:
108
+ ```bash
109
+ lsof -ti :7870 | xargs kill -9
110
+ ```
111
 
112
+ **Playwright not installed**:
113
+ ```bash
114
+ uv run playwright install chromium
115
+ ```
116
 
117
+ **App startup timeout**:
118
+ - Check app logs for initialization errors
119
+ - Ensure all environment variables are set
120
+ - Increase `APP_STARTUP_TIMEOUT` in `tests/e2e/conftest.py` if needed
121
 
122
+ ## Advanced Debugging
 
 
 
 
123
 
124
+ ### View E2E Test Logs
125
 
126
+ To see detailed logs from E2E tests (test steps, AI response sizes, etc.):
127
 
128
+ ```bash
129
+ uv run pytest tests/e2e/ -v -s --log-cli-level=INFO --headed --browser=chromium
130
+ ```
131
 
132
+ This shows:
133
+ - When messages are sent to the AI
134
+ - Response character counts
135
+ - Test assertions as they pass/fail
136
 
137
+ ### Slow Motion Debugging
138
+
139
+ Watch interactions in slow motion (2-second delays between actions):
140
+
141
+ ```bash
142
+ PW_SLOW_MO=2000 uv run pytest tests/e2e/ -v -s --log-cli-level=INFO --headed --browser=chromium
143
+ ```
144
+
145
+ ### Interactive Debug Mode
146
+
147
+ Use Playwright Inspector to step through the test:
148
+
149
+ ```bash
150
+ PWDEBUG=1 uv run pytest tests/e2e/ -v --headed --browser=chromium
151
+ ```
152
+
153
+ This opens the Playwright Inspector where you can pause, step through, and inspect the page.
154
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
155
 
156
 
docker-compose.yaml CHANGED
@@ -1,17 +1,49 @@
1
  services:
2
- browser:
3
- image: mcr.microsoft.com/playwright:v1.55.0-noble
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
 
5
  notebooks:
6
- build: .
7
- # This ensures changes inside our container are propagated to the host machine
 
8
  volumes:
9
  - ./:/app
 
10
  - /var/run/docker.sock:/var/run/docker.sock
11
  ports:
12
  - 7861:7861
13
  working_dir: /app
14
  user: "0:0"
15
 
16
-
17
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  services:
2
+ # Production runtime stage - minimal image with only production dependencies
3
+ runtime:
4
+ build:
5
+ context: .
6
+ target: runtime
7
+ volumes:
8
+ - ./:/app
9
+ - /app/.venv # Anonymous volume for container's venv
10
+ ports:
11
+ - 7860:7860
12
+ working_dir: /app
13
+ environment:
14
+ - GROQ_API_KEY=${GROQ_API_KEY}
15
+ - GITHUB_PERSONAL_ACCESS_TOKEN=${GITHUB_PERSONAL_ACCESS_TOKEN:-}
16
+ command: ["src/app.py"]
17
 
18
+ # Notebooks stage - uses runtime with development tools
19
  notebooks:
20
+ build:
21
+ context: .
22
+ target: test # Uses test stage which has dev dependencies
23
  volumes:
24
  - ./:/app
25
+ - /app/.venv # Anonymous volume for container's venv
26
  - /var/run/docker.sock:/var/run/docker.sock
27
  ports:
28
  - 7861:7861
29
  working_dir: /app
30
  user: "0:0"
31
 
32
+ # Test stage - extends runtime with dev dependencies and Playwright
33
+ test:
34
+ build:
35
+ context: .
36
+ target: test # Explicitly build the test stage
37
+ volumes:
38
+ - ./:/app
39
+ - /app/.venv # Anonymous volume for container's venv
40
+ - ./htmlcov:/app/htmlcov # Mount htmlcov for coverage reports
41
+ - /var/run/docker.sock:/var/run/docker.sock
42
+ working_dir: /app
43
+ user: "0:0"
44
+ environment:
45
+ - GROQ_API_KEY=${GROQ_API_KEY}
46
+ - GITHUB_PERSONAL_ACCESS_TOKEN=${GITHUB_PERSONAL_ACCESS_TOKEN:-}
47
+ - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=false
48
+ entrypoint: []
49
+ command: ["uv", "run", "pytest", "tests/unit", "tests/integration", "-v", "--cov=src", "--cov-report=html", "--cov-report=term-missing"]
pyproject.toml CHANGED
@@ -36,9 +36,12 @@ dependencies = [
36
  dev = [
37
  "ipykernel~=6.30",
38
  "ipywidgets~=8.1",
 
39
  "pytest~=8.0",
40
  "pytest-asyncio~=0.24",
41
  "pytest-cov~=6.0",
 
 
42
  ]
43
 
44
  [tool.hatch.build.targets.wheel]
@@ -53,14 +56,21 @@ python_files = ["test_*.py", "*_test.py", "spec-*.py"]
53
  python_classes = ["Test*", "*Tests"]
54
  python_functions = ["test_*"]
55
  pythonpath = ["src"]
 
 
 
 
 
56
 
57
  [tool.coverage.run]
58
  source = ["src"]
 
59
  omit = [
60
  "*/tests/*",
61
  "*/test_*",
62
  "*/__pycache__/*",
63
  "*/notebooks/*",
 
64
  ]
65
 
66
  [tool.coverage.report]
 
36
  dev = [
37
  "ipykernel~=6.30",
38
  "ipywidgets~=8.1",
39
+ "playwright~=1.48",
40
  "pytest~=8.0",
41
  "pytest-asyncio~=0.24",
42
  "pytest-cov~=6.0",
43
+ "pytest-playwright~=0.5",
44
+ "pytest-xdist~=3.6",
45
  ]
46
 
47
  [tool.hatch.build.targets.wheel]
 
56
  python_classes = ["Test*", "*Tests"]
57
  python_functions = ["test_*"]
58
  pythonpath = ["src"]
59
+ asyncio_mode = "auto"
60
+ # By default, exclude E2E tests (which use Playwright and manage their own event loops).
61
+ # E2E tests run separately to avoid asyncio conflicts with async integration tests.
62
+ # Run E2E: uv run pytest tests/e2e/ -v
63
+ addopts = "--ignore=tests/e2e"
64
 
65
  [tool.coverage.run]
66
  source = ["src"]
67
+ branch = true
68
  omit = [
69
  "*/tests/*",
70
  "*/test_*",
71
  "*/__pycache__/*",
72
  "*/notebooks/*",
73
+ "src/app.py", # E2E coverage not testable without proper coverage framework
74
  ]
75
 
76
  [tool.coverage.report]
src/app.py CHANGED
@@ -1,3 +1,5 @@
 
 
1
  import gradio
2
  from gradio import Request, themes
3
 
@@ -129,4 +131,7 @@ if __name__ == "__main__":
129
  # Initialize session when page loads
130
  ui.load(get_session_status, inputs=[], outputs=[session_init])
131
 
132
- ui.launch(server_name="0.0.0.0", server_port=7860, show_api=False)
 
 
 
 
1
+ import os
2
+
3
  import gradio
4
  from gradio import Request, themes
5
 
 
131
  # Initialize session when page loads
132
  ui.load(get_session_status, inputs=[], outputs=[session_init])
133
 
134
+ # Read port and server address from environment variables for E2E tests
135
+ server_port = int(os.getenv("GRADIO_SERVER_PORT", "7860"))
136
+ server_name = os.getenv("GRADIO_SERVER_NAME", "0.0.0.0")
137
+ ui.launch(server_name=server_name, server_port=server_port, show_api=False)
src/config.py CHANGED
@@ -124,11 +124,11 @@ class Config(BaseSettings):
124
  """Central configuration class for ai-me application with Pydantic validation."""
125
 
126
  # Environment Variables (from .env) - Required
127
- # Note: These have no defaults, so they MUST be in .env or will raise ValidationError
128
- # We don't provide defaults here because Pydantic will raise an error at runtime
129
- # if they're missing from the environment, which is the intended behavior.
130
- openai_api_key: SecretStr = Field(...,
131
- description="OpenAI API key for tracing")
132
  groq_api_key: SecretStr = Field(...,
133
  description="Groq API key for inference")
134
  github_token: Optional[SecretStr] = Field(
@@ -202,9 +202,12 @@ class Config(BaseSettings):
202
  )
203
  set_default_openai_client(self.openai_client)
204
 
205
- # Set tracing API key AFTER setting default client
206
- logger.info("Setting tracing export API key for agents.")
207
- set_tracing_export_api_key(self.openai_api_key.get_secret_value())
 
 
 
208
 
209
  def _safe_repr(self) -> str: # pragma: no cover
210
  """Helper to generate string representation excluding sensitive fields."""
 
124
  """Central configuration class for ai-me application with Pydantic validation."""
125
 
126
  # Environment Variables (from .env) - Required
127
+ # Note: openai_api_key is only used for tracing (optional)
128
+ # groq_api_key is required for LLM inference
129
+ openai_api_key: Optional[SecretStr] = Field(
130
+ default=None,
131
+ description="OpenAI API key for tracing (optional)")
132
  groq_api_key: SecretStr = Field(...,
133
  description="Groq API key for inference")
134
  github_token: Optional[SecretStr] = Field(
 
202
  )
203
  set_default_openai_client(self.openai_client)
204
 
205
+ # Set tracing API key AFTER setting default client (if provided)
206
+ if self.openai_api_key:
207
+ logger.info("Setting tracing export API key for agents.")
208
+ set_tracing_export_api_key(self.openai_api_key.get_secret_value())
209
+ else:
210
+ logger.info("No OpenAI API key provided, tracing disabled.")
211
 
212
  def _safe_repr(self) -> str: # pragma: no cover
213
  """Helper to generate string representation excluding sensitive fields."""
src/data.py CHANGED
@@ -350,6 +350,17 @@ class DataManager:
350
  except Exception: # pragma: no cover
351
  pass # Collection doesn't exist yet
352
 
 
 
 
 
 
 
 
 
 
 
 
353
  logger.info(f"Creating vectorstore with {len(chunks)} chunks...")
354
  vectorstore = Chroma.from_documents(
355
  documents=chunks,
 
350
  except Exception: # pragma: no cover
351
  pass # Collection doesn't exist yet
352
 
353
+ # Validate chunks - ChromaDB requires at least one document
354
+ if not chunks:
355
+ error_msg = (
356
+ "No documents loaded for vectorstore. Please configure document sources:\n"
357
+ " 1. Set GITHUB_REPOS environment variable (e.g., 'byoung/me,byoung/ai-me')\n"
358
+ " 2. Or place markdown files in the docs/local-testing/ directory\n"
359
+ " 3. Or configure DOC_ROOT and DOC_LOAD_LOCAL environment variables"
360
+ )
361
+ logger.error(error_msg)
362
+ raise ValueError(error_msg)
363
+
364
  logger.info(f"Creating vectorstore with {len(chunks)} chunks...")
365
  vectorstore = Chroma.from_documents(
366
  documents=chunks,
tests/e2e/__init__.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """End-to-end tests for the AI-Me application.
2
+
3
+ Tests the complete application stack including:
4
+ - Gradio server startup and web interface
5
+ - Browser-based user interactions
6
+ - Chat interface and message handling
7
+ - Live code coverage collection
8
+
9
+ These tests run the full app.py server in a background thread while
10
+ driving browser automation via Playwright. Coverage is collected
11
+ across all interactions.
12
+ """
tests/e2e/conftest.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """End-to-end test configuration and fixtures.
2
+
3
+ Manages Gradio app server lifecycle and browser fixtures for E2E tests.
4
+ """
5
+
6
+ import logging
7
+ import os
8
+ import socket
9
+ import subprocess
10
+ import sys
11
+ import threading
12
+ import time
13
+ from pathlib import Path
14
+ from typing import Iterator
15
+
16
+ import pytest
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+ PROJECT_ROOT = Path(__file__).parent.parent.parent.absolute()
21
+ SRC_DIR = PROJECT_ROOT / "src"
22
+ APP_HOST = "127.0.0.1"
23
+ APP_PORT = 7870
24
+ APP_URL = f"http://{APP_HOST}:{APP_PORT}"
25
+ APP_STARTUP_TIMEOUT = 300
26
+ APP_SHUTDOWN_TIMEOUT = 10
27
+
28
+
29
+ def _port_is_open(host: str, port: int, timeout: float = 1.0) -> bool:
30
+ """Check if a port is accepting connections."""
31
+ try:
32
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
33
+ sock.settimeout(timeout)
34
+ result = sock.connect_ex((host, port))
35
+ sock.close()
36
+ return result == 0
37
+ except Exception as e:
38
+ logger.debug(f"Port check failed for {host}:{port}: {e}")
39
+ return False
40
+
41
+
42
+ class AppServer:
43
+ """Manages the Gradio app server lifecycle."""
44
+
45
+ def __init__(self):
46
+ self.process = None
47
+
48
+ def start(self) -> None:
49
+ """Start the app server and wait for port 7870 to be listening."""
50
+ logger.info(f"Starting app server on {APP_URL}...")
51
+
52
+ env = os.environ.copy()
53
+ env["PYTHONUNBUFFERED"] = "1"
54
+ env["GRADIO_SERVER_PORT"] = str(APP_PORT)
55
+ env["GRADIO_SERVER_NAME"] = APP_HOST # Bind to 127.0.0.1 specifically
56
+
57
+ # Verify required environment variables are present
58
+ if "GROQ_API_KEY" not in env or not env["GROQ_API_KEY"]:
59
+ logger.error("GROQ_API_KEY is not set - app will fail to start")
60
+ raise EnvironmentError("GROQ_API_KEY environment variable is required")
61
+
62
+ # Verify GITHUB_PERSONAL_ACCESS_TOKEN for loading GitHub repos
63
+ if "GITHUB_PERSONAL_ACCESS_TOKEN" not in env or not env["GITHUB_PERSONAL_ACCESS_TOKEN"]:
64
+ logger.error("GITHUB_PERSONAL_ACCESS_TOKEN is not set - app will fail to load documents")
65
+ raise EnvironmentError("GITHUB_PERSONAL_ACCESS_TOKEN environment variable is required")
66
+
67
+ # Configure GITHUB_REPOS for E2E tests if not already set
68
+ # These repos are required for the agent to have knowledge to answer questions
69
+ if "GITHUB_REPOS" not in env or not env["GITHUB_REPOS"]:
70
+ env["GITHUB_REPOS"] = "byoung/me,byoung/ai-me"
71
+ logger.info(f"Setting GITHUB_REPOS for E2E tests: {env['GITHUB_REPOS']}")
72
+
73
+ try:
74
+ self.process = subprocess.Popen(
75
+ [sys.executable, str(SRC_DIR / "app.py")],
76
+ stdout=subprocess.PIPE,
77
+ stderr=subprocess.STDOUT,
78
+ text=True,
79
+ bufsize=1,
80
+ cwd=str(PROJECT_ROOT),
81
+ env=env,
82
+ )
83
+
84
+ # Print startup output to console for debugging
85
+ startup_lines = []
86
+ def log_output():
87
+ if self.process and self.process.stdout:
88
+ for line in self.process.stdout:
89
+ line_stripped = line.rstrip()
90
+ startup_lines.append(line_stripped)
91
+ print(f"[APP] {line_stripped}")
92
+ # Signal ready when port appears in output
93
+ if "7870" in line or "Running on" in line:
94
+ logger.info(f"Port detection: {line_stripped}")
95
+
96
+ threading.Thread(target=log_output, daemon=True).start()
97
+
98
+ # Wait for port to actually be listening
99
+ start_time = time.time()
100
+ while time.time() - start_time < APP_STARTUP_TIMEOUT:
101
+ if _port_is_open(APP_HOST, APP_PORT):
102
+ logger.info(f"✓ App server started on {APP_URL}")
103
+ return
104
+ time.sleep(0.5)
105
+
106
+ # Timeout occurred - show what we captured
107
+ logger.error(f"Timeout waiting for port {APP_PORT} after {APP_STARTUP_TIMEOUT}s")
108
+ logger.error(f"Last 10 startup lines: {startup_lines[-10:]}")
109
+ if self.process:
110
+ logger.error(f"Process returncode: {self.process.returncode}")
111
+ raise TimeoutError(
112
+ f"Port {APP_PORT} did not open within {APP_STARTUP_TIMEOUT}s"
113
+ )
114
+
115
+ except Exception as e:
116
+ logger.error(f"Failed to start app server: {e}")
117
+ self._cleanup()
118
+ raise
119
+
120
+ def _cleanup(self) -> None:
121
+ """Terminate the subprocess."""
122
+ if not self.process:
123
+ return
124
+ try:
125
+ self.process.terminate()
126
+ self.process.wait(timeout=APP_SHUTDOWN_TIMEOUT)
127
+ except subprocess.TimeoutExpired:
128
+ self.process.kill()
129
+ self.process.wait()
130
+ finally:
131
+ self.process = None
132
+
133
+ def stop(self) -> None:
134
+ """Stop the app server."""
135
+ if not self.process:
136
+ return
137
+ logger.info("Stopping app server...")
138
+ self._cleanup()
139
+ logger.info("✓ App server stopped")
140
+
141
+
142
+ @pytest.fixture(scope="session")
143
+ def app_server() -> Iterator[str]:
144
+ """Start the app server for the test session, yield URL to tests.
145
+
146
+ Skips E2E tests if GROQ_API_KEY is not available (graceful degradation for CI).
147
+ """
148
+ # Check for required environment variable
149
+ if not os.environ.get("GROQ_API_KEY"):
150
+ pytest.skip(
151
+ "GROQ_API_KEY not set - skipping E2E tests (normal in CI without API keys)",
152
+ allow_module_level=True
153
+ )
154
+
155
+ server = AppServer()
156
+ server.start()
157
+ time.sleep(1) # Let it fully settle
158
+ yield APP_URL
159
+ server.stop()
160
+
161
+
162
+ @pytest.fixture(scope="session")
163
+ def browser_context_args(app_server):
164
+ """Configure browser context for E2E tests."""
165
+ return {
166
+ "base_url": app_server,
167
+ "ignore_https_errors": True,
168
+ }
tests/e2e/test_chat_interaction.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """End-to-end tests for the AI-Me chatbot interface.
2
+
3
+ Tests the complete user interaction flow with the Gradio chat interface.
4
+ Uses sync Playwright to drive a real Chromium browser against the running app.
5
+ """
6
+
7
+ import logging
8
+ import time
9
+
10
+ import pytest
11
+ from playwright.sync_api import Page
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class ChatBotHelper:
17
+ """Helper class for interacting with the Gradio chat interface."""
18
+
19
+ def __init__(self, page: Page):
20
+ self.page = page
21
+ self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
22
+
23
+ def load_page(self, url: str, timeout: int = 120000) -> None:
24
+ """Navigate to the app and wait for the chat interface to load."""
25
+ self.logger.info(f"Loading app at {url}...")
26
+ self.page.goto(url, wait_until="load", timeout=timeout)
27
+
28
+ # Wait for heading to indicate page has loaded
29
+ try:
30
+ self.page.locator("h1").first.wait_for(state="visible", timeout=10000)
31
+ self.logger.info("✓ App loaded successfully")
32
+ except Exception as e:
33
+ self.logger.error(f"Failed to load app: {e}")
34
+ raise
35
+
36
+ def send_message(self, message: str, timeout: int = 60000) -> str:
37
+ """Send a chat message and wait for a response from the AI agent."""
38
+ self.logger.info(f"Sending: {message}")
39
+
40
+ # Find and fill the input field
41
+ input_field = self.page.locator("textarea, input[type='text']").first
42
+ input_field.wait_for(state="visible", timeout=10000)
43
+ input_field.fill(message)
44
+
45
+ # Press Enter to send
46
+ input_field.press("Enter")
47
+
48
+ # Wait for response from AI agent
49
+ self.logger.debug("Waiting for AI response...")
50
+ start_time = time.time()
51
+ response_timeout_sec = timeout / 1000
52
+ last_response_text = ""
53
+ stable_count = 0
54
+
55
+ # Generic selector for bot response bubbles in Gradio chatbot
56
+ # This matches all message rows with the bot-row class
57
+ bot_response_selector = ".message-row.bubble.bot-row"
58
+
59
+ # Wait for text to stop changing (indicates AI has finished responding)
60
+ while time.time() - start_time < response_timeout_sec:
61
+ try:
62
+ # Get all bot responses
63
+ response_elements = self.page.locator(bot_response_selector)
64
+ if response_elements.count() > 0:
65
+ # Get the last (most recent) bot response
66
+ last_element = response_elements.last
67
+ response_text = last_element.inner_text()
68
+
69
+ # Check if text is stable (no new content for 2 consecutive checks)
70
+ if response_text == last_response_text:
71
+ stable_count += 1
72
+ if stable_count >= 2:
73
+ # Text has stabilized, we have a complete response
74
+ if len(response_text) > len(message) * 2: # Substantial response
75
+ self.logger.info(f"✓ Got AI response ({len(response_text)} chars) {response_text}")
76
+ return response_text
77
+ else:
78
+ stable_count = 0
79
+
80
+ last_response_text = response_text
81
+ except Exception as e:
82
+ self.logger.debug(f"Error checking page: {e}")
83
+ pass
84
+
85
+ time.sleep(1.0) # Check every 1 second for stability
86
+
87
+ raise TimeoutError(f"No response received within {timeout}ms")
88
+
89
+
90
+ class TestChatbotInteraction:
91
+ """Test suite for the AI-Me chatbot interface."""
92
+
93
+ @pytest.fixture
94
+ def chat(self, page: Page) -> ChatBotHelper:
95
+ """Provide a ChatBotHelper instance for each test."""
96
+ return ChatBotHelper(page)
97
+
98
+ def test_happy_path_conversation(self, chat: ChatBotHelper, app_server: str):
99
+ """Happy path: Load app, send realistic prompts, verify responses."""
100
+ # Load the app
101
+ chat.load_page(app_server, timeout=120000)
102
+
103
+ # Start with a greeting
104
+ resp1 = chat.send_message("Hi", timeout=60000)
105
+ assert resp1 and len(resp1) > 10, "Should get greeting response"
106
+ assert "error" not in resp1.lower(), "Greeting should not contain error text"
107
+ logger.info("✓ Greeting response received")
108
+
109
+ # Ask about experience
110
+ resp2 = chat.send_message("Do you have python experience?", timeout=60000)
111
+ assert resp2 and len(resp2) > 20, "Should answer about Python experience"
112
+ assert "error" not in resp2.lower(), "Experience response should not contain error text"
113
+ logger.info("✓ Experience question answered")
114
+
115
+ # Ask about challenges
116
+ resp3 = chat.send_message("What is the hardest thing you've ever done?", timeout=60000)
117
+ assert resp3 and len(resp3) > 20, "Should answer about challenges"
118
+ assert "error" not in resp3.lower(), "Response should not contain error text"
119
+ logger.info("✓ Challenge question answered")
120
+
121
+ logger.info("✓ Happy path test passed")
tests/unit/test_data.py CHANGED
@@ -83,6 +83,32 @@ class TestLoadLocalDocuments:
83
  assert len(docs) >= 3, "Expected at least 3 docs from test data"
84
 
85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86
  class TestProcessDocuments:
87
  """Tests for DataManager.process_documents() method.
88
 
 
83
  assert len(docs) >= 3, "Expected at least 3 docs from test data"
84
 
85
 
86
+ class TestCreateVectorstore:
87
+ """Tests for DataManager.create_vectorstore() method.
88
+
89
+ Implements FR-002 (Knowledge Retrieval): Vectorstore creation with edge cases.
90
+ """
91
+
92
+ def test_create_vectorstore_rejects_empty_chunks(self):
93
+ """Tests FR-002: Reject empty chunks with clear error message.
94
+
95
+ When no documents are provided, create_vectorstore should raise
96
+ ValueError with a clear message about how to configure document sources.
97
+ This prevents silent failures and provides actionable guidance.
98
+ """
99
+ config = DataManagerConfig()
100
+ dm = DataManager(config=config)
101
+
102
+ # Should raise ValueError with clear message
103
+ with pytest.raises(ValueError) as exc_info:
104
+ dm.create_vectorstore(chunks=[], reset=True)
105
+
106
+ error_message = str(exc_info.value)
107
+ assert "No documents loaded" in error_message, "Should explain the problem"
108
+ assert "GITHUB_REPOS" in error_message, "Should mention GITHUB_REPOS"
109
+ assert "docs/local-testing/" in error_message, "Should mention local docs path"
110
+
111
+
112
  class TestProcessDocuments:
113
  """Tests for DataManager.process_documents() method.
114
 
uv.lock CHANGED
@@ -34,9 +34,12 @@ dependencies = [
34
  dev = [
35
  { name = "ipykernel" },
36
  { name = "ipywidgets" },
 
37
  { name = "pytest" },
38
  { name = "pytest-asyncio" },
39
  { name = "pytest-cov" },
 
 
40
  ]
41
 
42
  [package.metadata]
@@ -68,9 +71,12 @@ requires-dist = [
68
  dev = [
69
  { name = "ipykernel", specifier = "~=6.30" },
70
  { name = "ipywidgets", specifier = "~=8.1" },
 
71
  { name = "pytest", specifier = "~=8.0" },
72
  { name = "pytest-asyncio", specifier = "~=0.24" },
73
  { name = "pytest-cov", specifier = "~=6.0" },
 
 
74
  ]
75
 
76
  [[package]]
@@ -536,6 +542,15 @@ wheels = [
536
  { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" },
537
  ]
538
 
 
 
 
 
 
 
 
 
 
539
  [[package]]
540
  name = "executing"
541
  version = "2.2.1"
@@ -2009,6 +2024,25 @@ wheels = [
2009
  { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" },
2010
  ]
2011
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2012
  [[package]]
2013
  name = "pluggy"
2014
  version = "1.6.0"
@@ -2239,6 +2273,18 @@ wheels = [
2239
  { url = "https://files.pythonhosted.org/packages/a6/53/d78dc063216e62fc55f6b2eebb447f6a4b0a59f55c8406376f76bf959b08/pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6", size = 32327, upload-time = "2021-03-10T02:09:53.503Z" },
2240
  ]
2241
 
 
 
 
 
 
 
 
 
 
 
 
 
2242
  [[package]]
2243
  name = "pygithub"
2244
  version = "2.8.1"
@@ -2355,6 +2401,19 @@ wheels = [
2355
  { url = "https://files.pythonhosted.org/packages/20/7f/338843f449ace853647ace35870874f69a764d251872ed1b4de9f234822c/pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0", size = 19694, upload-time = "2025-03-25T06:22:27.807Z" },
2356
  ]
2357
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2358
  [[package]]
2359
  name = "pytest-cov"
2360
  version = "6.3.0"
@@ -2369,6 +2428,34 @@ wheels = [
2369
  { url = "https://files.pythonhosted.org/packages/80/b4/bb7263e12aade3842b938bc5c6958cae79c5ee18992f9b9349019579da0f/pytest_cov-6.3.0-py3-none-any.whl", hash = "sha256:440db28156d2468cafc0415b4f8e50856a0d11faefa38f30906048fe490f1749", size = 25115, upload-time = "2025-09-06T15:40:12.44Z" },
2370
  ]
2371
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2372
  [[package]]
2373
  name = "python-dateutil"
2374
  version = "2.9.0.post0"
@@ -2412,6 +2499,18 @@ wheels = [
2412
  { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" },
2413
  ]
2414
 
 
 
 
 
 
 
 
 
 
 
 
 
2415
  [[package]]
2416
  name = "pytz"
2417
  version = "2025.2"
@@ -2857,6 +2956,15 @@ wheels = [
2857
  { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" },
2858
  ]
2859
 
 
 
 
 
 
 
 
 
 
2860
  [[package]]
2861
  name = "threadpoolctl"
2862
  version = "3.6.0"
 
34
  dev = [
35
  { name = "ipykernel" },
36
  { name = "ipywidgets" },
37
+ { name = "playwright" },
38
  { name = "pytest" },
39
  { name = "pytest-asyncio" },
40
  { name = "pytest-cov" },
41
+ { name = "pytest-playwright" },
42
+ { name = "pytest-xdist" },
43
  ]
44
 
45
  [package.metadata]
 
71
  dev = [
72
  { name = "ipykernel", specifier = "~=6.30" },
73
  { name = "ipywidgets", specifier = "~=8.1" },
74
+ { name = "playwright", specifier = "~=1.48" },
75
  { name = "pytest", specifier = "~=8.0" },
76
  { name = "pytest-asyncio", specifier = "~=0.24" },
77
  { name = "pytest-cov", specifier = "~=6.0" },
78
+ { name = "pytest-playwright", specifier = "~=0.5" },
79
+ { name = "pytest-xdist", specifier = "~=3.6" },
80
  ]
81
 
82
  [[package]]
 
542
  { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" },
543
  ]
544
 
545
+ [[package]]
546
+ name = "execnet"
547
+ version = "2.1.1"
548
+ source = { registry = "https://pypi.org/simple" }
549
+ sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524, upload-time = "2024-04-08T09:04:19.245Z" }
550
+ wheels = [
551
+ { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612, upload-time = "2024-04-08T09:04:17.414Z" },
552
+ ]
553
+
554
  [[package]]
555
  name = "executing"
556
  version = "2.2.1"
 
2024
  { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" },
2025
  ]
2026
 
2027
+ [[package]]
2028
+ name = "playwright"
2029
+ version = "1.55.0"
2030
+ source = { registry = "https://pypi.org/simple" }
2031
+ dependencies = [
2032
+ { name = "greenlet" },
2033
+ { name = "pyee" },
2034
+ ]
2035
+ wheels = [
2036
+ { url = "https://files.pythonhosted.org/packages/80/3a/c81ff76df266c62e24f19718df9c168f49af93cabdbc4608ae29656a9986/playwright-1.55.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:d7da108a95001e412effca4f7610de79da1637ccdf670b1ae3fdc08b9694c034", size = 40428109, upload-time = "2025-08-28T15:46:20.357Z" },
2037
+ { url = "https://files.pythonhosted.org/packages/cf/f5/bdb61553b20e907196a38d864602a9b4a461660c3a111c67a35179b636fa/playwright-1.55.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8290cf27a5d542e2682ac274da423941f879d07b001f6575a5a3a257b1d4ba1c", size = 38687254, upload-time = "2025-08-28T15:46:23.925Z" },
2038
+ { url = "https://files.pythonhosted.org/packages/4a/64/48b2837ef396487807e5ab53c76465747e34c7143fac4a084ef349c293a8/playwright-1.55.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:25b0d6b3fd991c315cca33c802cf617d52980108ab8431e3e1d37b5de755c10e", size = 40428108, upload-time = "2025-08-28T15:46:27.119Z" },
2039
+ { url = "https://files.pythonhosted.org/packages/08/33/858312628aa16a6de97839adc2ca28031ebc5391f96b6fb8fdf1fcb15d6c/playwright-1.55.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:c6d4d8f6f8c66c483b0835569c7f0caa03230820af8e500c181c93509c92d831", size = 45905643, upload-time = "2025-08-28T15:46:30.312Z" },
2040
+ { url = "https://files.pythonhosted.org/packages/83/83/b8d06a5b5721931aa6d5916b83168e28bd891f38ff56fe92af7bdee9860f/playwright-1.55.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29a0777c4ce1273acf90c87e4ae2fe0130182100d99bcd2ae5bf486093044838", size = 45296647, upload-time = "2025-08-28T15:46:33.221Z" },
2041
+ { url = "https://files.pythonhosted.org/packages/06/2e/9db64518aebcb3d6ef6cd6d4d01da741aff912c3f0314dadb61226c6a96a/playwright-1.55.0-py3-none-win32.whl", hash = "sha256:29e6d1558ad9d5b5c19cbec0a72f6a2e35e6353cd9f262e22148685b86759f90", size = 35476046, upload-time = "2025-08-28T15:46:36.184Z" },
2042
+ { url = "https://files.pythonhosted.org/packages/46/4f/9ba607fa94bb9cee3d4beb1c7b32c16efbfc9d69d5037fa85d10cafc618b/playwright-1.55.0-py3-none-win_amd64.whl", hash = "sha256:7eb5956473ca1951abb51537e6a0da55257bb2e25fc37c2b75af094a5c93736c", size = 35476048, upload-time = "2025-08-28T15:46:38.867Z" },
2043
+ { url = "https://files.pythonhosted.org/packages/21/98/5ca173c8ec906abde26c28e1ecb34887343fd71cc4136261b90036841323/playwright-1.55.0-py3-none-win_arm64.whl", hash = "sha256:012dc89ccdcbd774cdde8aeee14c08e0dd52ddb9135bf10e9db040527386bd76", size = 31225543, upload-time = "2025-08-28T15:46:41.613Z" },
2044
+ ]
2045
+
2046
  [[package]]
2047
  name = "pluggy"
2048
  version = "1.6.0"
 
2273
  { url = "https://files.pythonhosted.org/packages/a6/53/d78dc063216e62fc55f6b2eebb447f6a4b0a59f55c8406376f76bf959b08/pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6", size = 32327, upload-time = "2021-03-10T02:09:53.503Z" },
2274
  ]
2275
 
2276
+ [[package]]
2277
+ name = "pyee"
2278
+ version = "13.0.0"
2279
+ source = { registry = "https://pypi.org/simple" }
2280
+ dependencies = [
2281
+ { name = "typing-extensions" },
2282
+ ]
2283
+ sdist = { url = "https://files.pythonhosted.org/packages/95/03/1fd98d5841cd7964a27d729ccf2199602fe05eb7a405c1462eb7277945ed/pyee-13.0.0.tar.gz", hash = "sha256:b391e3c5a434d1f5118a25615001dbc8f669cf410ab67d04c4d4e07c55481c37", size = 31250, upload-time = "2025-03-17T18:53:15.955Z" }
2284
+ wheels = [
2285
+ { url = "https://files.pythonhosted.org/packages/9b/4d/b9add7c84060d4c1906abe9a7e5359f2a60f7a9a4f67268b2766673427d8/pyee-13.0.0-py3-none-any.whl", hash = "sha256:48195a3cddb3b1515ce0695ed76036b5ccc2ef3a9f963ff9f77aec0139845498", size = 15730, upload-time = "2025-03-17T18:53:14.532Z" },
2286
+ ]
2287
+
2288
  [[package]]
2289
  name = "pygithub"
2290
  version = "2.8.1"
 
2401
  { url = "https://files.pythonhosted.org/packages/20/7f/338843f449ace853647ace35870874f69a764d251872ed1b4de9f234822c/pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0", size = 19694, upload-time = "2025-03-25T06:22:27.807Z" },
2402
  ]
2403
 
2404
+ [[package]]
2405
+ name = "pytest-base-url"
2406
+ version = "2.1.0"
2407
+ source = { registry = "https://pypi.org/simple" }
2408
+ dependencies = [
2409
+ { name = "pytest" },
2410
+ { name = "requests" },
2411
+ ]
2412
+ sdist = { url = "https://files.pythonhosted.org/packages/ae/1a/b64ac368de6b993135cb70ca4e5d958a5c268094a3a2a4cac6f0021b6c4f/pytest_base_url-2.1.0.tar.gz", hash = "sha256:02748589a54f9e63fcbe62301d6b0496da0d10231b753e950c63e03aee745d45", size = 6702, upload-time = "2024-01-31T22:43:00.81Z" }
2413
+ wheels = [
2414
+ { url = "https://files.pythonhosted.org/packages/98/1c/b00940ab9eb8ede7897443b771987f2f4a76f06be02f1b3f01eb7567e24a/pytest_base_url-2.1.0-py3-none-any.whl", hash = "sha256:3ad15611778764d451927b2a53240c1a7a591b521ea44cebfe45849d2d2812e6", size = 5302, upload-time = "2024-01-31T22:42:58.897Z" },
2415
+ ]
2416
+
2417
  [[package]]
2418
  name = "pytest-cov"
2419
  version = "6.3.0"
 
2428
  { url = "https://files.pythonhosted.org/packages/80/b4/bb7263e12aade3842b938bc5c6958cae79c5ee18992f9b9349019579da0f/pytest_cov-6.3.0-py3-none-any.whl", hash = "sha256:440db28156d2468cafc0415b4f8e50856a0d11faefa38f30906048fe490f1749", size = 25115, upload-time = "2025-09-06T15:40:12.44Z" },
2429
  ]
2430
 
2431
+ [[package]]
2432
+ name = "pytest-playwright"
2433
+ version = "0.7.1"
2434
+ source = { registry = "https://pypi.org/simple" }
2435
+ dependencies = [
2436
+ { name = "playwright" },
2437
+ { name = "pytest" },
2438
+ { name = "pytest-base-url" },
2439
+ { name = "python-slugify" },
2440
+ ]
2441
+ sdist = { url = "https://files.pythonhosted.org/packages/a0/1e/9771990bad2b59d37728c4b6f28c234b3badbb2494bd72d54a6e2a988e23/pytest_playwright-0.7.1.tar.gz", hash = "sha256:94b551b2677ecdc16284fcd6a4f0045eafda47a60e74410f3fe4d8260e12cabf", size = 16769, upload-time = "2025-09-08T08:10:53.765Z" }
2442
+ wheels = [
2443
+ { url = "https://files.pythonhosted.org/packages/dd/59/373da90ce6a1a46ca6a449bf16cea11a3c6e269814eb60e7668526350b95/pytest_playwright-0.7.1-py3-none-any.whl", hash = "sha256:fcc46510fb75f8eba6df3bc8e84e4e902483d92be98075f20b9d160651a36d90", size = 16754, upload-time = "2025-09-08T08:10:55.92Z" },
2444
+ ]
2445
+
2446
+ [[package]]
2447
+ name = "pytest-xdist"
2448
+ version = "3.8.0"
2449
+ source = { registry = "https://pypi.org/simple" }
2450
+ dependencies = [
2451
+ { name = "execnet" },
2452
+ { name = "pytest" },
2453
+ ]
2454
+ sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" }
2455
+ wheels = [
2456
+ { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" },
2457
+ ]
2458
+
2459
  [[package]]
2460
  name = "python-dateutil"
2461
  version = "2.9.0.post0"
 
2499
  { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" },
2500
  ]
2501
 
2502
+ [[package]]
2503
+ name = "python-slugify"
2504
+ version = "8.0.4"
2505
+ source = { registry = "https://pypi.org/simple" }
2506
+ dependencies = [
2507
+ { name = "text-unidecode" },
2508
+ ]
2509
+ sdist = { url = "https://files.pythonhosted.org/packages/87/c7/5e1547c44e31da50a460df93af11a535ace568ef89d7a811069ead340c4a/python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856", size = 10921, upload-time = "2024-02-08T18:32:45.488Z" }
2510
+ wheels = [
2511
+ { url = "https://files.pythonhosted.org/packages/a4/62/02da182e544a51a5c3ccf4b03ab79df279f9c60c5e82d5e8bec7ca26ac11/python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8", size = 10051, upload-time = "2024-02-08T18:32:43.911Z" },
2512
+ ]
2513
+
2514
  [[package]]
2515
  name = "pytz"
2516
  version = "2025.2"
 
2956
  { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" },
2957
  ]
2958
 
2959
+ [[package]]
2960
+ name = "text-unidecode"
2961
+ version = "1.3"
2962
+ source = { registry = "https://pypi.org/simple" }
2963
+ sdist = { url = "https://files.pythonhosted.org/packages/ab/e2/e9a00f0ccb71718418230718b3d900e71a5d16e701a3dae079a21e9cd8f8/text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93", size = 76885, upload-time = "2019-08-30T21:36:45.405Z" }
2964
+ wheels = [
2965
+ { url = "https://files.pythonhosted.org/packages/a6/a5/c0b6468d3824fe3fde30dbb5e1f687b291608f9473681bbf7dabbf5a87d7/text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8", size = 78154, upload-time = "2019-08-30T21:37:03.543Z" },
2966
+ ]
2967
+
2968
  [[package]]
2969
  name = "threadpoolctl"
2970
  version = "3.6.0"