Spaces:
Running
Running
github-actions[bot]
commited on
Commit
β’
6ebad61
0
Parent(s):
Sync to HuggingFace Spaces
Browse filesThis view is limited to 50 files because it contains too many changes. Β
See raw diff
- .dockerignore +25 -0
- .editorconfig +7 -0
- .env.example +32 -0
- .github/workflows/deploy.yml +54 -0
- .github/workflows/llama-cpp.yml +112 -0
- .github/workflows/on-pull-request-to-main.yml +9 -0
- .github/workflows/on-push-to-main.yml +7 -0
- .github/workflows/reusable-test-lint-ping.yml +25 -0
- .github/workflows/update-searxng-docker-image.yml +44 -0
- .gitignore +7 -0
- .npmrc +1 -0
- Dockerfile +82 -0
- README.md +131 -0
- biome.json +30 -0
- client/components/AiResponse/AiModelDownloadAllowanceContent.tsx +62 -0
- client/components/AiResponse/AiResponseContent.tsx +164 -0
- client/components/AiResponse/AiResponseSection.tsx +92 -0
- client/components/AiResponse/ChatInterface.tsx +195 -0
- client/components/AiResponse/CopyIconButton.tsx +32 -0
- client/components/AiResponse/FormattedMarkdown.tsx +38 -0
- client/components/AiResponse/LoadingModelContent.tsx +40 -0
- client/components/AiResponse/PreparingContent.tsx +29 -0
- client/components/AiResponse/WebLlmModelSelect.tsx +81 -0
- client/components/AiResponse/WllamaModelSelect.tsx +42 -0
- client/components/App/App.tsx +97 -0
- client/components/Logs/LogsModal.tsx +101 -0
- client/components/Logs/ShowLogsButton.tsx +42 -0
- client/components/Pages/AccessPage.tsx +61 -0
- client/components/Pages/Main/MainPage.tsx +65 -0
- client/components/Pages/Main/Menu/AISettingsForm.tsx +366 -0
- client/components/Pages/Main/Menu/ActionsForm.tsx +18 -0
- client/components/Pages/Main/Menu/ClearDataButton.tsx +59 -0
- client/components/Pages/Main/Menu/InterfaceSettingsForm.tsx +45 -0
- client/components/Pages/Main/Menu/MenuButton.tsx +53 -0
- client/components/Pages/Main/Menu/MenuDrawer.tsx +111 -0
- client/components/Pages/Main/Menu/SearchSettingsForm.tsx +43 -0
- client/components/Search/Form/SearchForm.tsx +140 -0
- client/components/Search/Results/Graphical/ImageResultsList.tsx +120 -0
- client/components/Search/Results/SearchResultsSection.tsx +132 -0
- client/components/Search/Results/Textual/SearchResultsList.tsx +87 -0
- client/index.html +36 -0
- client/index.tsx +9 -0
- client/modules/accessKey.ts +95 -0
- client/modules/logEntries.ts +20 -0
- client/modules/openai.ts +16 -0
- client/modules/parentWindow.ts +5 -0
- client/modules/pubSub.ts +106 -0
- client/modules/querySuggestions.ts +31 -0
- client/modules/search.ts +159 -0
- client/modules/searchTokenHash.ts +41 -0
.dockerignore
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Logs
|
2 |
+
logs
|
3 |
+
*.log
|
4 |
+
npm-debug.log*
|
5 |
+
yarn-debug.log*
|
6 |
+
yarn-error.log*
|
7 |
+
pnpm-debug.log*
|
8 |
+
lerna-debug.log*
|
9 |
+
|
10 |
+
node_modules
|
11 |
+
dist
|
12 |
+
dist-ssr
|
13 |
+
*.local
|
14 |
+
|
15 |
+
# Editor directories and files
|
16 |
+
.vscode/*
|
17 |
+
!.vscode/extensions.json
|
18 |
+
.idea
|
19 |
+
.DS_Store
|
20 |
+
*.suo
|
21 |
+
*.ntvs*
|
22 |
+
*.njsproj
|
23 |
+
*.sln
|
24 |
+
*.sw?
|
25 |
+
|
.editorconfig
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[*]
|
2 |
+
charset = utf-8
|
3 |
+
insert_final_newline = true
|
4 |
+
end_of_line = lf
|
5 |
+
indent_style = space
|
6 |
+
indent_size = 2
|
7 |
+
max_line_length = 80
|
.env.example
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# A comma-separated list of access keys. Example: `ACCESS_KEYS="ABC123,JUD71F,HUWE3"`. Leave blank for unrestricted access.
|
2 |
+
ACCESS_KEYS=""
|
3 |
+
|
4 |
+
# The timeout in hours for access key validation. Set to 0 to require validation on every page load.
|
5 |
+
ACCESS_KEY_TIMEOUT_HOURS="24"
|
6 |
+
|
7 |
+
# The default model ID for WebLLM with F16 shaders.
|
8 |
+
WEBLLM_DEFAULT_F16_MODEL_ID="SmolLM2-360M-Instruct-q0f16-MLC"
|
9 |
+
|
10 |
+
# The default model ID for WebLLM with F32 shaders.
|
11 |
+
WEBLLM_DEFAULT_F32_MODEL_ID="SmolLM2-360M-Instruct-q0f32-MLC"
|
12 |
+
|
13 |
+
# The default model ID for Wllama.
|
14 |
+
WLLAMA_DEFAULT_MODEL_ID="smollm2-360m"
|
15 |
+
|
16 |
+
# The base URL for the internal OpenAI compatible API. Example: `INTERNAL_OPENAI_COMPATIBLE_API_BASE_URL="https://api.openai.com/v1"`. Leave blank to disable internal OpenAI compatible API.
|
17 |
+
INTERNAL_OPENAI_COMPATIBLE_API_BASE_URL=""
|
18 |
+
|
19 |
+
# The access key for the internal OpenAI compatible API.
|
20 |
+
INTERNAL_OPENAI_COMPATIBLE_API_KEY=""
|
21 |
+
|
22 |
+
# The model for the internal OpenAI compatible API.
|
23 |
+
INTERNAL_OPENAI_COMPATIBLE_API_MODEL=""
|
24 |
+
|
25 |
+
# The name of the internal OpenAI compatible API, displayed in the UI.
|
26 |
+
INTERNAL_OPENAI_COMPATIBLE_API_NAME="Internal API"
|
27 |
+
|
28 |
+
# The type of inference to use by default. The possible values are:
|
29 |
+
# "browser" -> In the browser (Private)
|
30 |
+
# "openai" -> Remote Server (API)
|
31 |
+
# "internal" -> $INTERNAL_OPENAI_COMPATIBLE_API_NAME
|
32 |
+
DEFAULT_INFERENCE_TYPE="browser"
|
.github/workflows/deploy.yml
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Deploy
|
2 |
+
|
3 |
+
on:
|
4 |
+
workflow_dispatch:
|
5 |
+
|
6 |
+
jobs:
|
7 |
+
build-and-push-image:
|
8 |
+
name: Publish Docker image to GitHub Packages
|
9 |
+
runs-on: ubuntu-latest
|
10 |
+
env:
|
11 |
+
REGISTRY: ghcr.io
|
12 |
+
IMAGE_NAME: ${{ github.repository }}
|
13 |
+
permissions:
|
14 |
+
contents: read
|
15 |
+
packages: write
|
16 |
+
steps:
|
17 |
+
- name: Checkout repository
|
18 |
+
uses: actions/checkout@v4
|
19 |
+
- name: Log in to the Container registry
|
20 |
+
uses: docker/login-action@v3
|
21 |
+
with:
|
22 |
+
registry: ${{ env.REGISTRY }}
|
23 |
+
username: ${{ github.actor }}
|
24 |
+
password: ${{ secrets.GITHUB_TOKEN }}
|
25 |
+
- name: Extract metadata (tags, labels) for Docker
|
26 |
+
id: meta
|
27 |
+
uses: docker/metadata-action@v5
|
28 |
+
with:
|
29 |
+
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
30 |
+
- name: Set up Docker Buildx
|
31 |
+
uses: docker/setup-buildx-action@v3
|
32 |
+
- name: Build and push Docker image
|
33 |
+
uses: docker/build-push-action@v6
|
34 |
+
with:
|
35 |
+
context: .
|
36 |
+
push: true
|
37 |
+
tags: ${{ steps.meta.outputs.tags }}
|
38 |
+
labels: ${{ steps.meta.outputs.labels }}
|
39 |
+
platforms: linux/amd64,linux/arm64
|
40 |
+
|
41 |
+
sync-to-hf:
|
42 |
+
name: Sync to HuggingFace Spaces
|
43 |
+
runs-on: ubuntu-latest
|
44 |
+
steps:
|
45 |
+
- uses: actions/checkout@v4
|
46 |
+
with:
|
47 |
+
lfs: true
|
48 |
+
- uses: JacobLinCool/huggingface-sync@v1
|
49 |
+
with:
|
50 |
+
github: ${{ secrets.GITHUB_TOKEN }}
|
51 |
+
user: ${{ vars.HF_SPACE_OWNER }}
|
52 |
+
space: ${{ vars.HF_SPACE_NAME }}
|
53 |
+
token: ${{ secrets.HF_TOKEN }}
|
54 |
+
configuration: "hf-space-config.yml"
|
.github/workflows/llama-cpp.yml
ADDED
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Review Pull Request with LlamaCPP
|
2 |
+
|
3 |
+
on:
|
4 |
+
pull_request:
|
5 |
+
types: [opened, synchronize, reopened]
|
6 |
+
branches: ["main"]
|
7 |
+
|
8 |
+
concurrency:
|
9 |
+
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
10 |
+
cancel-in-progress: true
|
11 |
+
|
12 |
+
jobs:
|
13 |
+
llama-cpp:
|
14 |
+
if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-ai-review') }}
|
15 |
+
continue-on-error: true
|
16 |
+
runs-on: ubuntu-latest
|
17 |
+
name: LlamaCPP
|
18 |
+
permissions:
|
19 |
+
pull-requests: write
|
20 |
+
contents: read
|
21 |
+
timeout-minutes: 30
|
22 |
+
env:
|
23 |
+
LLAMA_CPP_COMMIT: 42ae10bbcd7b56f29a302c86796542a6dadf46c9
|
24 |
+
steps:
|
25 |
+
- name: Checkout Repository
|
26 |
+
uses: actions/checkout@v4
|
27 |
+
|
28 |
+
- name: Fetch branches and output the diff in this step
|
29 |
+
run: |
|
30 |
+
git fetch origin main:main
|
31 |
+
git fetch origin pull/${{ github.event.pull_request.number }}/head:pr-branch
|
32 |
+
mkdir -p /tmp/llama_review
|
33 |
+
git diff main..pr-branch > /tmp/llama_review/diff.txt
|
34 |
+
|
35 |
+
- name: Write prompt to file
|
36 |
+
id: build_prompt
|
37 |
+
run: |
|
38 |
+
PR_TITLE=$(echo "${{ github.event.pull_request.title }}" | sed 's/[()]/\\&/g')
|
39 |
+
DIFF_CONTENT=$(cat /tmp/llama_review/diff.txt)
|
40 |
+
echo "<|im_start|>system
|
41 |
+
You are a senior software engineer working ${{ github.event.repository.name }}, which has the following description: \"${{ github.event.repository.description }}\".
|
42 |
+
You are currently reviewing a pull request titled \"$PR_TITLE\", from the branch \"${{ github.event.pull_request.head.ref }}\".<|im_end|>
|
43 |
+
<|im_start|>user
|
44 |
+
Write a high-quality review of the following changes:
|
45 |
+
\`\`\`diff
|
46 |
+
$DIFF_CONTENT
|
47 |
+
\`\`\`
|
48 |
+
<|im_end|>
|
49 |
+
<|im_start|>assistant
|
50 |
+
### Pull Request Review
|
51 |
+
|
52 |
+
#### Summary" > /tmp/llama_review/prompt.txt
|
53 |
+
|
54 |
+
- name: Show Prompt
|
55 |
+
run: cat /tmp/llama_review/prompt.txt
|
56 |
+
|
57 |
+
- name: Cache LlamaCPP
|
58 |
+
id: cache_llama_cpp
|
59 |
+
uses: actions/cache@v4
|
60 |
+
with:
|
61 |
+
path: ~/.cache/llama.cpp/
|
62 |
+
key: llama-cpp-${{ runner.os }}-${{ env.LLAMA_CPP_COMMIT }}
|
63 |
+
|
64 |
+
- name: Clone and build LlamaCPP
|
65 |
+
if: steps.cache_llama_cpp.outputs.cache-hit != 'true'
|
66 |
+
run: |
|
67 |
+
git clone https://github.com/ggerganov/llama.cpp.git
|
68 |
+
cd llama.cpp
|
69 |
+
git checkout ${{ env.LLAMA_CPP_COMMIT }}
|
70 |
+
make -j llama-cli
|
71 |
+
mkdir -p ~/.cache/llama.cpp/
|
72 |
+
cp llama-cli ~/.cache/llama.cpp/
|
73 |
+
curl -L -o ~/.cache/llama.cpp/Qwen2.5-Coder-3B-Instruct-Q8_0.gguf https://huggingface.co/unsloth/Qwen2.5-Coder-3B-Instruct-GGUF/resolve/main/Qwen2.5-Coder-3B-Instruct-Q8_0.gguf
|
74 |
+
|
75 |
+
- name: Copy LlamaCPP to /usr/local/bin/
|
76 |
+
run: cp ~/.cache/llama.cpp/llama-cli /usr/local/bin/
|
77 |
+
|
78 |
+
- name: Run LlamaCPP
|
79 |
+
run: |
|
80 |
+
PROMPT=$(cat /tmp/llama_review/prompt.txt)
|
81 |
+
llama-cli \
|
82 |
+
-m ~/.cache/llama.cpp/Qwen2.5-Coder-3B-Instruct-Q8_0.gguf \
|
83 |
+
-p "$PROMPT" \
|
84 |
+
-e \
|
85 |
+
--ctx-size 32768 \
|
86 |
+
-np 1 \
|
87 |
+
-t -1 \
|
88 |
+
-n -1 \
|
89 |
+
--temp 0.7 \
|
90 |
+
--top-p 0.9 \
|
91 |
+
--min-p 0.1 \
|
92 |
+
--top-k 0 \
|
93 |
+
--no-display-prompt > /tmp/llama_review/response.txt
|
94 |
+
|
95 |
+
- name: Show Response
|
96 |
+
run: cat /tmp/llama_review/response.txt
|
97 |
+
|
98 |
+
- name: Find Comment
|
99 |
+
uses: peter-evans/find-comment@v3
|
100 |
+
id: find_comment
|
101 |
+
with:
|
102 |
+
issue-number: ${{ github.event.pull_request.number }}
|
103 |
+
comment-author: "github-actions[bot]"
|
104 |
+
body-includes: "[end of text]"
|
105 |
+
|
106 |
+
- name: Post or Update PR Review
|
107 |
+
uses: peter-evans/create-or-update-comment@v4
|
108 |
+
with:
|
109 |
+
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
110 |
+
issue-number: ${{ github.event.pull_request.number }}
|
111 |
+
body-path: /tmp/llama_review/response.txt
|
112 |
+
edit-mode: replace
|
.github/workflows/on-pull-request-to-main.yml
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: On Pull Request To Main
|
2 |
+
on:
|
3 |
+
pull_request:
|
4 |
+
types: [opened, synchronize, reopened]
|
5 |
+
branches: ["main"]
|
6 |
+
jobs:
|
7 |
+
test-lint-ping:
|
8 |
+
if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-test-lint-ping') }}
|
9 |
+
uses: ./.github/workflows/reusable-test-lint-ping.yml
|
.github/workflows/on-push-to-main.yml
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: On Push To Main
|
2 |
+
on:
|
3 |
+
push:
|
4 |
+
branches: ["main"]
|
5 |
+
jobs:
|
6 |
+
test-lint-ping:
|
7 |
+
uses: ./.github/workflows/reusable-test-lint-ping.yml
|
.github/workflows/reusable-test-lint-ping.yml
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
on:
|
2 |
+
workflow_call:
|
3 |
+
jobs:
|
4 |
+
check-code-quality:
|
5 |
+
name: Check Code Quality
|
6 |
+
runs-on: ubuntu-latest
|
7 |
+
steps:
|
8 |
+
- uses: actions/checkout@v4
|
9 |
+
- uses: actions/setup-node@v4
|
10 |
+
with:
|
11 |
+
node-version: 20
|
12 |
+
cache: "npm"
|
13 |
+
- run: npm ci --ignore-scripts
|
14 |
+
- run: npm test
|
15 |
+
- run: npm run lint
|
16 |
+
check-docker-container:
|
17 |
+
needs: [check-code-quality]
|
18 |
+
name: Check Docker Container
|
19 |
+
runs-on: ubuntu-latest
|
20 |
+
steps:
|
21 |
+
- uses: actions/checkout@v4
|
22 |
+
- run: docker compose -f docker-compose.production.yml up -d
|
23 |
+
- name: Check if main page is available
|
24 |
+
run: until curl -s -o /dev/null -w "%{http_code}" localhost:7860 | grep 200; do sleep 1; done
|
25 |
+
- run: docker compose -f docker-compose.production.yml down
|
.github/workflows/update-searxng-docker-image.yml
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Update SearXNG Docker Image
|
2 |
+
|
3 |
+
on:
|
4 |
+
schedule:
|
5 |
+
- cron: "0 14 * * *"
|
6 |
+
workflow_dispatch:
|
7 |
+
|
8 |
+
permissions:
|
9 |
+
contents: write
|
10 |
+
|
11 |
+
jobs:
|
12 |
+
update-searxng-image:
|
13 |
+
runs-on: ubuntu-latest
|
14 |
+
steps:
|
15 |
+
- name: Checkout code
|
16 |
+
uses: actions/checkout@v4
|
17 |
+
with:
|
18 |
+
token: ${{ secrets.GITHUB_TOKEN }}
|
19 |
+
|
20 |
+
- name: Get latest SearXNG image tag
|
21 |
+
id: get_latest_tag
|
22 |
+
run: |
|
23 |
+
LATEST_TAG=$(curl -s "https://hub.docker.com/v2/repositories/searxng/searxng/tags/?page_size=3&ordering=last_updated" | jq -r '.results[] | select(.name != "latest-build-cache" and .name != "latest") | .name' | head -n 1)
|
24 |
+
echo "LATEST_TAG=${LATEST_TAG}" >> $GITHUB_OUTPUT
|
25 |
+
|
26 |
+
- name: Update Dockerfile
|
27 |
+
run: |
|
28 |
+
sed -i 's|FROM searxng/searxng:.*|FROM searxng/searxng:${{ steps.get_latest_tag.outputs.LATEST_TAG }}|' Dockerfile
|
29 |
+
|
30 |
+
- name: Check for changes
|
31 |
+
id: git_status
|
32 |
+
run: |
|
33 |
+
git diff --exit-code || echo "changes=true" >> $GITHUB_OUTPUT
|
34 |
+
|
35 |
+
- name: Commit and push if changed
|
36 |
+
if: steps.git_status.outputs.changes == 'true'
|
37 |
+
run: |
|
38 |
+
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
39 |
+
git config --local user.name "github-actions[bot]"
|
40 |
+
git add Dockerfile
|
41 |
+
git commit -m "Update SearXNG Docker image to tag ${{ steps.get_latest_tag.outputs.LATEST_TAG }}"
|
42 |
+
git push
|
43 |
+
env:
|
44 |
+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
.gitignore
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
node_modules
|
2 |
+
.DS_Store
|
3 |
+
/client/dist
|
4 |
+
/server/models
|
5 |
+
.vscode
|
6 |
+
/vite-build-stats.html
|
7 |
+
.env
|
.npmrc
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
legacy-peer-deps = true
|
Dockerfile
ADDED
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Use the SearXNG image as the base
|
2 |
+
FROM searxng/searxng:2024.11.17-10d3af84b
|
3 |
+
|
4 |
+
# Set the default port to 7860 if not provided
|
5 |
+
ENV PORT=7860
|
6 |
+
|
7 |
+
# Expose the port specified by the PORT environment variable
|
8 |
+
EXPOSE $PORT
|
9 |
+
|
10 |
+
# Install necessary packages using Alpine's package manager
|
11 |
+
RUN apk add --update \
|
12 |
+
nodejs \
|
13 |
+
npm \
|
14 |
+
git \
|
15 |
+
build-base \
|
16 |
+
cmake \
|
17 |
+
ccache
|
18 |
+
|
19 |
+
# Set the SearXNG settings folder path
|
20 |
+
ARG SEARXNG_SETTINGS_FOLDER=/etc/searxng
|
21 |
+
|
22 |
+
# Modify SearXNG configuration:
|
23 |
+
# 1. Change output format from HTML to JSON
|
24 |
+
# 2. Remove user switching in the entrypoint script
|
25 |
+
# 3. Create and set permissions for the settings folder
|
26 |
+
RUN sed -i 's/- html/- json/' /usr/local/searxng/searx/settings.yml \
|
27 |
+
&& sed -i 's/su-exec searxng:searxng //' /usr/local/searxng/dockerfiles/docker-entrypoint.sh \
|
28 |
+
&& mkdir -p ${SEARXNG_SETTINGS_FOLDER} \
|
29 |
+
&& chmod 777 ${SEARXNG_SETTINGS_FOLDER}
|
30 |
+
|
31 |
+
# Set up user and directory structure
|
32 |
+
ARG USERNAME=user
|
33 |
+
ARG HOME_DIR=/home/${USERNAME}
|
34 |
+
ARG APP_DIR=${HOME_DIR}/app
|
35 |
+
|
36 |
+
# Create a non-root user and set up the application directory
|
37 |
+
RUN adduser -D -u 1000 ${USERNAME} \
|
38 |
+
&& mkdir -p ${APP_DIR} \
|
39 |
+
&& chown -R ${USERNAME}:${USERNAME} ${HOME_DIR}
|
40 |
+
|
41 |
+
# Switch to the non-root user
|
42 |
+
USER ${USERNAME}
|
43 |
+
|
44 |
+
# Set the working directory to the application directory
|
45 |
+
WORKDIR ${APP_DIR}
|
46 |
+
|
47 |
+
# Define environment variables that can be passed to the container during build.
|
48 |
+
# This approach allows for dynamic configuration without relying on a `.env` file,
|
49 |
+
# which might not be suitable for all deployment scenarios.
|
50 |
+
ARG ACCESS_KEYS
|
51 |
+
ARG ACCESS_KEY_TIMEOUT_HOURS
|
52 |
+
ARG WEBLLM_DEFAULT_F16_MODEL_ID
|
53 |
+
ARG WEBLLM_DEFAULT_F32_MODEL_ID
|
54 |
+
ARG WLLAMA_DEFAULT_MODEL_ID
|
55 |
+
ARG INTERNAL_OPENAI_COMPATIBLE_API_BASE_URL
|
56 |
+
ARG INTERNAL_OPENAI_COMPATIBLE_API_KEY
|
57 |
+
ARG INTERNAL_OPENAI_COMPATIBLE_API_MODEL
|
58 |
+
ARG INTERNAL_OPENAI_COMPATIBLE_API_NAME
|
59 |
+
ARG DEFAULT_INFERENCE_TYPE
|
60 |
+
|
61 |
+
# Copy package.json, package-lock.json, and .npmrc files
|
62 |
+
COPY --chown=${USERNAME}:${USERNAME} ./package.json ./package.json
|
63 |
+
COPY --chown=${USERNAME}:${USERNAME} ./package-lock.json ./package-lock.json
|
64 |
+
COPY --chown=${USERNAME}:${USERNAME} ./.npmrc ./.npmrc
|
65 |
+
|
66 |
+
# Install Node.js dependencies
|
67 |
+
RUN npm ci
|
68 |
+
|
69 |
+
# Copy the rest of the application files
|
70 |
+
COPY --chown=${USERNAME}:${USERNAME} . .
|
71 |
+
|
72 |
+
# Configure Git to treat the app directory as safe
|
73 |
+
RUN git config --global --add safe.directory ${APP_DIR}
|
74 |
+
|
75 |
+
# Build the application
|
76 |
+
RUN npm run build
|
77 |
+
|
78 |
+
# Set the entrypoint to use a shell
|
79 |
+
ENTRYPOINT [ "/bin/sh", "-c" ]
|
80 |
+
|
81 |
+
# Run SearXNG in the background and start the Node.js application using PM2
|
82 |
+
CMD [ "(/usr/local/searxng/dockerfiles/docker-entrypoint.sh -f > /dev/null 2>&1) & (npx pm2 start ecosystem.config.cjs && npx pm2 logs production-server)" ]
|
README.md
ADDED
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: MiniSearch
|
3 |
+
emoji: ππ
|
4 |
+
colorFrom: yellow
|
5 |
+
colorTo: yellow
|
6 |
+
sdk: docker
|
7 |
+
short_description: Minimalist web-searching app with browser-based AI assistant
|
8 |
+
pinned: true
|
9 |
+
custom_headers:
|
10 |
+
cross-origin-embedder-policy: require-corp
|
11 |
+
cross-origin-opener-policy: same-origin
|
12 |
+
cross-origin-resource-policy: cross-origin
|
13 |
+
---
|
14 |
+
|
15 |
+
# MiniSearch
|
16 |
+
|
17 |
+
A minimalist web-searching app with an AI assistant that runs directly from your browser.
|
18 |
+
|
19 |
+
Live demo: https://felladrin-minisearch.hf.space
|
20 |
+
|
21 |
+
## Screenshot
|
22 |
+
|
23 |
+
![MiniSearch Screenshot](https://github.com/user-attachments/assets/f8d72a8e-a725-42e9-9358-e6ebade2acb2)
|
24 |
+
|
25 |
+
## Features
|
26 |
+
|
27 |
+
- **Privacy-focused**: [No tracking, no ads, no data collection](https://docs.searxng.org/own-instance.html#how-does-searxng-protect-privacy)
|
28 |
+
- **Easy to use**: Minimalist yet intuitive interface for all users
|
29 |
+
- **Cross-platform**: Models run inside the browser, both on desktop and mobile
|
30 |
+
- **Integrated**: Search from the browser address bar by setting it as the default search engine
|
31 |
+
- **Efficient**: Models are loaded and cached only when needed
|
32 |
+
- **Customizable**: Tweakable settings for search results and text generation
|
33 |
+
- **Open-source**: [The code is available for inspection and contribution at GitHub](https://github.com/felladrin/MiniSearch)
|
34 |
+
|
35 |
+
## Prerequisites
|
36 |
+
|
37 |
+
- [Docker](https://docs.docker.com/get-docker/)
|
38 |
+
|
39 |
+
## Getting started
|
40 |
+
|
41 |
+
Here are the easiest ways to get started with MiniSearch. Pick the one that suits you best.
|
42 |
+
|
43 |
+
**Option 1** - Use [MiniSearch's Docker Image](https://github.com/felladrin/MiniSearch/pkgs/container/minisearch) by running in your terminal:
|
44 |
+
|
45 |
+
```bash
|
46 |
+
docker run -p 7860:7860 ghcr.io/felladrin/minisearch:main
|
47 |
+
```
|
48 |
+
|
49 |
+
**Option 2** - Add MiniSearch's Docker Image to your existing Docker Compose file:
|
50 |
+
|
51 |
+
```yaml
|
52 |
+
services:
|
53 |
+
minisearch:
|
54 |
+
image: ghcr.io/felladrin/minisearch:main
|
55 |
+
ports:
|
56 |
+
- "7860:7860"
|
57 |
+
```
|
58 |
+
|
59 |
+
**Option 3** - Build from source by [downloading the repository files](https://github.com/felladrin/MiniSearch/archive/refs/heads/main.zip) and running:
|
60 |
+
|
61 |
+
```bash
|
62 |
+
docker compose -f docker-compose.production.yml up --build
|
63 |
+
```
|
64 |
+
|
65 |
+
Once the container is running, open http://localhost:7860 in your browser and start searching!
|
66 |
+
|
67 |
+
## Frequently asked questions
|
68 |
+
|
69 |
+
<details>
|
70 |
+
<summary>How do I search via the browser's address bar?</summary>
|
71 |
+
<p>
|
72 |
+
You can set MiniSearch as your browser's address-bar search engine using the pattern <code>http://localhost:7860/?q=%s</code>, in which your search term replaces <code>%s</code>.
|
73 |
+
</p>
|
74 |
+
</details>
|
75 |
+
|
76 |
+
<details>
|
77 |
+
<summary>Can I use custom models via OpenAI-Compatible API?</summary>
|
78 |
+
<p>
|
79 |
+
Yes! For this, open the Menu and change the "AI Processing Location" to <code>Remote server (API)</code>. Then configure the Base URL, and optionally set an API Key and a Model to use.
|
80 |
+
</p>
|
81 |
+
</details>
|
82 |
+
|
83 |
+
<details>
|
84 |
+
<summary>How do I restrict the access to my MiniSearch instance via password?</summary>
|
85 |
+
<p>
|
86 |
+
Create a <code>.env</code> file and set a value for <code>ACCESS_KEYS</code>. Then reset the MiniSearch docker container.
|
87 |
+
</p>
|
88 |
+
<p>
|
89 |
+
For example, if you to set the password to <code>PepperoniPizza</code>, then this is what you should add to your <code>.env</code>:<br/>
|
90 |
+
<code>ACCESS_KEYS="PepperoniPizza"</code>
|
91 |
+
</p>
|
92 |
+
<p>
|
93 |
+
You can find more examples in the <code>.env.example</code> file.
|
94 |
+
</p>
|
95 |
+
</details>
|
96 |
+
|
97 |
+
<details>
|
98 |
+
<summary>I want to serve MiniSearch to other users, allowing them to use my own OpenAI-Compatible API key, but without revealing it to them. Is it possible?</summary>
|
99 |
+
<p>Yes! In MiniSearch, we call this text-generation feature "Internal OpenAI-Compatible API". To use this it:</p>
|
100 |
+
<ol>
|
101 |
+
<li>Set up your OpenAI-Compatible API endpoint by configuring the following environment variables in your <code>.env</code> file:
|
102 |
+
<ul>
|
103 |
+
<li><code>INTERNAL_OPENAI_COMPATIBLE_API_BASE_URL</code>: The base URL for your API</li>
|
104 |
+
<li><code>INTERNAL_OPENAI_COMPATIBLE_API_KEY</code>: Your API access key</li>
|
105 |
+
<li><code>INTERNAL_OPENAI_COMPATIBLE_API_MODEL</code>: The model to use</li>
|
106 |
+
<li><code>INTERNAL_OPENAI_COMPATIBLE_API_NAME</code>: The name to display in the UI</li>
|
107 |
+
</ul>
|
108 |
+
</li>
|
109 |
+
<li>Restart MiniSearch server.</li>
|
110 |
+
<li>In the MiniSearch menu, select the new option (named as per your <code>INTERNAL_OPENAI_COMPATIBLE_API_NAME</code> setting) from the "AI Processing Location" dropdown.</li>
|
111 |
+
</ol>
|
112 |
+
</details>
|
113 |
+
|
114 |
+
<details>
|
115 |
+
<summary>How can I contribute to the development of this tool?</summary>
|
116 |
+
<p>Fork this repository and clone it. Then, start the development server by running the following command:</p>
|
117 |
+
<p><code>docker compose up</code></p>
|
118 |
+
<p>Make your changes, push them to your fork, and open a pull request! All contributions are welcome!</p>
|
119 |
+
</details>
|
120 |
+
|
121 |
+
<details>
|
122 |
+
<summary>Why is MiniSearch built upon SearXNG's Docker Image and using a single image instead of composing it from multiple services?</summary>
|
123 |
+
<p>There are a few reasons for this:</p>
|
124 |
+
<ul>
|
125 |
+
<li>MiniSearch utilizes SearXNG as its meta-search engine.</li>
|
126 |
+
<li>Manual installation of SearXNG is not trivial, so we use the docker image they provide, which has everything set up.</li>
|
127 |
+
<li>SearXNG only provides a Docker Image based on Alpine Linux.</li>
|
128 |
+
<li>The user of the image needs to be customized in a specific way to run on HuggingFace Spaces, where MiniSearch's demo runs.</li>
|
129 |
+
<li>HuggingFace only accepts a single docker image. It doesn't run docker compose or multiple images, unfortunately.</li>
|
130 |
+
</ul>
|
131 |
+
</details>
|
biome.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
|
3 |
+
"vcs": {
|
4 |
+
"enabled": false,
|
5 |
+
"clientKind": "git",
|
6 |
+
"useIgnoreFile": false
|
7 |
+
},
|
8 |
+
"files": {
|
9 |
+
"ignoreUnknown": false,
|
10 |
+
"ignore": []
|
11 |
+
},
|
12 |
+
"formatter": {
|
13 |
+
"enabled": true,
|
14 |
+
"indentStyle": "space"
|
15 |
+
},
|
16 |
+
"organizeImports": {
|
17 |
+
"enabled": true
|
18 |
+
},
|
19 |
+
"linter": {
|
20 |
+
"enabled": true,
|
21 |
+
"rules": {
|
22 |
+
"recommended": true
|
23 |
+
}
|
24 |
+
},
|
25 |
+
"javascript": {
|
26 |
+
"formatter": {
|
27 |
+
"quoteStyle": "double"
|
28 |
+
}
|
29 |
+
}
|
30 |
+
}
|
client/components/AiResponse/AiModelDownloadAllowanceContent.tsx
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Alert, Button, Group, Text } from "@mantine/core";
|
2 |
+
import { IconCheck, IconInfoCircle, IconX } from "@tabler/icons-react";
|
3 |
+
import { usePubSub } from "create-pubsub/react";
|
4 |
+
import { useState } from "react";
|
5 |
+
import { addLogEntry } from "../../modules/logEntries";
|
6 |
+
import { settingsPubSub } from "../../modules/pubSub";
|
7 |
+
|
8 |
+
export default function AiModelDownloadAllowanceContent() {
|
9 |
+
const [settings, setSettings] = usePubSub(settingsPubSub);
|
10 |
+
const [hasDeniedDownload, setDeniedDownload] = useState(false);
|
11 |
+
|
12 |
+
const handleAccept = () => {
|
13 |
+
setSettings({
|
14 |
+
...settings,
|
15 |
+
allowAiModelDownload: true,
|
16 |
+
});
|
17 |
+
addLogEntry("User allowed the AI model download");
|
18 |
+
};
|
19 |
+
|
20 |
+
const handleDecline = () => {
|
21 |
+
setDeniedDownload(true);
|
22 |
+
addLogEntry("User denied the AI model download");
|
23 |
+
};
|
24 |
+
|
25 |
+
return hasDeniedDownload ? null : (
|
26 |
+
<Alert
|
27 |
+
variant="light"
|
28 |
+
color="blue"
|
29 |
+
title="Allow AI model download?"
|
30 |
+
icon={<IconInfoCircle />}
|
31 |
+
>
|
32 |
+
<Text size="sm" mb="md">
|
33 |
+
To obtain AI responses, a language model needs to be downloaded to your
|
34 |
+
browser. Enabling this option lets the app store it and load it
|
35 |
+
instantly on subsequent uses.
|
36 |
+
</Text>
|
37 |
+
<Text size="sm" mb="md">
|
38 |
+
Please note that the download size ranges from 100 MB to 4 GB, depending
|
39 |
+
on the model you select in the Menu, so it's best to avoid using mobile
|
40 |
+
data for this.
|
41 |
+
</Text>
|
42 |
+
<Group justify="flex-end" mt="md">
|
43 |
+
<Button
|
44 |
+
variant="subtle"
|
45 |
+
color="gray"
|
46 |
+
leftSection={<IconX size="1rem" />}
|
47 |
+
onClick={handleDecline}
|
48 |
+
size="xs"
|
49 |
+
>
|
50 |
+
Not now
|
51 |
+
</Button>
|
52 |
+
<Button
|
53 |
+
leftSection={<IconCheck size="1rem" />}
|
54 |
+
onClick={handleAccept}
|
55 |
+
size="xs"
|
56 |
+
>
|
57 |
+
Allow download
|
58 |
+
</Button>
|
59 |
+
</Group>
|
60 |
+
</Alert>
|
61 |
+
);
|
62 |
+
}
|
client/components/AiResponse/AiResponseContent.tsx
ADDED
@@ -0,0 +1,164 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
ActionIcon,
|
3 |
+
Alert,
|
4 |
+
Badge,
|
5 |
+
Box,
|
6 |
+
Card,
|
7 |
+
Group,
|
8 |
+
ScrollArea,
|
9 |
+
Text,
|
10 |
+
Tooltip,
|
11 |
+
} from "@mantine/core";
|
12 |
+
import {
|
13 |
+
IconArrowsMaximize,
|
14 |
+
IconArrowsMinimize,
|
15 |
+
IconHandStop,
|
16 |
+
IconInfoCircle,
|
17 |
+
IconRefresh,
|
18 |
+
} from "@tabler/icons-react";
|
19 |
+
import type { PublishFunction } from "create-pubsub";
|
20 |
+
import { usePubSub } from "create-pubsub/react";
|
21 |
+
import { type ReactNode, Suspense, lazy, useMemo } from "react";
|
22 |
+
import { match } from "ts-pattern";
|
23 |
+
import { settingsPubSub } from "../../modules/pubSub";
|
24 |
+
import { searchAndRespond } from "../../modules/textGeneration";
|
25 |
+
|
26 |
+
const FormattedMarkdown = lazy(() => import("./FormattedMarkdown"));
|
27 |
+
const CopyIconButton = lazy(() => import("./CopyIconButton"));
|
28 |
+
|
29 |
+
export default function AiResponseContent({
|
30 |
+
textGenerationState,
|
31 |
+
response,
|
32 |
+
setTextGenerationState,
|
33 |
+
}: {
|
34 |
+
textGenerationState: string;
|
35 |
+
response: string;
|
36 |
+
setTextGenerationState: PublishFunction<
|
37 |
+
| "failed"
|
38 |
+
| "awaitingSearchResults"
|
39 |
+
| "preparingToGenerate"
|
40 |
+
| "idle"
|
41 |
+
| "loadingModel"
|
42 |
+
| "generating"
|
43 |
+
| "interrupted"
|
44 |
+
| "completed"
|
45 |
+
>;
|
46 |
+
}) {
|
47 |
+
const [settings, setSettings] = usePubSub(settingsPubSub);
|
48 |
+
|
49 |
+
const ConditionalScrollArea = useMemo(
|
50 |
+
() =>
|
51 |
+
({ children }: { children: ReactNode }) => {
|
52 |
+
return settings.enableAiResponseScrolling ? (
|
53 |
+
<ScrollArea.Autosize mah={300} type="auto" offsetScrollbars>
|
54 |
+
{children}
|
55 |
+
</ScrollArea.Autosize>
|
56 |
+
) : (
|
57 |
+
<Box>{children}</Box>
|
58 |
+
);
|
59 |
+
},
|
60 |
+
[settings.enableAiResponseScrolling],
|
61 |
+
);
|
62 |
+
|
63 |
+
return (
|
64 |
+
<Card withBorder shadow="sm" radius="md">
|
65 |
+
<Card.Section withBorder inheritPadding py="xs">
|
66 |
+
<Group justify="space-between">
|
67 |
+
<Group gap="xs" align="center">
|
68 |
+
<Text fw={500}>
|
69 |
+
{match(textGenerationState)
|
70 |
+
.with("generating", () => "Generating AI Response...")
|
71 |
+
.otherwise(() => "AI Response")}
|
72 |
+
</Text>
|
73 |
+
{match(textGenerationState)
|
74 |
+
.with("interrupted", () => (
|
75 |
+
<Badge variant="light" color="yellow" size="xs">
|
76 |
+
Interrupted
|
77 |
+
</Badge>
|
78 |
+
))
|
79 |
+
.otherwise(() => null)}
|
80 |
+
</Group>
|
81 |
+
<Group gap="xs" align="center">
|
82 |
+
{match(textGenerationState)
|
83 |
+
.with("generating", () => (
|
84 |
+
<Tooltip label="Interrupt generation">
|
85 |
+
<ActionIcon
|
86 |
+
onClick={() => setTextGenerationState("interrupted")}
|
87 |
+
variant="subtle"
|
88 |
+
color="gray"
|
89 |
+
>
|
90 |
+
<IconHandStop size={16} />
|
91 |
+
</ActionIcon>
|
92 |
+
</Tooltip>
|
93 |
+
))
|
94 |
+
.otherwise(() => (
|
95 |
+
<Tooltip label="Regenerate response">
|
96 |
+
<ActionIcon
|
97 |
+
onClick={() => searchAndRespond()}
|
98 |
+
variant="subtle"
|
99 |
+
color="gray"
|
100 |
+
>
|
101 |
+
<IconRefresh size={16} />
|
102 |
+
</ActionIcon>
|
103 |
+
</Tooltip>
|
104 |
+
))}
|
105 |
+
{settings.enableAiResponseScrolling ? (
|
106 |
+
<Tooltip label="Show full response without scroll bar">
|
107 |
+
<ActionIcon
|
108 |
+
onClick={() => {
|
109 |
+
setSettings({
|
110 |
+
...settings,
|
111 |
+
enableAiResponseScrolling: false,
|
112 |
+
});
|
113 |
+
}}
|
114 |
+
variant="subtle"
|
115 |
+
color="gray"
|
116 |
+
>
|
117 |
+
<IconArrowsMaximize size={16} />
|
118 |
+
</ActionIcon>
|
119 |
+
</Tooltip>
|
120 |
+
) : (
|
121 |
+
<Tooltip label="Enable scroll bar">
|
122 |
+
<ActionIcon
|
123 |
+
onClick={() => {
|
124 |
+
setSettings({
|
125 |
+
...settings,
|
126 |
+
enableAiResponseScrolling: true,
|
127 |
+
});
|
128 |
+
}}
|
129 |
+
variant="subtle"
|
130 |
+
color="gray"
|
131 |
+
>
|
132 |
+
<IconArrowsMinimize size={16} />
|
133 |
+
</ActionIcon>
|
134 |
+
</Tooltip>
|
135 |
+
)}
|
136 |
+
<Suspense>
|
137 |
+
<CopyIconButton value={response} tooltipLabel="Copy response" />
|
138 |
+
</Suspense>
|
139 |
+
</Group>
|
140 |
+
</Group>
|
141 |
+
</Card.Section>
|
142 |
+
<Card.Section withBorder>
|
143 |
+
<ConditionalScrollArea>
|
144 |
+
<Suspense>
|
145 |
+
<FormattedMarkdown>{response}</FormattedMarkdown>
|
146 |
+
</Suspense>
|
147 |
+
</ConditionalScrollArea>
|
148 |
+
{match(textGenerationState)
|
149 |
+
.with("failed", () => (
|
150 |
+
<Alert
|
151 |
+
variant="light"
|
152 |
+
color="yellow"
|
153 |
+
title="Failed to generate response"
|
154 |
+
icon={<IconInfoCircle />}
|
155 |
+
>
|
156 |
+
Could not generate response. It's possible that your browser or
|
157 |
+
your system is out of memory.
|
158 |
+
</Alert>
|
159 |
+
))
|
160 |
+
.otherwise(() => null)}
|
161 |
+
</Card.Section>
|
162 |
+
</Card>
|
163 |
+
);
|
164 |
+
}
|
client/components/AiResponse/AiResponseSection.tsx
ADDED
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { usePubSub } from "create-pubsub/react";
|
2 |
+
import { Suspense, lazy, useMemo } from "react";
|
3 |
+
import { Pattern, match } from "ts-pattern";
|
4 |
+
import {
|
5 |
+
modelLoadingProgressPubSub,
|
6 |
+
modelSizeInMegabytesPubSub,
|
7 |
+
queryPubSub,
|
8 |
+
responsePubSub,
|
9 |
+
settingsPubSub,
|
10 |
+
textGenerationStatePubSub,
|
11 |
+
} from "../../modules/pubSub";
|
12 |
+
|
13 |
+
const AiResponseContent = lazy(() => import("./AiResponseContent"));
|
14 |
+
const PreparingContent = lazy(() => import("./PreparingContent"));
|
15 |
+
const LoadingModelContent = lazy(() => import("./LoadingModelContent"));
|
16 |
+
const ChatInterface = lazy(() => import("./ChatInterface"));
|
17 |
+
const AiModelDownloadAllowanceContent = lazy(
|
18 |
+
() => import("./AiModelDownloadAllowanceContent"),
|
19 |
+
);
|
20 |
+
|
21 |
+
export default function AiResponseSection() {
|
22 |
+
const [query] = usePubSub(queryPubSub);
|
23 |
+
const [response] = usePubSub(responsePubSub);
|
24 |
+
const [textGenerationState, setTextGenerationState] = usePubSub(
|
25 |
+
textGenerationStatePubSub,
|
26 |
+
);
|
27 |
+
const [modelLoadingProgress] = usePubSub(modelLoadingProgressPubSub);
|
28 |
+
const [settings] = usePubSub(settingsPubSub);
|
29 |
+
const [modelSizeInMegabytes] = usePubSub(modelSizeInMegabytesPubSub);
|
30 |
+
|
31 |
+
return useMemo(
|
32 |
+
() =>
|
33 |
+
match([settings.enableAiResponse, textGenerationState])
|
34 |
+
.with([true, Pattern.not("idle").select()], (textGenerationState) =>
|
35 |
+
match(textGenerationState)
|
36 |
+
.with(
|
37 |
+
Pattern.union("generating", "interrupted", "completed", "failed"),
|
38 |
+
(textGenerationState) => (
|
39 |
+
<>
|
40 |
+
<Suspense>
|
41 |
+
<AiResponseContent
|
42 |
+
textGenerationState={textGenerationState}
|
43 |
+
response={response}
|
44 |
+
setTextGenerationState={setTextGenerationState}
|
45 |
+
/>
|
46 |
+
</Suspense>
|
47 |
+
{textGenerationState === "completed" && (
|
48 |
+
<Suspense>
|
49 |
+
<ChatInterface
|
50 |
+
initialQuery={query}
|
51 |
+
initialResponse={response}
|
52 |
+
/>
|
53 |
+
</Suspense>
|
54 |
+
)}
|
55 |
+
</>
|
56 |
+
),
|
57 |
+
)
|
58 |
+
.with("awaitingModelDownloadAllowance", () => (
|
59 |
+
<Suspense>
|
60 |
+
<AiModelDownloadAllowanceContent />
|
61 |
+
</Suspense>
|
62 |
+
))
|
63 |
+
.with("loadingModel", () => (
|
64 |
+
<Suspense>
|
65 |
+
<LoadingModelContent
|
66 |
+
modelLoadingProgress={modelLoadingProgress}
|
67 |
+
modelSizeInMegabytes={modelSizeInMegabytes}
|
68 |
+
/>
|
69 |
+
</Suspense>
|
70 |
+
))
|
71 |
+
.with(
|
72 |
+
Pattern.union("awaitingSearchResults", "preparingToGenerate"),
|
73 |
+
(textGenerationState) => (
|
74 |
+
<Suspense>
|
75 |
+
<PreparingContent textGenerationState={textGenerationState} />
|
76 |
+
</Suspense>
|
77 |
+
),
|
78 |
+
)
|
79 |
+
.exhaustive(),
|
80 |
+
)
|
81 |
+
.otherwise(() => null),
|
82 |
+
[
|
83 |
+
settings,
|
84 |
+
textGenerationState,
|
85 |
+
setTextGenerationState,
|
86 |
+
modelLoadingProgress,
|
87 |
+
response,
|
88 |
+
query,
|
89 |
+
modelSizeInMegabytes,
|
90 |
+
],
|
91 |
+
);
|
92 |
+
}
|
client/components/AiResponse/ChatInterface.tsx
ADDED
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
Button,
|
3 |
+
Card,
|
4 |
+
Group,
|
5 |
+
Paper,
|
6 |
+
Stack,
|
7 |
+
Text,
|
8 |
+
Textarea,
|
9 |
+
} from "@mantine/core";
|
10 |
+
import { IconSend } from "@tabler/icons-react";
|
11 |
+
import { usePubSub } from "create-pubsub/react";
|
12 |
+
import type { ChatMessage } from "gpt-tokenizer/GptEncoding";
|
13 |
+
import {
|
14 |
+
type KeyboardEvent,
|
15 |
+
Suspense,
|
16 |
+
lazy,
|
17 |
+
useEffect,
|
18 |
+
useRef,
|
19 |
+
useState,
|
20 |
+
} from "react";
|
21 |
+
import { match } from "ts-pattern";
|
22 |
+
import { addLogEntry } from "../../modules/logEntries";
|
23 |
+
import { settingsPubSub } from "../../modules/pubSub";
|
24 |
+
import { generateChatResponse } from "../../modules/textGeneration";
|
25 |
+
|
26 |
+
const FormattedMarkdown = lazy(() => import("./FormattedMarkdown"));
|
27 |
+
const CopyIconButton = lazy(() => import("./CopyIconButton"));
|
28 |
+
|
29 |
+
export default function ChatInterface({
|
30 |
+
initialQuery,
|
31 |
+
initialResponse,
|
32 |
+
}: {
|
33 |
+
initialQuery: string;
|
34 |
+
initialResponse: string;
|
35 |
+
}) {
|
36 |
+
const [messages, setMessages] = useState<ChatMessage[]>([]);
|
37 |
+
const [input, setInput] = useState("");
|
38 |
+
const [isGenerating, setIsGenerating] = useState(false);
|
39 |
+
const [streamedResponse, setStreamedResponse] = useState("");
|
40 |
+
const latestResponseRef = useRef("");
|
41 |
+
const [settings] = usePubSub(settingsPubSub);
|
42 |
+
|
43 |
+
useEffect(() => {
|
44 |
+
setMessages([
|
45 |
+
{ role: "user", content: initialQuery },
|
46 |
+
{ role: "assistant", content: initialResponse },
|
47 |
+
]);
|
48 |
+
}, [initialQuery, initialResponse]);
|
49 |
+
|
50 |
+
const handleSend = async () => {
|
51 |
+
if (input.trim() === "" || isGenerating) return;
|
52 |
+
|
53 |
+
const newMessages: ChatMessage[] = [
|
54 |
+
...messages,
|
55 |
+
{ role: "user", content: input },
|
56 |
+
];
|
57 |
+
setMessages(newMessages);
|
58 |
+
setInput("");
|
59 |
+
setIsGenerating(true);
|
60 |
+
setStreamedResponse("");
|
61 |
+
latestResponseRef.current = "";
|
62 |
+
|
63 |
+
try {
|
64 |
+
addLogEntry("User sent a follow-up question");
|
65 |
+
await generateChatResponse(newMessages, (partialResponse) => {
|
66 |
+
setStreamedResponse(partialResponse);
|
67 |
+
latestResponseRef.current = partialResponse;
|
68 |
+
});
|
69 |
+
setMessages((prevMessages) => [
|
70 |
+
...prevMessages,
|
71 |
+
{ role: "assistant", content: latestResponseRef.current },
|
72 |
+
]);
|
73 |
+
addLogEntry("AI responded to follow-up question");
|
74 |
+
} catch (error) {
|
75 |
+
addLogEntry(`Error generating chat response: ${error}`);
|
76 |
+
setMessages((prevMessages) => [
|
77 |
+
...prevMessages,
|
78 |
+
{
|
79 |
+
role: "assistant",
|
80 |
+
content: "Sorry, I encountered an error while generating a response.",
|
81 |
+
},
|
82 |
+
]);
|
83 |
+
} finally {
|
84 |
+
setIsGenerating(false);
|
85 |
+
setStreamedResponse("");
|
86 |
+
}
|
87 |
+
};
|
88 |
+
|
89 |
+
const handleKeyDown = (event: KeyboardEvent<HTMLTextAreaElement>) => {
|
90 |
+
match([event, settings.enterToSubmit])
|
91 |
+
.with([{ code: "Enter", shiftKey: false }, true], () => {
|
92 |
+
event.preventDefault();
|
93 |
+
handleSend();
|
94 |
+
})
|
95 |
+
.with([{ code: "Enter", shiftKey: true }, false], () => {
|
96 |
+
event.preventDefault();
|
97 |
+
handleSend();
|
98 |
+
})
|
99 |
+
.otherwise(() => undefined);
|
100 |
+
};
|
101 |
+
|
102 |
+
const getChatContent = () => {
|
103 |
+
return messages
|
104 |
+
.slice(2)
|
105 |
+
.map(
|
106 |
+
(msg, index) =>
|
107 |
+
`${index + 1}. ${msg.role?.toUpperCase()}\n\n${msg.content}`,
|
108 |
+
)
|
109 |
+
.join("\n\n");
|
110 |
+
};
|
111 |
+
|
112 |
+
return (
|
113 |
+
<Card withBorder shadow="sm" radius="md">
|
114 |
+
<Card.Section withBorder inheritPadding py="xs">
|
115 |
+
<Group justify="space-between">
|
116 |
+
<Text fw={500}>Follow-up questions</Text>
|
117 |
+
{messages.length > 2 && (
|
118 |
+
<Suspense>
|
119 |
+
<CopyIconButton
|
120 |
+
value={getChatContent()}
|
121 |
+
tooltipLabel="Copy conversation"
|
122 |
+
/>
|
123 |
+
</Suspense>
|
124 |
+
)}
|
125 |
+
</Group>
|
126 |
+
</Card.Section>
|
127 |
+
<Stack gap="md" pt="md">
|
128 |
+
{messages.slice(2).length > 0 && (
|
129 |
+
<Stack gap="md">
|
130 |
+
{messages.slice(2).map((message, index) => (
|
131 |
+
<Paper
|
132 |
+
key={`${message.role}-${index}`}
|
133 |
+
shadow="xs"
|
134 |
+
radius="xl"
|
135 |
+
p="sm"
|
136 |
+
maw="90%"
|
137 |
+
style={{
|
138 |
+
alignSelf:
|
139 |
+
message.role === "user" ? "flex-end" : "flex-start",
|
140 |
+
}}
|
141 |
+
>
|
142 |
+
<Suspense>
|
143 |
+
<FormattedMarkdown>{message.content}</FormattedMarkdown>
|
144 |
+
</Suspense>
|
145 |
+
</Paper>
|
146 |
+
))}
|
147 |
+
{isGenerating && streamedResponse.length > 0 && (
|
148 |
+
<Paper
|
149 |
+
shadow="xs"
|
150 |
+
radius="xl"
|
151 |
+
p="sm"
|
152 |
+
maw="90%"
|
153 |
+
style={{ alignSelf: "flex-start" }}
|
154 |
+
>
|
155 |
+
<Suspense>
|
156 |
+
<FormattedMarkdown>{streamedResponse}</FormattedMarkdown>
|
157 |
+
</Suspense>
|
158 |
+
</Paper>
|
159 |
+
)}
|
160 |
+
</Stack>
|
161 |
+
)}
|
162 |
+
<Group align="flex-end" style={{ position: "relative" }}>
|
163 |
+
<Textarea
|
164 |
+
placeholder="Anything else you would like to know?"
|
165 |
+
value={input}
|
166 |
+
onChange={(event) => setInput(event.currentTarget.value)}
|
167 |
+
onKeyDown={handleKeyDown}
|
168 |
+
autosize
|
169 |
+
minRows={1}
|
170 |
+
maxRows={4}
|
171 |
+
style={{ flexGrow: 1, paddingRight: "50px" }}
|
172 |
+
disabled={isGenerating}
|
173 |
+
/>
|
174 |
+
<Button
|
175 |
+
size="sm"
|
176 |
+
variant="default"
|
177 |
+
onClick={handleSend}
|
178 |
+
loading={isGenerating}
|
179 |
+
style={{
|
180 |
+
height: "100%",
|
181 |
+
position: "absolute",
|
182 |
+
right: 0,
|
183 |
+
top: 0,
|
184 |
+
bottom: 0,
|
185 |
+
borderTopLeftRadius: 0,
|
186 |
+
borderBottomLeftRadius: 0,
|
187 |
+
}}
|
188 |
+
>
|
189 |
+
<IconSend size={16} />
|
190 |
+
</Button>
|
191 |
+
</Group>
|
192 |
+
</Stack>
|
193 |
+
</Card>
|
194 |
+
);
|
195 |
+
}
|
client/components/AiResponse/CopyIconButton.tsx
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { ActionIcon, CopyButton, Tooltip } from "@mantine/core";
|
2 |
+
import { IconCheck, IconCopy } from "@tabler/icons-react";
|
3 |
+
|
4 |
+
interface CopyIconButtonProps {
|
5 |
+
value: string;
|
6 |
+
tooltipLabel?: string;
|
7 |
+
}
|
8 |
+
|
9 |
+
export default function CopyIconButton({
|
10 |
+
value,
|
11 |
+
tooltipLabel = "Copy",
|
12 |
+
}: CopyIconButtonProps) {
|
13 |
+
return (
|
14 |
+
<CopyButton value={value} timeout={2000}>
|
15 |
+
{({ copied, copy }) => (
|
16 |
+
<Tooltip
|
17 |
+
label={copied ? "Copied" : tooltipLabel}
|
18 |
+
withArrow
|
19 |
+
position="right"
|
20 |
+
>
|
21 |
+
<ActionIcon
|
22 |
+
color={copied ? "teal" : "gray"}
|
23 |
+
variant="subtle"
|
24 |
+
onClick={copy}
|
25 |
+
>
|
26 |
+
{copied ? <IconCheck size={16} /> : <IconCopy size={16} />}
|
27 |
+
</ActionIcon>
|
28 |
+
</Tooltip>
|
29 |
+
)}
|
30 |
+
</CopyButton>
|
31 |
+
);
|
32 |
+
}
|
client/components/AiResponse/FormattedMarkdown.tsx
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { TypographyStylesProvider } from "@mantine/core";
|
2 |
+
import Markdown from "react-markdown";
|
3 |
+
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
|
4 |
+
import syntaxHighlighterStyle from "react-syntax-highlighter/dist/esm/styles/prism/one-dark";
|
5 |
+
|
6 |
+
const FormattedMarkdown = ({ children }: { children: string }) => {
|
7 |
+
return (
|
8 |
+
<TypographyStylesProvider p="md">
|
9 |
+
<Markdown
|
10 |
+
components={{
|
11 |
+
code(props) {
|
12 |
+
const { children, className, node, ref, ...rest } = props;
|
13 |
+
void node;
|
14 |
+
const languageMatch = /language-(\w+)/.exec(className || "");
|
15 |
+
return languageMatch ? (
|
16 |
+
<SyntaxHighlighter
|
17 |
+
{...rest}
|
18 |
+
ref={ref as never}
|
19 |
+
language={languageMatch[1]}
|
20 |
+
style={syntaxHighlighterStyle}
|
21 |
+
>
|
22 |
+
{children?.toString().replace(/\n$/, "") ?? ""}
|
23 |
+
</SyntaxHighlighter>
|
24 |
+
) : (
|
25 |
+
<code {...rest} className={className}>
|
26 |
+
{children}
|
27 |
+
</code>
|
28 |
+
);
|
29 |
+
},
|
30 |
+
}}
|
31 |
+
>
|
32 |
+
{children}
|
33 |
+
</Markdown>
|
34 |
+
</TypographyStylesProvider>
|
35 |
+
);
|
36 |
+
};
|
37 |
+
|
38 |
+
export default FormattedMarkdown;
|
client/components/AiResponse/LoadingModelContent.tsx
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Card, Group, Progress, Stack, Text } from "@mantine/core";
|
2 |
+
|
3 |
+
export default function LoadingModelContent({
|
4 |
+
modelLoadingProgress,
|
5 |
+
modelSizeInMegabytes,
|
6 |
+
}: {
|
7 |
+
modelLoadingProgress: number;
|
8 |
+
modelSizeInMegabytes: number;
|
9 |
+
}) {
|
10 |
+
const isLoadingStarting = modelLoadingProgress === 0;
|
11 |
+
const isLoadingComplete = modelLoadingProgress === 100;
|
12 |
+
const percent =
|
13 |
+
isLoadingComplete || isLoadingStarting ? 100 : modelLoadingProgress;
|
14 |
+
const strokeColor = percent === 100 ? "#52c41a" : "#3385ff";
|
15 |
+
const downloadedSize = (modelSizeInMegabytes * modelLoadingProgress) / 100;
|
16 |
+
const sizeText = `${downloadedSize.toFixed(0)} MB / ${modelSizeInMegabytes.toFixed(0)} MB`;
|
17 |
+
|
18 |
+
return (
|
19 |
+
<Card withBorder shadow="sm" radius="md">
|
20 |
+
<Card.Section withBorder inheritPadding py="xs">
|
21 |
+
<Text fw={500}>Loading AI...</Text>
|
22 |
+
</Card.Section>
|
23 |
+
<Card.Section withBorder inheritPadding py="md">
|
24 |
+
<Stack gap="xs">
|
25 |
+
<Progress color={strokeColor} value={percent} animated />
|
26 |
+
{!isLoadingStarting && (
|
27 |
+
<Group justify="space-between">
|
28 |
+
<Text size="sm" c="dimmed">
|
29 |
+
{sizeText}
|
30 |
+
</Text>
|
31 |
+
<Text size="sm" c="dimmed">
|
32 |
+
{percent.toFixed(1)}%
|
33 |
+
</Text>
|
34 |
+
</Group>
|
35 |
+
)}
|
36 |
+
</Stack>
|
37 |
+
</Card.Section>
|
38 |
+
</Card>
|
39 |
+
);
|
40 |
+
}
|
client/components/AiResponse/PreparingContent.tsx
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Card, Skeleton, Stack, Text } from "@mantine/core";
|
2 |
+
import { match } from "ts-pattern";
|
3 |
+
|
4 |
+
export default function PreparingContent({
|
5 |
+
textGenerationState,
|
6 |
+
}: {
|
7 |
+
textGenerationState: string;
|
8 |
+
}) {
|
9 |
+
return (
|
10 |
+
<Card withBorder shadow="sm" radius="md">
|
11 |
+
<Card.Section withBorder inheritPadding py="xs">
|
12 |
+
<Text fw={500}>
|
13 |
+
{match(textGenerationState)
|
14 |
+
.with("awaitingSearchResults", () => "Awaiting search results...")
|
15 |
+
.with("preparingToGenerate", () => "Preparing AI response...")
|
16 |
+
.otherwise(() => null)}
|
17 |
+
</Text>
|
18 |
+
</Card.Section>
|
19 |
+
<Card.Section withBorder inheritPadding py="md">
|
20 |
+
<Stack>
|
21 |
+
<Skeleton height={8} radius="xl" />
|
22 |
+
<Skeleton height={8} width="70%" radius="xl" />
|
23 |
+
<Skeleton height={8} radius="xl" />
|
24 |
+
<Skeleton height={8} width="43%" radius="xl" />
|
25 |
+
</Stack>
|
26 |
+
</Card.Section>
|
27 |
+
</Card>
|
28 |
+
);
|
29 |
+
}
|
client/components/AiResponse/WebLlmModelSelect.tsx
ADDED
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { type ComboboxItem, Select } from "@mantine/core";
|
2 |
+
import { prebuiltAppConfig } from "@mlc-ai/web-llm";
|
3 |
+
import { useCallback, useEffect, useState } from "react";
|
4 |
+
import { isF16Supported } from "../../modules/webGpu";
|
5 |
+
|
6 |
+
export default function WebLlmModelSelect({
|
7 |
+
value,
|
8 |
+
onChange,
|
9 |
+
}: {
|
10 |
+
value: string;
|
11 |
+
onChange: (value: string) => void;
|
12 |
+
}) {
|
13 |
+
const [webGpuModels] = useState<ComboboxItem[]>(() => {
|
14 |
+
const models = prebuiltAppConfig.model_list
|
15 |
+
.filter((model) => {
|
16 |
+
const isSmall = isSmallModel(model);
|
17 |
+
const suffix = getModelSuffix(isF16Supported, isSmall);
|
18 |
+
return model.model_id.endsWith(suffix);
|
19 |
+
})
|
20 |
+
.sort((a, b) => (a.vram_required_MB ?? 0) - (b.vram_required_MB ?? 0))
|
21 |
+
.map((model) => {
|
22 |
+
const modelSizeInMegabytes =
|
23 |
+
Math.round(model.vram_required_MB ?? 0) || "N/A";
|
24 |
+
const isSmall = isSmallModel(model);
|
25 |
+
const suffix = getModelSuffix(isF16Supported, isSmall);
|
26 |
+
const modelName = model.model_id.replace(suffix, "");
|
27 |
+
|
28 |
+
return {
|
29 |
+
label: `${modelSizeInMegabytes} MB β’ ${modelName}`,
|
30 |
+
value: model.model_id,
|
31 |
+
};
|
32 |
+
});
|
33 |
+
|
34 |
+
return models;
|
35 |
+
});
|
36 |
+
|
37 |
+
useEffect(() => {
|
38 |
+
const isCurrentModelValid = webGpuModels.some(
|
39 |
+
(model) => model.value === value,
|
40 |
+
);
|
41 |
+
|
42 |
+
if (!isCurrentModelValid && webGpuModels.length > 0) {
|
43 |
+
onChange(webGpuModels[0].value);
|
44 |
+
}
|
45 |
+
}, [onChange, webGpuModels, value]);
|
46 |
+
|
47 |
+
const handleChange = useCallback(
|
48 |
+
(value: string | null) => {
|
49 |
+
if (value) onChange(value);
|
50 |
+
},
|
51 |
+
[onChange],
|
52 |
+
);
|
53 |
+
|
54 |
+
return (
|
55 |
+
<Select
|
56 |
+
value={value}
|
57 |
+
onChange={handleChange}
|
58 |
+
label="AI Model"
|
59 |
+
description="Select the model to use for AI responses."
|
60 |
+
data={webGpuModels}
|
61 |
+
allowDeselect={false}
|
62 |
+
searchable
|
63 |
+
/>
|
64 |
+
);
|
65 |
+
}
|
66 |
+
|
67 |
+
type ModelConfig = (typeof prebuiltAppConfig.model_list)[number];
|
68 |
+
|
69 |
+
const smallModels = ["SmolLM2-135M", "SmolLM2-360M"] as const;
|
70 |
+
|
71 |
+
function isSmallModel(model: ModelConfig) {
|
72 |
+
return smallModels.some((smallModel) =>
|
73 |
+
model.model_id.startsWith(smallModel),
|
74 |
+
);
|
75 |
+
}
|
76 |
+
|
77 |
+
function getModelSuffix(isF16: boolean, isSmall: boolean) {
|
78 |
+
if (isSmall) return isF16 ? "-q0f16-MLC" : "-q0f32-MLC";
|
79 |
+
|
80 |
+
return isF16 ? "-q4f16_1-MLC" : "-q4f32_1-MLC";
|
81 |
+
}
|
client/components/AiResponse/WllamaModelSelect.tsx
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { type ComboboxItem, Select } from "@mantine/core";
|
2 |
+
import { useEffect, useState } from "react";
|
3 |
+
import { wllamaModels } from "../../modules/wllama";
|
4 |
+
|
5 |
+
export default function WllamaModelSelect({
|
6 |
+
value,
|
7 |
+
onChange,
|
8 |
+
}: {
|
9 |
+
value: string;
|
10 |
+
onChange: (value: string) => void;
|
11 |
+
}) {
|
12 |
+
const [wllamaModelOptions] = useState<ComboboxItem[]>(
|
13 |
+
Object.entries(wllamaModels)
|
14 |
+
.sort(([, a], [, b]) => a.fileSizeInMegabytes - b.fileSizeInMegabytes)
|
15 |
+
.map(([value, { label, fileSizeInMegabytes }]) => ({
|
16 |
+
label: `${fileSizeInMegabytes} MB β’ ${label}`,
|
17 |
+
value,
|
18 |
+
})),
|
19 |
+
);
|
20 |
+
|
21 |
+
useEffect(() => {
|
22 |
+
const isCurrentModelValid = wllamaModelOptions.some(
|
23 |
+
(model) => model.value === value,
|
24 |
+
);
|
25 |
+
|
26 |
+
if (!isCurrentModelValid && wllamaModelOptions.length > 0) {
|
27 |
+
onChange(wllamaModelOptions[0].value);
|
28 |
+
}
|
29 |
+
}, [onChange, wllamaModelOptions, value]);
|
30 |
+
|
31 |
+
return (
|
32 |
+
<Select
|
33 |
+
value={value}
|
34 |
+
onChange={(value) => value && onChange(value)}
|
35 |
+
label="AI Model"
|
36 |
+
description="Select the model to use for AI responses."
|
37 |
+
data={wllamaModelOptions}
|
38 |
+
allowDeselect={false}
|
39 |
+
searchable
|
40 |
+
/>
|
41 |
+
);
|
42 |
+
}
|
client/components/App/App.tsx
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { MantineProvider } from "@mantine/core";
|
2 |
+
import { Route, Switch } from "wouter";
|
3 |
+
import "@mantine/core/styles.css";
|
4 |
+
import { Notifications } from "@mantine/notifications";
|
5 |
+
import { usePubSub } from "create-pubsub/react";
|
6 |
+
import { lazy, useEffect, useState } from "react";
|
7 |
+
import { addLogEntry } from "../../modules/logEntries";
|
8 |
+
import { settingsPubSub } from "../../modules/pubSub";
|
9 |
+
import { defaultSettings } from "../../modules/settings";
|
10 |
+
import "@mantine/notifications/styles.css";
|
11 |
+
import { match } from "ts-pattern";
|
12 |
+
import { verifyStoredAccessKey } from "../../modules/accessKey";
|
13 |
+
|
14 |
+
const MainPage = lazy(() => import("../Pages/Main/MainPage"));
|
15 |
+
const AccessPage = lazy(() => import("../Pages/AccessPage"));
|
16 |
+
|
17 |
+
export function App() {
|
18 |
+
useInitializeSettings();
|
19 |
+
const { hasValidatedAccessKey, isCheckingStoredKey, setValidatedAccessKey } =
|
20 |
+
useAccessKeyValidation();
|
21 |
+
|
22 |
+
return match(isCheckingStoredKey)
|
23 |
+
.with(false, () => (
|
24 |
+
<MantineProvider defaultColorScheme="dark">
|
25 |
+
<Notifications />
|
26 |
+
<Switch>
|
27 |
+
<Route path="/">
|
28 |
+
{match([VITE_ACCESS_KEYS_ENABLED, hasValidatedAccessKey])
|
29 |
+
.with([true, false], () => (
|
30 |
+
<AccessPage
|
31 |
+
onAccessKeyValid={() => setValidatedAccessKey(true)}
|
32 |
+
/>
|
33 |
+
))
|
34 |
+
.otherwise(() => (
|
35 |
+
<MainPage />
|
36 |
+
))}
|
37 |
+
</Route>
|
38 |
+
</Switch>
|
39 |
+
</MantineProvider>
|
40 |
+
))
|
41 |
+
.otherwise(() => null);
|
42 |
+
}
|
43 |
+
|
44 |
+
/**
|
45 |
+
* A custom React hook that initializes the application settings.
|
46 |
+
*
|
47 |
+
* @returns The initialized settings object.
|
48 |
+
*
|
49 |
+
* @remarks
|
50 |
+
* This hook uses the `usePubSub` hook to access and update the settings state.
|
51 |
+
* It initializes the settings by merging the default settings with any existing settings.
|
52 |
+
* The initialization is performed once when the component mounts.
|
53 |
+
*/
|
54 |
+
function useInitializeSettings() {
|
55 |
+
const [settings, setSettings] = usePubSub(settingsPubSub);
|
56 |
+
const [settingsInitialized, setSettingsInitialized] = useState(false);
|
57 |
+
|
58 |
+
useEffect(() => {
|
59 |
+
if (settingsInitialized) return;
|
60 |
+
|
61 |
+
setSettings({ ...defaultSettings, ...settings });
|
62 |
+
|
63 |
+
setSettingsInitialized(true);
|
64 |
+
|
65 |
+
addLogEntry("Settings initialized");
|
66 |
+
}, [settings, setSettings, settingsInitialized]);
|
67 |
+
|
68 |
+
return settings;
|
69 |
+
}
|
70 |
+
|
71 |
+
/**
|
72 |
+
* A custom React hook that validates the stored access key on mount.
|
73 |
+
*
|
74 |
+
* @returns An object containing the validation state and loading state
|
75 |
+
*/
|
76 |
+
function useAccessKeyValidation() {
|
77 |
+
const [hasValidatedAccessKey, setValidatedAccessKey] = useState(false);
|
78 |
+
const [isCheckingStoredKey, setCheckingStoredKey] = useState(true);
|
79 |
+
|
80 |
+
useEffect(() => {
|
81 |
+
async function checkStoredAccessKey() {
|
82 |
+
if (VITE_ACCESS_KEYS_ENABLED) {
|
83 |
+
const isValid = await verifyStoredAccessKey();
|
84 |
+
if (isValid) setValidatedAccessKey(true);
|
85 |
+
}
|
86 |
+
setCheckingStoredKey(false);
|
87 |
+
}
|
88 |
+
|
89 |
+
checkStoredAccessKey();
|
90 |
+
}, []);
|
91 |
+
|
92 |
+
return {
|
93 |
+
hasValidatedAccessKey,
|
94 |
+
isCheckingStoredKey,
|
95 |
+
setValidatedAccessKey,
|
96 |
+
};
|
97 |
+
}
|
client/components/Logs/LogsModal.tsx
ADDED
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
Alert,
|
3 |
+
Button,
|
4 |
+
Center,
|
5 |
+
Group,
|
6 |
+
Modal,
|
7 |
+
Pagination,
|
8 |
+
Table,
|
9 |
+
} from "@mantine/core";
|
10 |
+
import { IconInfoCircle } from "@tabler/icons-react";
|
11 |
+
import { usePubSub } from "create-pubsub/react";
|
12 |
+
import { useCallback, useMemo, useState } from "react";
|
13 |
+
import { logEntriesPubSub } from "../../modules/logEntries";
|
14 |
+
|
15 |
+
export default function LogsModal({
|
16 |
+
opened,
|
17 |
+
onClose,
|
18 |
+
}: {
|
19 |
+
opened: boolean;
|
20 |
+
onClose: () => void;
|
21 |
+
}) {
|
22 |
+
const [logEntries] = usePubSub(logEntriesPubSub);
|
23 |
+
|
24 |
+
const [page, setPage] = useState(1);
|
25 |
+
|
26 |
+
const logEntriesPerPage = 5;
|
27 |
+
|
28 |
+
const logEntriesFromCurrentPage = useMemo(
|
29 |
+
() =>
|
30 |
+
logEntries.slice(
|
31 |
+
(page - 1) * logEntriesPerPage,
|
32 |
+
page * logEntriesPerPage,
|
33 |
+
),
|
34 |
+
[logEntries, page],
|
35 |
+
);
|
36 |
+
|
37 |
+
const downloadLogsAsJson = useCallback(() => {
|
38 |
+
const jsonString = JSON.stringify(logEntries, null, 2);
|
39 |
+
const blob = new Blob([jsonString], { type: "application/json" });
|
40 |
+
const url = URL.createObjectURL(blob);
|
41 |
+
const link = document.createElement("a");
|
42 |
+
link.href = url;
|
43 |
+
link.download = "logs.json";
|
44 |
+
document.body.appendChild(link);
|
45 |
+
link.click();
|
46 |
+
document.body.removeChild(link);
|
47 |
+
URL.revokeObjectURL(url);
|
48 |
+
}, [logEntries]);
|
49 |
+
|
50 |
+
return (
|
51 |
+
<Modal opened={opened} onClose={onClose} size="xl" title="Logs">
|
52 |
+
<Alert variant="light" color="blue" icon={<IconInfoCircle />} mb="md">
|
53 |
+
<Group justify="space-between" align="center">
|
54 |
+
<span>
|
55 |
+
This information is stored solely in your browser for personal use.
|
56 |
+
It isn't sent automatically and is retained for debugging purposes
|
57 |
+
should you need to{" "}
|
58 |
+
<a
|
59 |
+
href="https://github.com/felladrin/MiniSearch/issues/new?labels=bug&template=bug_report.yml"
|
60 |
+
target="_blank"
|
61 |
+
rel="noopener noreferrer"
|
62 |
+
>
|
63 |
+
report a bug
|
64 |
+
</a>
|
65 |
+
.
|
66 |
+
</span>
|
67 |
+
<Button onClick={downloadLogsAsJson} size="xs" data-autofocus>
|
68 |
+
Download Logs
|
69 |
+
</Button>
|
70 |
+
</Group>
|
71 |
+
</Alert>
|
72 |
+
<Table striped highlightOnHover withTableBorder>
|
73 |
+
<Table.Thead>
|
74 |
+
<Table.Tr>
|
75 |
+
<Table.Th>Time</Table.Th>
|
76 |
+
<Table.Th>Message</Table.Th>
|
77 |
+
</Table.Tr>
|
78 |
+
</Table.Thead>
|
79 |
+
<Table.Tbody>
|
80 |
+
{logEntriesFromCurrentPage.map((entry, index) => (
|
81 |
+
<Table.Tr key={`${entry.timestamp}-${index}`}>
|
82 |
+
<Table.Td>
|
83 |
+
{new Date(entry.timestamp).toLocaleTimeString()}
|
84 |
+
</Table.Td>
|
85 |
+
<Table.Td>{entry.message}</Table.Td>
|
86 |
+
</Table.Tr>
|
87 |
+
))}
|
88 |
+
</Table.Tbody>
|
89 |
+
</Table>
|
90 |
+
<Center>
|
91 |
+
<Pagination
|
92 |
+
total={Math.ceil(logEntries.length / logEntriesPerPage)}
|
93 |
+
value={page}
|
94 |
+
onChange={setPage}
|
95 |
+
size="sm"
|
96 |
+
mt="md"
|
97 |
+
/>
|
98 |
+
</Center>
|
99 |
+
</Modal>
|
100 |
+
);
|
101 |
+
}
|
client/components/Logs/ShowLogsButton.tsx
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Button, Center, Loader, Stack, Text } from "@mantine/core";
|
2 |
+
import { Suspense, lazy, useState } from "react";
|
3 |
+
import { addLogEntry } from "../../modules/logEntries";
|
4 |
+
|
5 |
+
const LogsModal = lazy(() => import("./LogsModal"));
|
6 |
+
|
7 |
+
export default function ShowLogsButton() {
|
8 |
+
const [isLogsModalOpen, setLogsModalOpen] = useState(false);
|
9 |
+
|
10 |
+
const handleShowLogsButtonClick = () => {
|
11 |
+
addLogEntry("User opened the logs modal");
|
12 |
+
setLogsModalOpen(true);
|
13 |
+
};
|
14 |
+
|
15 |
+
const handleCloseLogsButtonClick = () => {
|
16 |
+
addLogEntry("User closed the logs modal");
|
17 |
+
setLogsModalOpen(false);
|
18 |
+
};
|
19 |
+
|
20 |
+
return (
|
21 |
+
<Stack gap="xs">
|
22 |
+
<Suspense
|
23 |
+
fallback={
|
24 |
+
<Center>
|
25 |
+
<Loader color="gray" type="bars" />
|
26 |
+
</Center>
|
27 |
+
}
|
28 |
+
>
|
29 |
+
<Button size="sm" onClick={handleShowLogsButtonClick} variant="default">
|
30 |
+
Show logs
|
31 |
+
</Button>
|
32 |
+
<Text size="xs" c="dimmed">
|
33 |
+
View session logs for debugging.
|
34 |
+
</Text>
|
35 |
+
<LogsModal
|
36 |
+
opened={isLogsModalOpen}
|
37 |
+
onClose={handleCloseLogsButtonClick}
|
38 |
+
/>
|
39 |
+
</Suspense>
|
40 |
+
</Stack>
|
41 |
+
);
|
42 |
+
}
|
client/components/Pages/AccessPage.tsx
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Button, Container, Stack, TextInput, Title } from "@mantine/core";
|
2 |
+
import { type FormEvent, useState } from "react";
|
3 |
+
import { validateAccessKey } from "../../modules/accessKey";
|
4 |
+
import { addLogEntry } from "../../modules/logEntries";
|
5 |
+
|
6 |
+
export default function AccessPage({
|
7 |
+
onAccessKeyValid,
|
8 |
+
}: {
|
9 |
+
onAccessKeyValid: () => void;
|
10 |
+
}) {
|
11 |
+
const [accessKey, setAccessKey] = useState("");
|
12 |
+
const [error, setError] = useState("");
|
13 |
+
|
14 |
+
const handleSubmit = async (formEvent: FormEvent<HTMLFormElement>) => {
|
15 |
+
formEvent.preventDefault();
|
16 |
+
setError("");
|
17 |
+
try {
|
18 |
+
const isValid = await validateAccessKey(accessKey);
|
19 |
+
if (isValid) {
|
20 |
+
addLogEntry("Valid access key entered");
|
21 |
+
onAccessKeyValid();
|
22 |
+
} else {
|
23 |
+
setError("Invalid access key");
|
24 |
+
addLogEntry("Invalid access key attempt");
|
25 |
+
}
|
26 |
+
} catch (error) {
|
27 |
+
setError("Error validating access key");
|
28 |
+
addLogEntry(`Error validating access key: ${error}`);
|
29 |
+
}
|
30 |
+
};
|
31 |
+
|
32 |
+
return (
|
33 |
+
<Container size="xs">
|
34 |
+
<Stack p="lg" mih="100vh" justify="center">
|
35 |
+
<Title order={2} ta="center">
|
36 |
+
Access Restricted
|
37 |
+
</Title>
|
38 |
+
<form onSubmit={handleSubmit}>
|
39 |
+
<Stack gap="xs">
|
40 |
+
<TextInput
|
41 |
+
value={accessKey}
|
42 |
+
onChange={({ target }) => setAccessKey(target.value)}
|
43 |
+
placeholder="Enter your access key to continue"
|
44 |
+
required
|
45 |
+
autoFocus
|
46 |
+
error={error}
|
47 |
+
styles={{
|
48 |
+
input: {
|
49 |
+
textAlign: "center",
|
50 |
+
},
|
51 |
+
}}
|
52 |
+
/>
|
53 |
+
<Button size="xs" type="submit">
|
54 |
+
Submit
|
55 |
+
</Button>
|
56 |
+
</Stack>
|
57 |
+
</form>
|
58 |
+
</Stack>
|
59 |
+
</Container>
|
60 |
+
);
|
61 |
+
}
|
client/components/Pages/Main/MainPage.tsx
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Center, Container, Loader, Stack } from "@mantine/core";
|
2 |
+
import { usePubSub } from "create-pubsub/react";
|
3 |
+
import { Suspense } from "react";
|
4 |
+
import { lazy } from "react";
|
5 |
+
import { Pattern, match } from "ts-pattern";
|
6 |
+
import {
|
7 |
+
queryPubSub,
|
8 |
+
searchStatePubSub,
|
9 |
+
textGenerationStatePubSub,
|
10 |
+
} from "../../../modules/pubSub";
|
11 |
+
|
12 |
+
const AiResponseSection = lazy(
|
13 |
+
() => import("../../AiResponse/AiResponseSection"),
|
14 |
+
);
|
15 |
+
const SearchResultsSection = lazy(
|
16 |
+
() => import("../../Search/Results/SearchResultsSection"),
|
17 |
+
);
|
18 |
+
const MenuButton = lazy(() => import("./Menu/MenuButton"));
|
19 |
+
const SearchForm = lazy(() => import("../../Search/Form/SearchForm"));
|
20 |
+
|
21 |
+
export default function MainPage() {
|
22 |
+
const [query, updateQuery] = usePubSub(queryPubSub);
|
23 |
+
const [searchState] = usePubSub(searchStatePubSub);
|
24 |
+
const [textGenerationState] = usePubSub(textGenerationStatePubSub);
|
25 |
+
|
26 |
+
return (
|
27 |
+
<Container>
|
28 |
+
<Stack
|
29 |
+
py="md"
|
30 |
+
mih="100vh"
|
31 |
+
justify={match(query)
|
32 |
+
.with(Pattern.string.length(0), () => "center")
|
33 |
+
.otherwise(() => undefined)}
|
34 |
+
>
|
35 |
+
<Suspense
|
36 |
+
fallback={
|
37 |
+
<Center>
|
38 |
+
<Loader type="bars" />
|
39 |
+
</Center>
|
40 |
+
}
|
41 |
+
>
|
42 |
+
<SearchForm
|
43 |
+
query={query}
|
44 |
+
updateQuery={updateQuery}
|
45 |
+
additionalButtons={<MenuButton />}
|
46 |
+
/>
|
47 |
+
</Suspense>
|
48 |
+
{match(textGenerationState)
|
49 |
+
.with(Pattern.not("idle"), () => (
|
50 |
+
<Suspense>
|
51 |
+
<AiResponseSection />
|
52 |
+
</Suspense>
|
53 |
+
))
|
54 |
+
.otherwise(() => null)}
|
55 |
+
{match(searchState)
|
56 |
+
.with(Pattern.not("idle"), () => (
|
57 |
+
<Suspense>
|
58 |
+
<SearchResultsSection />
|
59 |
+
</Suspense>
|
60 |
+
))
|
61 |
+
.otherwise(() => null)}
|
62 |
+
</Stack>
|
63 |
+
</Container>
|
64 |
+
);
|
65 |
+
}
|
client/components/Pages/Main/Menu/AISettingsForm.tsx
ADDED
@@ -0,0 +1,366 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
Group,
|
3 |
+
NumberInput,
|
4 |
+
Select,
|
5 |
+
Skeleton,
|
6 |
+
Slider,
|
7 |
+
Stack,
|
8 |
+
Switch,
|
9 |
+
Text,
|
10 |
+
TextInput,
|
11 |
+
Textarea,
|
12 |
+
} from "@mantine/core";
|
13 |
+
import { useForm } from "@mantine/form";
|
14 |
+
import { IconInfoCircle } from "@tabler/icons-react";
|
15 |
+
import { usePubSub } from "create-pubsub/react";
|
16 |
+
import { Suspense, lazy, useEffect, useState } from "react";
|
17 |
+
import { Pattern, match } from "ts-pattern";
|
18 |
+
import { addLogEntry } from "../../../../modules/logEntries";
|
19 |
+
import { getOpenAiClient } from "../../../../modules/openai";
|
20 |
+
import { settingsPubSub } from "../../../../modules/pubSub";
|
21 |
+
import { defaultSettings, inferenceTypes } from "../../../../modules/settings";
|
22 |
+
import { isWebGPUAvailable } from "../../../../modules/webGpu";
|
23 |
+
|
24 |
+
const WebLlmModelSelect = lazy(
|
25 |
+
() => import("../../../AiResponse/WebLlmModelSelect"),
|
26 |
+
);
|
27 |
+
const WllamaModelSelect = lazy(
|
28 |
+
() => import("../../../AiResponse/WllamaModelSelect"),
|
29 |
+
);
|
30 |
+
|
31 |
+
export default function AISettingsForm() {
|
32 |
+
const [settings, setSettings] = usePubSub(settingsPubSub);
|
33 |
+
const [openAiModels, setOpenAiModels] = useState<
|
34 |
+
{
|
35 |
+
label: string;
|
36 |
+
value: string;
|
37 |
+
}[]
|
38 |
+
>([]);
|
39 |
+
const [openAiApiModelError, setOpenAiApiModelError] = useState<
|
40 |
+
string | undefined
|
41 |
+
>(undefined);
|
42 |
+
|
43 |
+
const form = useForm({
|
44 |
+
initialValues: settings,
|
45 |
+
onValuesChange: setSettings,
|
46 |
+
});
|
47 |
+
|
48 |
+
useEffect(() => {
|
49 |
+
async function fetchOpenAiModels() {
|
50 |
+
try {
|
51 |
+
const openai = getOpenAiClient({
|
52 |
+
baseURL: settings.openAiApiBaseUrl,
|
53 |
+
apiKey: settings.openAiApiKey,
|
54 |
+
});
|
55 |
+
const response = await openai.models.list();
|
56 |
+
const models = response.data.map((model) => ({
|
57 |
+
label: model.id,
|
58 |
+
value: model.id,
|
59 |
+
}));
|
60 |
+
setOpenAiModels(models);
|
61 |
+
setOpenAiApiModelError(undefined);
|
62 |
+
} catch (error) {
|
63 |
+
const errorMessage =
|
64 |
+
error instanceof Error ? error.message : String(error);
|
65 |
+
addLogEntry(`Error fetching OpenAI models: ${errorMessage}`);
|
66 |
+
setOpenAiModels([]);
|
67 |
+
setOpenAiApiModelError(errorMessage);
|
68 |
+
}
|
69 |
+
}
|
70 |
+
|
71 |
+
if (settings.inferenceType === "openai" && settings.openAiApiBaseUrl) {
|
72 |
+
fetchOpenAiModels();
|
73 |
+
}
|
74 |
+
}, [
|
75 |
+
settings.inferenceType,
|
76 |
+
settings.openAiApiBaseUrl,
|
77 |
+
settings.openAiApiKey,
|
78 |
+
]);
|
79 |
+
|
80 |
+
useEffect(() => {
|
81 |
+
if (openAiApiModelError === form.errors.openAiApiModel) return;
|
82 |
+
|
83 |
+
form.setFieldError("openAiApiModel", openAiApiModelError);
|
84 |
+
}, [openAiApiModelError, form.setFieldError, form.errors.openAiApiModel]);
|
85 |
+
|
86 |
+
useEffect(() => {
|
87 |
+
if (openAiModels.length > 0) {
|
88 |
+
const hasNoModelSelected = !form.values.openAiApiModel;
|
89 |
+
const isModelInvalid = !openAiModels.find(
|
90 |
+
(model) => model.value === form.values.openAiApiModel,
|
91 |
+
);
|
92 |
+
|
93 |
+
if (hasNoModelSelected || isModelInvalid) {
|
94 |
+
form.setFieldValue("openAiApiModel", openAiModels[0].value);
|
95 |
+
}
|
96 |
+
}
|
97 |
+
}, [openAiModels, form.setFieldValue, form.values.openAiApiModel]);
|
98 |
+
|
99 |
+
const isUsingCustomInstructions =
|
100 |
+
form.values.systemPrompt !== defaultSettings.systemPrompt;
|
101 |
+
|
102 |
+
const handleRestoreDefaultInstructions = () => {
|
103 |
+
form.setFieldValue("systemPrompt", defaultSettings.systemPrompt);
|
104 |
+
};
|
105 |
+
|
106 |
+
const suggestedCpuThreads =
|
107 |
+
(navigator.hardwareConcurrency &&
|
108 |
+
Math.max(
|
109 |
+
defaultSettings.cpuThreads,
|
110 |
+
navigator.hardwareConcurrency - 2,
|
111 |
+
)) ??
|
112 |
+
defaultSettings.cpuThreads;
|
113 |
+
|
114 |
+
return (
|
115 |
+
<Stack gap="md">
|
116 |
+
<Switch
|
117 |
+
label="AI Response"
|
118 |
+
{...form.getInputProps("enableAiResponse", {
|
119 |
+
type: "checkbox",
|
120 |
+
})}
|
121 |
+
labelPosition="left"
|
122 |
+
description="Enable or disable AI-generated responses to your queries. When disabled, you'll only see web search results."
|
123 |
+
/>
|
124 |
+
|
125 |
+
{form.values.enableAiResponse && (
|
126 |
+
<>
|
127 |
+
<Stack gap="xs" mb="md">
|
128 |
+
<Text size="sm">Search results to consider</Text>
|
129 |
+
<Text size="xs" c="dimmed">
|
130 |
+
Determines the number of search results to consider when
|
131 |
+
generating AI responses. A higher value may enhance accuracy, but
|
132 |
+
it will also increase response time.
|
133 |
+
</Text>
|
134 |
+
<Slider
|
135 |
+
{...form.getInputProps("searchResultsToConsider")}
|
136 |
+
min={0}
|
137 |
+
max={6}
|
138 |
+
marks={Array.from({ length: 7 }, (_, index) => ({
|
139 |
+
value: index,
|
140 |
+
label: index.toString(),
|
141 |
+
}))}
|
142 |
+
/>
|
143 |
+
</Stack>
|
144 |
+
|
145 |
+
<Select
|
146 |
+
{...form.getInputProps("inferenceType")}
|
147 |
+
label="AI Processing Location"
|
148 |
+
data={inferenceTypes}
|
149 |
+
allowDeselect={false}
|
150 |
+
/>
|
151 |
+
|
152 |
+
{form.values.inferenceType === "openai" && (
|
153 |
+
<>
|
154 |
+
<TextInput
|
155 |
+
{...form.getInputProps("openAiApiBaseUrl")}
|
156 |
+
label="API Base URL"
|
157 |
+
placeholder="http://localhost:11434/v1"
|
158 |
+
required
|
159 |
+
/>
|
160 |
+
<Group gap="xs">
|
161 |
+
<IconInfoCircle size={16} />
|
162 |
+
<Text size="xs" c="dimmed" flex={1}>
|
163 |
+
You may need to add{" "}
|
164 |
+
<em>
|
165 |
+
{`${self.location.protocol}//${self.location.hostname}`}
|
166 |
+
</em>{" "}
|
167 |
+
to the list of allowed network origins in your API server
|
168 |
+
settings.
|
169 |
+
</Text>
|
170 |
+
</Group>
|
171 |
+
<TextInput
|
172 |
+
{...form.getInputProps("openAiApiKey")}
|
173 |
+
label="API Key"
|
174 |
+
type="password"
|
175 |
+
description="Optional, as local API servers usually do not require it."
|
176 |
+
/>
|
177 |
+
<Select
|
178 |
+
{...form.getInputProps("openAiApiModel")}
|
179 |
+
label="API Model"
|
180 |
+
data={openAiModels}
|
181 |
+
description="Optional, as some API servers don't provide a model list."
|
182 |
+
allowDeselect={false}
|
183 |
+
disabled={openAiModels.length === 0}
|
184 |
+
searchable
|
185 |
+
/>
|
186 |
+
</>
|
187 |
+
)}
|
188 |
+
|
189 |
+
{form.values.inferenceType === "browser" && (
|
190 |
+
<>
|
191 |
+
{isWebGPUAvailable && (
|
192 |
+
<Switch
|
193 |
+
label="WebGPU"
|
194 |
+
{...form.getInputProps("enableWebGpu", {
|
195 |
+
type: "checkbox",
|
196 |
+
})}
|
197 |
+
labelPosition="left"
|
198 |
+
description="Enable or disable WebGPU usage. When disabled, the app will use the CPU instead."
|
199 |
+
/>
|
200 |
+
)}
|
201 |
+
|
202 |
+
{match([isWebGPUAvailable, form.values.enableWebGpu])
|
203 |
+
.with([true, true], () => (
|
204 |
+
<Suspense fallback={<Skeleton height={50} />}>
|
205 |
+
<WebLlmModelSelect
|
206 |
+
value={form.values.webLlmModelId}
|
207 |
+
onChange={(value) =>
|
208 |
+
form.setFieldValue("webLlmModelId", value)
|
209 |
+
}
|
210 |
+
/>
|
211 |
+
</Suspense>
|
212 |
+
))
|
213 |
+
.with([false, Pattern.any], [Pattern.any, false], () => (
|
214 |
+
<>
|
215 |
+
<Suspense fallback={<Skeleton height={50} />}>
|
216 |
+
<WllamaModelSelect
|
217 |
+
value={form.values.wllamaModelId}
|
218 |
+
onChange={(value) =>
|
219 |
+
form.setFieldValue("wllamaModelId", value)
|
220 |
+
}
|
221 |
+
/>
|
222 |
+
</Suspense>
|
223 |
+
<NumberInput
|
224 |
+
label="CPU threads to use"
|
225 |
+
description={
|
226 |
+
<>
|
227 |
+
<span>
|
228 |
+
Number of threads to use for the AI model. Lower
|
229 |
+
values will use less CPU but may take longer to
|
230 |
+
respond. A value that is too high may cause the app
|
231 |
+
to hang.
|
232 |
+
</span>
|
233 |
+
{suggestedCpuThreads > defaultSettings.cpuThreads && (
|
234 |
+
<span>
|
235 |
+
{" "}
|
236 |
+
The default value is{" "}
|
237 |
+
<Text
|
238 |
+
component="span"
|
239 |
+
size="xs"
|
240 |
+
c="blue"
|
241 |
+
style={{ cursor: "pointer" }}
|
242 |
+
onClick={() =>
|
243 |
+
form.setFieldValue(
|
244 |
+
"cpuThreads",
|
245 |
+
defaultSettings.cpuThreads,
|
246 |
+
)
|
247 |
+
}
|
248 |
+
>
|
249 |
+
{defaultSettings.cpuThreads}
|
250 |
+
</Text>
|
251 |
+
, but based on the number of logical processors in
|
252 |
+
your CPU, the suggested value is{" "}
|
253 |
+
<Text
|
254 |
+
component="span"
|
255 |
+
size="xs"
|
256 |
+
c="blue"
|
257 |
+
style={{ cursor: "pointer" }}
|
258 |
+
onClick={() =>
|
259 |
+
form.setFieldValue(
|
260 |
+
"cpuThreads",
|
261 |
+
suggestedCpuThreads,
|
262 |
+
)
|
263 |
+
}
|
264 |
+
>
|
265 |
+
{suggestedCpuThreads}
|
266 |
+
</Text>
|
267 |
+
.
|
268 |
+
</span>
|
269 |
+
)}
|
270 |
+
</>
|
271 |
+
}
|
272 |
+
min={1}
|
273 |
+
{...form.getInputProps("cpuThreads")}
|
274 |
+
/>
|
275 |
+
</>
|
276 |
+
))
|
277 |
+
.otherwise(() => null)}
|
278 |
+
</>
|
279 |
+
)}
|
280 |
+
|
281 |
+
<Textarea
|
282 |
+
label="Instructions for AI"
|
283 |
+
descriptionProps={{ component: "div" }}
|
284 |
+
description={
|
285 |
+
<>
|
286 |
+
<span>
|
287 |
+
Customize instructions for the AI to tailor its responses.
|
288 |
+
</span>
|
289 |
+
<br />
|
290 |
+
<span>For example:</span>
|
291 |
+
<ul>
|
292 |
+
<li>
|
293 |
+
Specify preferences
|
294 |
+
<ul>
|
295 |
+
<li>
|
296 |
+
<em>"use simple language"</em>
|
297 |
+
</li>
|
298 |
+
<li>
|
299 |
+
<em>"provide step-by-step explanations"</em>
|
300 |
+
</li>
|
301 |
+
</ul>
|
302 |
+
</li>
|
303 |
+
<li>
|
304 |
+
Set a response style
|
305 |
+
<ul>
|
306 |
+
<li>
|
307 |
+
<em>"answer in a friendly tone"</em>
|
308 |
+
</li>
|
309 |
+
<li>
|
310 |
+
<em>"write your response in Spanish"</em>
|
311 |
+
</li>
|
312 |
+
</ul>
|
313 |
+
</li>
|
314 |
+
<li>
|
315 |
+
Provide context about the audience
|
316 |
+
<ul>
|
317 |
+
<li>
|
318 |
+
<em>"you're talking to a high school student"</em>
|
319 |
+
</li>
|
320 |
+
<li>
|
321 |
+
<em>
|
322 |
+
"consider that your audience is composed of
|
323 |
+
professionals in the field of graphic design"
|
324 |
+
</em>
|
325 |
+
</li>
|
326 |
+
</ul>
|
327 |
+
</li>
|
328 |
+
</ul>
|
329 |
+
<span>
|
330 |
+
The special tag <em>{"{{searchResults}}"}</em> will be
|
331 |
+
replaced with the search results, while{" "}
|
332 |
+
<em>{"{{dateTime}}"}</em> will be replaced with the current
|
333 |
+
date and time.
|
334 |
+
</span>
|
335 |
+
{isUsingCustomInstructions && (
|
336 |
+
<>
|
337 |
+
<br />
|
338 |
+
<br />
|
339 |
+
<span>
|
340 |
+
Currently, you're using custom instructions. If you ever
|
341 |
+
need to restore the default instructions, you can do so by
|
342 |
+
clicking
|
343 |
+
</span>{" "}
|
344 |
+
<Text
|
345 |
+
component="span"
|
346 |
+
size="xs"
|
347 |
+
c="blue"
|
348 |
+
style={{ cursor: "pointer" }}
|
349 |
+
onClick={handleRestoreDefaultInstructions}
|
350 |
+
>
|
351 |
+
here
|
352 |
+
</Text>
|
353 |
+
<span>.</span>
|
354 |
+
</>
|
355 |
+
)}
|
356 |
+
</>
|
357 |
+
}
|
358 |
+
autosize
|
359 |
+
maxRows={10}
|
360 |
+
{...form.getInputProps("systemPrompt")}
|
361 |
+
/>
|
362 |
+
</>
|
363 |
+
)}
|
364 |
+
</Stack>
|
365 |
+
);
|
366 |
+
}
|
client/components/Pages/Main/Menu/ActionsForm.tsx
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Stack } from "@mantine/core";
|
2 |
+
import { Suspense, lazy } from "react";
|
3 |
+
|
4 |
+
const ClearDataButton = lazy(() => import("./ClearDataButton"));
|
5 |
+
const ShowLogsButton = lazy(() => import("../../../Logs/ShowLogsButton"));
|
6 |
+
|
7 |
+
export default function ActionsForm() {
|
8 |
+
return (
|
9 |
+
<Stack gap="lg">
|
10 |
+
<Suspense>
|
11 |
+
<ClearDataButton />
|
12 |
+
</Suspense>
|
13 |
+
<Suspense>
|
14 |
+
<ShowLogsButton />
|
15 |
+
</Suspense>
|
16 |
+
</Stack>
|
17 |
+
);
|
18 |
+
}
|
client/components/Pages/Main/Menu/ClearDataButton.tsx
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Button, Stack, Text } from "@mantine/core";
|
2 |
+
import { useState } from "react";
|
3 |
+
import { useLocation } from "wouter";
|
4 |
+
import { addLogEntry } from "../../../../modules/logEntries";
|
5 |
+
|
6 |
+
export default function ClearDataButton() {
|
7 |
+
const [isClearingData, setIsClearingData] = useState(false);
|
8 |
+
const [hasClearedData, setHasClearedData] = useState(false);
|
9 |
+
const [, navigate] = useLocation();
|
10 |
+
|
11 |
+
const handleClearDataButtonClick = async () => {
|
12 |
+
const sureToDelete = self.confirm(
|
13 |
+
"Are you sure you want to reset the settings and delete all files in cache?",
|
14 |
+
);
|
15 |
+
|
16 |
+
if (!sureToDelete) return;
|
17 |
+
|
18 |
+
addLogEntry("User initiated data clearing");
|
19 |
+
|
20 |
+
setIsClearingData(true);
|
21 |
+
|
22 |
+
self.localStorage.clear();
|
23 |
+
|
24 |
+
for (const cacheName of await self.caches.keys()) {
|
25 |
+
await self.caches.delete(cacheName);
|
26 |
+
}
|
27 |
+
|
28 |
+
for (const databaseInfo of await self.indexedDB.databases()) {
|
29 |
+
if (databaseInfo.name) self.indexedDB.deleteDatabase(databaseInfo.name);
|
30 |
+
}
|
31 |
+
|
32 |
+
setIsClearingData(false);
|
33 |
+
|
34 |
+
setHasClearedData(true);
|
35 |
+
|
36 |
+
addLogEntry("All data cleared successfully");
|
37 |
+
|
38 |
+
navigate("/", { replace: true });
|
39 |
+
|
40 |
+
self.location.reload();
|
41 |
+
};
|
42 |
+
|
43 |
+
return (
|
44 |
+
<Stack gap="xs">
|
45 |
+
<Button
|
46 |
+
onClick={handleClearDataButtonClick}
|
47 |
+
variant="default"
|
48 |
+
loading={isClearingData}
|
49 |
+
loaderProps={{ type: "bars" }}
|
50 |
+
disabled={hasClearedData}
|
51 |
+
>
|
52 |
+
{hasClearedData ? "Data cleared" : "Clear all data"}
|
53 |
+
</Button>
|
54 |
+
<Text size="xs" c="dimmed">
|
55 |
+
Reset settings and delete all files in cache to free up space.
|
56 |
+
</Text>
|
57 |
+
</Stack>
|
58 |
+
);
|
59 |
+
}
|
client/components/Pages/Main/Menu/InterfaceSettingsForm.tsx
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
Stack,
|
3 |
+
Switch,
|
4 |
+
useComputedColorScheme,
|
5 |
+
useMantineColorScheme,
|
6 |
+
} from "@mantine/core";
|
7 |
+
import { useForm } from "@mantine/form";
|
8 |
+
import { usePubSub } from "create-pubsub/react";
|
9 |
+
import { settingsPubSub } from "../../../../modules/pubSub";
|
10 |
+
|
11 |
+
export default function InterfaceSettingsForm() {
|
12 |
+
const [settings, setSettings] = usePubSub(settingsPubSub);
|
13 |
+
const form = useForm({
|
14 |
+
initialValues: settings,
|
15 |
+
onValuesChange: setSettings,
|
16 |
+
});
|
17 |
+
const { setColorScheme } = useMantineColorScheme();
|
18 |
+
const computedColorScheme = useComputedColorScheme("light");
|
19 |
+
|
20 |
+
const toggleColorScheme = () => {
|
21 |
+
setColorScheme(computedColorScheme === "dark" ? "light" : "dark");
|
22 |
+
};
|
23 |
+
|
24 |
+
return (
|
25 |
+
<Stack gap="md">
|
26 |
+
<Switch
|
27 |
+
label="Dark Mode"
|
28 |
+
checked={computedColorScheme === "dark"}
|
29 |
+
onChange={toggleColorScheme}
|
30 |
+
labelPosition="left"
|
31 |
+
description="Enable or disable the dark color scheme."
|
32 |
+
styles={{ labelWrapper: { width: "100%" } }}
|
33 |
+
/>
|
34 |
+
|
35 |
+
<Switch
|
36 |
+
{...form.getInputProps("enterToSubmit", {
|
37 |
+
type: "checkbox",
|
38 |
+
})}
|
39 |
+
label="Enter to Submit"
|
40 |
+
labelPosition="left"
|
41 |
+
description="Enable or disable using Enter key to submit the search query. When disabled, you'll need to click the Search button or use Shift+Enter to submit."
|
42 |
+
/>
|
43 |
+
</Stack>
|
44 |
+
);
|
45 |
+
}
|
client/components/Pages/Main/Menu/MenuButton.tsx
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Button } from "@mantine/core";
|
2 |
+
import { Suspense, lazy, useCallback, useEffect, useState } from "react";
|
3 |
+
import { addLogEntry } from "../../../../modules/logEntries";
|
4 |
+
|
5 |
+
const MenuDrawer = lazy(() => import("./MenuDrawer"));
|
6 |
+
|
7 |
+
export default function MenuButton() {
|
8 |
+
const [isDrawerOpen, setDrawerOpen] = useState(false);
|
9 |
+
const [isDrawerLoaded, setDrawerLoaded] = useState(false);
|
10 |
+
|
11 |
+
const openDrawer = useCallback(() => {
|
12 |
+
setDrawerOpen(true);
|
13 |
+
addLogEntry("User opened the menu");
|
14 |
+
}, []);
|
15 |
+
|
16 |
+
const closeDrawer = useCallback(() => {
|
17 |
+
setDrawerOpen(false);
|
18 |
+
addLogEntry("User closed the menu");
|
19 |
+
}, []);
|
20 |
+
|
21 |
+
const handleDrawerLoad = useCallback(() => {
|
22 |
+
if (!isDrawerLoaded) {
|
23 |
+
addLogEntry("Menu drawer loaded");
|
24 |
+
setDrawerLoaded(true);
|
25 |
+
}
|
26 |
+
}, [isDrawerLoaded]);
|
27 |
+
|
28 |
+
return (
|
29 |
+
<>
|
30 |
+
<Button
|
31 |
+
size="xs"
|
32 |
+
onClick={openDrawer}
|
33 |
+
variant="default"
|
34 |
+
loading={isDrawerOpen && !isDrawerLoaded}
|
35 |
+
>
|
36 |
+
Menu
|
37 |
+
</Button>
|
38 |
+
{(isDrawerOpen || isDrawerLoaded) && (
|
39 |
+
<Suspense fallback={<SuspenseListener onUnload={handleDrawerLoad} />}>
|
40 |
+
<MenuDrawer onClose={closeDrawer} opened={isDrawerOpen} />
|
41 |
+
</Suspense>
|
42 |
+
)}
|
43 |
+
</>
|
44 |
+
);
|
45 |
+
}
|
46 |
+
|
47 |
+
function SuspenseListener({ onUnload }: { onUnload: () => void }) {
|
48 |
+
useEffect(() => {
|
49 |
+
return () => onUnload();
|
50 |
+
}, [onUnload]);
|
51 |
+
|
52 |
+
return null;
|
53 |
+
}
|
client/components/Pages/Main/Menu/MenuDrawer.tsx
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
Accordion,
|
3 |
+
ActionIcon,
|
4 |
+
Center,
|
5 |
+
Drawer,
|
6 |
+
type DrawerProps,
|
7 |
+
FocusTrap,
|
8 |
+
Group,
|
9 |
+
HoverCard,
|
10 |
+
Stack,
|
11 |
+
} from "@mantine/core";
|
12 |
+
import { IconBrandGithub } from "@tabler/icons-react";
|
13 |
+
import prettyMilliseconds from "pretty-ms";
|
14 |
+
import { Suspense, lazy } from "react";
|
15 |
+
import { repository } from "../../../../../package.json";
|
16 |
+
import { addLogEntry } from "../../../../modules/logEntries";
|
17 |
+
import { getSemanticVersion } from "../../../../modules/stringFormatters";
|
18 |
+
|
19 |
+
const AISettingsForm = lazy(() => import("./AISettingsForm"));
|
20 |
+
const SearchSettingsForm = lazy(() => import("./SearchSettingsForm"));
|
21 |
+
const InterfaceSettingsForm = lazy(() => import("./InterfaceSettingsForm"));
|
22 |
+
const ActionsForm = lazy(() => import("./ActionsForm"));
|
23 |
+
|
24 |
+
export default function MenuDrawer(drawerProps: DrawerProps) {
|
25 |
+
const repoName = repository.url.split("/").pop();
|
26 |
+
|
27 |
+
return (
|
28 |
+
<Drawer
|
29 |
+
{...drawerProps}
|
30 |
+
position="right"
|
31 |
+
size="md"
|
32 |
+
title={
|
33 |
+
<Group gap="xs">
|
34 |
+
<ActionIcon
|
35 |
+
variant="subtle"
|
36 |
+
component="a"
|
37 |
+
color="var(--mantine-color-text)"
|
38 |
+
href={repository.url}
|
39 |
+
target="_blank"
|
40 |
+
onClick={() => addLogEntry("User clicked the GitHub link")}
|
41 |
+
>
|
42 |
+
<IconBrandGithub size={16} />
|
43 |
+
</ActionIcon>
|
44 |
+
<HoverCard shadow="md" withArrow>
|
45 |
+
<HoverCard.Target>
|
46 |
+
<Center>{repoName}</Center>
|
47 |
+
</HoverCard.Target>
|
48 |
+
<HoverCard.Dropdown>
|
49 |
+
<Stack gap="xs">
|
50 |
+
<Center>{repoName}</Center>
|
51 |
+
<Center>
|
52 |
+
{`v${getSemanticVersion(VITE_BUILD_DATE_TIME)}+${VITE_COMMIT_SHORT_HASH}`}
|
53 |
+
</Center>
|
54 |
+
<Center>
|
55 |
+
Released{" "}
|
56 |
+
{prettyMilliseconds(
|
57 |
+
new Date().getTime() -
|
58 |
+
new Date(VITE_BUILD_DATE_TIME).getTime(),
|
59 |
+
{
|
60 |
+
compact: true,
|
61 |
+
verbose: true,
|
62 |
+
},
|
63 |
+
)}{" "}
|
64 |
+
ago
|
65 |
+
</Center>
|
66 |
+
</Stack>
|
67 |
+
</HoverCard.Dropdown>
|
68 |
+
</HoverCard>
|
69 |
+
</Group>
|
70 |
+
}
|
71 |
+
>
|
72 |
+
<FocusTrap.InitialFocus />
|
73 |
+
<Drawer.Body>
|
74 |
+
<Accordion variant="separated" multiple>
|
75 |
+
<Accordion.Item value="aiSettings">
|
76 |
+
<Accordion.Control>AI Settings</Accordion.Control>
|
77 |
+
<Accordion.Panel>
|
78 |
+
<Suspense>
|
79 |
+
<AISettingsForm />
|
80 |
+
</Suspense>
|
81 |
+
</Accordion.Panel>
|
82 |
+
</Accordion.Item>
|
83 |
+
<Accordion.Item value="searchSettings">
|
84 |
+
<Accordion.Control>Search Settings</Accordion.Control>
|
85 |
+
<Accordion.Panel>
|
86 |
+
<Suspense>
|
87 |
+
<SearchSettingsForm />
|
88 |
+
</Suspense>
|
89 |
+
</Accordion.Panel>
|
90 |
+
</Accordion.Item>
|
91 |
+
<Accordion.Item value="interfaceSettings">
|
92 |
+
<Accordion.Control>Interface Settings</Accordion.Control>
|
93 |
+
<Accordion.Panel>
|
94 |
+
<Suspense>
|
95 |
+
<InterfaceSettingsForm />
|
96 |
+
</Suspense>
|
97 |
+
</Accordion.Panel>
|
98 |
+
</Accordion.Item>
|
99 |
+
<Accordion.Item value="actions">
|
100 |
+
<Accordion.Control>Actions</Accordion.Control>
|
101 |
+
<Accordion.Panel>
|
102 |
+
<Suspense>
|
103 |
+
<ActionsForm />
|
104 |
+
</Suspense>
|
105 |
+
</Accordion.Panel>
|
106 |
+
</Accordion.Item>
|
107 |
+
</Accordion>
|
108 |
+
</Drawer.Body>
|
109 |
+
</Drawer>
|
110 |
+
);
|
111 |
+
}
|
client/components/Pages/Main/Menu/SearchSettingsForm.tsx
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Slider, Stack, Switch, Text } from "@mantine/core";
|
2 |
+
import { useForm } from "@mantine/form";
|
3 |
+
import { usePubSub } from "create-pubsub/react";
|
4 |
+
import { settingsPubSub } from "../../../../modules/pubSub";
|
5 |
+
|
6 |
+
export default function SearchSettingsForm() {
|
7 |
+
const [settings, setSettings] = usePubSub(settingsPubSub);
|
8 |
+
const form = useForm({
|
9 |
+
initialValues: settings,
|
10 |
+
onValuesChange: setSettings,
|
11 |
+
});
|
12 |
+
|
13 |
+
return (
|
14 |
+
<Stack gap="md">
|
15 |
+
<Stack gap="xs" mb="md">
|
16 |
+
<Text size="sm">Search Results Limit</Text>
|
17 |
+
<Text size="xs" c="dimmed">
|
18 |
+
Maximum number of search results to fetch. A higher value provides
|
19 |
+
more results but may increase search time.
|
20 |
+
</Text>
|
21 |
+
<Slider
|
22 |
+
{...form.getInputProps("searchResultsLimit")}
|
23 |
+
min={5}
|
24 |
+
max={30}
|
25 |
+
step={5}
|
26 |
+
marks={[5, 10, 15, 20, 25, 30].map((value) => ({
|
27 |
+
value,
|
28 |
+
label: value.toString(),
|
29 |
+
}))}
|
30 |
+
/>
|
31 |
+
</Stack>
|
32 |
+
|
33 |
+
<Switch
|
34 |
+
{...form.getInputProps("enableImageSearch", {
|
35 |
+
type: "checkbox",
|
36 |
+
})}
|
37 |
+
label="Image Search"
|
38 |
+
labelPosition="left"
|
39 |
+
description="Enable or disable image search results. When enabled, relevant images will be displayed alongside web search results."
|
40 |
+
/>
|
41 |
+
</Stack>
|
42 |
+
);
|
43 |
+
}
|
client/components/Search/Form/SearchForm.tsx
ADDED
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Button, Group, Stack, Textarea } from "@mantine/core";
|
2 |
+
import { usePubSub } from "create-pubsub/react";
|
3 |
+
import {
|
4 |
+
type ChangeEvent,
|
5 |
+
type KeyboardEvent,
|
6 |
+
type ReactNode,
|
7 |
+
useCallback,
|
8 |
+
useEffect,
|
9 |
+
useRef,
|
10 |
+
useState,
|
11 |
+
} from "react";
|
12 |
+
import { Pattern, match } from "ts-pattern";
|
13 |
+
import { useLocation } from "wouter";
|
14 |
+
import { addLogEntry } from "../../../modules/logEntries";
|
15 |
+
import { postMessageToParentWindow } from "../../../modules/parentWindow";
|
16 |
+
import { settingsPubSub } from "../../../modules/pubSub";
|
17 |
+
import { getRandomQuerySuggestion } from "../../../modules/querySuggestions";
|
18 |
+
import { sleepUntilIdle } from "../../../modules/sleep";
|
19 |
+
import { searchAndRespond } from "../../../modules/textGeneration";
|
20 |
+
|
21 |
+
export default function SearchForm({
|
22 |
+
query,
|
23 |
+
updateQuery,
|
24 |
+
additionalButtons,
|
25 |
+
}: {
|
26 |
+
query: string;
|
27 |
+
updateQuery: (query: string) => void;
|
28 |
+
additionalButtons?: ReactNode;
|
29 |
+
}) {
|
30 |
+
const textAreaRef = useRef<HTMLTextAreaElement>(null);
|
31 |
+
const [textAreaValue, setTextAreaValue] = useState(query);
|
32 |
+
const defaultSuggestedQuery = "Anything you need!";
|
33 |
+
const [suggestedQuery, setSuggestedQuery] = useState(defaultSuggestedQuery);
|
34 |
+
const [, navigate] = useLocation();
|
35 |
+
const [settings] = usePubSub(settingsPubSub);
|
36 |
+
|
37 |
+
const handleMount = useCallback(async () => {
|
38 |
+
await sleepUntilIdle();
|
39 |
+
searchAndRespond();
|
40 |
+
}, []);
|
41 |
+
|
42 |
+
const handleInitialSuggestion = useCallback(async () => {
|
43 |
+
const suggestion = await getRandomQuerySuggestion();
|
44 |
+
setSuggestedQuery(suggestion);
|
45 |
+
}, []);
|
46 |
+
|
47 |
+
useEffect(() => {
|
48 |
+
handleMount();
|
49 |
+
handleInitialSuggestion();
|
50 |
+
}, [handleMount, handleInitialSuggestion]);
|
51 |
+
|
52 |
+
const handleInputChange = async (event: ChangeEvent<HTMLTextAreaElement>) => {
|
53 |
+
const text = event.target.value;
|
54 |
+
|
55 |
+
setTextAreaValue(text);
|
56 |
+
|
57 |
+
if (text.length === 0) {
|
58 |
+
setSuggestedQuery(await getRandomQuerySuggestion());
|
59 |
+
}
|
60 |
+
};
|
61 |
+
|
62 |
+
const handleClearButtonClick = async () => {
|
63 |
+
setSuggestedQuery(await getRandomQuerySuggestion());
|
64 |
+
setTextAreaValue("");
|
65 |
+
textAreaRef.current?.focus();
|
66 |
+
addLogEntry("User cleaned the search query field");
|
67 |
+
};
|
68 |
+
|
69 |
+
const startSearching = useCallback(() => {
|
70 |
+
const queryToEncode = match(textAreaValue.trim())
|
71 |
+
.with(Pattern.string.minLength(1), () => textAreaValue)
|
72 |
+
.otherwise(() => suggestedQuery);
|
73 |
+
|
74 |
+
setTextAreaValue(queryToEncode);
|
75 |
+
|
76 |
+
const queryString = `q=${encodeURIComponent(queryToEncode)}`;
|
77 |
+
|
78 |
+
postMessageToParentWindow({ queryString, hash: "" });
|
79 |
+
|
80 |
+
navigate(`/?${queryString}`, { replace: true });
|
81 |
+
|
82 |
+
updateQuery(queryToEncode);
|
83 |
+
|
84 |
+
searchAndRespond();
|
85 |
+
|
86 |
+
addLogEntry(
|
87 |
+
`User submitted a search with ${queryToEncode.length} characters length`,
|
88 |
+
);
|
89 |
+
}, [textAreaValue, suggestedQuery, updateQuery, navigate]);
|
90 |
+
|
91 |
+
const handleSubmit = (event: { preventDefault: () => void }) => {
|
92 |
+
event.preventDefault();
|
93 |
+
startSearching();
|
94 |
+
};
|
95 |
+
|
96 |
+
const handleKeyDown = (event: KeyboardEvent<HTMLTextAreaElement>) => {
|
97 |
+
match([event, settings.enterToSubmit])
|
98 |
+
.with(
|
99 |
+
[{ code: "Enter", shiftKey: false }, true],
|
100 |
+
[{ code: "Enter", shiftKey: true }, false],
|
101 |
+
() => handleSubmit(event),
|
102 |
+
)
|
103 |
+
.otherwise(() => undefined);
|
104 |
+
};
|
105 |
+
|
106 |
+
return (
|
107 |
+
<form onSubmit={handleSubmit} style={{ width: "100%" }}>
|
108 |
+
<Stack gap="xs">
|
109 |
+
<Textarea
|
110 |
+
value={textAreaValue}
|
111 |
+
placeholder={suggestedQuery}
|
112 |
+
ref={textAreaRef}
|
113 |
+
onKeyDown={handleKeyDown}
|
114 |
+
onChange={handleInputChange}
|
115 |
+
autosize
|
116 |
+
minRows={1}
|
117 |
+
maxRows={8}
|
118 |
+
autoFocus
|
119 |
+
/>
|
120 |
+
<Group gap="xs">
|
121 |
+
{match(textAreaValue)
|
122 |
+
.with(Pattern.string.minLength(1), () => (
|
123 |
+
<Button
|
124 |
+
size="xs"
|
125 |
+
onClick={handleClearButtonClick}
|
126 |
+
variant="default"
|
127 |
+
>
|
128 |
+
Clear
|
129 |
+
</Button>
|
130 |
+
))
|
131 |
+
.otherwise(() => null)}
|
132 |
+
<Button size="xs" type="submit" variant="default" flex={1}>
|
133 |
+
Search
|
134 |
+
</Button>
|
135 |
+
{additionalButtons}
|
136 |
+
</Group>
|
137 |
+
</Stack>
|
138 |
+
</form>
|
139 |
+
);
|
140 |
+
}
|
client/components/Search/Results/Graphical/ImageResultsList.tsx
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Carousel } from "@mantine/carousel";
|
2 |
+
import { Button, Group, Stack, Text, Transition, rem } from "@mantine/core";
|
3 |
+
import { useEffect, useState } from "react";
|
4 |
+
import type { SearchResults } from "../../../../modules/search";
|
5 |
+
import "@mantine/carousel/styles.css";
|
6 |
+
import Lightbox from "yet-another-react-lightbox";
|
7 |
+
import Captions from "yet-another-react-lightbox/plugins/captions";
|
8 |
+
import "yet-another-react-lightbox/styles.css";
|
9 |
+
import "yet-another-react-lightbox/plugins/captions.css";
|
10 |
+
import { addLogEntry } from "../../../../modules/logEntries";
|
11 |
+
import { getHostname } from "../../../../modules/stringFormatters";
|
12 |
+
|
13 |
+
export default function ImageResultsList({
|
14 |
+
imageResults,
|
15 |
+
}: {
|
16 |
+
imageResults: SearchResults["imageResults"];
|
17 |
+
}) {
|
18 |
+
const [isLightboxOpen, setLightboxOpen] = useState(false);
|
19 |
+
const [lightboxIndex, setLightboxIndex] = useState(0);
|
20 |
+
const [canStartTransition, setCanStartTransition] = useState(false);
|
21 |
+
|
22 |
+
useEffect(() => {
|
23 |
+
setCanStartTransition(true);
|
24 |
+
}, []);
|
25 |
+
|
26 |
+
const handleImageClick = (index: number) => {
|
27 |
+
setLightboxIndex(index);
|
28 |
+
setLightboxOpen(true);
|
29 |
+
};
|
30 |
+
|
31 |
+
const imageStyle = {
|
32 |
+
objectFit: "cover",
|
33 |
+
height: rem(180),
|
34 |
+
width: rem(240),
|
35 |
+
borderRadius: rem(4),
|
36 |
+
border: `${rem(2)} solid var(--mantine-color-default-border)`,
|
37 |
+
cursor: "zoom-in",
|
38 |
+
} as const;
|
39 |
+
|
40 |
+
return (
|
41 |
+
<>
|
42 |
+
<Carousel slideSize="0" slideGap="xs" align="start" dragFree loop>
|
43 |
+
{imageResults.map(([title, sourceUrl, thumbnailUrl], index) => (
|
44 |
+
<Transition
|
45 |
+
key={`${title}-${sourceUrl}-${thumbnailUrl}`}
|
46 |
+
mounted={canStartTransition}
|
47 |
+
transition="fade"
|
48 |
+
timingFunction="ease"
|
49 |
+
enterDelay={index * 250}
|
50 |
+
duration={1500}
|
51 |
+
>
|
52 |
+
{(styles) => (
|
53 |
+
<Carousel.Slide style={styles}>
|
54 |
+
<img
|
55 |
+
alt={title}
|
56 |
+
src={thumbnailUrl}
|
57 |
+
onClick={() => handleImageClick(index)}
|
58 |
+
onKeyDown={(e) => {
|
59 |
+
if (e.key === "Enter") {
|
60 |
+
handleImageClick(index);
|
61 |
+
}
|
62 |
+
}}
|
63 |
+
style={imageStyle}
|
64 |
+
/>
|
65 |
+
</Carousel.Slide>
|
66 |
+
)}
|
67 |
+
</Transition>
|
68 |
+
))}
|
69 |
+
</Carousel>
|
70 |
+
<Lightbox
|
71 |
+
open={isLightboxOpen}
|
72 |
+
close={() => setLightboxOpen(false)}
|
73 |
+
plugins={[Captions]}
|
74 |
+
index={lightboxIndex}
|
75 |
+
slides={imageResults.map(([title, url, thumbnailUrl, sourceUrl]) => ({
|
76 |
+
src: thumbnailUrl,
|
77 |
+
description: (
|
78 |
+
<Stack align="center" gap="md">
|
79 |
+
{title && (
|
80 |
+
<Text component="cite" ta="center">
|
81 |
+
{title}
|
82 |
+
</Text>
|
83 |
+
)}
|
84 |
+
<Group align="center" justify="center" gap="xs">
|
85 |
+
<Button
|
86 |
+
variant="subtle"
|
87 |
+
component="a"
|
88 |
+
size="xs"
|
89 |
+
href={sourceUrl}
|
90 |
+
target="_blank"
|
91 |
+
title="Click to see the image in full size"
|
92 |
+
rel="noopener noreferrer"
|
93 |
+
onClick={() => {
|
94 |
+
addLogEntry("User visited an image result in full size");
|
95 |
+
}}
|
96 |
+
>
|
97 |
+
View in full resolution
|
98 |
+
</Button>
|
99 |
+
<Button
|
100 |
+
variant="subtle"
|
101 |
+
component="a"
|
102 |
+
href={url}
|
103 |
+
target="_blank"
|
104 |
+
size="xs"
|
105 |
+
title="Click to visit the page where the image was found"
|
106 |
+
rel="noopener noreferrer"
|
107 |
+
onClick={() => {
|
108 |
+
addLogEntry("User visited an image result source");
|
109 |
+
}}
|
110 |
+
>
|
111 |
+
Visit {getHostname(url)}
|
112 |
+
</Button>
|
113 |
+
</Group>
|
114 |
+
</Stack>
|
115 |
+
),
|
116 |
+
}))}
|
117 |
+
/>
|
118 |
+
</>
|
119 |
+
);
|
120 |
+
}
|
client/components/Search/Results/SearchResultsSection.tsx
ADDED
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
Alert,
|
3 |
+
AspectRatio,
|
4 |
+
Divider,
|
5 |
+
Group,
|
6 |
+
Skeleton,
|
7 |
+
Space,
|
8 |
+
Stack,
|
9 |
+
em,
|
10 |
+
} from "@mantine/core";
|
11 |
+
import { useMediaQuery } from "@mantine/hooks";
|
12 |
+
import { IconInfoCircle } from "@tabler/icons-react";
|
13 |
+
import { usePubSub } from "create-pubsub/react";
|
14 |
+
import { Suspense, lazy, useMemo } from "react";
|
15 |
+
import { Pattern, match } from "ts-pattern";
|
16 |
+
import {
|
17 |
+
searchResultsPubSub,
|
18 |
+
searchStatePubSub,
|
19 |
+
settingsPubSub,
|
20 |
+
} from "../../../modules/pubSub";
|
21 |
+
import type { SearchResults } from "../../../modules/search";
|
22 |
+
import type { Settings } from "../../../modules/settings";
|
23 |
+
|
24 |
+
const ImageResultsList = lazy(() => import("./Graphical/ImageResultsList"));
|
25 |
+
const SearchResultsList = lazy(() => import("./Textual/SearchResultsList"));
|
26 |
+
|
27 |
+
export default function SearchResultsSection() {
|
28 |
+
const [searchResults] = usePubSub(searchResultsPubSub);
|
29 |
+
const [searchState] = usePubSub(searchStatePubSub);
|
30 |
+
const [settings] = usePubSub(settingsPubSub);
|
31 |
+
|
32 |
+
return useMemo(
|
33 |
+
() =>
|
34 |
+
match(searchState)
|
35 |
+
.with("running", () => <RunningSearchContent />)
|
36 |
+
.with("failed", () => <FailedSearchContent />)
|
37 |
+
.with("completed", () => (
|
38 |
+
<CompletedSearchContent
|
39 |
+
searchResults={searchResults}
|
40 |
+
settings={settings}
|
41 |
+
/>
|
42 |
+
))
|
43 |
+
.otherwise(() => null),
|
44 |
+
[searchState, searchResults, settings],
|
45 |
+
);
|
46 |
+
}
|
47 |
+
|
48 |
+
function RunningSearchContent() {
|
49 |
+
const hasSmallScreen = useMediaQuery(`(max-width: ${em(530)})`);
|
50 |
+
const numberOfSquareSkeletons = hasSmallScreen ? 4 : 6;
|
51 |
+
const skeletonIds = Array.from(
|
52 |
+
{ length: numberOfSquareSkeletons },
|
53 |
+
(_, i) => `skeleton-${i}`,
|
54 |
+
);
|
55 |
+
|
56 |
+
return (
|
57 |
+
<>
|
58 |
+
<Divider
|
59 |
+
mb="sm"
|
60 |
+
variant="dashed"
|
61 |
+
labelPosition="center"
|
62 |
+
label="Searching the web..."
|
63 |
+
/>
|
64 |
+
<Stack>
|
65 |
+
<Group>
|
66 |
+
{skeletonIds.map((id) => (
|
67 |
+
<AspectRatio key={id} ratio={1} flex={1}>
|
68 |
+
<Skeleton />
|
69 |
+
</AspectRatio>
|
70 |
+
))}
|
71 |
+
</Group>
|
72 |
+
<Stack>
|
73 |
+
<Skeleton height={8} radius="xl" />
|
74 |
+
<Skeleton height={8} width="87%" radius="xl" />
|
75 |
+
<Skeleton height={8} radius="xl" />
|
76 |
+
<Skeleton height={8} width="70%" radius="xl" />
|
77 |
+
<Skeleton height={8} radius="xl" />
|
78 |
+
<Skeleton height={8} width="52%" radius="xl" />
|
79 |
+
<Skeleton height={8} radius="xl" />
|
80 |
+
<Skeleton height={8} width="63%" radius="xl" />
|
81 |
+
</Stack>
|
82 |
+
</Stack>
|
83 |
+
</>
|
84 |
+
);
|
85 |
+
}
|
86 |
+
|
87 |
+
function FailedSearchContent() {
|
88 |
+
return (
|
89 |
+
<>
|
90 |
+
<Divider
|
91 |
+
mb="sm"
|
92 |
+
variant="dashed"
|
93 |
+
labelPosition="center"
|
94 |
+
label="Search Results"
|
95 |
+
/>
|
96 |
+
<Alert
|
97 |
+
variant="light"
|
98 |
+
color="yellow"
|
99 |
+
title="No results found"
|
100 |
+
icon={<IconInfoCircle />}
|
101 |
+
>
|
102 |
+
It looks like your current search did not return any results. Try
|
103 |
+
refining your search by adding more keywords or rephrasing your query.
|
104 |
+
</Alert>
|
105 |
+
</>
|
106 |
+
);
|
107 |
+
}
|
108 |
+
|
109 |
+
function CompletedSearchContent({
|
110 |
+
searchResults,
|
111 |
+
settings,
|
112 |
+
}: {
|
113 |
+
searchResults: SearchResults;
|
114 |
+
settings: Settings;
|
115 |
+
}) {
|
116 |
+
return (
|
117 |
+
<>
|
118 |
+
<Divider variant="dashed" labelPosition="center" label="Search Results" />
|
119 |
+
{match([settings.enableImageSearch, searchResults.imageResults.length])
|
120 |
+
.with([true, Pattern.number.positive()], () => (
|
121 |
+
<Suspense>
|
122 |
+
<ImageResultsList imageResults={searchResults.imageResults} />
|
123 |
+
<Space h={8} />
|
124 |
+
</Suspense>
|
125 |
+
))
|
126 |
+
.otherwise(() => null)}
|
127 |
+
<Suspense>
|
128 |
+
<SearchResultsList searchResults={searchResults.textResults} />
|
129 |
+
</Suspense>
|
130 |
+
</>
|
131 |
+
);
|
132 |
+
}
|
client/components/Search/Results/Textual/SearchResultsList.tsx
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
Flex,
|
3 |
+
Stack,
|
4 |
+
Text,
|
5 |
+
Tooltip,
|
6 |
+
Transition,
|
7 |
+
UnstyledButton,
|
8 |
+
em,
|
9 |
+
} from "@mantine/core";
|
10 |
+
import { useMediaQuery } from "@mantine/hooks";
|
11 |
+
import { useEffect, useState } from "react";
|
12 |
+
import { addLogEntry } from "../../../../modules/logEntries";
|
13 |
+
import type { SearchResults } from "../../../../modules/search";
|
14 |
+
import { getHostname } from "../../../../modules/stringFormatters";
|
15 |
+
|
16 |
+
export default function SearchResultsList({
|
17 |
+
searchResults,
|
18 |
+
}: {
|
19 |
+
searchResults: SearchResults["textResults"];
|
20 |
+
}) {
|
21 |
+
const shouldDisplayDomainBelowTitle = useMediaQuery(
|
22 |
+
`(max-width: ${em(720)})`,
|
23 |
+
);
|
24 |
+
const [canStartTransition, setCanStartTransition] = useState(false);
|
25 |
+
|
26 |
+
useEffect(() => {
|
27 |
+
setCanStartTransition(true);
|
28 |
+
}, []);
|
29 |
+
|
30 |
+
return (
|
31 |
+
<Stack gap={40}>
|
32 |
+
{searchResults.map(([title, snippet, url], index) => (
|
33 |
+
<Transition
|
34 |
+
key={url}
|
35 |
+
mounted={canStartTransition}
|
36 |
+
transition="fade"
|
37 |
+
timingFunction="ease"
|
38 |
+
enterDelay={index * 200}
|
39 |
+
duration={750}
|
40 |
+
>
|
41 |
+
{(styles) => (
|
42 |
+
<Stack gap={16} style={styles}>
|
43 |
+
<Flex
|
44 |
+
gap={shouldDisplayDomainBelowTitle ? 0 : 16}
|
45 |
+
justify="space-between"
|
46 |
+
align="flex-start"
|
47 |
+
direction={shouldDisplayDomainBelowTitle ? "column" : "row"}
|
48 |
+
>
|
49 |
+
<UnstyledButton
|
50 |
+
variant="transparent"
|
51 |
+
component="a"
|
52 |
+
href={url}
|
53 |
+
target="_blank"
|
54 |
+
onClick={() => {
|
55 |
+
addLogEntry("User clicked a text result");
|
56 |
+
}}
|
57 |
+
>
|
58 |
+
<Text fw="bold" c="var(--mantine-color-blue-light-color)">
|
59 |
+
{title}
|
60 |
+
</Text>
|
61 |
+
</UnstyledButton>
|
62 |
+
<Tooltip label={url}>
|
63 |
+
<UnstyledButton
|
64 |
+
variant="transparent"
|
65 |
+
component="a"
|
66 |
+
href={url}
|
67 |
+
target="_blank"
|
68 |
+
fs="italic"
|
69 |
+
ta="end"
|
70 |
+
onClick={() => {
|
71 |
+
addLogEntry("User clicked a text result");
|
72 |
+
}}
|
73 |
+
>
|
74 |
+
{getHostname(url)}
|
75 |
+
</UnstyledButton>
|
76 |
+
</Tooltip>
|
77 |
+
</Flex>
|
78 |
+
<Text size="sm" c="dimmed" style={{ wordWrap: "break-word" }}>
|
79 |
+
{snippet}
|
80 |
+
</Text>
|
81 |
+
</Stack>
|
82 |
+
)}
|
83 |
+
</Transition>
|
84 |
+
))}
|
85 |
+
</Stack>
|
86 |
+
);
|
87 |
+
}
|
client/index.html
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!doctype html>
|
2 |
+
<html lang="en">
|
3 |
+
<head>
|
4 |
+
<meta charset="utf-8" />
|
5 |
+
<meta
|
6 |
+
name="viewport"
|
7 |
+
content="width=device-width, initial-scale=1.0, user-scalable=no"
|
8 |
+
/>
|
9 |
+
<meta
|
10 |
+
name="description"
|
11 |
+
content="Minimalist web-searching app with an AI assistant that is always available and runs directly from your browser."
|
12 |
+
/>
|
13 |
+
<meta itemprop="name" content="MiniSearch" />
|
14 |
+
<meta
|
15 |
+
itemprop="description"
|
16 |
+
content="Minimalist web-searching app with an AI assistant that is always available and runs directly from your browser."
|
17 |
+
/>
|
18 |
+
<meta property="og:type" content="website" />
|
19 |
+
<meta property="og:title" content="MiniSearch" />
|
20 |
+
<meta
|
21 |
+
property="og:description"
|
22 |
+
content="Minimalist web-searching app with an AI assistant that is always available and runs directly from your browser."
|
23 |
+
/>
|
24 |
+
<meta name="twitter:card" content="summary" />
|
25 |
+
<meta name="twitter:title" content="MiniSearch" />
|
26 |
+
<meta
|
27 |
+
name="twitter:description"
|
28 |
+
content="Minimalist web-searching app with an AI assistant that is always available and runs directly from your browser."
|
29 |
+
/>
|
30 |
+
<title>MiniSearch</title>
|
31 |
+
<link rel="icon" href="/favicon.png" />
|
32 |
+
</head>
|
33 |
+
<body>
|
34 |
+
<script type="module" src="./index.tsx"></script>
|
35 |
+
</body>
|
36 |
+
</html>
|
client/index.tsx
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { createRoot } from "react-dom/client";
|
2 |
+
import { App } from "./components/App/App";
|
3 |
+
import { addLogEntry } from "./modules/logEntries";
|
4 |
+
|
5 |
+
createRoot(document.body.appendChild(document.createElement("div"))).render(
|
6 |
+
<App />,
|
7 |
+
);
|
8 |
+
|
9 |
+
addLogEntry("App initialized");
|
client/modules/accessKey.ts
ADDED
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { notifications } from "@mantine/notifications";
|
2 |
+
import { argon2id } from "hash-wasm";
|
3 |
+
import { addLogEntry } from "./logEntries";
|
4 |
+
|
5 |
+
const ACCESS_KEY_STORAGE_KEY = "accessKeyHash";
|
6 |
+
|
7 |
+
interface StoredAccessKey {
|
8 |
+
hash: string;
|
9 |
+
timestamp: number;
|
10 |
+
}
|
11 |
+
|
12 |
+
async function hashAccessKey(accessKey: string): Promise<string> {
|
13 |
+
const salt = new Uint8Array(16);
|
14 |
+
crypto.getRandomValues(salt);
|
15 |
+
|
16 |
+
return argon2id({
|
17 |
+
password: accessKey,
|
18 |
+
salt,
|
19 |
+
parallelism: 1,
|
20 |
+
iterations: 16,
|
21 |
+
memorySize: 512,
|
22 |
+
hashLength: 8,
|
23 |
+
outputType: "encoded",
|
24 |
+
});
|
25 |
+
}
|
26 |
+
|
27 |
+
export async function validateAccessKey(accessKey: string): Promise<boolean> {
|
28 |
+
try {
|
29 |
+
const hash = await hashAccessKey(accessKey);
|
30 |
+
const response = await fetch("/api/validate-access-key", {
|
31 |
+
method: "POST",
|
32 |
+
headers: { "Content-Type": "application/json" },
|
33 |
+
body: JSON.stringify({ accessKeyHash: hash }),
|
34 |
+
});
|
35 |
+
const data = await response.json();
|
36 |
+
|
37 |
+
if (data.valid) {
|
38 |
+
const storedData: StoredAccessKey = {
|
39 |
+
hash,
|
40 |
+
timestamp: Date.now(),
|
41 |
+
};
|
42 |
+
localStorage.setItem(ACCESS_KEY_STORAGE_KEY, JSON.stringify(storedData));
|
43 |
+
addLogEntry("Access key hash stored");
|
44 |
+
}
|
45 |
+
|
46 |
+
return data.valid;
|
47 |
+
} catch (error) {
|
48 |
+
addLogEntry(`Error validating access key: ${error}`);
|
49 |
+
notifications.show({
|
50 |
+
title: "Error validating access key",
|
51 |
+
message: "Please contact the administrator",
|
52 |
+
color: "red",
|
53 |
+
position: "top-right",
|
54 |
+
});
|
55 |
+
return false;
|
56 |
+
}
|
57 |
+
}
|
58 |
+
|
59 |
+
export async function verifyStoredAccessKey(): Promise<boolean> {
|
60 |
+
if (VITE_ACCESS_KEY_TIMEOUT_HOURS === 0) return false;
|
61 |
+
|
62 |
+
const storedData = localStorage.getItem(ACCESS_KEY_STORAGE_KEY);
|
63 |
+
if (!storedData) return false;
|
64 |
+
|
65 |
+
try {
|
66 |
+
const { hash, timestamp }: StoredAccessKey = JSON.parse(storedData);
|
67 |
+
|
68 |
+
const expirationTime = VITE_ACCESS_KEY_TIMEOUT_HOURS * 60 * 60 * 1000;
|
69 |
+
if (Date.now() - timestamp > expirationTime) {
|
70 |
+
localStorage.removeItem(ACCESS_KEY_STORAGE_KEY);
|
71 |
+
addLogEntry("Stored access key expired");
|
72 |
+
return false;
|
73 |
+
}
|
74 |
+
|
75 |
+
const response = await fetch("/api/validate-access-key", {
|
76 |
+
method: "POST",
|
77 |
+
headers: { "Content-Type": "application/json" },
|
78 |
+
body: JSON.stringify({ accessKeyHash: hash }),
|
79 |
+
});
|
80 |
+
|
81 |
+
const data = await response.json();
|
82 |
+
if (!data.valid) {
|
83 |
+
localStorage.removeItem(ACCESS_KEY_STORAGE_KEY);
|
84 |
+
addLogEntry("Stored access key is no longer valid");
|
85 |
+
return false;
|
86 |
+
}
|
87 |
+
|
88 |
+
addLogEntry("Using stored access key");
|
89 |
+
return true;
|
90 |
+
} catch (error) {
|
91 |
+
addLogEntry(`Error verifying stored access key: ${error}`);
|
92 |
+
localStorage.removeItem(ACCESS_KEY_STORAGE_KEY);
|
93 |
+
return false;
|
94 |
+
}
|
95 |
+
}
|
client/modules/logEntries.ts
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { createPubSub } from "create-pubsub";
|
2 |
+
|
3 |
+
type LogEntry = {
|
4 |
+
timestamp: string;
|
5 |
+
message: string;
|
6 |
+
};
|
7 |
+
|
8 |
+
export const logEntriesPubSub = createPubSub<LogEntry[]>([]);
|
9 |
+
|
10 |
+
const [updateLogEntries, , getLogEntries] = logEntriesPubSub;
|
11 |
+
|
12 |
+
export function addLogEntry(message: string) {
|
13 |
+
updateLogEntries([
|
14 |
+
...getLogEntries(),
|
15 |
+
{
|
16 |
+
timestamp: new Date().toISOString(),
|
17 |
+
message,
|
18 |
+
},
|
19 |
+
]);
|
20 |
+
}
|
client/modules/openai.ts
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import OpenAI, { type ClientOptions } from "openai";
|
2 |
+
|
3 |
+
export function getOpenAiClient({
|
4 |
+
baseURL,
|
5 |
+
apiKey,
|
6 |
+
}: {
|
7 |
+
baseURL: ClientOptions["baseURL"];
|
8 |
+
apiKey: ClientOptions["apiKey"];
|
9 |
+
}) {
|
10 |
+
return new OpenAI({
|
11 |
+
baseURL,
|
12 |
+
apiKey,
|
13 |
+
dangerouslyAllowBrowser: true,
|
14 |
+
defaultHeaders: { "X-Stainless-Retry-Count": null },
|
15 |
+
});
|
16 |
+
}
|
client/modules/parentWindow.ts
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export function postMessageToParentWindow(message: unknown) {
|
2 |
+
const parentWindow = self.parent;
|
3 |
+
const targetOrigin = parentWindow?.[0]?.location?.ancestorOrigins?.[0];
|
4 |
+
if (targetOrigin) parentWindow.postMessage(message, targetOrigin);
|
5 |
+
}
|
client/modules/pubSub.ts
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { createPubSub } from "create-pubsub";
|
2 |
+
import throttle from "throttleit";
|
3 |
+
import { addLogEntry } from "./logEntries";
|
4 |
+
import { defaultSettings } from "./settings";
|
5 |
+
|
6 |
+
function createLocalStoragePubSub<T>(localStorageKey: string, defaultValue: T) {
|
7 |
+
const localStorageValue = localStorage.getItem(localStorageKey);
|
8 |
+
const localStoragePubSub = createPubSub(
|
9 |
+
localStorageValue ? (JSON.parse(localStorageValue) as T) : defaultValue,
|
10 |
+
);
|
11 |
+
|
12 |
+
const [, onValueChange] = localStoragePubSub;
|
13 |
+
|
14 |
+
onValueChange((value) =>
|
15 |
+
localStorage.setItem(localStorageKey, JSON.stringify(value)),
|
16 |
+
);
|
17 |
+
|
18 |
+
return localStoragePubSub;
|
19 |
+
}
|
20 |
+
|
21 |
+
const querySuggestionsPubSub = createLocalStoragePubSub<string[]>(
|
22 |
+
"querySuggestions",
|
23 |
+
[],
|
24 |
+
);
|
25 |
+
|
26 |
+
const lastSearchTokenHashPubSub = createLocalStoragePubSub(
|
27 |
+
"lastSearchTokenHash",
|
28 |
+
"",
|
29 |
+
);
|
30 |
+
|
31 |
+
export const [updateLastSearchTokenHash, , getLastSearchTokenHash] =
|
32 |
+
lastSearchTokenHashPubSub;
|
33 |
+
|
34 |
+
export const [updateQuerySuggestions, , getQuerySuggestions] =
|
35 |
+
querySuggestionsPubSub;
|
36 |
+
|
37 |
+
export const queryPubSub = createPubSub(
|
38 |
+
new URLSearchParams(self.location.search).get("q") ?? "",
|
39 |
+
);
|
40 |
+
|
41 |
+
export const [, , getQuery] = queryPubSub;
|
42 |
+
|
43 |
+
export const responsePubSub = createPubSub("");
|
44 |
+
|
45 |
+
export const updateResponse = throttle(responsePubSub[0], 1000 / 12);
|
46 |
+
|
47 |
+
export const searchResultsPubSub = createPubSub<
|
48 |
+
import("./search").SearchResults
|
49 |
+
>({
|
50 |
+
textResults: [],
|
51 |
+
imageResults: [],
|
52 |
+
});
|
53 |
+
|
54 |
+
export const [updateSearchResults, , getSearchResults] = searchResultsPubSub;
|
55 |
+
|
56 |
+
export const [updateSearchPromise, , getSearchPromise] = createPubSub<
|
57 |
+
Promise<import("./search").SearchResults>
|
58 |
+
>(Promise.resolve({ textResults: [], imageResults: [] }));
|
59 |
+
|
60 |
+
export const textGenerationStatePubSub = createPubSub<
|
61 |
+
| "idle"
|
62 |
+
| "awaitingModelDownloadAllowance"
|
63 |
+
| "loadingModel"
|
64 |
+
| "awaitingSearchResults"
|
65 |
+
| "preparingToGenerate"
|
66 |
+
| "generating"
|
67 |
+
| "interrupted"
|
68 |
+
| "failed"
|
69 |
+
| "completed"
|
70 |
+
>("idle");
|
71 |
+
|
72 |
+
export const [updateTextGenerationState, , getTextGenerationState] =
|
73 |
+
textGenerationStatePubSub;
|
74 |
+
|
75 |
+
const [, listenToTextGenerationStateChanges] = textGenerationStatePubSub;
|
76 |
+
|
77 |
+
listenToTextGenerationStateChanges((textGenerationState) => {
|
78 |
+
addLogEntry(`Text generation state changed to '${textGenerationState}'`);
|
79 |
+
});
|
80 |
+
|
81 |
+
export const searchStatePubSub = createPubSub<
|
82 |
+
"idle" | "running" | "failed" | "completed"
|
83 |
+
>("idle");
|
84 |
+
|
85 |
+
export const [updateSearchState] = searchStatePubSub;
|
86 |
+
|
87 |
+
const [, listenToSearchStateChanges] = searchStatePubSub;
|
88 |
+
|
89 |
+
listenToSearchStateChanges((searchState) => {
|
90 |
+
addLogEntry(`Search state changed to '${searchState}'`);
|
91 |
+
});
|
92 |
+
|
93 |
+
export const modelLoadingProgressPubSub = createPubSub(0);
|
94 |
+
|
95 |
+
export const [updateModelLoadingProgress] = modelLoadingProgressPubSub;
|
96 |
+
|
97 |
+
export const settingsPubSub = createLocalStoragePubSub(
|
98 |
+
"settings",
|
99 |
+
defaultSettings,
|
100 |
+
);
|
101 |
+
|
102 |
+
export const [, listenToSettingsChanges, getSettings] = settingsPubSub;
|
103 |
+
|
104 |
+
export const modelSizeInMegabytesPubSub = createPubSub(0);
|
105 |
+
|
106 |
+
export const [updateModelSizeInMegabytes] = modelSizeInMegabytesPubSub;
|
client/modules/querySuggestions.ts
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { addLogEntry } from "./logEntries";
|
2 |
+
import { getQuerySuggestions, updateQuerySuggestions } from "./pubSub";
|
3 |
+
|
4 |
+
export async function getRandomQuerySuggestion() {
|
5 |
+
if (getQuerySuggestions().length === 0) await refillQuerySuggestions(25);
|
6 |
+
|
7 |
+
const querySuggestions = getQuerySuggestions();
|
8 |
+
|
9 |
+
const randomQuerySuggestion = querySuggestions.pop() as string;
|
10 |
+
|
11 |
+
updateQuerySuggestions(querySuggestions);
|
12 |
+
|
13 |
+
return randomQuerySuggestion;
|
14 |
+
}
|
15 |
+
|
16 |
+
async function refillQuerySuggestions(limit?: number) {
|
17 |
+
const querySuggestionsFileUrl = new URL(
|
18 |
+
"/query-suggestions.json",
|
19 |
+
self.location.origin,
|
20 |
+
);
|
21 |
+
|
22 |
+
const fetchResponse = await fetch(querySuggestionsFileUrl.toString());
|
23 |
+
|
24 |
+
const querySuggestionsList: string[] = await fetchResponse.json();
|
25 |
+
|
26 |
+
updateQuerySuggestions(
|
27 |
+
querySuggestionsList.sort(() => Math.random() - 0.5).slice(0, limit),
|
28 |
+
);
|
29 |
+
|
30 |
+
addLogEntry(`Query suggestions refilled with ${limit} suggestions`);
|
31 |
+
}
|
client/modules/search.ts
ADDED
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { name } from "../../package.json";
|
2 |
+
import { addLogEntry } from "./logEntries";
|
3 |
+
import { getSearchTokenHash } from "./searchTokenHash";
|
4 |
+
|
5 |
+
export type SearchResults = {
|
6 |
+
textResults: [title: string, snippet: string, url: string][];
|
7 |
+
imageResults: [
|
8 |
+
title: string,
|
9 |
+
url: string,
|
10 |
+
thumbnailUrl: string,
|
11 |
+
sourceUrl: string,
|
12 |
+
][];
|
13 |
+
};
|
14 |
+
|
15 |
+
/**
|
16 |
+
* Creates a cached version of a search function using IndexedDB for storage.
|
17 |
+
*
|
18 |
+
* @param fn - The original search function to be cached.
|
19 |
+
* @returns A new function that wraps the original, adding caching functionality.
|
20 |
+
*
|
21 |
+
* This function implements a caching mechanism for search results using IndexedDB.
|
22 |
+
* It stores search results with a 15-minute time-to-live (TTL) to improve performance
|
23 |
+
* for repeated searches. The cache is automatically cleaned of expired entries.
|
24 |
+
*
|
25 |
+
* The returned function behaves as follows:
|
26 |
+
* 1. Checks IndexedDB for a cached result matching the query.
|
27 |
+
* 2. If a valid (non-expired) cached result exists, it is returned immediately.
|
28 |
+
* 3. Otherwise, the original search function is called, and its result is both
|
29 |
+
* returned and stored in the cache for future use.
|
30 |
+
*
|
31 |
+
* If IndexedDB is not available, the function falls back to using the original
|
32 |
+
* search function without caching.
|
33 |
+
*/
|
34 |
+
function cacheSearchWithIndexedDB(
|
35 |
+
fn: (query: string, limit?: number) => Promise<SearchResults>,
|
36 |
+
): (query: string, limit?: number) => Promise<SearchResults> {
|
37 |
+
const storeName = "searches";
|
38 |
+
const timeToLive = 15 * 60 * 1000;
|
39 |
+
|
40 |
+
async function openDB(): Promise<IDBDatabase> {
|
41 |
+
return new Promise((resolve, reject) => {
|
42 |
+
const request = indexedDB.open(name, 1);
|
43 |
+
request.onerror = () => reject(request.error);
|
44 |
+
request.onsuccess = () => {
|
45 |
+
const db = request.result;
|
46 |
+
cleanExpiredCache(db);
|
47 |
+
resolve(db);
|
48 |
+
};
|
49 |
+
request.onupgradeneeded = (event) => {
|
50 |
+
const db = (event.target as IDBOpenDBRequest).result;
|
51 |
+
db.createObjectStore(storeName);
|
52 |
+
};
|
53 |
+
});
|
54 |
+
}
|
55 |
+
|
56 |
+
async function cleanExpiredCache(db: IDBDatabase): Promise<void> {
|
57 |
+
const transaction = db.transaction(storeName, "readwrite");
|
58 |
+
const store = transaction.objectStore(storeName);
|
59 |
+
const currentTime = Date.now();
|
60 |
+
|
61 |
+
return new Promise((resolve) => {
|
62 |
+
const request = store.openCursor();
|
63 |
+
request.onsuccess = (event) => {
|
64 |
+
const cursor = (event.target as IDBRequest).result;
|
65 |
+
if (cursor) {
|
66 |
+
if (currentTime - cursor.value.timestamp >= timeToLive) {
|
67 |
+
cursor.delete();
|
68 |
+
}
|
69 |
+
cursor.continue();
|
70 |
+
} else {
|
71 |
+
resolve();
|
72 |
+
}
|
73 |
+
};
|
74 |
+
});
|
75 |
+
}
|
76 |
+
|
77 |
+
/**
|
78 |
+
* Generates a hash for a given query string.
|
79 |
+
*
|
80 |
+
* This function implements a simple hash algorithm:
|
81 |
+
* 1. It iterates through each character in the query string.
|
82 |
+
* 2. For each character, it updates the hash value using bitwise operations.
|
83 |
+
* 3. The final hash is converted to a 32-bit integer.
|
84 |
+
* 4. The result is returned as a base-36 string representation.
|
85 |
+
*
|
86 |
+
* @param query - The input string to be hashed.
|
87 |
+
* @returns A string representation of the hash in base-36.
|
88 |
+
*/
|
89 |
+
function hashQuery(query: string): string {
|
90 |
+
return query
|
91 |
+
.split("")
|
92 |
+
.reduce((acc, char) => ((acc << 5) - acc + char.charCodeAt(0)) | 0, 0)
|
93 |
+
.toString(36);
|
94 |
+
}
|
95 |
+
|
96 |
+
const dbPromise = openDB();
|
97 |
+
|
98 |
+
return async (query: string, limit?: number): Promise<SearchResults> => {
|
99 |
+
addLogEntry("Starting new search");
|
100 |
+
if (!indexedDB) return fn(query, limit);
|
101 |
+
|
102 |
+
const db = await dbPromise;
|
103 |
+
const transaction = db.transaction(storeName, "readwrite");
|
104 |
+
const store = transaction.objectStore(storeName);
|
105 |
+
const key = hashQuery(query);
|
106 |
+
const cachedResult = await new Promise<
|
107 |
+
| {
|
108 |
+
results: SearchResults;
|
109 |
+
timestamp: number;
|
110 |
+
}
|
111 |
+
| undefined
|
112 |
+
>((resolve) => {
|
113 |
+
const request = store.get(key);
|
114 |
+
request.onerror = () => resolve(undefined);
|
115 |
+
request.onsuccess = () => resolve(request.result);
|
116 |
+
});
|
117 |
+
|
118 |
+
if (cachedResult && Date.now() - cachedResult.timestamp < timeToLive) {
|
119 |
+
addLogEntry(
|
120 |
+
`Search cache hit, returning cached results containing ${cachedResult.results.textResults.length} texts and ${cachedResult.results.imageResults.length} images`,
|
121 |
+
);
|
122 |
+
return cachedResult.results;
|
123 |
+
}
|
124 |
+
|
125 |
+
addLogEntry("Search cache miss, fetching new results");
|
126 |
+
|
127 |
+
const results = await fn(query, limit);
|
128 |
+
|
129 |
+
const writeTransaction = db.transaction(storeName, "readwrite");
|
130 |
+
const writeStore = writeTransaction.objectStore(storeName);
|
131 |
+
writeStore.put({ results, timestamp: Date.now() }, key);
|
132 |
+
|
133 |
+
addLogEntry(
|
134 |
+
`Search completed with ${results.textResults.length} text results and ${results.imageResults.length} image results`,
|
135 |
+
);
|
136 |
+
|
137 |
+
return results;
|
138 |
+
};
|
139 |
+
}
|
140 |
+
|
141 |
+
export const search = cacheSearchWithIndexedDB(
|
142 |
+
async (query: string, limit?: number): Promise<SearchResults> => {
|
143 |
+
const searchUrl = new URL("/search", self.location.origin);
|
144 |
+
|
145 |
+
searchUrl.searchParams.set("q", query);
|
146 |
+
|
147 |
+
searchUrl.searchParams.set("token", await getSearchTokenHash());
|
148 |
+
|
149 |
+
if (limit && limit > 0) {
|
150 |
+
searchUrl.searchParams.set("limit", limit.toString());
|
151 |
+
}
|
152 |
+
|
153 |
+
const response = await fetch(searchUrl.toString());
|
154 |
+
|
155 |
+
return response.ok
|
156 |
+
? response.json()
|
157 |
+
: { textResults: [], imageResults: [] };
|
158 |
+
},
|
159 |
+
);
|
client/modules/searchTokenHash.ts
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { argon2Verify, argon2id } from "hash-wasm";
|
2 |
+
import { addLogEntry } from "./logEntries";
|
3 |
+
import { getLastSearchTokenHash, updateLastSearchTokenHash } from "./pubSub";
|
4 |
+
|
5 |
+
export async function getSearchTokenHash() {
|
6 |
+
const password = VITE_SEARCH_TOKEN;
|
7 |
+
const lastSearchTokenHash = getLastSearchTokenHash();
|
8 |
+
|
9 |
+
try {
|
10 |
+
const lastSearchTokenHashIsValid = await argon2Verify({
|
11 |
+
password,
|
12 |
+
hash: lastSearchTokenHash,
|
13 |
+
});
|
14 |
+
|
15 |
+
if (lastSearchTokenHashIsValid) {
|
16 |
+
addLogEntry("Using cached search token hash");
|
17 |
+
return lastSearchTokenHash;
|
18 |
+
}
|
19 |
+
} catch (error) {
|
20 |
+
void error;
|
21 |
+
}
|
22 |
+
|
23 |
+
const salt = new Uint8Array(16);
|
24 |
+
crypto.getRandomValues(salt);
|
25 |
+
|
26 |
+
const newSearchTokenHash = await argon2id({
|
27 |
+
password,
|
28 |
+
salt,
|
29 |
+
parallelism: 1,
|
30 |
+
iterations: 16,
|
31 |
+
memorySize: 512,
|
32 |
+
hashLength: 8,
|
33 |
+
outputType: "encoded",
|
34 |
+
});
|
35 |
+
|
36 |
+
updateLastSearchTokenHash(newSearchTokenHash);
|
37 |
+
|
38 |
+
addLogEntry("New search token hash generated");
|
39 |
+
|
40 |
+
return newSearchTokenHash;
|
41 |
+
}
|