neon_arch commited on
Commit
74e4fc6
β€’
2 Parent(s): 1c2ea24 e4625c3

Merge branch 'rolling' into improve-async-multithreading

Browse files
This view is limited to 50 files because it contains too many changes. Β  See raw diff
Files changed (50) hide show
  1. .cspell.json +16 -0
  2. .github/workflows/contributors.yml +2 -1
  3. .github/workflows/hello.yml +1 -0
  4. .github/workflows/labels.yml +1 -0
  5. .github/workflows/mega-linter.yml +89 -0
  6. .github/workflows/rust.yml +1 -0
  7. .github/workflows/rust_format.yml +1 -0
  8. .github/workflows/stale.yml +1 -0
  9. .gitignore +1 -0
  10. .gitpod.yml +44 -0
  11. .mega-linter.yml +22 -0
  12. CONTRIBUTING.md +1 -1
  13. Dockerfile +3 -3
  14. PULL_REQUEST_TEMPLATE.md +1 -1
  15. README.md +2 -2
  16. docker-compose.yml +1 -0
  17. public/static/colorschemes/catppuccin-mocha.css +9 -9
  18. public/static/colorschemes/dark-chocolate.css +10 -10
  19. public/static/colorschemes/dracula.css +9 -9
  20. public/static/colorschemes/gruvbox-dark.css +9 -9
  21. public/static/colorschemes/monokai.css +9 -9
  22. public/static/colorschemes/nord.css +9 -9
  23. public/static/colorschemes/oceanic-next.css +9 -9
  24. public/static/colorschemes/one-dark.css +9 -9
  25. public/static/colorschemes/solarized-dark.css +9 -9
  26. public/static/colorschemes/solarized-light.css +9 -9
  27. public/static/colorschemes/tokyo-night.css +9 -9
  28. public/static/colorschemes/tomorrow-night.css +9 -9
  29. public/static/cookies.js +15 -4
  30. public/static/pagination.js +4 -4
  31. public/static/settings.js +36 -10
  32. public/static/themes/simple.css +42 -42
  33. public/templates/search.html +3 -3
  34. src/bin/websurfx.rs +2 -2
  35. src/cache/cacher.rs +4 -4
  36. src/{config_parser β†’ config}/mod.rs +0 -0
  37. src/{config_parser β†’ config}/parser.rs +39 -46
  38. src/{config_parser β†’ config}/parser_models.rs +1 -1
  39. src/engines/duckduckgo.rs +1 -1
  40. src/engines/engine_models.rs +2 -2
  41. src/engines/searx.rs +1 -1
  42. src/handler/mod.rs +1 -1
  43. src/handler/{public_path_handler.rs β†’ public_paths.rs} +11 -9
  44. src/lib.rs +6 -6
  45. src/{search_results_handler β†’ results}/aggregation_models.rs +1 -1
  46. src/{search_results_handler β†’ results}/aggregator.rs +0 -0
  47. src/{search_results_handler β†’ results}/mod.rs +0 -0
  48. src/{search_results_handler β†’ results}/user_agent.rs +0 -0
  49. src/server/routes.rs +79 -81
  50. tests/index.rs +2 -2
.cspell.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "ignorePaths": [
3
+ "**/node_modules/**",
4
+ "**/vscode-extension/**",
5
+ "**/.git/**",
6
+ "**/.pnpm-lock.json",
7
+ ".vscode",
8
+ "megalinter",
9
+ "package-lock.json",
10
+ "report"
11
+ ],
12
+ "language": "en",
13
+ "noConfigSearch": true,
14
+ "words": ["megalinter", "oxsecurity"],
15
+ "version": "0.2"
16
+ }
.github/workflows/contributors.yml CHANGED
@@ -1,3 +1,4 @@
 
1
  name: Contributors List
2
 
3
  on:
@@ -44,4 +45,4 @@ jobs:
44
  delete-branch: true
45
  title: 'chore: update contributors-list'
46
  body: |
47
- Automated update to `images/contributors_list.svg`
 
1
+ ---
2
  name: Contributors List
3
 
4
  on:
 
45
  delete-branch: true
46
  title: 'chore: update contributors-list'
47
  body: |
48
+ Automated update to `images/contributors_list.svg`
.github/workflows/hello.yml CHANGED
@@ -1,3 +1,4 @@
 
1
  name: Welcome first time contributors
2
 
3
  on:
 
1
+ ---
2
  name: Welcome first time contributors
3
 
4
  on:
.github/workflows/labels.yml CHANGED
@@ -1,3 +1,4 @@
 
1
  name: Import open source standard labels
2
 
3
  on:
 
1
+ ---
2
  name: Import open source standard labels
3
 
4
  on:
.github/workflows/mega-linter.yml ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ # MegaLinter GitHub Action configuration file
3
+ # More info at https://megalinter.io
4
+ name: MegaLinter
5
+
6
+ on:
7
+ # Trigger mega-linter at every push. Action will also be visible from Pull Requests to rolling
8
+ push: # Comment this line to trigger action only on pull-requests (not recommended if you don't pay for GH Actions)
9
+ pull_request:
10
+ branches: [rolling]
11
+
12
+ env: # Comment env block if you do not want to apply fixes
13
+ # Apply linter fixes configuration
14
+ APPLY_FIXES: all # When active, APPLY_FIXES must also be defined as environment variable (in github/workflows/mega-linter.yml or other CI tool)
15
+ APPLY_FIXES_EVENT: pull_request # Decide which event triggers application of fixes in a commit or a PR (pull_request, push, all)
16
+ APPLY_FIXES_MODE: commit # If APPLY_FIXES is used, defines if the fixes are directly committed (commit) or posted in a PR (pull_request)
17
+
18
+ concurrency:
19
+ group: ${{ github.ref }}-${{ github.workflow }}
20
+ cancel-in-progress: true
21
+
22
+ jobs:
23
+ build:
24
+ name: MegaLinter
25
+ runs-on: ubuntu-latest
26
+ permissions:
27
+ # Give the default GITHUB_TOKEN write permission to commit and push, comment issues & post new PR
28
+ # Remove the ones you do not need
29
+ contents: write
30
+ issues: write
31
+ pull-requests: write
32
+ steps:
33
+ # Git Checkout
34
+ - name: Checkout Code
35
+ uses: actions/checkout@v3
36
+ with:
37
+ token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }}
38
+
39
+ # MegaLinter
40
+ - name: MegaLinter
41
+ id: ml
42
+ # You can override MegaLinter flavor used to have faster performances
43
+ # More info at https://megalinter.io/flavors/
44
+ uses: oxsecurity/megalinter/flavors/cupcake@v7.1.0
45
+ env:
46
+ # All available variables are described in documentation
47
+ # https://megalinter.io/configuration/
48
+ VALIDATE_ALL_CODEBASE: true # Set ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} to validate only diff with main branch
49
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
50
+ # ADD YOUR CUSTOM ENV VARIABLES HERE TO OVERRIDE VALUES OF .mega-linter.yml AT THE ROOT OF YOUR REPOSITORY
51
+
52
+ # Upload MegaLinter artifacts
53
+ - name: Archive production artifacts
54
+ if: ${{ success() }} || ${{ failure() }}
55
+ uses: actions/upload-artifact@v3
56
+ with:
57
+ name: MegaLinter reports
58
+ path: |
59
+ megalinter-reports
60
+ mega-linter.log
61
+
62
+ # Create pull request if applicable (for now works only on PR from same repository, not from forks)
63
+ - name: Create Pull Request with applied fixes
64
+ id: cpr
65
+ if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'pull_request' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
66
+ uses: peter-evans/create-pull-request@v5
67
+ with:
68
+ token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }}
69
+ commit-message: "[MegaLinter] Apply linters automatic fixes"
70
+ title: "[MegaLinter] Apply linters automatic fixes"
71
+ labels: bot
72
+ - name: Create PR output
73
+ if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'pull_request' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
74
+ run: |
75
+ echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}"
76
+ echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}"
77
+
78
+ # Push new commit if applicable (for now works only on PR from same repository, not from forks)
79
+ - name: Prepare commit
80
+ if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'commit' && github.ref != 'refs/heads/main' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
81
+ run: sudo chown -Rc $UID .git/
82
+ - name: Commit and push applied linter fixes
83
+ if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'commit' && github.ref != 'refs/heads/main' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
84
+ uses: stefanzweifel/git-auto-commit-action@v4
85
+ with:
86
+ branch: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref }}
87
+ commit_message: "[MegaLinter] Apply linters fixes"
88
+ commit_user_name: megalinter-bot
89
+ commit_user_email: nicolas.vuillamy@ox.security
.github/workflows/rust.yml CHANGED
@@ -1,3 +1,4 @@
 
1
  name: Rust
2
 
3
  on:
 
1
+ ---
2
  name: Rust
3
 
4
  on:
.github/workflows/rust_format.yml CHANGED
@@ -1,3 +1,4 @@
 
1
  name: Rust format and clippy checks
2
  on:
3
  push:
 
1
+ ---
2
  name: Rust format and clippy checks
3
  on:
4
  push:
.github/workflows/stale.yml CHANGED
@@ -1,3 +1,4 @@
 
1
  # This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
2
  #
3
  # You can adjust the behavior by modifying this file.
 
1
+ ---
2
  # This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
3
  #
4
  # You can adjust the behavior by modifying this file.
.gitignore CHANGED
@@ -3,3 +3,4 @@ package.json
3
  package-lock.json
4
  dump.rdb
5
  .vscode
 
 
3
  package-lock.json
4
  dump.rdb
5
  .vscode
6
+ megalinter-reports/
.gitpod.yml ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ image: gitpod/workspace-base
3
+ # Commands that will run on workspace start
4
+ tasks:
5
+ - name: Setup, Install & Build
6
+ before: apt install cargo redis-server nodejs npm -y && cargo test
7
+ init: cargo install cargo-watch
8
+ command: redis-server --port 8080 & cargo watch -q -w "." -x "run"
9
+ # Ports to expose on workspace startup
10
+ ports:
11
+ - name: Website
12
+ description: Website Preview
13
+ port: 8080
14
+ onOpen: open-preview
15
+ # vscode IDE setup
16
+ vscode:
17
+ extensions:
18
+ - vadimcn.vscode-lldb
19
+ - cschleiden.vscode-github-actions
20
+ - rust-lang.rust
21
+ - bungcip.better-toml
22
+ - serayuzgur.crates
23
+ - usernamehw.errorlens
24
+ - DavidAnson.vscode-markdownlint
25
+ - esbenp.prettier-vscode
26
+ - stylelint.vscode-stylelint
27
+ - dbaeumer.vscode-eslint
28
+ - evgeniypeshkov.syntax-highlighter
29
+ - redhat.vscode-yaml
30
+ - ms-azuretools.vscode-docker
31
+ - Catppuccin.catppuccin-vsc
32
+ - PKief.material-icon-theme
33
+ - oderwat.indent-rainbow
34
+ - formulahendry.auto-rename-tag
35
+ - eamodio.gitlens
36
+ github:
37
+ prebuilds:
38
+ master: true
39
+ branches: true
40
+ pullRequests: true
41
+ pullRequestsFromForks: true
42
+ addCheck: true
43
+ addComment: false
44
+ addBadge: true
.mega-linter.yml ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ # Configuration file for MegaLinter
3
+ # See all available variables at https://megalinter.io/configuration/ and in linters documentation
4
+
5
+ APPLY_FIXES: all # all, none, or list of linter keys
6
+ # ENABLE: # If you use ENABLE variable, all other languages/formats/tooling-formats will be disabled by default
7
+ ENABLE_LINTERS: # If you use ENABLE_LINTERS variable, all other linters will be disabled by default
8
+ - RUST_CLIPPY
9
+ - JAVASCRIPT_ES
10
+ - CSS_STYLELINT
11
+ - MARKDOWN_MARKDOWNLINT
12
+ - YAML_YAMLLINT
13
+ - HTML_DJLINT
14
+ - ACTION_ACTIONLINT
15
+ - DOCKERFILE_HADOLINT
16
+ - SPELL_CSPELL
17
+ # DISABLE:
18
+ # - COPYPASTE # Uncomment to disable checks of excessive copy-pastes
19
+ # - SPELL # Uncomment to disable checks of spelling mistakes
20
+ SHOW_ELAPSED_TIME: true
21
+ FILEIO_REPORTER: false
22
+ # DISABLE_ERRORS: true # Uncomment if you want MegaLinter to detect errors but not block CI to pass
CONTRIBUTING.md CHANGED
@@ -14,7 +14,7 @@ Know how to fix or improve a github action?. Consider Submitting a Pull request
14
 
15
  ## Source Code
16
 
17
- You should know atleast one of the things below to start contributing:
18
 
19
  - Rust basics
20
  - Actix-web crate basics
 
14
 
15
  ## Source Code
16
 
17
+ You should know at least one of the things below to start contributing:
18
 
19
  - Rust basics
20
  - Actix-web crate basics
Dockerfile CHANGED
@@ -3,7 +3,7 @@ FROM rust:latest AS chef
3
  # it will be cached from the second build onwards
4
  RUN cargo install cargo-chef
5
 
6
- WORKDIR app
7
 
8
  FROM chef AS planner
9
  COPY . .
@@ -20,7 +20,7 @@ RUN cargo install --path .
20
 
21
  # We do not need the Rust toolchain to run the binary!
22
  FROM gcr.io/distroless/cc-debian11
23
- COPY --from=builder ./public/ ./public/
24
- COPY --from=builder ./websurfx/ ./websurfx/
25
  COPY --from=builder /usr/local/cargo/bin/* /usr/local/bin/
26
  CMD ["websurfx"]
 
3
  # it will be cached from the second build onwards
4
  RUN cargo install cargo-chef
5
 
6
+ WORKDIR /app
7
 
8
  FROM chef AS planner
9
  COPY . .
 
20
 
21
  # We do not need the Rust toolchain to run the binary!
22
  FROM gcr.io/distroless/cc-debian11
23
+ COPY --from=builder /app/public/ /opt/websurfx/public/
24
+ COPY --from=builder /app/websurfx/config.lua /etc/xdg/websurfx/config.lua
25
  COPY --from=builder /usr/local/cargo/bin/* /usr/local/bin/
26
  CMD ["websurfx"]
PULL_REQUEST_TEMPLATE.md CHANGED
@@ -16,7 +16,7 @@
16
 
17
  ## Author's checklist
18
 
19
- <!-- additional notes for reviewiers -->
20
 
21
  ## Related issues
22
 
 
16
 
17
  ## Author's checklist
18
 
19
+ <!-- additional notes for reviewers -->
20
 
21
  ## Related issues
22
 
README.md CHANGED
@@ -59,7 +59,7 @@
59
  - **Community**
60
  - [πŸ“Š System Requirements](#system-requirements-)
61
  - [πŸ—¨οΈ FAQ (Frequently Asked Questions)](#faq-frequently-asked-questions-)
62
- - [πŸ“£ More Contributers Wanted](#more-contributers-wanted-)
63
  - [πŸ’– Supporting Websurfx](#supporting-websurfx-)
64
  - [πŸ“˜ Documentation](#documentation-)
65
  - [πŸ›£οΈ Roadmap](#roadmap-)
@@ -165,7 +165,7 @@ Websurfx is based on Rust due to its memory safety features, which prevents vuln
165
 
166
  **[⬆️ Back to Top](#--)**
167
 
168
- # More Contributers Wanted πŸ“£
169
 
170
  We are looking for more willing contributors to help grow this project. For more information on how you can contribute, check out the [project board](https://github.com/neon-mmd/websurfx/projects?query=is%3Aopen) and the [CONTRIBUTING.md](CONTRIBUTING.md) file for guidelines and rules for making contributions.
171
 
 
59
  - **Community**
60
  - [πŸ“Š System Requirements](#system-requirements-)
61
  - [πŸ—¨οΈ FAQ (Frequently Asked Questions)](#faq-frequently-asked-questions-)
62
+ - [πŸ“£ More Contributors Wanted](#more-contributors-wanted-)
63
  - [πŸ’– Supporting Websurfx](#supporting-websurfx-)
64
  - [πŸ“˜ Documentation](#documentation-)
65
  - [πŸ›£οΈ Roadmap](#roadmap-)
 
165
 
166
  **[⬆️ Back to Top](#--)**
167
 
168
+ # More Contributors Wanted πŸ“£
169
 
170
  We are looking for more willing contributors to help grow this project. For more information on how you can contribute, check out the [project board](https://github.com/neon-mmd/websurfx/projects?query=is%3Aopen) and the [CONTRIBUTING.md](CONTRIBUTING.md) file for guidelines and rules for making contributions.
171
 
docker-compose.yml CHANGED
@@ -1,3 +1,4 @@
 
1
  version: "3.9"
2
  services:
3
  app:
 
1
+ ---
2
  version: "3.9"
3
  services:
4
  app:
public/static/colorschemes/catppuccin-mocha.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #1e1e2e;
3
- --fg: #cdd6f4;
4
- --1: #45475a;
5
- --2: #f38ba8;
6
- --3: #a6e3a1;
7
- --4: #f9e2af;
8
- --5: #89b4fa;
9
- --6: #f5c2e7;
10
- --7: #ffffff;
11
  }
 
1
  :root {
2
+ --background-color: #1e1e2e;
3
+ --foreground-color: #cdd6f4;
4
+ --color-one: #45475a;
5
+ --color-two: #f38ba8;
6
+ --color-three: #a6e3a1;
7
+ --color-four: #f9e2af;
8
+ --color-five: #89b4fa;
9
+ --color-six: #f5c2e7;
10
+ --color-seven: #ffffff;
11
  }
public/static/colorschemes/dark-chocolate.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #000000;
3
- --fg: #ffffff;
4
- --1: #121212;
5
- --2: #808080;
6
- --3: #999999;
7
- --4: #666666;
8
- --5: #bfbfbf;
9
- --6: #e0e0e0;
10
- --7: #555555;
11
- }
 
1
  :root {
2
+ --background-color: #000000;
3
+ --foreground-color: #ffffff;
4
+ --color-one: #121212;
5
+ --color-two: #808080;
6
+ --color-three: #999999;
7
+ --color-four: #666666;
8
+ --color-five: #bfbfbf;
9
+ --color-six: #e0e0e0;
10
+ --color-seven: #555555;
11
+ }
public/static/colorschemes/dracula.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #44475a;
3
- --fg: #8be9fd;
4
- --1: #ff5555;
5
- --2: #50fa7b;
6
- --3: #ffb86c;
7
- --4: #bd93f9;
8
- --5: #ff79c6;
9
- --6: #94a3a5;
10
- --7: #ffffff;
11
  }
 
1
  :root {
2
+ --background-color: #44475a;
3
+ --foreground-color: #8be9fd;
4
+ --color-one: #ff5555;
5
+ --color-two: #50fa7b;
6
+ --color-three: #ffb86c;
7
+ --color-four: #bd93f9;
8
+ --color-five: #ff79c6;
9
+ --color-six: #94a3a5;
10
+ --color-seven: #ffffff;
11
  }
public/static/colorschemes/gruvbox-dark.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #282828;
3
- --fg: #ebdbb2;
4
- --1: #cc241d;
5
- --2: #98971a;
6
- --3: #d79921;
7
- --4: #458588;
8
- --5: #b16286;
9
- --6: #689d6a;
10
- --7: #ffffff;
11
  }
 
1
  :root {
2
+ --background-color: #1d2021;
3
+ --foreground-color: #ebdbb2;
4
+ --color-one: #282828;
5
+ --color-two: #98971a;
6
+ --color-three: #d79921;
7
+ --color-four: #458588;
8
+ --color-five: #b16286;
9
+ --color-six: #689d6a;
10
+ --color-seven: #ffffff;
11
  }
public/static/colorschemes/monokai.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #403e41;
3
- --fg: #fcfcfa;
4
- --1: #ff6188;
5
- --2: #a9dc76;
6
- --3: #ffd866;
7
- --4: #fc9867;
8
- --5: #ab9df2;
9
- --6: #78dce8;
10
- --7: #ffffff;
11
  }
 
1
  :root {
2
+ --background-color: #49483Eff;
3
+ --foreground-color: #FFB269;
4
+ --color-one: #272822ff;
5
+ --color-two: #61AFEF;
6
+ --color-three: #ffd866;
7
+ --color-four: #fc9867;
8
+ --color-five: #ab9df2;
9
+ --color-six: #78dce8;
10
+ --color-seven: #ffffff;
11
  }
public/static/colorschemes/nord.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #2e3440;
3
- --fg: #d8dee9;
4
- --1: #3b4252;
5
- --2: #bf616a;
6
- --3: #a3be8c;
7
- --4: #ebcb8b;
8
- --5: #81a1c1;
9
- --6: #b48ead;
10
- --7: #ffffff;
11
  }
 
1
  :root {
2
+ --background-color: #122736ff;
3
+ --foreground-color: #a2e2a9;
4
+ --color-one: #121B2Cff;
5
+ --color-two: #f08282;
6
+ --color-three: #ABC5AAff;
7
+ --color-four: #e6d2d2;
8
+ --color-five: #81a1c1;
9
+ --color-six: #e2ecd6;
10
+ --color-seven: #fff;
11
  }
public/static/colorschemes/oceanic-next.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #1b2b34;
3
- --fg: #d8dee9;
4
- --1: #343d46;
5
- --2: #ec5f67;
6
- --3: #99c794;
7
- --4: #fac863;
8
- --5: #6699cc;
9
- --6: #c594c5;
10
- --7: #ffffff;
11
  }
 
1
  :root {
2
+ --background-color: #1b2b34;
3
+ --foreground-color: #d8dee9;
4
+ --color-one: #343d46;
5
+ --color-two: #5FB3B3ff;
6
+ --color-three: #69Cf;
7
+ --color-four: #99c794;
8
+ --color-five: #69c;
9
+ --color-six: #c594c5;
10
+ --color-seven: #D8DEE9ff;
11
  }
public/static/colorschemes/one-dark.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #282c34;
3
- --fg: #abb2bf;
4
- --1: #3b4048;
5
- --2: #a3be8c;
6
- --3: #b48ead;
7
- --4: #c8ccd4;
8
- --5: #e06c75;
9
- --6: #61afef;
10
- --7: #be5046;
11
  }
 
1
  :root {
2
+ --background-color: #282c34;
3
+ --foreground-color: #abb2bf;
4
+ --color-one: #3b4048;
5
+ --color-two: #a3be8c;
6
+ --color-three: #b48ead;
7
+ --color-four: #c8ccd4;
8
+ --color-five: #e06c75;
9
+ --color-six: #61afef;
10
+ --color-seven: #be5046;
11
  }
public/static/colorschemes/solarized-dark.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #002b36;
3
- --fg: #839496;
4
- --1: #073642;
5
- --2: #dc322f;
6
- --3: #859900;
7
- --4: #b58900;
8
- --5: #268bd2;
9
- --6: #d33682;
10
- --7: #ffffff;
11
  }
 
1
  :root {
2
+ --background-color: #002b36;
3
+ --foreground-color: #c9e0e6;
4
+ --color-one: #073642;
5
+ --color-two: #2AA198ff;
6
+ --color-three: #2AA198ff;
7
+ --color-four: #EEE8D5ff;
8
+ --color-five: #268bd2;
9
+ --color-six: #d33682;
10
+ --color-seven: #fff;
11
  }
public/static/colorschemes/solarized-light.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #fdf6e3;
3
- --fg: #657b83;
4
- --1: #073642;
5
- --2: #dc322f;
6
- --3: #859900;
7
- --4: #b58900;
8
- --5: #268bd2;
9
- --6: #d33682;
10
- --7: #ffffff;
11
  }
 
1
  :root {
2
+ --background-color: #EEE8D5ff;
3
+ --foreground-color: #b1ab97;
4
+ --color-one: #fdf6e3;
5
+ --color-two: #DC322Fff;
6
+ --color-three: #586E75ff;
7
+ --color-four: #b58900;
8
+ --color-five: #268bd2;
9
+ --color-six: #d33682;
10
+ --color-seven: #fff;
11
  }
public/static/colorschemes/tokyo-night.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #1a1b26;
3
- --fg: #c0caf5;
4
- --1: #32364a;
5
- --2: #a9b1d6;
6
- --3: #5a5bb8;
7
- --4: #6b7089;
8
- --5: #e2afff;
9
- --6: #a9a1e1;
10
- --7: #988bc7;
11
  }
 
1
  :root {
2
+ --background-color: #1a1b26;
3
+ --foreground-color: #c0caf5;
4
+ --color-one: #32364a;
5
+ --color-two: #a9b1d6;
6
+ --color-three: #5a5bb8;
7
+ --color-four: #6b7089;
8
+ --color-five: #e2afff;
9
+ --color-six: #a9a1e1;
10
+ --color-seven: #988bc7;
11
  }
public/static/colorschemes/tomorrow-night.css CHANGED
@@ -1,11 +1,11 @@
1
  :root {
2
- --bg: #1d1f21;
3
- --fg: #c5c8c6;
4
- --1: #cc6666;
5
- --2: #b5bd68;
6
- --3: #f0c674;
7
- --4: #81a2be;
8
- --5: #b294bb;
9
- --6: #8abeb7;
10
- --7: #ffffff;
11
  }
 
1
  :root {
2
+ --background-color: #35383Cff;
3
+ --foreground-color: #D7DAD8ff;
4
+ --color-one: #1d1f21;
5
+ --color-two: #D77C79ff;
6
+ --color-three: #f0c674;
7
+ --color-four: #92B2CAff;
8
+ --color-five: #C0A7C7ff;
9
+ --color-six: #9AC9C4ff;
10
+ --color-seven: #fff;
11
  }
public/static/cookies.js CHANGED
@@ -1,15 +1,26 @@
1
- // This function is executed when any page on the website finishes loading and
2
- // this function retrieves the cookies if it is present on the user's machine.
3
- // If it is available then the saved cookies is display in the cookies tab
4
- // otherwise an appropriate message is displayed if it is not available.
 
 
 
 
 
 
5
  document.addEventListener(
6
  'DOMContentLoaded',
7
  () => {
8
  try {
 
9
  let cookie = decodeURIComponent(document.cookie)
 
 
10
  document.querySelector('.cookies input').value =
11
  cookie !== '' ? cookie : 'No cookies have been saved on your system'
12
  } catch (error) {
 
 
13
  console.error('Error decoding cookie:', error)
14
  document.querySelector('.cookies input').value = 'Error decoding cookie'
15
  }
 
1
+ /**
2
+ * This function is executed when any page on the website finishes loading and
3
+ * this function retrieves the cookies if it is present on the user's machine.
4
+ * If it is available then the saved cookies is display in the cookies tab
5
+ * otherwise an appropriate message is displayed if it is not available.
6
+ *
7
+ * @function
8
+ * @listens DOMContentLoaded
9
+ * @returns {void}
10
+ */
11
  document.addEventListener(
12
  'DOMContentLoaded',
13
  () => {
14
  try {
15
+ // Decode the cookie value
16
  let cookie = decodeURIComponent(document.cookie)
17
+ // Set the value of the input field to the decoded cookie value if it is not empty
18
+ // Otherwise, display a message indicating that no cookies have been saved on the user's system
19
  document.querySelector('.cookies input').value =
20
  cookie !== '' ? cookie : 'No cookies have been saved on your system'
21
  } catch (error) {
22
+ // If there is an error decoding the cookie, log the error to the console
23
+ // and display an error message in the input field
24
  console.error('Error decoding cookie:', error)
25
  document.querySelector('.cookies input').value = 'Error decoding cookie'
26
  }
public/static/pagination.js CHANGED
@@ -1,5 +1,5 @@
1
  /**
2
- * Navigates to the next page by incrementing the current page number in the URL query parameters.
3
  * @returns {void}
4
  */
5
  function navigate_forward() {
@@ -19,7 +19,7 @@ function navigate_forward() {
19
  }
20
 
21
  /**
22
- * Navigates to the previous page by decrementing the current page number in the URL query parameters.
23
  * @returns {void}
24
  */
25
  function navigate_backward() {
@@ -30,8 +30,8 @@ function navigate_backward() {
30
  let page = parseInt(searchParams.get('page'));
31
 
32
  if (isNaN(page)) {
33
- page = 1;
34
- } else if (page > 1) {
35
  page--;
36
  }
37
 
 
1
  /**
2
+ * Navigates to the next page by incrementing the current page number in the URL query string.
3
  * @returns {void}
4
  */
5
  function navigate_forward() {
 
19
  }
20
 
21
  /**
22
+ * Navigates to the previous page by decrementing the current page number in the URL query string.
23
  * @returns {void}
24
  */
25
  function navigate_backward() {
 
30
  let page = parseInt(searchParams.get('page'));
31
 
32
  if (isNaN(page)) {
33
+ page = 0;
34
+ } else if (page > 0) {
35
  page--;
36
  }
37
 
public/static/settings.js CHANGED
@@ -1,5 +1,7 @@
1
- // This function handles the toggling of selections of all upstream search engines
2
- // options in the settings page under the tab engines.
 
 
3
  function toggleAllSelection() {
4
  document
5
  .querySelectorAll('.engine')
@@ -10,25 +12,36 @@ function toggleAllSelection() {
10
  )
11
  }
12
 
13
- // This function adds the functionality to sidebar buttons to only show settings
14
- // related to that tab.
 
 
 
15
  function setActiveTab(current_tab) {
 
16
  document
17
  .querySelectorAll('.tab')
18
  .forEach((tab) => tab.classList.remove('active'))
19
  document
20
  .querySelectorAll('.btn')
21
  .forEach((tab) => tab.classList.remove('active'))
 
 
22
  current_tab.classList.add('active')
23
  document
24
  .querySelector(`.${current_tab.innerText.toLowerCase().replace(' ', '_')}`)
25
  .classList.add('active')
26
  }
27
 
28
- // This function adds the functionality to save all the user selected preferences
29
- // to be saved in a cookie on the users machine.
 
 
30
  function setClientSettings() {
 
31
  let cookie_dictionary = new Object()
 
 
32
  document.querySelectorAll('select').forEach((select_tag) => {
33
  if (select_tag.name === 'themes') {
34
  cookie_dictionary['theme'] = select_tag.value
@@ -36,6 +49,8 @@ function setClientSettings() {
36
  cookie_dictionary['colorscheme'] = select_tag.value
37
  }
38
  })
 
 
39
  let engines = []
40
  document.querySelectorAll('.engine').forEach((engine_checkbox) => {
41
  if (engine_checkbox.checked === true) {
@@ -43,33 +58,44 @@ function setClientSettings() {
43
  }
44
  })
45
  cookie_dictionary['engines'] = engines
 
 
46
  let expiration_date = new Date()
47
  expiration_date.setFullYear(expiration_date.getFullYear() + 1)
 
 
48
  document.cookie = `appCookie=${JSON.stringify(
49
  cookie_dictionary
50
  )}; expires=${expiration_date.toUTCString()}`
51
 
 
52
  document.querySelector('.message').innerText =
53
  'βœ… The settings have been saved sucessfully!!'
54
 
 
55
  setTimeout(() => {
56
  document.querySelector('.message').innerText = ''
57
  }, 10000)
58
  }
59
 
60
- // This functions gets the saved cookies if it is present on the user's machine If it
61
- // is available then it is parsed and converted to an object which is then used to
62
- // retrieve the preferences that the user had selected previously and is then loaded in the
63
- // website otherwise the function does nothing and the default server side settings are loaded.
 
 
64
  function getClientSettings() {
 
65
  let cookie = decodeURIComponent(document.cookie)
66
 
 
67
  if (cookie !== '') {
68
  let cookie_value = decodeURIComponent(document.cookie)
69
  .split(';')
70
  .map((item) => item.split('='))
71
  .reduce((acc, [_, v]) => (acc = JSON.parse(v)) && acc, {})
72
 
 
73
  let links = Array.from(document.querySelectorAll('link')).forEach(
74
  (item) => {
75
  if (item.href.includes('static/themes')) {
 
1
+ /**
2
+ * This function handles the toggling of selections of all upstream search engines
3
+ * options in the settings page under the tab engines.
4
+ */
5
  function toggleAllSelection() {
6
  document
7
  .querySelectorAll('.engine')
 
12
  )
13
  }
14
 
15
+ /**
16
+ * This function adds the functionality to sidebar buttons to only show settings
17
+ * related to that tab.
18
+ * @param {HTMLElement} current_tab - The current tab that was clicked.
19
+ */
20
  function setActiveTab(current_tab) {
21
+ // Remove the active class from all tabs and buttons
22
  document
23
  .querySelectorAll('.tab')
24
  .forEach((tab) => tab.classList.remove('active'))
25
  document
26
  .querySelectorAll('.btn')
27
  .forEach((tab) => tab.classList.remove('active'))
28
+
29
+ // Add the active class to the current tab and its corresponding settings
30
  current_tab.classList.add('active')
31
  document
32
  .querySelector(`.${current_tab.innerText.toLowerCase().replace(' ', '_')}`)
33
  .classList.add('active')
34
  }
35
 
36
+ /**
37
+ * This function adds the functionality to save all the user selected preferences
38
+ * to be saved in a cookie on the users machine.
39
+ */
40
  function setClientSettings() {
41
+ // Create an object to store the user's preferences
42
  let cookie_dictionary = new Object()
43
+
44
+ // Loop through all select tags and add their values to the cookie dictionary
45
  document.querySelectorAll('select').forEach((select_tag) => {
46
  if (select_tag.name === 'themes') {
47
  cookie_dictionary['theme'] = select_tag.value
 
49
  cookie_dictionary['colorscheme'] = select_tag.value
50
  }
51
  })
52
+
53
+ // Loop through all engine checkboxes and add their values to the cookie dictionary
54
  let engines = []
55
  document.querySelectorAll('.engine').forEach((engine_checkbox) => {
56
  if (engine_checkbox.checked === true) {
 
58
  }
59
  })
60
  cookie_dictionary['engines'] = engines
61
+
62
+ // Set the expiration date for the cookie to 1 year from the current date
63
  let expiration_date = new Date()
64
  expiration_date.setFullYear(expiration_date.getFullYear() + 1)
65
+
66
+ // Save the cookie to the user's machine
67
  document.cookie = `appCookie=${JSON.stringify(
68
  cookie_dictionary
69
  )}; expires=${expiration_date.toUTCString()}`
70
 
71
+ // Display a success message to the user
72
  document.querySelector('.message').innerText =
73
  'βœ… The settings have been saved sucessfully!!'
74
 
75
+ // Clear the success message after 10 seconds
76
  setTimeout(() => {
77
  document.querySelector('.message').innerText = ''
78
  }, 10000)
79
  }
80
 
81
+ /**
82
+ * This functions gets the saved cookies if it is present on the user's machine If it
83
+ * is available then it is parsed and converted to an object which is then used to
84
+ * retrieve the preferences that the user had selected previously and is then loaded in the
85
+ * website otherwise the function does nothing and the default server side settings are loaded.
86
+ */
87
  function getClientSettings() {
88
+ // Get the appCookie from the user's machine
89
  let cookie = decodeURIComponent(document.cookie)
90
 
91
+ // If the cookie is not empty, parse it and use it to set the user's preferences
92
  if (cookie !== '') {
93
  let cookie_value = decodeURIComponent(document.cookie)
94
  .split(';')
95
  .map((item) => item.split('='))
96
  .reduce((acc, [_, v]) => (acc = JSON.parse(v)) && acc, {})
97
 
98
+ // Loop through all link tags and update their href values to match the user's preferences
99
  let links = Array.from(document.querySelectorAll('link')).forEach(
100
  (item) => {
101
  if (item.href.includes('static/themes')) {
public/static/themes/simple.css CHANGED
@@ -16,7 +16,7 @@ body {
16
  justify-content: space-between;
17
  align-items: center;
18
  height: 100vh;
19
- background: var(--1);
20
  }
21
 
22
  /* styles for the index page */
@@ -46,7 +46,7 @@ body {
46
  outline: none;
47
  border: none;
48
  box-shadow: rgba(0, 0, 0, 1);
49
- background: var(--fg);
50
  }
51
 
52
  .search_bar button {
@@ -59,8 +59,8 @@ body {
59
  outline: none;
60
  border: none;
61
  gap: 0;
62
- background: var(--bg);
63
- color: var(--3);
64
  font-weight: 600;
65
  letter-spacing: 0.1rem;
66
  }
@@ -73,7 +73,7 @@ body {
73
  /* styles for the footer and header */
74
 
75
  header {
76
- background: var(--bg);
77
  width: 100%;
78
  display: flex;
79
  justify-content: right;
@@ -96,7 +96,7 @@ footer ul li a,
96
  header ul li a:visited,
97
  footer ul li a:visited {
98
  text-decoration: none;
99
- color: var(--2);
100
  text-transform: capitalize;
101
  letter-spacing: 0.1rem;
102
  }
@@ -107,12 +107,12 @@ header ul li a {
107
 
108
  header ul li a:hover,
109
  footer ul li a:hover {
110
- color: var(--5);
111
  }
112
 
113
  footer div span {
114
  font-size: 1.5rem;
115
- color: var(--4);
116
  }
117
 
118
  footer div {
@@ -121,7 +121,7 @@ footer div {
121
  }
122
 
123
  footer {
124
- background: var(--bg);
125
  width: 100%;
126
  padding: 1rem;
127
  display: flex;
@@ -158,28 +158,28 @@ footer {
158
 
159
  .results_aggregated .result h1 a {
160
  font-size: 1.5rem;
161
- color: var(--2);
162
  text-decoration: none;
163
  letter-spacing: 0.1rem;
164
  }
165
 
166
  .results_aggregated .result h1 a:hover {
167
- color: var(--5);
168
  }
169
 
170
  .results_aggregated .result h1 a:visited {
171
- color: var(--bg);
172
  }
173
 
174
  .results_aggregated .result small {
175
- color: var(--3);
176
  font-size: 1.1rem;
177
  word-wrap: break-word;
178
  line-break: anywhere;
179
  }
180
 
181
  .results_aggregated .result p {
182
- color: var(--fg);
183
  font-size: 1.2rem;
184
  margin-top: 0.3rem;
185
  word-wrap: break-word;
@@ -190,7 +190,7 @@ footer {
190
  text-align: right;
191
  font-size: 1.2rem;
192
  padding: 1rem;
193
- color: var(--5);
194
  }
195
 
196
  /* Styles for the 404 page */
@@ -233,12 +233,12 @@ footer {
233
 
234
  .error_content p a,
235
  .error_content p a:visited {
236
- color: var(--2);
237
  text-decoration: none;
238
  }
239
 
240
  .error_content p a:hover {
241
- color: var(--5);
242
  }
243
 
244
  .page_navigation {
@@ -249,8 +249,8 @@ footer {
249
  }
250
 
251
  .page_navigation button {
252
- background: var(--bg);
253
- color: var(--fg);
254
  padding: 1rem;
255
  border-radius: 0.5rem;
256
  outline: none;
@@ -265,12 +265,12 @@ footer {
265
 
266
  .about-container article {
267
  font-size: 1.5rem;
268
- color: var(--fg);
269
  padding-bottom: 10px;
270
  }
271
 
272
  .about-container article h1 {
273
- color: var(--2);
274
  font-size: 2.8rem;
275
  }
276
 
@@ -279,17 +279,17 @@ footer {
279
  }
280
 
281
  .about-container a {
282
- color: var(--3);
283
  }
284
 
285
  .about-container article h2 {
286
- color: var(--3);
287
  font-size: 1.8rem;
288
  padding-bottom: 10px;
289
  }
290
 
291
  .about-container p {
292
- color: var(--fg);
293
  font-size: 1.6rem;
294
  padding-bottom: 10px;
295
  }
@@ -310,12 +310,12 @@ footer {
310
  }
311
 
312
  .settings h1 {
313
- color: var(--2);
314
  font-size: 2.5rem;
315
  }
316
 
317
  .settings hr {
318
- border-color: var(--3);
319
  margin: 0.3rem 0 1rem 0;
320
  }
321
 
@@ -331,7 +331,7 @@ footer {
331
  border-radius: 5px;
332
  font-weight: bold;
333
  margin-bottom: 0.5rem;
334
- color: var(--fg);
335
  text-transform: capitalize;
336
  gap: 1.5rem;
337
  }
@@ -342,12 +342,12 @@ footer {
342
  }
343
 
344
  .settings_container .sidebar .btn.active {
345
- background-color: var(--2);
346
  }
347
 
348
  .settings_container .main_container {
349
  width: 70%;
350
- border-left: 1.5px solid var(--3);
351
  padding-left: 3rem;
352
  }
353
 
@@ -365,8 +365,8 @@ footer {
365
  margin-top: 1rem;
366
  padding: 1rem 2rem;
367
  font-size: 1.5rem;
368
- background: var(--3);
369
- color: var(--bg);
370
  border-radius: 0.5rem;
371
  border: 2px solid transparent;
372
  font-weight: bold;
@@ -383,13 +383,13 @@ footer {
383
 
384
  .settings_container .main_container .message {
385
  font-size: 1.5rem;
386
- color: var(--fg);
387
  }
388
 
389
  .settings_container .tab h3 {
390
  font-size: 2rem;
391
  font-weight: bold;
392
- color: var(--4);
393
  margin-top: 1.5rem;
394
  text-transform: capitalize;
395
  }
@@ -397,14 +397,14 @@ footer {
397
  .settings_container .tab .description {
398
  font-size: 1.5rem;
399
  margin-bottom: 0.5rem;
400
- color: var(--fg);
401
  }
402
 
403
  .settings_container .user_interface select {
404
  margin: 0.7rem 0;
405
  width: 20rem;
406
- background-color: var(--bg);
407
- color: var(--fg);
408
  padding: 1rem 2rem;
409
  border-radius: 0.5rem;
410
  outline: none;
@@ -413,7 +413,7 @@ footer {
413
  }
414
 
415
  .settings_container .user_interface option:hover {
416
- background-color: var(--1);
417
  }
418
 
419
  .settings_container .engines .engine_selection {
@@ -425,7 +425,7 @@ footer {
425
  }
426
 
427
  .settings_container .engines .toggle_btn {
428
- color: var(--fg);
429
  font-size: 1.5rem;
430
  display: flex;
431
  gap: 0.5rem;
@@ -464,7 +464,7 @@ footer {
464
  left: 0;
465
  right: 0;
466
  bottom: 0;
467
- background-color: var(--bg);
468
  -webkit-transition: 0.4s;
469
  transition: 0.4s;
470
  }
@@ -476,17 +476,17 @@ footer {
476
  width: 2.6rem;
477
  left: 0.4rem;
478
  bottom: 0.4rem;
479
- background-color: var(--fg);
480
  -webkit-transition: 0.4s;
481
  transition: 0.4s;
482
  }
483
 
484
  input:checked + .slider {
485
- background-color: var(--3);
486
  }
487
 
488
  input:focus + .slider {
489
- box-shadow: 0 0 1px var(--3);
490
  }
491
 
492
  input:checked + .slider:before {
 
16
  justify-content: space-between;
17
  align-items: center;
18
  height: 100vh;
19
+ background: var(--color-one);
20
  }
21
 
22
  /* styles for the index page */
 
46
  outline: none;
47
  border: none;
48
  box-shadow: rgba(0, 0, 0, 1);
49
+ background: var(--foreground-color);
50
  }
51
 
52
  .search_bar button {
 
59
  outline: none;
60
  border: none;
61
  gap: 0;
62
+ background: var(--background-color);
63
+ color: var(--color-three);
64
  font-weight: 600;
65
  letter-spacing: 0.1rem;
66
  }
 
73
  /* styles for the footer and header */
74
 
75
  header {
76
+ background: var(--background-color);
77
  width: 100%;
78
  display: flex;
79
  justify-content: right;
 
96
  header ul li a:visited,
97
  footer ul li a:visited {
98
  text-decoration: none;
99
+ color: var(--color-two);
100
  text-transform: capitalize;
101
  letter-spacing: 0.1rem;
102
  }
 
107
 
108
  header ul li a:hover,
109
  footer ul li a:hover {
110
+ color: var(--color-five);
111
  }
112
 
113
  footer div span {
114
  font-size: 1.5rem;
115
+ color: var(--color-four);
116
  }
117
 
118
  footer div {
 
121
  }
122
 
123
  footer {
124
+ background: var(--background-color);
125
  width: 100%;
126
  padding: 1rem;
127
  display: flex;
 
158
 
159
  .results_aggregated .result h1 a {
160
  font-size: 1.5rem;
161
+ color: var(--color-two);
162
  text-decoration: none;
163
  letter-spacing: 0.1rem;
164
  }
165
 
166
  .results_aggregated .result h1 a:hover {
167
+ color: var(--color-five);
168
  }
169
 
170
  .results_aggregated .result h1 a:visited {
171
+ color: var(--background-color);
172
  }
173
 
174
  .results_aggregated .result small {
175
+ color: var(--color-three);
176
  font-size: 1.1rem;
177
  word-wrap: break-word;
178
  line-break: anywhere;
179
  }
180
 
181
  .results_aggregated .result p {
182
+ color: var(--foreground-color);
183
  font-size: 1.2rem;
184
  margin-top: 0.3rem;
185
  word-wrap: break-word;
 
190
  text-align: right;
191
  font-size: 1.2rem;
192
  padding: 1rem;
193
+ color: var(--color-five);
194
  }
195
 
196
  /* Styles for the 404 page */
 
233
 
234
  .error_content p a,
235
  .error_content p a:visited {
236
+ color: var(--color-two);
237
  text-decoration: none;
238
  }
239
 
240
  .error_content p a:hover {
241
+ color: var(--color-five);
242
  }
243
 
244
  .page_navigation {
 
249
  }
250
 
251
  .page_navigation button {
252
+ background: var(--background-color);
253
+ color: var(--foreground-color);
254
  padding: 1rem;
255
  border-radius: 0.5rem;
256
  outline: none;
 
265
 
266
  .about-container article {
267
  font-size: 1.5rem;
268
+ color: var(--foreground-color);
269
  padding-bottom: 10px;
270
  }
271
 
272
  .about-container article h1 {
273
+ color: var(--color-two);
274
  font-size: 2.8rem;
275
  }
276
 
 
279
  }
280
 
281
  .about-container a {
282
+ color: var(--color-three);
283
  }
284
 
285
  .about-container article h2 {
286
+ color: var(--color-three);
287
  font-size: 1.8rem;
288
  padding-bottom: 10px;
289
  }
290
 
291
  .about-container p {
292
+ color: var(--foreground-color);
293
  font-size: 1.6rem;
294
  padding-bottom: 10px;
295
  }
 
310
  }
311
 
312
  .settings h1 {
313
+ color: var(--color-two);
314
  font-size: 2.5rem;
315
  }
316
 
317
  .settings hr {
318
+ border-color: var(--color-three);
319
  margin: 0.3rem 0 1rem 0;
320
  }
321
 
 
331
  border-radius: 5px;
332
  font-weight: bold;
333
  margin-bottom: 0.5rem;
334
+ color: var(--foreground-color);
335
  text-transform: capitalize;
336
  gap: 1.5rem;
337
  }
 
342
  }
343
 
344
  .settings_container .sidebar .btn.active {
345
+ background-color: var(--color-two);
346
  }
347
 
348
  .settings_container .main_container {
349
  width: 70%;
350
+ border-left: 1.5px solid var(--color-three);
351
  padding-left: 3rem;
352
  }
353
 
 
365
  margin-top: 1rem;
366
  padding: 1rem 2rem;
367
  font-size: 1.5rem;
368
+ background: var(--color-three);
369
+ color: var(--background-color);
370
  border-radius: 0.5rem;
371
  border: 2px solid transparent;
372
  font-weight: bold;
 
383
 
384
  .settings_container .main_container .message {
385
  font-size: 1.5rem;
386
+ color: var(--foreground-color);
387
  }
388
 
389
  .settings_container .tab h3 {
390
  font-size: 2rem;
391
  font-weight: bold;
392
+ color: var(--color-four);
393
  margin-top: 1.5rem;
394
  text-transform: capitalize;
395
  }
 
397
  .settings_container .tab .description {
398
  font-size: 1.5rem;
399
  margin-bottom: 0.5rem;
400
+ color: var(--foreground-color);
401
  }
402
 
403
  .settings_container .user_interface select {
404
  margin: 0.7rem 0;
405
  width: 20rem;
406
+ background-color: var(--background-color);
407
+ color: var(--foreground-color);
408
  padding: 1rem 2rem;
409
  border-radius: 0.5rem;
410
  outline: none;
 
413
  }
414
 
415
  .settings_container .user_interface option:hover {
416
+ background-color: var(--color-one);
417
  }
418
 
419
  .settings_container .engines .engine_selection {
 
425
  }
426
 
427
  .settings_container .engines .toggle_btn {
428
+ color: var(--foreground-color);
429
  font-size: 1.5rem;
430
  display: flex;
431
  gap: 0.5rem;
 
464
  left: 0;
465
  right: 0;
466
  bottom: 0;
467
+ background-color: var(--background-color);
468
  -webkit-transition: 0.4s;
469
  transition: 0.4s;
470
  }
 
476
  width: 2.6rem;
477
  left: 0.4rem;
478
  bottom: 0.4rem;
479
+ background-color: var(--foreground-color);
480
  -webkit-transition: 0.4s;
481
  transition: 0.4s;
482
  }
483
 
484
  input:checked + .slider {
485
+ background-color: var(--color-three);
486
  }
487
 
488
  input:focus + .slider {
489
+ box-shadow: 0 0 1px var(--color-three);
490
  }
491
 
492
  input:checked + .slider:before {
public/templates/search.html CHANGED
@@ -4,12 +4,12 @@
4
  <div class="results_aggregated">
5
  {{#each results}}
6
  <div class="result">
7
- <h1><a href="{{this.visitingUrl}}">{{{this.title}}}</a></h1>
8
- <small>{{this.url}}</small>
9
  <p>{{{this.description}}}</p>
10
  <div class="upstream_engines">
11
  {{#each engine}}
12
- <span>{{this}}</span>
13
  {{/each}}
14
  </div>
15
  </div>
 
4
  <div class="results_aggregated">
5
  {{#each results}}
6
  <div class="result">
7
+ <h1><a href="/{{{this.visitingUrl}}}">{{{this.title}}}</a></h1>
8
+ <small>{{{this.url}}}</small>
9
  <p>{{{this.description}}}</p>
10
  <div class="upstream_engines">
11
  {{#each engine}}
12
+ <span>{{{this}}}</span>
13
  {{/each}}
14
  </div>
15
  </div>
src/bin/websurfx.rs CHANGED
@@ -5,7 +5,7 @@
5
 
6
  use std::net::TcpListener;
7
 
8
- use websurfx::{config_parser::parser::Config, run};
9
 
10
  /// The function that launches the main server and registers all the routes of the website.
11
  ///
@@ -26,7 +26,7 @@ async fn main() -> std::io::Result<()> {
26
 
27
  log::info!("started server on port {}", config.port);
28
 
29
- let listener = TcpListener::bind((config.binding_ip_addr.clone(), config.port))?;
30
 
31
  run(listener, config)?.await
32
  }
 
5
 
6
  use std::net::TcpListener;
7
 
8
+ use websurfx::{config::parser::Config, run};
9
 
10
  /// The function that launches the main server and registers all the routes of the website.
11
  ///
 
26
 
27
  log::info!("started server on port {}", config.port);
28
 
29
+ let listener = TcpListener::bind((config.binding_ip.clone(), config.port))?;
30
 
31
  run(listener, config)?.await
32
  }
src/cache/cacher.rs CHANGED
@@ -32,7 +32,7 @@ impl RedisCache {
32
  /// # Arguments
33
  ///
34
  /// * `url` - It takes an url as string.
35
- fn compute_url_hash(url: &str) -> String {
36
  format!("{:?}", compute(url))
37
  }
38
 
@@ -41,8 +41,8 @@ impl RedisCache {
41
  /// # Arguments
42
  ///
43
  /// * `url` - It takes an url as a string.
44
- pub fn cached_results_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> {
45
- let hashed_url_string = Self::compute_url_hash(url);
46
  Ok(self.connection.get(hashed_url_string)?)
47
  }
48
 
@@ -59,7 +59,7 @@ impl RedisCache {
59
  json_results: String,
60
  url: &str,
61
  ) -> Result<(), Box<dyn std::error::Error>> {
62
- let hashed_url_string = Self::compute_url_hash(url);
63
 
64
  // put results_json into cache
65
  self.connection.set(&hashed_url_string, json_results)?;
 
32
  /// # Arguments
33
  ///
34
  /// * `url` - It takes an url as string.
35
+ fn hash_url(url: &str) -> String {
36
  format!("{:?}", compute(url))
37
  }
38
 
 
41
  /// # Arguments
42
  ///
43
  /// * `url` - It takes an url as a string.
44
+ pub fn cached_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> {
45
+ let hashed_url_string = Self::hash_url(url);
46
  Ok(self.connection.get(hashed_url_string)?)
47
  }
48
 
 
59
  json_results: String,
60
  url: &str,
61
  ) -> Result<(), Box<dyn std::error::Error>> {
62
+ let hashed_url_string = Self::hash_url(url);
63
 
64
  // put results_json into cache
65
  self.connection.set(&hashed_url_string, json_results)?;
src/{config_parser β†’ config}/mod.rs RENAMED
File without changes
src/{config_parser β†’ config}/parser.rs RENAMED
@@ -14,9 +14,9 @@ static CONFIG_FILE_NAME: &str = "config.lua";
14
  /// # Fields
15
  //
16
  /// * `port` - It stores the parsed port number option on which the server should launch.
17
- /// * `binding_ip_addr` - It stores the parsed ip address option on which the server should launch
18
  /// * `style` - It stores the theming options for the website.
19
- /// * `redis_connection_url` - It stores the redis connection url address on which the redis
20
  /// client should connect.
21
  /// * `aggregator` - It stores the option to whether enable or disable production use.
22
  /// * `logging` - It stores the option to whether enable or disable logs.
@@ -25,10 +25,10 @@ static CONFIG_FILE_NAME: &str = "config.lua";
25
  #[derive(Clone)]
26
  pub struct Config {
27
  pub port: u16,
28
- pub binding_ip_addr: String,
29
  pub style: Style,
30
- pub redis_connection_url: String,
31
- pub aggregator: AggreatorConfig,
32
  pub logging: bool,
33
  pub debug: bool,
34
  pub upstream_search_engines: Vec<String>,
@@ -41,47 +41,38 @@ pub struct Config {
41
  /// * `random_delay` - It stores the option to whether enable or disable random delays between
42
  /// requests.
43
  #[derive(Clone)]
44
- pub struct AggreatorConfig {
45
  pub random_delay: bool,
46
  }
47
 
48
  impl Config {
49
  /// A function which parses the config.lua file and puts all the parsed options in the newly
50
- /// contructed Config struct and returns it.
51
  ///
52
  /// # Error
53
  ///
54
  /// Returns a lua parse error if parsing of the config.lua file fails or has a syntax error
55
- /// or io error if the config.lua file doesn't exists otherwise it returns a newly contructed
56
  /// Config struct with all the parsed config options from the parsed config file.
57
  pub fn parse() -> Result<Self, Box<dyn std::error::Error>> {
58
  Lua::new().context(|context| -> Result<Self, Box<dyn std::error::Error>> {
59
  let globals = context.globals();
60
 
61
  context
62
- .load(&fs::read_to_string(
63
- Config::handle_different_config_file_path()?,
64
- )?)
65
  .exec()?;
66
 
67
- let production_use = globals.get::<_, bool>("production_use")?;
68
- let aggregator_config = if production_use {
69
- AggreatorConfig { random_delay: true }
70
- } else {
71
- AggreatorConfig {
72
- random_delay: false,
73
- }
74
- };
75
-
76
  Ok(Config {
77
  port: globals.get::<_, u16>("port")?,
78
- binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?,
79
  style: Style::new(
80
  globals.get::<_, String>("theme")?,
81
  globals.get::<_, String>("colorscheme")?,
82
  ),
83
- redis_connection_url: globals.get::<_, String>("redis_connection_url")?,
84
- aggregator: aggregator_config,
 
 
85
  logging: globals.get::<_, bool>("logging")?,
86
  debug: globals.get::<_, bool>("debug")?,
87
  upstream_search_engines: globals
@@ -104,35 +95,37 @@ impl Config {
104
  /// one (3).
105
  /// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present
106
  /// here then it returns an error as mentioned above.
107
- fn handle_different_config_file_path() -> Result<String, Box<dyn std::error::Error>> {
108
- if Path::new(
109
- format!(
110
- "{}/.config/{}/config.lua",
111
- std::env::var("HOME").unwrap(),
112
- COMMON_DIRECTORY_NAME
113
- )
114
- .as_str(),
115
- )
116
- .exists()
117
- {
118
- Ok(format!(
119
  "{}/.config/{}/{}",
120
  std::env::var("HOME").unwrap(),
121
  COMMON_DIRECTORY_NAME,
122
  CONFIG_FILE_NAME
123
- ))
124
- } else if Path::new(
125
- format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str(),
126
- )
127
- .exists()
128
- {
129
- Ok("/etc/xdg/websurfx/config.lua".to_string())
130
- } else if Path::new(format!("./{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str())
131
  .exists()
132
  {
133
- Ok("./websurfx/config.lua".to_string())
134
- } else {
135
- Err("Config file not found!!".to_string().into())
136
  }
 
 
 
 
 
 
 
 
 
137
  }
138
  }
 
14
  /// # Fields
15
  //
16
  /// * `port` - It stores the parsed port number option on which the server should launch.
17
+ /// * `binding_ip` - It stores the parsed ip address option on which the server should launch
18
  /// * `style` - It stores the theming options for the website.
19
+ /// * `redis_url` - It stores the redis connection url address on which the redis
20
  /// client should connect.
21
  /// * `aggregator` - It stores the option to whether enable or disable production use.
22
  /// * `logging` - It stores the option to whether enable or disable logs.
 
25
  #[derive(Clone)]
26
  pub struct Config {
27
  pub port: u16,
28
+ pub binding_ip: String,
29
  pub style: Style,
30
+ pub redis_url: String,
31
+ pub aggregator: AggregatorConfig,
32
  pub logging: bool,
33
  pub debug: bool,
34
  pub upstream_search_engines: Vec<String>,
 
41
  /// * `random_delay` - It stores the option to whether enable or disable random delays between
42
  /// requests.
43
  #[derive(Clone)]
44
+ pub struct AggregatorConfig {
45
  pub random_delay: bool,
46
  }
47
 
48
  impl Config {
49
  /// A function which parses the config.lua file and puts all the parsed options in the newly
50
+ /// constructed Config struct and returns it.
51
  ///
52
  /// # Error
53
  ///
54
  /// Returns a lua parse error if parsing of the config.lua file fails or has a syntax error
55
+ /// or io error if the config.lua file doesn't exists otherwise it returns a newly constructed
56
  /// Config struct with all the parsed config options from the parsed config file.
57
  pub fn parse() -> Result<Self, Box<dyn std::error::Error>> {
58
  Lua::new().context(|context| -> Result<Self, Box<dyn std::error::Error>> {
59
  let globals = context.globals();
60
 
61
  context
62
+ .load(&fs::read_to_string(Config::config_path()?)?)
 
 
63
  .exec()?;
64
 
 
 
 
 
 
 
 
 
 
65
  Ok(Config {
66
  port: globals.get::<_, u16>("port")?,
67
+ binding_ip: globals.get::<_, String>("binding_ip")?,
68
  style: Style::new(
69
  globals.get::<_, String>("theme")?,
70
  globals.get::<_, String>("colorscheme")?,
71
  ),
72
+ redis_url: globals.get::<_, String>("redis_url")?,
73
+ aggregator: AggregatorConfig {
74
+ random_delay: globals.get::<_, bool>("production_use")?,
75
+ },
76
  logging: globals.get::<_, bool>("logging")?,
77
  debug: globals.get::<_, bool>("debug")?,
78
  upstream_search_engines: globals
 
95
  /// one (3).
96
  /// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present
97
  /// here then it returns an error as mentioned above.
98
+ fn config_path() -> Result<String, Box<dyn std::error::Error>> {
99
+ // check user config
100
+
101
+ let path = format!(
102
+ "{}/.config/{}/config.lua",
103
+ std::env::var("HOME").unwrap(),
104
+ COMMON_DIRECTORY_NAME
105
+ );
106
+ if Path::new(path.as_str()).exists() {
107
+ return Ok(format!(
 
 
108
  "{}/.config/{}/{}",
109
  std::env::var("HOME").unwrap(),
110
  COMMON_DIRECTORY_NAME,
111
  CONFIG_FILE_NAME
112
+ ));
113
+ }
114
+
115
+ // look for config in /etc/xdg
116
+ if Path::new(format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str())
 
 
 
117
  .exists()
118
  {
119
+ return Ok("/etc/xdg/websurfx/config.lua".to_string());
 
 
120
  }
121
+
122
+ // use dev config
123
+ if Path::new(format!("./{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str()).exists()
124
+ {
125
+ return Ok("./websurfx/config.lua".to_string());
126
+ }
127
+
128
+ // if no of the configs above exist, return error
129
+ Err("Config file not found!!".to_string().into())
130
  }
131
  }
src/{config_parser β†’ config}/parser_models.rs RENAMED
@@ -1,5 +1,5 @@
1
  //! This module provides public models for handling, storing and serializing parsed config file
2
- //! options from config.lua by grouping them togather.
3
 
4
  use serde::{Deserialize, Serialize};
5
 
 
1
  //! This module provides public models for handling, storing and serializing parsed config file
2
+ //! options from config.lua by grouping them together.
3
 
4
  use serde::{Deserialize, Serialize};
5
 
src/engines/duckduckgo.rs CHANGED
@@ -7,7 +7,7 @@ use std::collections::HashMap;
7
  use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT};
8
  use scraper::{Html, Selector};
9
 
10
- use crate::search_results_handler::aggregation_models::RawSearchResult;
11
 
12
  use super::engine_models::{EngineError, SearchEngine};
13
 
 
7
  use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT};
8
  use scraper::{Html, Selector};
9
 
10
+ use crate::results::aggregation_models::RawSearchResult;
11
 
12
  use super::engine_models::{EngineError, SearchEngine};
13
 
src/engines/engine_models.rs CHANGED
@@ -1,7 +1,7 @@
1
  //! This module provides the error enum to handle different errors associated while requesting data from
2
  //! the upstream search engines with the search query provided by the user.
3
 
4
- use crate::search_results_handler::aggregation_models::RawSearchResult;
5
  use error_stack::{IntoReport, Result, ResultExt};
6
  use std::{collections::HashMap, fmt, time::Duration};
7
 
@@ -14,7 +14,7 @@ use std::{collections::HashMap, fmt, time::Duration};
14
  /// search engines.
15
  /// * `UnexpectedError` - This variant handles all the errors which are unexpected or occur rarely
16
  /// and are errors mostly related to failure in initialization of HeaderMap, Selector errors and
17
- /// all other errors occuring within the code handling the `upstream search engines`.
18
  #[derive(Debug)]
19
  pub enum EngineError {
20
  EmptyResultSet,
 
1
  //! This module provides the error enum to handle different errors associated while requesting data from
2
  //! the upstream search engines with the search query provided by the user.
3
 
4
+ use crate::results::aggregation_models::RawSearchResult;
5
  use error_stack::{IntoReport, Result, ResultExt};
6
  use std::{collections::HashMap, fmt, time::Duration};
7
 
 
14
  /// search engines.
15
  /// * `UnexpectedError` - This variant handles all the errors which are unexpected or occur rarely
16
  /// and are errors mostly related to failure in initialization of HeaderMap, Selector errors and
17
+ /// all other errors occurring within the code handling the `upstream search engines`.
18
  #[derive(Debug)]
19
  pub enum EngineError {
20
  EmptyResultSet,
src/engines/searx.rs CHANGED
@@ -6,7 +6,7 @@ use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT};
6
  use scraper::{Html, Selector};
7
  use std::collections::HashMap;
8
 
9
- use crate::search_results_handler::aggregation_models::RawSearchResult;
10
 
11
  use super::engine_models::{EngineError, SearchEngine};
12
  use error_stack::{IntoReport, Report, Result, ResultExt};
 
6
  use scraper::{Html, Selector};
7
  use std::collections::HashMap;
8
 
9
+ use crate::results::aggregation_models::RawSearchResult;
10
 
11
  use super::engine_models::{EngineError, SearchEngine};
12
  use error_stack::{IntoReport, Report, Result, ResultExt};
src/handler/mod.rs CHANGED
@@ -1 +1 @@
1
- pub mod public_path_handler;
 
1
+ pub mod public_paths;
src/handler/{public_path_handler.rs β†’ public_paths.rs} RENAMED
@@ -17,15 +17,17 @@ static PUBLIC_DIRECTORY_NAME: &str = "public";
17
  /// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
18
  /// 2. Under project folder ( or codebase in other words) if it is not present
19
  /// here then it returns an error as mentioned above.
20
- pub fn handle_different_public_path() -> Result<String, Error> {
21
  if Path::new(format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
22
- Ok(format!("/opt/websurfx/{}", PUBLIC_DIRECTORY_NAME))
23
- } else if Path::new(format!("./{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
24
- Ok(format!("./{}", PUBLIC_DIRECTORY_NAME))
25
- } else {
26
- Err(Error::new(
27
- std::io::ErrorKind::NotFound,
28
- "Themes (public) folder not found!!",
29
- ))
30
  }
 
 
 
 
 
 
 
 
 
31
  }
 
17
  /// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
18
  /// 2. Under project folder ( or codebase in other words) if it is not present
19
  /// here then it returns an error as mentioned above.
20
+ pub fn public_path() -> Result<String, Error> {
21
  if Path::new(format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
22
+ return Ok(format!("/opt/websurfx/{}", PUBLIC_DIRECTORY_NAME));
 
 
 
 
 
 
 
23
  }
24
+
25
+ if Path::new(format!("./{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
26
+ return Ok(format!("./{}", PUBLIC_DIRECTORY_NAME));
27
+ }
28
+
29
+ Err(Error::new(
30
+ std::io::ErrorKind::NotFound,
31
+ "Themes (public) folder not found!!",
32
+ ))
33
  }
src/lib.rs CHANGED
@@ -2,10 +2,10 @@
2
  //! and register all the routes for the `websurfx` meta search engine website.
3
 
4
  pub mod cache;
5
- pub mod config_parser;
6
  pub mod engines;
7
  pub mod handler;
8
- pub mod search_results_handler;
9
  pub mod server;
10
 
11
  use std::net::TcpListener;
@@ -14,9 +14,9 @@ use crate::server::routes;
14
 
15
  use actix_files as fs;
16
  use actix_web::{dev::Server, middleware::Logger, web, App, HttpServer};
17
- use config_parser::parser::Config;
18
  use handlebars::Handlebars;
19
- use handler::public_path_handler::handle_different_public_path;
20
 
21
  /// Runs the web server on the provided TCP listener and returns a `Server` instance.
22
  ///
@@ -32,7 +32,7 @@ use handler::public_path_handler::handle_different_public_path;
32
  ///
33
  /// ```rust
34
  /// use std::net::TcpListener;
35
- /// use websurfx::{config_parser::parser::Config, run};
36
  ///
37
  /// let config = Config::parse().unwrap();
38
  /// let listener = TcpListener::bind("127.0.0.1:8080").expect("Failed to bind address");
@@ -41,7 +41,7 @@ use handler::public_path_handler::handle_different_public_path;
41
  pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
42
  let mut handlebars: Handlebars = Handlebars::new();
43
 
44
- let public_folder_path: String = handle_different_public_path()?;
45
 
46
  handlebars
47
  .register_templates_directory(".html", format!("{}/templates", public_folder_path))
 
2
  //! and register all the routes for the `websurfx` meta search engine website.
3
 
4
  pub mod cache;
5
+ pub mod config;
6
  pub mod engines;
7
  pub mod handler;
8
+ pub mod results;
9
  pub mod server;
10
 
11
  use std::net::TcpListener;
 
14
 
15
  use actix_files as fs;
16
  use actix_web::{dev::Server, middleware::Logger, web, App, HttpServer};
17
+ use config::parser::Config;
18
  use handlebars::Handlebars;
19
+ use handler::public_paths::public_path;
20
 
21
  /// Runs the web server on the provided TCP listener and returns a `Server` instance.
22
  ///
 
32
  ///
33
  /// ```rust
34
  /// use std::net::TcpListener;
35
+ /// use websurfx::{config::parser::Config, run};
36
  ///
37
  /// let config = Config::parse().unwrap();
38
  /// let listener = TcpListener::bind("127.0.0.1:8080").expect("Failed to bind address");
 
41
  pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
42
  let mut handlebars: Handlebars = Handlebars::new();
43
 
44
+ let public_folder_path: String = public_path()?;
45
 
46
  handlebars
47
  .register_templates_directory(".html", format!("{}/templates", public_folder_path))
src/{search_results_handler β†’ results}/aggregation_models.rs RENAMED
@@ -3,7 +3,7 @@
3
 
4
  use serde::{Deserialize, Serialize};
5
 
6
- use crate::{config_parser::parser_models::Style, engines::engine_models::EngineError};
7
 
8
  /// A named struct to store, serialize and deserializes the individual search result from all the
9
  /// scraped and aggregated search results from the upstream search engines.
 
3
 
4
  use serde::{Deserialize, Serialize};
5
 
6
+ use crate::{config::parser_models::Style, engines::engine_models::EngineError};
7
 
8
  /// A named struct to store, serialize and deserializes the individual search result from all the
9
  /// scraped and aggregated search results from the upstream search engines.
src/{search_results_handler β†’ results}/aggregator.rs RENAMED
File without changes
src/{search_results_handler β†’ results}/mod.rs RENAMED
File without changes
src/{search_results_handler β†’ results}/user_agent.rs RENAMED
File without changes
src/server/routes.rs CHANGED
@@ -1,14 +1,14 @@
1
  //! This module provides the functionality to handle different routes of the `websurfx`
2
- //! meta search engine website and provide approriate response to each route/page
3
  //! when requested.
4
 
5
  use std::fs::read_to_string;
6
 
7
  use crate::{
8
  cache::cacher::RedisCache,
9
- config_parser::parser::Config,
10
- handler::public_path_handler::handle_different_public_path,
11
- search_results_handler::{aggregation_models::SearchResults, aggregator::aggregate},
12
  };
13
  use actix_web::{get, web, HttpRequest, HttpResponse};
14
  use handlebars::Handlebars;
@@ -87,86 +87,25 @@ pub async fn search(
87
  config: web::Data<Config>,
88
  ) -> Result<HttpResponse, Box<dyn std::error::Error>> {
89
  let params = web::Query::<SearchParams>::from_query(req.query_string())?;
90
-
91
- //Initialize redis cache connection struct
92
- let mut redis_cache = RedisCache::new(config.redis_connection_url.clone())?;
93
  match &params.q {
94
  Some(query) => {
95
  if query.trim().is_empty() {
96
- Ok(HttpResponse::Found()
97
  .insert_header(("location", "/"))
98
- .finish())
99
- } else {
100
- let page_url: String; // Declare the page_url variable without initializing it
101
-
102
- // ...
103
-
104
- let page = match params.page {
105
- Some(page_number) => {
106
- if page_number <= 1 {
107
- page_url = format!(
108
- "http://{}:{}/search?q={}&page={}",
109
- config.binding_ip_addr, config.port, query, 1
110
- );
111
- 1
112
- } else {
113
- page_url = format!(
114
- "http://{}:{}/search?q={}&page={}",
115
- config.binding_ip_addr, config.port, query, page_number
116
- );
117
-
118
- page_number
119
- }
120
- }
121
- None => {
122
- page_url = format!(
123
- "http://{}:{}{}&page={}",
124
- config.binding_ip_addr,
125
- config.port,
126
- req.uri(),
127
- 1
128
- );
129
-
130
- 1
131
- }
132
- };
133
-
134
- // fetch the cached results json.
135
- let cached_results_json = redis_cache.cached_results_json(&page_url);
136
- // check if fetched catch results was indeed fetched or it was an error and if so
137
- // handle the data accordingly.
138
- match cached_results_json {
139
- Ok(results_json) => {
140
- let new_results_json: SearchResults = serde_json::from_str(&results_json)?;
141
- let page_content: String = hbs.render("search", &new_results_json)?;
142
- Ok(HttpResponse::Ok().body(page_content))
143
- }
144
- Err(_) => {
145
- // check if the cookie value is empty or not if it is empty then use the
146
- // default selected upstream search engines from the config file otherwise
147
- // parse the non-empty cookie and grab the user selected engines from the
148
- // UI and use that.
149
- let mut results_json: crate::search_results_handler::aggregation_models::SearchResults = match req.cookie("appCookie") {
150
- Some(cookie_value) => {
151
- let cookie_value:Cookie = serde_json::from_str(cookie_value.name_value().1)?;
152
- aggregate(query.clone(), page, config.aggregator.random_delay, config.debug, cookie_value.engines).await?
153
- },
154
- None => aggregate(query.clone(), page, config.aggregator.random_delay, config.debug, config.upstream_search_engines.clone()).await?,
155
- };
156
- results_json.add_style(config.style.clone());
157
- // check whether the results grabbed from the upstream engines are empty or
158
- // not if they are empty then set the empty_result_set option to true in
159
- // the result json.
160
- if results_json.is_empty_result_set() {
161
- results_json.set_empty_result_set();
162
- }
163
- redis_cache
164
- .cache_results(serde_json::to_string(&results_json)?, &page_url)?;
165
- let page_content: String = hbs.render("search", &results_json)?;
166
- Ok(HttpResponse::Ok().body(page_content))
167
- }
168
- }
169
  }
 
 
 
 
 
 
 
 
 
 
 
 
170
  }
171
  None => Ok(HttpResponse::Found()
172
  .insert_header(("location", "/"))
@@ -174,11 +113,70 @@ pub async fn search(
174
  }
175
  }
176
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
  /// Handles the route of robots.txt page of the `websurfx` meta search engine website.
178
  #[get("/robots.txt")]
179
  pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
180
- let page_content: String =
181
- read_to_string(format!("{}/robots.txt", handle_different_public_path()?))?;
182
  Ok(HttpResponse::Ok()
183
  .content_type("text/plain; charset=ascii")
184
  .body(page_content))
 
1
  //! This module provides the functionality to handle different routes of the `websurfx`
2
+ //! meta search engine website and provide appropriate response to each route/page
3
  //! when requested.
4
 
5
  use std::fs::read_to_string;
6
 
7
  use crate::{
8
  cache::cacher::RedisCache,
9
+ config::parser::Config,
10
+ handler::public_paths::public_path,
11
+ results::{aggregation_models::SearchResults, aggregator::aggregate},
12
  };
13
  use actix_web::{get, web, HttpRequest, HttpResponse};
14
  use handlebars::Handlebars;
 
87
  config: web::Data<Config>,
88
  ) -> Result<HttpResponse, Box<dyn std::error::Error>> {
89
  let params = web::Query::<SearchParams>::from_query(req.query_string())?;
 
 
 
90
  match &params.q {
91
  Some(query) => {
92
  if query.trim().is_empty() {
93
+ return Ok(HttpResponse::Found()
94
  .insert_header(("location", "/"))
95
+ .finish());
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
96
  }
97
+ let page = match &params.page {
98
+ Some(page) => *page,
99
+ None => 0,
100
+ };
101
+
102
+ let url = format!(
103
+ "http://{}:{}/search?q={}&page={}",
104
+ config.binding_ip, config.port, query, page
105
+ );
106
+ let results_json = results(url, &config, query.to_string(), page, req).await?;
107
+ let page_content: String = hbs.render("search", &results_json)?;
108
+ Ok(HttpResponse::Ok().body(page_content))
109
  }
110
  None => Ok(HttpResponse::Found()
111
  .insert_header(("location", "/"))
 
113
  }
114
  }
115
 
116
+ /// Fetches the results for a query and page.
117
+ /// First checks the redis cache, if that fails it gets proper results
118
+ async fn results(
119
+ url: String,
120
+ config: &Config,
121
+ query: String,
122
+ page: u32,
123
+ req: HttpRequest,
124
+ ) -> Result<SearchResults, Box<dyn std::error::Error>> {
125
+ //Initialize redis cache connection struct
126
+ let mut redis_cache = RedisCache::new(config.redis_url.clone())?;
127
+ // fetch the cached results json.
128
+ let cached_results_json = redis_cache.cached_json(&url);
129
+ // check if fetched catch results was indeed fetched or it was an error and if so
130
+ // handle the data accordingly.
131
+ match cached_results_json {
132
+ Ok(results_json) => Ok(serde_json::from_str::<SearchResults>(&results_json).unwrap()),
133
+ Err(_) => {
134
+ // check if the cookie value is empty or not if it is empty then use the
135
+ // default selected upstream search engines from the config file otherwise
136
+ // parse the non-empty cookie and grab the user selected engines from the
137
+ // UI and use that.
138
+ let mut results_json: crate::results::aggregation_models::SearchResults = match req
139
+ .cookie("appCookie")
140
+ {
141
+ Some(cookie_value) => {
142
+ let cookie_value: Cookie = serde_json::from_str(cookie_value.name_value().1)?;
143
+ aggregate(
144
+ query,
145
+ page,
146
+ config.aggregator.random_delay,
147
+ config.debug,
148
+ cookie_value.engines,
149
+ )
150
+ .await?
151
+ }
152
+ None => {
153
+ aggregate(
154
+ query,
155
+ page,
156
+ config.aggregator.random_delay,
157
+ config.debug,
158
+ config.upstream_search_engines.clone(),
159
+ )
160
+ .await?
161
+ }
162
+ };
163
+ results_json.add_style(config.style.clone());
164
+ // check whether the results grabbed from the upstream engines are empty or
165
+ // not if they are empty then set the empty_result_set option to true in
166
+ // the result json.
167
+ if results_json.is_empty_result_set() {
168
+ results_json.set_empty_result_set();
169
+ }
170
+ redis_cache.cache_results(serde_json::to_string(&results_json)?, &url)?;
171
+ Ok(results_json)
172
+ }
173
+ }
174
+ }
175
+
176
  /// Handles the route of robots.txt page of the `websurfx` meta search engine website.
177
  #[get("/robots.txt")]
178
  pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
179
+ let page_content: String = read_to_string(format!("{}/robots.txt", public_path()?))?;
 
180
  Ok(HttpResponse::Ok()
181
  .content_type("text/plain; charset=ascii")
182
  .body(page_content))
tests/index.rs CHANGED
@@ -1,7 +1,7 @@
1
  use std::net::TcpListener;
2
 
3
  use handlebars::Handlebars;
4
- use websurfx::{config_parser::parser::Config, run};
5
 
6
  // Starts a new instance of the HTTP server, bound to a random available port
7
  fn spawn_app() -> String {
@@ -41,5 +41,5 @@ async fn test_index() {
41
  assert_eq!(res.text().await.unwrap(), template);
42
  }
43
 
44
- // TODO: Write tests for tesing parameters for search function that if provided with something
45
  // other than u32 like alphabets and special characters than it should panic
 
1
  use std::net::TcpListener;
2
 
3
  use handlebars::Handlebars;
4
+ use websurfx::{config::parser::Config, run};
5
 
6
  // Starts a new instance of the HTTP server, bound to a random available port
7
  fn spawn_app() -> String {
 
41
  assert_eq!(res.text().await.unwrap(), template);
42
  }
43
 
44
+ // TODO: Write tests for testing parameters for search function that if provided with something
45
  // other than u32 like alphabets and special characters than it should panic