khronoz commited on
Commit
f730525
·
unverified ·
1 Parent(s): 73a5bd8

Enhancements & Refactored Code (#10)

Browse files

* Fix: Button loading state not reset on 'back'

* Feat: search post processing to filter by average score

* Feat: Updated api status error handling

* Feat: Updated error handling & timeout duration

* Refactored imports & moved global vars to constants.py

* Feat: Updated dockerfile to install llama-cpp-python with openblas support by default

* Add .env file and update gitignore, pipeline name, robots.txt, middleware, layout, page, sitemap, and navlink components

* Fixed Pipeline Name

* Updated Check File Size workflow

.github/workflows/check-file-size.yml CHANGED
@@ -12,6 +12,8 @@ jobs:
12
  check-file-size:
13
  runs-on: ubuntu-latest
14
  steps:
 
 
15
  - name: Check large files
16
  uses: ppremk/lfs-warning@v3.2
17
  with:
 
12
  check-file-size:
13
  runs-on: ubuntu-latest
14
  steps:
15
+ - name: Checkout
16
+ uses: actions/checkout@v4
17
  - name: Check large files
18
  uses: ppremk/lfs-warning@v3.2
19
  with:
.github/workflows/pipeline.yml CHANGED
@@ -1,4 +1,4 @@
1
- name: Test Build and Deploy
2
 
3
  on:
4
  push:
@@ -6,6 +6,7 @@ on:
6
  # to run this workflow manually from the Actions tab
7
  workflow_dispatch:
8
 
 
9
  jobs:
10
  check-file-size:
11
  uses: ./.github/workflows/check-file-size.yml
 
1
+ name: Pipeline
2
 
3
  on:
4
  push:
 
6
  # to run this workflow manually from the Actions tab
7
  workflow_dispatch:
8
 
9
+ # Test, Build and Deploy the app
10
  jobs:
11
  check-file-size:
12
  uses: ./.github/workflows/check-file-size.yml
Dockerfile CHANGED
@@ -5,7 +5,7 @@ FROM nvidia/cuda:${CUDA_IMAGE}
5
  # Set up a new user named "user" with user ID 1000
6
  RUN useradd -m -u 1000 user
7
 
8
- # Install the dependencies
9
  RUN apt-get update && apt-get upgrade -y \
10
  && apt-get install -y git build-essential \
11
  python3.11 gcc wget \
@@ -13,7 +13,10 @@ RUN apt-get update && apt-get upgrade -y \
13
  cmake protobuf-compiler pkg-config \
14
  libclblast-dev libopenblas-dev \
15
  liblapack-dev liblapacke-dev libeigen3-dev libboost-all-dev \
16
- && mkdir -p /etc/OpenCL/vendors && echo "libnvidia-opencl.so.1" > /etc/OpenCL/vendors/nvidia.icd
 
 
 
17
 
18
  # Install pip for python 3.11
19
  RUN wget https://bootstrap.pypa.io/get-pip.py && \
@@ -23,21 +26,25 @@ RUN wget https://bootstrap.pypa.io/get-pip.py && \
23
  # Switch to the user 'user'
24
  USER user
25
 
26
- # Setting build related env vars
27
- ENV CUDA_DOCKER_ARCH=all
28
- ENV LLAMA_CUBLAS=1
29
-
30
- # Set home to the user's home directory and Poetry's environment variables
31
- ENV HOME=/home/user \
32
- PATH=/home/user/.local/bin:$PATH \
33
  PYTHONUNBUFFERED=1 \
34
  POETRY_NO_INTERACTION=1 \
35
  POETRY_VIRTUALENVS_IN_PROJECT=1 \
36
  POETRY_VIRTUALENVS_CREATE=1 \
37
  POETRY_CACHE_DIR=/tmp/poetry_cache \
38
- # Build llama-cpp-python with default cuda support
39
- CMAKE_ARGS="-DLLAMA_CUBLAS=on"
40
- # CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS"
 
 
 
 
 
41
 
42
  # Set the working directory to /app
43
  WORKDIR $HOME/app
@@ -61,4 +68,8 @@ RUN poetry install --without dev,torch-cpu && \
61
  # Change to the package directory
62
  WORKDIR $HOME/app/backend
63
 
 
 
 
 
64
  CMD ["poetry", "run", "uvicorn", "main:app", "--host", "0.0.0.0"]
 
5
  # Set up a new user named "user" with user ID 1000
6
  RUN useradd -m -u 1000 user
7
 
8
+ # Install the dependencies & clean up
9
  RUN apt-get update && apt-get upgrade -y \
10
  && apt-get install -y git build-essential \
11
  python3.11 gcc wget \
 
13
  cmake protobuf-compiler pkg-config \
14
  libclblast-dev libopenblas-dev \
15
  liblapack-dev liblapacke-dev libeigen3-dev libboost-all-dev \
16
+ && mkdir -p /etc/OpenCL/vendors && echo "libnvidia-opencl.so.1" > /etc/OpenCL/vendors/nvidia.icd \
17
+ # Cleaning cache:
18
+ && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
19
+ && apt-get clean -y && rm -rf /var/lib/apt/lists/*
20
 
21
  # Install pip for python 3.11
22
  RUN wget https://bootstrap.pypa.io/get-pip.py && \
 
26
  # Switch to the user 'user'
27
  USER user
28
 
29
+ # Setting build / container related env vars
30
+ ENV CUDA_DOCKER_ARCH=all \
31
+ LLAMA_CUBLAS=1 \
32
+ # Set home to the user's home directory and Poetry's environment variables
33
+ HOME=/home/user \
34
+ PATH=/home/user/.local/bin:$PATH \
 
35
  PYTHONUNBUFFERED=1 \
36
  POETRY_NO_INTERACTION=1 \
37
  POETRY_VIRTUALENVS_IN_PROJECT=1 \
38
  POETRY_VIRTUALENVS_CREATE=1 \
39
  POETRY_CACHE_DIR=/tmp/poetry_cache \
40
+ # Set the uvicorn env
41
+ ENVIRONMENT=prod \
42
+ ##########################################################
43
+ # Build llama-cpp-python with cuda support
44
+ # CMAKE_ARGS="-DLLAMA_CUBLAS=on"
45
+ # Build llama-cpp-python with openblas support on CPU
46
+ CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS"
47
+ ##########################################################
48
 
49
  # Set the working directory to /app
50
  WORKDIR $HOME/app
 
68
  # Change to the package directory
69
  WORKDIR $HOME/app/backend
70
 
71
+ # Make port 8000 available to the world outside this container
72
+ EXPOSE 8000
73
+
74
+ # Run the app when the container launches
75
  CMD ["poetry", "run", "uvicorn", "main:app", "--host", "0.0.0.0"]
backend/.gitignore CHANGED
@@ -1,2 +1,3 @@
1
  __pycache__
2
  storage
 
 
1
  __pycache__
2
  storage
3
+ .env
backend/backend/app/api/routers/chat.py CHANGED
@@ -1,8 +1,6 @@
1
  import logging
2
  from typing import List
3
 
4
- from app.utils.index import get_index
5
- from app.utils.json import json_to_model
6
  from fastapi import APIRouter, Depends, HTTPException, Request, status
7
  from fastapi.responses import StreamingResponse
8
  from fastapi.websockets import WebSocketDisconnect
@@ -13,6 +11,9 @@ from llama_index.memory import ChatMemoryBuffer
13
  from llama_index.prompts import PromptTemplate
14
  from pydantic import BaseModel
15
 
 
 
 
16
  chat_router = r = APIRouter()
17
 
18
  """
 
1
  import logging
2
  from typing import List
3
 
 
 
4
  from fastapi import APIRouter, Depends, HTTPException, Request, status
5
  from fastapi.responses import StreamingResponse
6
  from fastapi.websockets import WebSocketDisconnect
 
11
  from llama_index.prompts import PromptTemplate
12
  from pydantic import BaseModel
13
 
14
+ from backend.app.utils.index import get_index
15
+ from backend.app.utils.json import json_to_model
16
+
17
  chat_router = r = APIRouter()
18
 
19
  """
backend/backend/app/api/routers/query.py CHANGED
@@ -1,8 +1,6 @@
1
  import logging
2
  from typing import List
3
 
4
- from app.utils.index import get_index
5
- from app.utils.json import json_to_model
6
  from fastapi import APIRouter, Depends, HTTPException, Request, status
7
  from fastapi.responses import StreamingResponse
8
  from fastapi.websockets import WebSocketDisconnect
@@ -10,6 +8,9 @@ from llama_index import VectorStoreIndex
10
  from llama_index.llms.types import MessageRole
11
  from pydantic import BaseModel
12
 
 
 
 
13
  query_router = r = APIRouter()
14
 
15
  """
 
1
  import logging
2
  from typing import List
3
 
 
 
4
  from fastapi import APIRouter, Depends, HTTPException, Request, status
5
  from fastapi.responses import StreamingResponse
6
  from fastapi.websockets import WebSocketDisconnect
 
8
  from llama_index.llms.types import MessageRole
9
  from pydantic import BaseModel
10
 
11
+ from backend.app.utils.index import get_index
12
+ from backend.app.utils.json import json_to_model
13
+
14
  query_router = r = APIRouter()
15
 
16
  """
backend/backend/app/api/routers/search.py CHANGED
@@ -1,12 +1,13 @@
1
  import logging
2
  import re
3
 
4
- from app.utils.index import get_index
5
  from fastapi import APIRouter, Depends, HTTPException, Request, status
6
  from llama_index import VectorStoreIndex
7
  from llama_index.postprocessor import SimilarityPostprocessor
8
  from llama_index.retrievers import VectorIndexRetriever
9
 
 
 
10
  search_router = r = APIRouter()
11
 
12
  """
@@ -36,17 +37,22 @@ async def search(
36
  index=index,
37
  similarity_top_k=10,
38
  )
39
- # similarity postprocessor: filter nodes below 0.45 similarity score
40
- node_postprocessor = SimilarityPostprocessor(similarity_cutoff=0.45)
41
 
42
  # retrieve results
43
  query_results = retriever.retrieve(query)
44
 
45
  query_results_scores = [result.get_score() for result in query_results]
46
 
 
 
 
47
  logger.info(f"Search results similarity score: {query_results_scores}")
 
 
 
 
48
 
49
- # postprocess results
50
  filtered_results = node_postprocessor.postprocess_nodes(query_results)
51
 
52
  filtered_results_scores = [result.get_score() for result in filtered_results]
@@ -68,9 +74,7 @@ async def search(
68
  "^_+ | _+$", "", node_dict["text"]
69
  ) # remove leading and trailing underscores
70
  data["text"] = cleaned_text
71
- data["similarity_score"] = round(
72
- node.get_score(), 2
73
- ) # round to 2 decimal places
74
  response.append(data)
75
  id += 1
76
 
 
1
  import logging
2
  import re
3
 
 
4
  from fastapi import APIRouter, Depends, HTTPException, Request, status
5
  from llama_index import VectorStoreIndex
6
  from llama_index.postprocessor import SimilarityPostprocessor
7
  from llama_index.retrievers import VectorIndexRetriever
8
 
9
+ from backend.app.utils.index import get_index
10
+
11
  search_router = r = APIRouter()
12
 
13
  """
 
37
  index=index,
38
  similarity_top_k=10,
39
  )
 
 
40
 
41
  # retrieve results
42
  query_results = retriever.retrieve(query)
43
 
44
  query_results_scores = [result.get_score() for result in query_results]
45
 
46
+ # get average score
47
+ average_score = sum(query_results_scores) / len(query_results_scores)
48
+
49
  logger.info(f"Search results similarity score: {query_results_scores}")
50
+ logger.info(f"Average similarity score: {average_score}")
51
+
52
+ # similarity postprocessor: filter nodes below 0.45 similarity score
53
+ node_postprocessor = SimilarityPostprocessor(similarity_cutoff=average_score)
54
 
55
+ # postprocess results based on average score
56
  filtered_results = node_postprocessor.postprocess_nodes(query_results)
57
 
58
  filtered_results_scores = [result.get_score() for result in filtered_results]
 
74
  "^_+ | _+$", "", node_dict["text"]
75
  ) # remove leading and trailing underscores
76
  data["text"] = cleaned_text
77
+ data["similarity_score"] = node.get_score()
 
 
78
  response.append(data)
79
  id += 1
80
 
backend/backend/app/utils/contants.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ########################################################################
2
+ # Model Constants for the backend app #
3
+ ########################################################################
4
+ from pathlib import Path
5
+
6
+ from torch.cuda import is_available as is_cuda_available
7
+
8
+ # Model Constants
9
+ MAX_NEW_TOKENS = 4096
10
+ CONTEXT_SIZE = MAX_NEW_TOKENS
11
+ DEVICE_TYPE = "cuda" if is_cuda_available() else "cpu"
12
+
13
+ # Get the current directory
14
+ CUR_DIR = Path.cwd()
15
+
16
+ STORAGE_DIR = str(CUR_DIR / "storage") # directory to cache the generated index
17
+ DATA_DIR = str(CUR_DIR / "data") # directory containing the documents to index
18
+
19
+ # LLM Model Constants
20
+ LLM_MODEL_URL = "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q4_K_M.gguf"
21
+ # Model Kwargs
22
+ # set to at least 1 to use GPU, adjust according to your GPU memory, but must be able to fit the model
23
+ MODEL_KWARGS = {"n_gpu_layers": 100} if DEVICE_TYPE == "cuda" else {}
24
+
25
+ # Service Context Constants
26
+ CHUNK_SIZE = 1000
27
+ CHUNK_OVERLAP = 100
28
+
29
+ # Embedding Model Constants
30
+ EMBED_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2"
31
+ EMBED_POOLING = "mean"
32
+
33
+ # Prompt Helper Constants
34
+ # set maximum input size
35
+ CHUNK_SIZE_LIMIT = MAX_NEW_TOKENS
36
+ # set number of output tokens
37
+ NUM_OUTPUT = 256
38
+ # set maximum chunk overlap
39
+ CHUNK_OVERLAP_RATIO = 0.2
backend/backend/app/utils/index.py CHANGED
@@ -1,6 +1,5 @@
1
  import logging
2
  import os
3
- from pathlib import Path
4
 
5
  from llama_index import (
6
  PromptHelper,
@@ -17,29 +16,26 @@ from llama_index.llms.llama_utils import (
17
  completion_to_prompt,
18
  messages_to_prompt,
19
  )
20
- from torch.cuda import is_available as is_cuda_available
21
 
22
- MAX_NEW_TOKENS = 4096
23
- CONTEXT_SIZE = MAX_NEW_TOKENS
24
- MODEL_ID = "TheBloke/Llama-2-7B-Chat-GGUF"
25
- DEVICE_TYPE = "cuda" if is_cuda_available() else "cpu"
26
-
27
- # Get the current directory
28
- current_directory = Path.cwd()
29
-
30
- STORAGE_DIR = str(
31
- current_directory / "storage"
32
- ) # directory to cache the generated index
33
- DATA_DIR = str(
34
- current_directory / "data"
35
- ) # directory containing the documents to index
36
-
37
-
38
- # set to at least 1 to use GPU, adjust according to your GPU memory, but must be able to fit the model
39
- model_kwargs = {"n_gpu_layers": 100} if DEVICE_TYPE == "cuda" else {}
40
 
41
  llm = LlamaCPP(
42
- model_url="https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q4_K_M.gguf",
43
  temperature=0.1,
44
  max_new_tokens=MAX_NEW_TOKENS,
45
  # llama2 has a context window of 4096 tokens, but we set it lower to allow for some wiggle room
@@ -47,7 +43,7 @@ llm = LlamaCPP(
47
  # kwargs to pass to __call__()
48
  # generate_kwargs={},
49
  # kwargs to pass to __init__()
50
- model_kwargs=model_kwargs,
51
  # transform inputs into Llama2 format
52
  messages_to_prompt=messages_to_prompt,
53
  completion_to_prompt=completion_to_prompt,
@@ -63,22 +59,22 @@ num_output = 256
63
  max_chunk_overlap = 0.2
64
 
65
  embed_model = HuggingFaceEmbedding(
66
- model_name="sentence-transformers/all-MiniLM-L6-v2",
67
- pooling="mean",
68
  device=DEVICE_TYPE,
69
  )
70
 
71
  prompt_helper = PromptHelper(
72
- chunk_size_limit=4096,
73
- chunk_overlap_ratio=0.2,
74
- num_output=256,
75
  )
76
 
77
  service_context = ServiceContext.from_defaults(
78
  llm=llm,
79
  embed_model=embed_model,
80
- chunk_size=1000,
81
- chunk_overlap=100,
82
  prompt_helper=prompt_helper,
83
  )
84
 
 
1
  import logging
2
  import os
 
3
 
4
  from llama_index import (
5
  PromptHelper,
 
16
  completion_to_prompt,
17
  messages_to_prompt,
18
  )
 
19
 
20
+ from backend.app.utils.contants import (
21
+ CHUNK_OVERLAP,
22
+ CHUNK_OVERLAP_RATIO,
23
+ CHUNK_SIZE,
24
+ CHUNK_SIZE_LIMIT,
25
+ CONTEXT_SIZE,
26
+ DATA_DIR,
27
+ DEVICE_TYPE,
28
+ EMBED_MODEL_NAME,
29
+ EMBED_POOLING,
30
+ LLM_MODEL_URL,
31
+ MAX_NEW_TOKENS,
32
+ MODEL_KWARGS,
33
+ NUM_OUTPUT,
34
+ STORAGE_DIR,
35
+ )
 
 
36
 
37
  llm = LlamaCPP(
38
+ model_url=LLM_MODEL_URL,
39
  temperature=0.1,
40
  max_new_tokens=MAX_NEW_TOKENS,
41
  # llama2 has a context window of 4096 tokens, but we set it lower to allow for some wiggle room
 
43
  # kwargs to pass to __call__()
44
  # generate_kwargs={},
45
  # kwargs to pass to __init__()
46
+ model_kwargs=MODEL_KWARGS,
47
  # transform inputs into Llama2 format
48
  messages_to_prompt=messages_to_prompt,
49
  completion_to_prompt=completion_to_prompt,
 
59
  max_chunk_overlap = 0.2
60
 
61
  embed_model = HuggingFaceEmbedding(
62
+ model_name=EMBED_MODEL_NAME,
63
+ pooling=EMBED_POOLING,
64
  device=DEVICE_TYPE,
65
  )
66
 
67
  prompt_helper = PromptHelper(
68
+ chunk_size_limit=CHUNK_SIZE_LIMIT,
69
+ chunk_overlap_ratio=CHUNK_OVERLAP_RATIO,
70
+ num_output=NUM_OUTPUT,
71
  )
72
 
73
  service_context = ServiceContext.from_defaults(
74
  llm=llm,
75
  embed_model=embed_model,
76
+ chunk_size=CHUNK_SIZE,
77
+ chunk_overlap=CHUNK_OVERLAP,
78
  prompt_helper=prompt_helper,
79
  )
80
 
backend/backend/main.py CHANGED
@@ -1,29 +1,31 @@
1
  import logging
2
  import os
3
 
4
- from app.api.routers.chat import chat_router
5
- from app.api.routers.healthcheck import healthcheck_router
6
- from app.api.routers.query import query_router
7
- from app.api.routers.search import search_router
8
- from app.utils.index import create_index
9
  from dotenv import load_dotenv
10
  from fastapi import FastAPI
11
  from fastapi.middleware.cors import CORSMiddleware
12
  from torch.cuda import is_available as is_cuda_available
13
 
 
 
 
 
 
 
14
  load_dotenv()
15
 
16
  app = FastAPI()
17
 
18
  environment = os.getenv("ENVIRONMENT", "dev") # Default to 'development' if not set
19
 
20
- # TODO: Add reading allowed origins from environment variables
 
21
 
22
  if environment == "dev":
23
  logger = logging.getLogger("uvicorn")
24
  logger.warning("Running in development mode - allowing CORS for all origins")
25
  app.add_middleware(
26
- CORSMiddleware,
27
  allow_origins=["*"],
28
  allow_credentials=True,
29
  allow_methods=["*"],
@@ -32,19 +34,15 @@ if environment == "dev":
32
 
33
  if environment == "prod":
34
  # In production, specify the allowed origins
35
- allowed_origins = [
36
- "https://your-production-domain.com",
37
- "https://another-production-domain.com",
38
- # Add more allowed origins as needed
39
- ]
40
 
41
  logger = logging.getLogger("uvicorn")
42
  logger.info(f"Running in production mode - allowing CORS for {allowed_origins}")
43
  app.add_middleware(
44
- CORSMiddleware,
45
  allow_origins=allowed_origins,
46
  allow_credentials=True,
47
- allow_methods=["GET", "POST", "PUT", "DELETE"],
48
  allow_headers=["*"],
49
  )
50
 
 
1
  import logging
2
  import os
3
 
 
 
 
 
 
4
  from dotenv import load_dotenv
5
  from fastapi import FastAPI
6
  from fastapi.middleware.cors import CORSMiddleware
7
  from torch.cuda import is_available as is_cuda_available
8
 
9
+ from backend.app.api.routers.chat import chat_router
10
+ from backend.app.api.routers.healthcheck import healthcheck_router
11
+ from backend.app.api.routers.query import query_router
12
+ from backend.app.api.routers.search import search_router
13
+ from backend.app.utils.index import create_index
14
+
15
  load_dotenv()
16
 
17
  app = FastAPI()
18
 
19
  environment = os.getenv("ENVIRONMENT", "dev") # Default to 'development' if not set
20
 
21
+ # Add allowed origins from environment variables
22
+ allowed_origins = os.getenv("ALLOWED_ORIGINS", "*")
23
 
24
  if environment == "dev":
25
  logger = logging.getLogger("uvicorn")
26
  logger.warning("Running in development mode - allowing CORS for all origins")
27
  app.add_middleware(
28
+ middleware_class=CORSMiddleware,
29
  allow_origins=["*"],
30
  allow_credentials=True,
31
  allow_methods=["*"],
 
34
 
35
  if environment == "prod":
36
  # In production, specify the allowed origins
37
+ allowed_origins = allowed_origins.split(",") if allowed_origins != "*" else ["*"]
 
 
 
 
38
 
39
  logger = logging.getLogger("uvicorn")
40
  logger.info(f"Running in production mode - allowing CORS for {allowed_origins}")
41
  app.add_middleware(
42
+ middleware_class=CORSMiddleware,
43
  allow_origins=allowed_origins,
44
  allow_credentials=True,
45
+ allow_methods=["GET", "POST"],
46
  allow_headers=["*"],
47
  )
48
 
backend/example.env ADDED
@@ -0,0 +1 @@
 
 
1
+ ALLOWED_ORIGINS=http://localhost:3000
frontend/app/about/page.tsx CHANGED
@@ -3,7 +3,7 @@
3
  export default function About() {
4
 
5
  return (
6
- <div className="rounded-xl shadow-xl p-4 mb-8 max-w-5xl w-full">
7
  <div className="max-w-2xl mx-auto p-4">
8
  <div className="bg-gradient-to-r from-blue-500 to-indigo-500 text-white p-8 rounded-lg shadow-lg">
9
  <h1 className="text-2xl md:text-4xl font-bold mb-4">About Smart Retrieval</h1>
 
3
  export default function About() {
4
 
5
  return (
6
+ <div className="rounded-xl shadow-xl p-4 max-w-5xl w-full">
7
  <div className="max-w-2xl mx-auto p-4">
8
  <div className="bg-gradient-to-r from-blue-500 to-indigo-500 text-white p-8 rounded-lg shadow-lg">
9
  <h1 className="text-2xl md:text-4xl font-bold mb-4">About Smart Retrieval</h1>
frontend/app/components/footer.tsx ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ "use client";
2
+
3
+ import { FooterNavLink } from "./ui/navlink";
4
+ import { IconGitHub } from "./ui/icons";
5
+ import { Text, Cookie } from "lucide-react";
6
+
7
+ export default function Footer() {
8
+ return (
9
+ <footer>
10
+ <div className="flex flex-col items-center justify-center bg-gray-800 text-white p-4 mb-4 rounded-lg shadow-xl">
11
+ <div className="flex flex-col items-center">
12
+ <p className="text-sm text-center">
13
+ © 2024 JTC DBE. All rights reserved.
14
+ </p>
15
+ </div>
16
+ <div className="flex items-center mt-2 gap-4">
17
+ <FooterNavLink href="https://github.com/digitalbuiltenvironment/Smart-Retrieval/" target="_blank">
18
+ <div className="text-sm text-center underline">
19
+ <IconGitHub className="h-5 w-5 inline mr-2 mb-1" />
20
+ Github
21
+ </div>
22
+ </FooterNavLink>
23
+ <FooterNavLink href="/terms-of-service">
24
+ <div className="text-sm text-center underline">
25
+ <Text className="h-5 w-5 inline mr-2 mb-1" />
26
+ Terms of Service
27
+ </div>
28
+ </FooterNavLink>
29
+ <FooterNavLink href="/privacy-policy">
30
+ <div className="text-sm text-center underline">
31
+ <Cookie className="h-5 w-5 inline mr-2 mb-1" />
32
+ Privacy Policy
33
+ </div>
34
+ </FooterNavLink>
35
+ </div>
36
+ </div>
37
+ </footer>
38
+ );
39
+ }
40
+
frontend/app/components/header.tsx CHANGED
@@ -1,140 +1,47 @@
1
  "use client";
2
 
3
- import Link from 'next/link';
4
  import Image from 'next/image';
5
  import { Home, InfoIcon, MessageCircle, Search, FileQuestion, Menu, X } from 'lucide-react';
6
- import { usePathname } from 'next/navigation';
7
  import { useTheme } from "next-themes";
8
- import { useEffect, useState, useRef } from "react";
9
  import { useMedia } from 'react-use';
10
- import useSWR from 'swr'
11
- import logo from '../../public/smart-retrieval-logo.webp'
12
-
13
- interface NavLinkProps {
14
- href: string;
15
- children: React.ReactNode;
16
- onClick?: () => void; // Include onClick as an optional prop
17
- }
18
-
19
- interface MobileMenuProps {
20
- isOpen: boolean;
21
- onClose: () => void;
22
- }
23
-
24
- const MobileMenu: React.FC<MobileMenuProps> = ({ isOpen, onClose }) => {
25
- const isLargeScreen = useMedia('(min-width: 1024px)', false);
26
- const menuRef = useRef<HTMLDivElement | null>(null);
27
-
28
- useEffect(() => {
29
- const handleOutsideClick = (event: MouseEvent | TouchEvent) => {
30
- if (
31
- !isLargeScreen &&
32
- isOpen &&
33
- !menuRef.current?.contains(event.target as Node) &&
34
- !((event.target as HTMLElement).closest('.toggle-button')) // Exclude the toggle button
35
- ) {
36
- onClose(); // Close the menu
37
- }
38
- };
39
-
40
- if (!isLargeScreen && isOpen) {
41
- // Add event listeners for both mouse and touch events
42
- document.addEventListener('mousedown', handleOutsideClick);
43
- }
44
-
45
- return () => {
46
- // Remove the event listener when the component unmounts
47
- document.removeEventListener('mousedown', handleOutsideClick);
48
- };
49
- }, [isLargeScreen, isOpen, onClose]);
50
-
51
- useEffect(() => {
52
- if (isLargeScreen && isOpen) {
53
- onClose();
54
- }
55
- }, [isLargeScreen, isOpen, onClose]);
56
- return (
57
- <div ref={menuRef} className={`w-full h-full p-2 bg-opacity-80 ${isOpen ? 'flex' : 'hidden'}`}>
58
- <div className="flex items-center justify-center mt-2" style={{ width: '9%', height: '9%' }}>
59
- <Image
60
- className='rounded-full max-w-full'
61
- src={logo}
62
- alt="Logo"
63
- style={{
64
- width: 'auto',
65
- height: 'auto',
66
- }}
67
- priority
68
- sizes="100vw, 50vw, 33vw"
69
- />
70
- </div>
71
- <div className="flex items-center justify-center h-full">
72
- {/* Mobile menu content */}
73
- <div className="w-64 p-4 rounded-r-md">
74
- <NavLink href="/" onClick={onClose}>
75
- <div className="flex items-center mb-4">
76
- <Home className="mr-2 h-5 w-5" />
77
- Home
78
- </div>
79
- </NavLink>
80
- <NavLink href="/about" onClick={onClose}>
81
- <div className="flex items-center mb-4">
82
- <InfoIcon className="mr-2 h-5 w-5" />
83
- About
84
- </div>
85
- </NavLink>
86
- <NavLink href="/chat" onClick={onClose}>
87
- <div className="flex items-center mb-4">
88
- <MessageCircle className="mr-2 h-5 w-5" />
89
- Chat
90
- </div>
91
- </NavLink>
92
- <NavLink href="/query" onClick={onClose}>
93
- <div className="flex items-center mb-4">
94
- <FileQuestion className="mr-2 h-5 w-5" />
95
- Q&A
96
- </div>
97
- </NavLink>
98
- <NavLink href="/search" onClick={onClose}>
99
- <div className="flex items-center">
100
- <Search className="mr-2 h-5 w-5" />
101
- Search
102
- </div>
103
- </NavLink>
104
- </div>
105
- </div>
106
- </div>
107
- );
108
- };
109
-
110
- const NavLink: React.FC<NavLinkProps> = ({ href, children, onClick }) => {
111
- // Use the useRouter hook to get information about the current route
112
- const pathname = usePathname();
113
-
114
- // Determine if the current tab is active
115
- const isActive = pathname === href;
116
-
117
- const handleClick = () => {
118
- if (onClick) {
119
- onClick(); // Call the onClick handler if provided
120
- }
121
- };
122
-
123
- return (
124
- <Link href={href} passHref>
125
- {/* Add a class to highlight the active tab */}
126
- <div className={`flex items-center font-bold ${isActive ? 'text-blue-500' : ''}`} onClick={handleClick}>
127
- {children}
128
- </div>
129
- </Link>
130
- );
131
- };
132
 
133
  export default function Header() {
134
  const isLargeScreen = useMedia('(min-width: 1024px)', false);
135
  const [mounted, setMounted] = useState(false);
136
  const { theme, setTheme } = useTheme();
137
- // const [apiStatus, setApiStatus] = useState(false);
138
  // Use SWR for API status fetching
139
  const healthcheck_api = process.env.NEXT_PUBLIC_HEALTHCHECK_API;
140
  const { data: apiStatus, error: apiError } = useSWR(healthcheck_api, async (url) => {
@@ -149,7 +56,7 @@ export default function Header() {
149
  const data = await response.json();
150
  return data;
151
  } catch (error: any) {
152
- console.error('Error fetching Backend API Status:', error.message);
153
  throw error;
154
  }
155
  }, {
@@ -158,7 +65,14 @@ export default function Header() {
158
  refreshInterval: 60000, // Revalidate every 60 seconds
159
  });
160
  if (apiError) {
161
- console.error('[Header] Error fetching Backend API Status:', apiError.message);
 
 
 
 
 
 
 
162
  }
163
 
164
  useEffect(() => {
@@ -222,43 +136,43 @@ export default function Header() {
222
  </button>
223
  </div>
224
  {/* Mobile menu component */}
225
- <MobileMenu isOpen={isMobileMenuOpen} onClose={() => setMobileMenuOpen(false)} />
226
  <div className={`hidden items-center gap-4 lg:flex`}>
227
- <NavLink href="/">
228
  <div className="flex items-center transition duration-300 ease-in-out transform hover:scale-125">
229
  <Home className="mr-1 h-4 w-4" />
230
  Home
231
  </div>
232
- </NavLink>
233
- <NavLink href="/about">
234
  <div className="flex items-center transition duration-300 ease-in-out transform hover:scale-125">
235
  <InfoIcon className="mr-1 h-4 w-4" />
236
  About
237
  </div>
238
- </NavLink>
239
- <NavLink href="/chat">
240
  <div className="flex items-center transition duration-300 ease-in-out transform hover:scale-125">
241
  <MessageCircle className="mr-1 h-4 w-4" />
242
  Chat
243
  </div>
244
- </NavLink>
245
- <NavLink href="/query">
246
  <div className="flex items-center transition duration-300 ease-in-out transform hover:scale-125">
247
  <FileQuestion className="mr-1 h-4 w-4" />
248
  Q&A
249
  </div>
250
- </NavLink>
251
- <NavLink href="/search">
252
  <div className="flex items-center transition duration-300 ease-in-out transform hover:scale-125">
253
  <Search className="mr-1 h-4 w-4" />
254
  Search
255
  </div>
256
- </NavLink>
257
  </div>
258
  <div className="flex items-center ml-auto">
259
  {/* Status Page Button/Indicator */}
260
  <span className='flex items-center mr-1'>API:</span>
261
- <NavLink href='/status'>
262
  <div className="flex items-center mr-2 text-xl transition duration-300 ease-in-out transform hover:scale-125">
263
  {apiError ? (
264
  <span role="img" aria-label="red circle">
@@ -270,7 +184,7 @@ export default function Header() {
270
  </span>
271
  )}
272
  </div>
273
- </NavLink>
274
  <span className="lg:text-lg font-nunito">|</span>
275
  {/* Toggle button with icon based on the theme */}
276
  <button
 
1
  "use client";
2
 
 
3
  import Image from 'next/image';
4
  import { Home, InfoIcon, MessageCircle, Search, FileQuestion, Menu, X } from 'lucide-react';
 
5
  import { useTheme } from "next-themes";
6
+ import { useEffect, useState } from "react";
7
  import { useMedia } from 'react-use';
8
+ import useSWR from 'swr';
9
+ import logo from '../../public/smart-retrieval-logo.webp';
10
+ import { HeaderNavLink } from './ui/navlink';
11
+ import { MobileMenu } from './ui/mobilemenu';
12
+
13
+ const MobileMenuItems = [
14
+ {
15
+ href: '/',
16
+ icon: <Home className="mr-2 h-5 w-5" />,
17
+ label: 'Home',
18
+ },
19
+ {
20
+ href: '/about',
21
+ icon: <InfoIcon className="mr-2 h-5 w-5" />,
22
+ label: 'About',
23
+ },
24
+ {
25
+ href: '/chat',
26
+ icon: <MessageCircle className="mr-2 h-5 w-5" />,
27
+ label: 'Chat',
28
+ },
29
+ {
30
+ href: '/query',
31
+ icon: <FileQuestion className="mr-2 h-5 w-5" />,
32
+ label: 'Q&A',
33
+ },
34
+ {
35
+ href: '/search',
36
+ icon: <Search className="mr-2 h-5 w-5" />,
37
+ label: 'Search',
38
+ },
39
+ ];
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
 
41
  export default function Header() {
42
  const isLargeScreen = useMedia('(min-width: 1024px)', false);
43
  const [mounted, setMounted] = useState(false);
44
  const { theme, setTheme } = useTheme();
 
45
  // Use SWR for API status fetching
46
  const healthcheck_api = process.env.NEXT_PUBLIC_HEALTHCHECK_API;
47
  const { data: apiStatus, error: apiError } = useSWR(healthcheck_api, async (url) => {
 
56
  const data = await response.json();
57
  return data;
58
  } catch (error: any) {
59
+ console.error('Error fetching Backend API Status');
60
  throw error;
61
  }
62
  }, {
 
65
  refreshInterval: 60000, // Revalidate every 60 seconds
66
  });
67
  if (apiError) {
68
+ if (apiError.name === 'AbortError') {
69
+ console.error('[Header] Error fetching Backend API Status: Request timed out');
70
+ }
71
+ else {
72
+ console.error('[Header] Error fetching Backend API Status:', apiError.message);
73
+ }
74
+ } else {
75
+ console.log('[Header] API Status:', apiStatus);
76
  }
77
 
78
  useEffect(() => {
 
136
  </button>
137
  </div>
138
  {/* Mobile menu component */}
139
+ <MobileMenu isOpen={isMobileMenuOpen} onClose={() => setMobileMenuOpen(false)} logoSrc={logo} items={MobileMenuItems} />
140
  <div className={`hidden items-center gap-4 lg:flex`}>
141
+ <HeaderNavLink href="/">
142
  <div className="flex items-center transition duration-300 ease-in-out transform hover:scale-125">
143
  <Home className="mr-1 h-4 w-4" />
144
  Home
145
  </div>
146
+ </HeaderNavLink>
147
+ <HeaderNavLink href="/about">
148
  <div className="flex items-center transition duration-300 ease-in-out transform hover:scale-125">
149
  <InfoIcon className="mr-1 h-4 w-4" />
150
  About
151
  </div>
152
+ </HeaderNavLink>
153
+ <HeaderNavLink href="/chat">
154
  <div className="flex items-center transition duration-300 ease-in-out transform hover:scale-125">
155
  <MessageCircle className="mr-1 h-4 w-4" />
156
  Chat
157
  </div>
158
+ </HeaderNavLink>
159
+ <HeaderNavLink href="/query">
160
  <div className="flex items-center transition duration-300 ease-in-out transform hover:scale-125">
161
  <FileQuestion className="mr-1 h-4 w-4" />
162
  Q&A
163
  </div>
164
+ </HeaderNavLink>
165
+ <HeaderNavLink href="/search">
166
  <div className="flex items-center transition duration-300 ease-in-out transform hover:scale-125">
167
  <Search className="mr-1 h-4 w-4" />
168
  Search
169
  </div>
170
+ </HeaderNavLink>
171
  </div>
172
  <div className="flex items-center ml-auto">
173
  {/* Status Page Button/Indicator */}
174
  <span className='flex items-center mr-1'>API:</span>
175
+ <HeaderNavLink href='/status'>
176
  <div className="flex items-center mr-2 text-xl transition duration-300 ease-in-out transform hover:scale-125">
177
  {apiError ? (
178
  <span role="img" aria-label="red circle">
 
184
  </span>
185
  )}
186
  </div>
187
+ </HeaderNavLink>
188
  <span className="lg:text-lg font-nunito">|</span>
189
  {/* Toggle button with icon based on the theme */}
190
  <button
frontend/app/components/login-buttons.tsx CHANGED
@@ -1,12 +1,11 @@
1
  'use client'
2
 
3
- import { useState } from 'react'
4
  import { signIn } from 'next-auth/react'
5
 
6
  import { cn } from '@/app/components/ui/lib/utils'
7
  import { Button, type ButtonProps } from '@/app/components/ui/button'
8
  import { IconGoogle, IconSGid, IconSpinner } from '@/app/components/ui/icons'
9
- import { useTheme } from 'next-themes';
10
 
11
  interface LoginButtonProps extends ButtonProps {
12
  showIcon?: boolean;
@@ -20,7 +19,10 @@ function GoogleLoginButton({
20
  ...props
21
  }: LoginButtonProps) {
22
  const [isLoading, setIsLoading] = useState(false);
23
- const { theme } = useTheme();
 
 
 
24
 
25
  return (
26
  <Button
 
1
  'use client'
2
 
3
+ import { useState, useEffect } from 'react'
4
  import { signIn } from 'next-auth/react'
5
 
6
  import { cn } from '@/app/components/ui/lib/utils'
7
  import { Button, type ButtonProps } from '@/app/components/ui/button'
8
  import { IconGoogle, IconSGid, IconSpinner } from '@/app/components/ui/icons'
 
9
 
10
  interface LoginButtonProps extends ButtonProps {
11
  showIcon?: boolean;
 
19
  ...props
20
  }: LoginButtonProps) {
21
  const [isLoading, setIsLoading] = useState(false);
22
+
23
+ useEffect(() => {
24
+ setIsLoading(false);
25
+ }, []);
26
 
27
  return (
28
  <Button
frontend/app/components/ui/mobilemenu.tsx ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ "use client";
2
+
3
+ import { useEffect, useRef } from 'react';
4
+ import Image, { StaticImageData } from 'next/image';
5
+ import { HeaderNavLink } from '@/app/components/ui/navlink';
6
+ import { useMedia } from 'react-use';
7
+
8
+ interface MenuItem {
9
+ href: string;
10
+ icon: React.ReactNode;
11
+ label: string;
12
+ }
13
+
14
+ interface MobileMenuProps {
15
+ isOpen: boolean;
16
+ onClose: () => void;
17
+ logoSrc: StaticImageData;
18
+ items: MenuItem[];
19
+ }
20
+
21
+ const MobileMenu: React.FC<MobileMenuProps> = ({ isOpen, onClose, logoSrc, items }) => {
22
+ const isLargeScreen = useMedia('(min-width: 1024px)', false);
23
+ const menuRef = useRef<HTMLDivElement | null>(null);
24
+
25
+ useEffect(() => {
26
+ const handleOutsideClick = (event: MouseEvent | TouchEvent) => {
27
+ if (
28
+ !isLargeScreen &&
29
+ isOpen &&
30
+ !menuRef.current?.contains(event.target as Node) &&
31
+ !((event.target as HTMLElement).closest('.toggle-button')) // Exclude the toggle button
32
+ ) {
33
+ onClose(); // Close the menu
34
+ }
35
+ };
36
+
37
+ if (!isLargeScreen && isOpen) {
38
+ // Add event listeners for both mouse and touch events
39
+ document.addEventListener('mousedown', handleOutsideClick);
40
+ }
41
+
42
+ return () => {
43
+ // Remove the event listener when the component unmounts
44
+ document.removeEventListener('mousedown', handleOutsideClick);
45
+ };
46
+ }, [isLargeScreen, isOpen, onClose]);
47
+
48
+ useEffect(() => {
49
+ if (isLargeScreen && isOpen) {
50
+ onClose();
51
+ }
52
+ }, [isLargeScreen, isOpen, onClose]);
53
+
54
+ return (
55
+ <div ref={menuRef} className={`w-full h-full p-2 bg-opacity-80 ${isOpen ? 'flex' : 'hidden'}`}>
56
+ <div className="flex items-center justify-center mt-2" style={{ width: '9%', height: '9%' }}>
57
+ <Image
58
+ className='rounded-full max-w-full'
59
+ src={logoSrc}
60
+ alt="Logo"
61
+ style={{
62
+ width: 'auto',
63
+ height: 'auto',
64
+ }}
65
+ priority
66
+ sizes="100vw, 50vw, 33vw"
67
+ />
68
+ </div>
69
+ <div className="flex items-center justify-center h-full">
70
+ {/* Mobile menu content */}
71
+ <div className="w-64 p-4 rounded-r-md">
72
+ {items.map((item, index) => (
73
+ <HeaderNavLink key={index} href={item.href} onClick={onClose}>
74
+ <div className="flex items-center mb-4">
75
+ {item.icon}
76
+ {item.label}
77
+ </div>
78
+ </HeaderNavLink>
79
+ ))}
80
+ </div>
81
+ </div>
82
+ </div>
83
+ );
84
+ };
85
+
86
+ export { MobileMenu };
frontend/app/components/ui/navlink.tsx ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ "use client";
2
+
3
+ import { usePathname } from 'next/navigation';
4
+ import Link from 'next/link';
5
+
6
+ export interface NavLinkProps {
7
+ href: string;
8
+ children: React.ReactNode;
9
+ onClick?: () => void; // Include onClick as an optional prop
10
+ target?: string;
11
+ }
12
+
13
+ const HeaderNavLink: React.FC<NavLinkProps> = ({ href, children, onClick }) => {
14
+ // Use the useRouter hook to get information about the current route
15
+ const pathname = usePathname();
16
+
17
+ // Determine if the current tab is active
18
+ const isActive = pathname === href;
19
+
20
+ const handleClick = () => {
21
+ if (onClick) {
22
+ onClick(); // Call the onClick handler if provided
23
+ }
24
+ };
25
+
26
+ return (
27
+ <Link href={href} passHref>
28
+ {/* Add a class to highlight the active tab */}
29
+ <div className={`flex items-center font-bold ${isActive ? 'text-blue-500' : ''}`} onClick={handleClick}>
30
+ {children}
31
+ </div>
32
+ </Link>
33
+ );
34
+ };
35
+
36
+ const FooterNavLink: React.FC<NavLinkProps> = ({ href, children, onClick, target }) => {
37
+ const handleClick = () => {
38
+ if (onClick) {
39
+ onClick(); // Call the onClick handler if provided
40
+ }
41
+ };
42
+
43
+ return (
44
+ <Link href={href} passHref target={target}>
45
+ {/* Add a class to highlight the active tab */}
46
+ <div className="flex items-center font-bold" onClick={handleClick}>
47
+ {children}
48
+ </div>
49
+ </Link>
50
+ );
51
+ }
52
+
53
+ export { HeaderNavLink, FooterNavLink }
frontend/app/components/ui/search/useSearch.tsx CHANGED
@@ -33,12 +33,16 @@ const useSearch = (): UseSearchResult => {
33
  return;
34
  }
35
  const response = await fetch(`${search_api}?query=${query}`, {
36
- signal: AbortSignal.timeout(60000), // Abort the request if it takes longer than 60 seconds
37
  });
38
  const data = await response.json();
39
  setSearchResults(data);
40
- } catch (error) {
41
- console.error("Error during search:", error);
 
 
 
 
42
  setSearchResults([]);
43
  }
44
 
 
33
  return;
34
  }
35
  const response = await fetch(`${search_api}?query=${query}`, {
36
+ signal: AbortSignal.timeout(120000), // Abort the request if it takes longer than 120 seconds
37
  });
38
  const data = await response.json();
39
  setSearchResults(data);
40
+ } catch (error: any) {
41
+ if (error.name === "AbortError") {
42
+ console.error("Error fetching search results: Request timed out");
43
+ } else {
44
+ console.error("Error fetching search results:", error.message);
45
+ }
46
  setSearchResults([]);
47
  }
48
 
frontend/app/layout.tsx CHANGED
@@ -3,6 +3,7 @@ import { Inter } from "next/font/google";
3
  import "./globals.css";
4
  import { Providers } from './providers'
5
  import Header from "@/app/components/header";
 
6
  import Main from "@/app/components/ui/main-container";
7
 
8
  const inter = Inter({ subsets: ["latin"] });
@@ -33,6 +34,7 @@ export default function RootLayout({
33
  <Main>
34
  <Header />
35
  {children}
 
36
  </Main>
37
  </Providers>
38
  </body>
 
3
  import "./globals.css";
4
  import { Providers } from './providers'
5
  import Header from "@/app/components/header";
6
+ import Footer from "@/app/components/footer";
7
  import Main from "@/app/components/ui/main-container";
8
 
9
  const inter = Inter({ subsets: ["latin"] });
 
34
  <Main>
35
  <Header />
36
  {children}
37
+ <Footer />
38
  </Main>
39
  </Providers>
40
  </body>
frontend/app/page.tsx CHANGED
@@ -10,7 +10,7 @@ export default function Home() {
10
  const [isLoading, setIsLoading] = useState(false);
11
 
12
  return (
13
- <div className="rounded-xl shadow-xl p-4 mb-8 max-w-5xl w-full">
14
  <div className="max-w-2xl mx-auto p-4 text-center">
15
  <div className="flex flex-col items-center mb-4 bg-gradient-to-r from-blue-500 to-indigo-500 text-white p-8 rounded-lg shadow-lg">
16
  <div className="flex flex-col md:flex-row items-center md:items-start">
 
10
  const [isLoading, setIsLoading] = useState(false);
11
 
12
  return (
13
+ <div className="rounded-xl shadow-xl p-4 max-w-5xl w-full">
14
  <div className="max-w-2xl mx-auto p-4 text-center">
15
  <div className="flex flex-col items-center mb-4 bg-gradient-to-r from-blue-500 to-indigo-500 text-white p-8 rounded-lg shadow-lg">
16
  <div className="flex flex-col md:flex-row items-center md:items-start">
frontend/app/privacy-policy/page.tsx ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ "use client";
2
+
3
+ const PrivacyPolicyPage: React.FC = () => {
4
+ return (
5
+ <div className="rounded-xl shadow-xl p-4 max-w-5xl w-full">
6
+ <div className="max-w-3xl mx-auto p-4">
7
+ <div className="bg-gradient-to-r from-blue-500 to-indigo-500 text-white p-8 rounded-lg shadow-lg">
8
+ <h1 className="text-2xl md:text-4xl font-bold mb-4">Privacy Policy</h1>
9
+ <span>
10
+ <p className="mb-2 gap-2">
11
+ Smart-Retrieval the website (hereinafter referred to as the "Service").
12
+ </p>
13
+ <p className="mb-2">
14
+ This page informs you of our policies regarding the collection, use, and disclosure of personal data when you use our Service and the choices you have associated with that data.
15
+ </p>
16
+ <p className="mb-2">
17
+ Smart-Retrieval currently does not collect & store any personal data. However, we may collect your data to provide and improve the Service in the future.
18
+ </p>
19
+ <p>
20
+ By using the Service, you agree to the collection and use of information in accordance with this policy. Unless otherwise defined in this Privacy Policy, terms used in this Privacy Policy have the same meanings as in our Terms and Conditions, accessible from Smart-Retrieval.
21
+ </p>
22
+ </span>
23
+ <h2 className="text-xl md:text-2xl font-bold mb-4 mt-4">Information Collection And Use</h2>
24
+ <span>
25
+ <p className="mb-4">
26
+ We collect several different types of information for various purposes to provide and improve our Service to you.
27
+ </p>
28
+ </span>
29
+ <h2 className="text-lg md:text-xl font-bold mb-4">Types of Data Collected</h2>
30
+ <span>
31
+ <h2 className="text-l md:text-xl font-bold mb-4">Personal Data</h2>
32
+ <span>
33
+ <p className="mb-2">
34
+ While using our Service, we may ask you to provide us with certain personally identifiable information that can be used to contact or identify you ("Personal Data").
35
+ </p>
36
+ <p className="mb-2">
37
+ Personally identifiable information may include, but is not limited to:
38
+ </p>
39
+ <ul className="list-disc list-inside mt-2 ml-4">
40
+ <li>Email Address</li>
41
+ <li>First name and last name</li>
42
+ <li>Cookies and Usage Data</li>
43
+ </ul>
44
+ </span>
45
+ <h2 className="text-l md:text-xl font-bold mb-4 mt-4">Usage Data</h2>
46
+ <span>
47
+ <p className="mb-2">
48
+ We may also collect information how the Service is accessed and used ("Usage Data"). This Usage Data may include information such as your computer's Internet Protocol address (e.g. IP address), browser type, browser version, the pages of our Service that you visit, the time and date of your visit, the time spent on those pages, unique device identifiers and other diagnostic data.
49
+ </p>
50
+ <p className="mb-2">
51
+ This Usage Data may be collected automatically when using the Service.
52
+ </p>
53
+ </span>
54
+ <h2 className="text-l md:text-xl font-bold mb-4 mt-4">Tracking & Cookies Data</h2>
55
+ <span>
56
+ <p className="mb-2">
57
+ We use cookies and similar tracking technologies to track the activity on our Service and hold certain information.
58
+ </p>
59
+ <p className="mb-2">
60
+ Cookies are files with small amount of data which may include an anonymous unique identifier. Cookies are sent to your browser from a website and stored on your device. Tracking technologies also used are beacons, tags, and scripts to collect and track information and to improve and analyze our Service.
61
+ </p>
62
+ <p className="mb-2">
63
+ You can instruct your browser to refuse all cookies or to indicate when a cookie is being sent. However, if you do not accept cookies, you may not be able to use some portions of our Service.
64
+ </p>
65
+ <p>
66
+ Examples of Cookies we use:
67
+ </p>
68
+ <ul className="list-disc list-inside mt-2 ml-4">
69
+ <li>Session Cookies. We use Session Cookies to operate our Service.</li>
70
+ <li>Preference Cookies. We use Preference Cookies to remember your preferences and various settings.</li>
71
+ <li>Security Cookies. We use Security Cookies for security purposes.</li>
72
+ </ul>
73
+ </span>
74
+ <h2 className="text-l md:text-xl font-bold mb-4 mt-4">Use of Data</h2>
75
+ <span>
76
+ <p className="mb-2">
77
+ Smart-Retrieval uses the collected data for various purposes:
78
+ </p>
79
+ <ul className="list-disc list-inside mt-2 ml-4">
80
+ <li>To provide and maintain the Service.</li>
81
+ <li>To notify you about changes to our Service.</li>
82
+ <li>To allow you to participate in interactive features of our Service when you choose to do so.</li>
83
+ </ul>
84
+ </span>
85
+ </span>
86
+ </div>
87
+ </div>
88
+ </div>
89
+ );
90
+ };
91
+
92
+ export default PrivacyPolicyPage;
frontend/app/sign-in/page.tsx CHANGED
@@ -4,7 +4,7 @@ import { GoogleLoginButton, SGIDLoginButton } from '@/app/components/login-butto
4
 
5
  const SignInPage = () => {
6
  return (
7
- <div className="rounded-xl shadow-xl p-4 mb-8 max-w-5xl w-full">
8
  <div className="max-w-2xl mx-auto text-center">
9
  <div className="flex flex-col items-center justify-center py-10 space-y-4">
10
  <div className="bg-blue-500 text-white px-8 py-6 rounded-lg shadow-lg">
 
4
 
5
  const SignInPage = () => {
6
  return (
7
+ <div className="rounded-xl shadow-xl p-4 max-w-5xl w-full">
8
  <div className="max-w-2xl mx-auto text-center">
9
  <div className="flex flex-col items-center justify-center py-10 space-y-4">
10
  <div className="bg-blue-500 text-white px-8 py-6 rounded-lg shadow-lg">
frontend/app/status/page.tsx CHANGED
@@ -21,7 +21,7 @@ const StatusPage = () => {
21
  const data = await response.json();
22
  return data;
23
  } catch (error: any) {
24
- console.error('Error fetching Backend API Status:', error.message);
25
  throw error;
26
  }
27
  }, {
@@ -30,7 +30,12 @@ const StatusPage = () => {
30
  refreshInterval: 60000, // Revalidate every 60 seconds
31
  });
32
  if (error) {
33
- console.error('[status] Error fetching Backend API Status:', error.message);
 
 
 
 
 
34
  }
35
 
36
  const apiStatus = error ? '❌' : '✅';
@@ -46,7 +51,7 @@ const StatusPage = () => {
46
  };
47
 
48
  return (
49
- <div className="rounded-xl shadow-xl p-4 mb-8 max-w-5xl w-full bg-white dark:bg-zinc-700/30">
50
  <div className="max-w-2xl space-y-2 p-4">
51
  <h1 className="text-xl font-bold">Backend API Status</h1>
52
  <p>
 
21
  const data = await response.json();
22
  return data;
23
  } catch (error: any) {
24
+ console.error('Error fetching Backend API Status');
25
  throw error;
26
  }
27
  }, {
 
30
  refreshInterval: 60000, // Revalidate every 60 seconds
31
  });
32
  if (error) {
33
+ if (error.name === 'AbortError') {
34
+ console.error('[status] Error fetching Backend API Status: Request timed out');
35
+ }
36
+ else {
37
+ console.error('[status] Error fetching Backend API Status:', error.message);
38
+ }
39
  }
40
 
41
  const apiStatus = error ? '❌' : '✅';
 
51
  };
52
 
53
  return (
54
+ <div className="rounded-xl shadow-xl p-4 max-w-5xl w-full bg-white dark:bg-zinc-700/30">
55
  <div className="max-w-2xl space-y-2 p-4">
56
  <h1 className="text-xl font-bold">Backend API Status</h1>
57
  <p>
frontend/app/terms-of-service/page.tsx ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ "use client";
2
+
3
+ import React from 'react';
4
+
5
+ const TermsOfServicePage: React.FC = () => {
6
+ return (
7
+ <div className="rounded-xl shadow-xl p-4 max-w-5xl w-full">
8
+ <div className="max-w-3xl mx-auto p-4">
9
+ <div className="bg-gradient-to-r from-blue-500 to-indigo-500 text-white p-8 rounded-lg shadow-lg">
10
+ <h1 className="text-2xl md:text-4xl font-bold mb-4">Terms of Service</h1>
11
+ <span>
12
+ <p className="mb-2 gap-2">
13
+ Smart-Retrieval the website (hereinafter referred to as the "Service").
14
+ </p>
15
+ <p className="mb-2">
16
+ This page informs you of our policies regarding the collection, use, and disclosure of personal data when you use our Service and the choices you have associated with that data.
17
+ </p>
18
+ <p className="mb-2">
19
+ Smart-Retrieval currently does not collect & store any personal data. However, we may collect your data to provide and improve the Service in the future.
20
+ </p>
21
+ <p>
22
+ By using the Service, you agree to the collection and use of information in accordance with this policy. Unless otherwise defined in this Privacy Policy, terms used in this Privacy Policy have the same meanings as in our Terms and Conditions, accessible from Smart-Retrieval.
23
+ </p>
24
+ </span>
25
+ </div>
26
+ </div>
27
+ </div>
28
+ );
29
+ };
30
+
31
+ export default TermsOfServicePage;
frontend/middleware.ts CHANGED
@@ -6,5 +6,5 @@ export { default } from "next-auth/middleware"
6
 
7
  // Ensure auth is required for all except the following paths
8
  export const config = {
9
- matcher: ['/((?!api/auth|_next/static|_next/image|favicon.ico|about|sign-in).+)']
10
  };
 
6
 
7
  // Ensure auth is required for all except the following paths
8
  export const config = {
9
+ matcher: ['/((?!api/auth|_next/static|_next/image|favicon.ico|about|sign-in|privacy-policy|terms-of-service|sitemap.xml|robots.txt).+)']
10
  };
frontend/public/robots.txt ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //robots.txt
2
+
3
+ # Block all crawlers for /accounts
4
+ User-agent: *
5
+ Disallow: /accounts
6
+
7
+ # Allow all crawlers
8
+ User-agent: *
9
+ Allow: /
10
+
11
+ # Delay between requests
12
+ Crawl-delay: 5
13
+
14
+ Sitemap: https://smart-retrieval-demo.vercel.app/sitemap.xml
frontend/public/sitemap.xml ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <urlset
3
+ xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"
4
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
5
+ xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9
6
+ http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd">
7
+
8
+ <url>
9
+ <loc>https://smart-retrieval-demo.vercel.app/</loc>
10
+ <lastmod>2024-01-30T06:02:15+00:00</lastmod>
11
+ <priority>1.00</priority>
12
+ </url>
13
+ <url>
14
+ <loc>https://smart-retrieval-demo.vercel.app/about</loc>
15
+ <lastmod>2024-01-30T06:02:15+00:00</lastmod>
16
+ <priority>0.80</priority>
17
+ </url>
18
+
19
+
20
+ </urlset>