init: foodviz project files
Browse files- .github/workflows/ploomber-cloud.yaml +40 -0
- .gitignore +162 -0
- .pre-commit-config.yaml +83 -0
- .streamlit/config.toml +62 -0
- Dockerfile +36 -0
- app.py +63 -0
- foodviz/__init__.py +0 -0
- foodviz/agents/__init__.py +0 -0
- foodviz/agents/llm.py +117 -0
- foodviz/agents/parse_data.py +88 -0
- foodviz/data/.~lock.food_local_langs_processed.csv# +0 -1
- foodviz/pages/__init__.py +6 -0
- foodviz/pages/comparer.py +82 -0
- foodviz/pages/frontend/bubble.html +180 -0
- foodviz/pages/frontend/bubble.js +112 -0
- foodviz/pages/home.py +72 -0
- foodviz/pages/search.py +104 -0
- foodviz/pages/viewer.py +334 -0
- foodviz/utils/__init__.py +19 -0
- foodviz/utils/color_slider.py +91 -0
- foodviz/utils/config.py +78 -0
- foodviz/utils/helpers.py +70 -0
- foodviz/utils/state.py +100 -0
- pyproject.toml +29 -0
- requirements.dev.txt +27 -0
- requirements.txt +80 -0
.github/workflows/ploomber-cloud.yaml
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Ploomber Cloud
|
2 |
+
|
3 |
+
on:
|
4 |
+
schedule:
|
5 |
+
- cron: '0 1 * * *'
|
6 |
+
|
7 |
+
jobs:
|
8 |
+
deploy-to-ploomber-cloud:
|
9 |
+
runs-on: ubuntu-latest
|
10 |
+
environment: production
|
11 |
+
|
12 |
+
steps:
|
13 |
+
- uses: actions/checkout@v2
|
14 |
+
- name: Set up Python ${{ matrix.python-version }}
|
15 |
+
uses: actions/setup-python@v2
|
16 |
+
with:
|
17 |
+
python-version: '3.10'
|
18 |
+
|
19 |
+
- name: Install dependencies
|
20 |
+
run: |
|
21 |
+
python -m pip install --upgrade pip
|
22 |
+
pip install ploomber-cloud
|
23 |
+
|
24 |
+
- name: Create environment file
|
25 |
+
shell: bash
|
26 |
+
env:
|
27 |
+
SECRET_URL: ${{ secrets.SUPABASE_URL }}
|
28 |
+
SECRET_KEY: ${{ secrets.SUPABASE_KEY }}
|
29 |
+
run: |
|
30 |
+
printf "SUPABASE_URL=\"$SECRET_URL\"\nSUPABASE_KEY=\"$SECRET_KEY\"" > ./app/.env
|
31 |
+
|
32 |
+
- name: Deploy
|
33 |
+
env:
|
34 |
+
PLOOMBER_CLOUD_KEY: ${{ secrets.PLOOMBER_CLOUD_KEY }}
|
35 |
+
run: |
|
36 |
+
cd app/
|
37 |
+
cat .env
|
38 |
+
ploomber-cloud deploy --watch
|
39 |
+
# The --watch flag will print deployment status updates to the logs.
|
40 |
+
# To learn more, visit: https://docs.cloud.ploomber.io/en/latest/user-guide/github.html
|
.gitignore
ADDED
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
|
6 |
+
app/.streamlit/secrets.toml
|
7 |
+
|
8 |
+
# C extensions
|
9 |
+
*.so
|
10 |
+
|
11 |
+
# Distribution / packaging
|
12 |
+
.Python
|
13 |
+
build/
|
14 |
+
develop-eggs/
|
15 |
+
dist/
|
16 |
+
downloads/
|
17 |
+
eggs/
|
18 |
+
.eggs/
|
19 |
+
lib/
|
20 |
+
lib64/
|
21 |
+
parts/
|
22 |
+
sdist/
|
23 |
+
var/
|
24 |
+
wheels/
|
25 |
+
share/python-wheels/
|
26 |
+
*.egg-info/
|
27 |
+
.installed.cfg
|
28 |
+
*.egg
|
29 |
+
MANIFEST
|
30 |
+
|
31 |
+
# PyInstaller
|
32 |
+
# Usually these files are written by a python script from a template
|
33 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
34 |
+
*.manifest
|
35 |
+
*.spec
|
36 |
+
|
37 |
+
# Installer logs
|
38 |
+
pip-log.txt
|
39 |
+
pip-delete-this-directory.txt
|
40 |
+
|
41 |
+
# Unit test / coverage reports
|
42 |
+
htmlcov/
|
43 |
+
.tox/
|
44 |
+
.nox/
|
45 |
+
.coverage
|
46 |
+
.coverage.*
|
47 |
+
.cache
|
48 |
+
nosetests.xml
|
49 |
+
coverage.xml
|
50 |
+
*.cover
|
51 |
+
*.py,cover
|
52 |
+
.hypothesis/
|
53 |
+
.pytest_cache/
|
54 |
+
cover/
|
55 |
+
|
56 |
+
# Translations
|
57 |
+
*.mo
|
58 |
+
*.pot
|
59 |
+
|
60 |
+
# Django stuff:
|
61 |
+
*.log
|
62 |
+
local_settings.py
|
63 |
+
db.sqlite3
|
64 |
+
db.sqlite3-journal
|
65 |
+
|
66 |
+
# Flask stuff:
|
67 |
+
instance/
|
68 |
+
.webassets-cache
|
69 |
+
|
70 |
+
# Scrapy stuff:
|
71 |
+
.scrapy
|
72 |
+
|
73 |
+
# Sphinx documentation
|
74 |
+
docs/_build/
|
75 |
+
|
76 |
+
# PyBuilder
|
77 |
+
.pybuilder/
|
78 |
+
target/
|
79 |
+
|
80 |
+
# Jupyter Notebook
|
81 |
+
.ipynb_checkpoints
|
82 |
+
|
83 |
+
# IPython
|
84 |
+
profile_default/
|
85 |
+
ipython_config.py
|
86 |
+
|
87 |
+
# pyenv
|
88 |
+
# For a library or package, you might want to ignore these files since the code is
|
89 |
+
# intended to run in multiple environments; otherwise, check them in:
|
90 |
+
# .python-version
|
91 |
+
|
92 |
+
# pipenv
|
93 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
94 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
95 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
96 |
+
# install all needed dependencies.
|
97 |
+
#Pipfile.lock
|
98 |
+
|
99 |
+
# poetry
|
100 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
101 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
102 |
+
# commonly ignored for libraries.
|
103 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
104 |
+
poetry.lock
|
105 |
+
|
106 |
+
# pdm
|
107 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
108 |
+
#pdm.lock
|
109 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
110 |
+
# in version control.
|
111 |
+
# https://pdm.fming.dev/#use-with-ide
|
112 |
+
.pdm.toml
|
113 |
+
|
114 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
115 |
+
__pypackages__/
|
116 |
+
|
117 |
+
# Celery stuff
|
118 |
+
celerybeat-schedule
|
119 |
+
celerybeat.pid
|
120 |
+
|
121 |
+
# SageMath parsed files
|
122 |
+
*.sage.py
|
123 |
+
|
124 |
+
# Environments
|
125 |
+
.env
|
126 |
+
.venv
|
127 |
+
env/
|
128 |
+
venv/
|
129 |
+
ENV/
|
130 |
+
env.bak/
|
131 |
+
venv.bak/
|
132 |
+
|
133 |
+
# Spyder project settings
|
134 |
+
.spyderproject
|
135 |
+
.spyproject
|
136 |
+
|
137 |
+
# Rope project settings
|
138 |
+
.ropeproject
|
139 |
+
|
140 |
+
# mkdocs documentation
|
141 |
+
/site
|
142 |
+
|
143 |
+
# mypy
|
144 |
+
.mypy_cache/
|
145 |
+
.dmypy.json
|
146 |
+
dmypy.json
|
147 |
+
|
148 |
+
# Pyre type checker
|
149 |
+
.pyre/
|
150 |
+
|
151 |
+
# pytype static type analyzer
|
152 |
+
.pytype/
|
153 |
+
|
154 |
+
# Cython debug symbols
|
155 |
+
cython_debug/
|
156 |
+
|
157 |
+
# PyCharm
|
158 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
159 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
160 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
161 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
162 |
+
.idea/
|
.pre-commit-config.yaml
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
ci:
|
2 |
+
skip: [python-safety-dependencies-check]
|
3 |
+
|
4 |
+
repos:
|
5 |
+
- repo: https://github.com/pre-commit/pre-commit-hooks
|
6 |
+
rev: v4.5.0
|
7 |
+
hooks:
|
8 |
+
- id: trailing-whitespace
|
9 |
+
- id: end-of-file-fixer
|
10 |
+
- id: check-yaml
|
11 |
+
- id: requirements-txt-fixer
|
12 |
+
|
13 |
+
- repo: https://github.com/PyCQA/autoflake
|
14 |
+
rev: v2.3.1
|
15 |
+
hooks:
|
16 |
+
- id: autoflake
|
17 |
+
verbose: true
|
18 |
+
|
19 |
+
- repo: https://github.com/miki725/importanize/
|
20 |
+
rev: '0.7'
|
21 |
+
hooks:
|
22 |
+
- id: importanize
|
23 |
+
language_version: python3
|
24 |
+
language: python
|
25 |
+
args: [ --verbose ]
|
26 |
+
|
27 |
+
- repo: https://github.com/pycqa/flake8
|
28 |
+
rev: 7.0.0
|
29 |
+
hooks:
|
30 |
+
- id: flake8
|
31 |
+
exclude: ^.*__init__.py$
|
32 |
+
stages: [commit, push, manual]
|
33 |
+
args: [--max-line-length=120]
|
34 |
+
|
35 |
+
- repo: https://github.com/hhatto/autopep8
|
36 |
+
rev: v2.0.4
|
37 |
+
hooks:
|
38 |
+
- id: autopep8
|
39 |
+
args: [--in-place, --aggressive, --aggressive, --max-line-length=120, --verbose]
|
40 |
+
|
41 |
+
- repo: https://github.com/PyCQA/docformatter
|
42 |
+
rev: v1.7.5
|
43 |
+
hooks:
|
44 |
+
- id: docformatter
|
45 |
+
|
46 |
+
- repo: https://github.com/thibaudcolas/curlylint
|
47 |
+
rev: v0.13.1
|
48 |
+
hooks:
|
49 |
+
- id: curlylint
|
50 |
+
|
51 |
+
# - repo: https://github.com/Lucas-C/pre-commit-hooks-safety
|
52 |
+
# rev: v1.3.1
|
53 |
+
# hooks:
|
54 |
+
# - id: python-safety-dependencies-check
|
55 |
+
# files: requirements
|
56 |
+
|
57 |
+
- repo: https://github.com/PyCQA/bandit
|
58 |
+
rev: '1.7.8'
|
59 |
+
hooks:
|
60 |
+
- id: bandit
|
61 |
+
|
62 |
+
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
63 |
+
rev: v0.3.4
|
64 |
+
hooks:
|
65 |
+
- id: ruff
|
66 |
+
args:
|
67 |
+
- --fix
|
68 |
+
- --line-length=120
|
69 |
+
|
70 |
+
- repo: https://github.com/psf/black
|
71 |
+
rev: 23.11.0
|
72 |
+
hooks:
|
73 |
+
- id: black
|
74 |
+
language_version: python3
|
75 |
+
args: [--line-length=120]
|
76 |
+
|
77 |
+
- repo: https://github.com/python-poetry/poetry
|
78 |
+
rev: '1.8.2'
|
79 |
+
hooks:
|
80 |
+
- id: poetry-check
|
81 |
+
- id: poetry-lock
|
82 |
+
- id: poetry-export
|
83 |
+
args: ["-C", "./app", "--without-hashes", "--without=dev", "-f", "requirements.txt", "-o", "./app/requirements.txt"]
|
.streamlit/config.toml
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[theme]
|
2 |
+
#primaryColor="#f21111"
|
3 |
+
#backgroundColor="#0e1117"
|
4 |
+
#secondaryBackgroundColor="#31333F"
|
5 |
+
#textColor="#fafafa"
|
6 |
+
# -- dark chocolate --
|
7 |
+
#primaryColor="#2c3e50"
|
8 |
+
#backgroundColor="#1a1a1a"
|
9 |
+
#secondaryBackgroundColor="#333333"
|
10 |
+
#textColor="#dcdcdc"
|
11 |
+
# -- midnight feast --
|
12 |
+
#primaryColor="#2c3e50"
|
13 |
+
#backgroundColor="#1a1a1a"
|
14 |
+
#secondaryBackgroundColor="#333333"
|
15 |
+
#textColor="#dcdcdc"
|
16 |
+
# -- umami delight --
|
17 |
+
#primaryColor="#556b2f"
|
18 |
+
#backgroundColor="#1a1a1a"
|
19 |
+
#secondaryBackgroundColor="#4c721d"
|
20 |
+
#textColor="#f0e68c"
|
21 |
+
# -- spicy wasabi --
|
22 |
+
#primaryColor="#33CC99"
|
23 |
+
#backgroundColor="#222222"
|
24 |
+
#secondaryBackgroundColor="#333333"
|
25 |
+
#textColor="#f0f0f0"
|
26 |
+
# -- night sky --
|
27 |
+
#primaryColor="#a62c81"
|
28 |
+
#backgroundColor="#191919"
|
29 |
+
#secondaryBackgroundColor="#222222"
|
30 |
+
#textColor="#dddddd"
|
31 |
+
# -- modern minimalist --
|
32 |
+
#primaryColor="#00bcd4"
|
33 |
+
#backgroundColor="#212121"
|
34 |
+
#secondaryBackgroundColor="#303030"
|
35 |
+
#textColor="#e0e0e0"
|
36 |
+
# -- cocoa indulgence --
|
37 |
+
#primaryColor="#8b4513"
|
38 |
+
#backgroundColor="#1c1c1c"
|
39 |
+
#secondaryBackgroundColor="#3d2c1f"
|
40 |
+
#textColor="#ffe4b5"
|
41 |
+
# -- spicy heat --
|
42 |
+
#primaryColor="#d9534f"
|
43 |
+
#backgroundColor="#000000"
|
44 |
+
#secondaryBackgroundColor="#8b0000"
|
45 |
+
#textColor="#b0c4de"
|
46 |
+
#font="monospace"
|
47 |
+
# -- fit blue --
|
48 |
+
primaryColor="#0066ee"
|
49 |
+
backgroundColor="#11131C"
|
50 |
+
secondaryBackgroundColor="#373f51"
|
51 |
+
textColor="#fdfeff"
|
52 |
+
# -- grey and teal
|
53 |
+
#primaryColor="#049a77"
|
54 |
+
#backgroundColor="#000000"
|
55 |
+
#secondaryBackgroundColor="#2d3446"
|
56 |
+
#textColor="#e5e5e5"
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
[client]
|
61 |
+
toolbarMode="minimal"
|
62 |
+
showSidebarNavigation=false
|
Dockerfile
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM ubuntu:22.04
|
2 |
+
LABEL authors="p1utoze"
|
3 |
+
|
4 |
+
# Install dependencies
|
5 |
+
RUN apt-get update && apt-get install -y \
|
6 |
+
python3 \
|
7 |
+
python3-pip \
|
8 |
+
python3-dev \
|
9 |
+
build-essential \
|
10 |
+
libssl-dev \
|
11 |
+
libffi-dev \
|
12 |
+
python3-setuptools \
|
13 |
+
python3-venv \
|
14 |
+
git \
|
15 |
+
&& apt-get clean
|
16 |
+
|
17 |
+
WORKDIR /app
|
18 |
+
|
19 |
+
# Install python packages
|
20 |
+
COPY requirements.txt /app/requirements.txt
|
21 |
+
RUN #python3 -m pip install torch --index-url https://download.pytorch.org/whl/cpu
|
22 |
+
RUN python3 -m pip install --upgrade pip
|
23 |
+
RUN python3 -m pip install -r requirements.txt
|
24 |
+
|
25 |
+
# Copy the Entry Point script
|
26 |
+
COPY app.py /app/app.py
|
27 |
+
|
28 |
+
COPY foodviz/pages/ /app/pages
|
29 |
+
COPY foodviz/ /app/foodviz/
|
30 |
+
COPY .streamlit /app/.streamlit
|
31 |
+
|
32 |
+
# Expose the port
|
33 |
+
EXPOSE 8501
|
34 |
+
|
35 |
+
# Command to run on container start
|
36 |
+
CMD ["streamlit", "run", "app.py", "--server.port=8501"]
|
app.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import streamlit_antd_components as sac
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
# from foodviz.pages.comparer import run as comparer_run
|
5 |
+
from foodviz.pages import home_run, comparer_run, search_run, viewer_run
|
6 |
+
# from foodviz.pages.viewer import run as viewer_run
|
7 |
+
# from foodviz.pages.search import run as search_run
|
8 |
+
from foodviz.utils import SupabaseConnection
|
9 |
+
|
10 |
+
# Loads the environment variables
|
11 |
+
load_dotenv()
|
12 |
+
|
13 |
+
pages = {
|
14 |
+
"Home": [home_run, 0, "house"],
|
15 |
+
"View": [viewer_run, 1, "eye"],
|
16 |
+
"Compare": [comparer_run, 2, "bar-chart-steps"],
|
17 |
+
"Search": [search_run, 3, "search"],
|
18 |
+
}
|
19 |
+
|
20 |
+
|
21 |
+
def menu_callback():
|
22 |
+
st.session_state.page_index = pages[st.session_state.tab_item][1]
|
23 |
+
|
24 |
+
|
25 |
+
if __name__ == "__main__":
|
26 |
+
st.set_page_config(
|
27 |
+
page_title="IFCT Food Database",
|
28 |
+
page_icon="🍔",
|
29 |
+
layout="wide",
|
30 |
+
)
|
31 |
+
|
32 |
+
# Set the page index to Home.
|
33 |
+
if "page_index" not in st.session_state:
|
34 |
+
st.session_state.page_index = 0
|
35 |
+
|
36 |
+
# Initialize connection.
|
37 |
+
if "_conn" not in st.session_state:
|
38 |
+
st.session_state["_conn"] = st.connection("supabase", type=SupabaseConnection)
|
39 |
+
|
40 |
+
st.markdown(
|
41 |
+
"""
|
42 |
+
<style>
|
43 |
+
.appview-container .main .block-container{{
|
44 |
+
padding-top: {padding_top}rem; }}
|
45 |
+
</style>
|
46 |
+
""".format(
|
47 |
+
padding_top=1.5
|
48 |
+
),
|
49 |
+
unsafe_allow_html=True,
|
50 |
+
)
|
51 |
+
sac.tabs(
|
52 |
+
[sac.TabsItem(label=i, icon=pages[i][2]) for i in pages.keys()],
|
53 |
+
variant="outline",
|
54 |
+
index=st.session_state.page_index,
|
55 |
+
use_container_width=True,
|
56 |
+
on_change=menu_callback,
|
57 |
+
key="tab_item",
|
58 |
+
)
|
59 |
+
try:
|
60 |
+
pages[st.session_state.tab_item][0]()
|
61 |
+
except Exception as e:
|
62 |
+
st.error(f"Page '{st.session_state.tab_item}' not found.")
|
63 |
+
st.error(f"Error: {e}")
|
foodviz/__init__.py
ADDED
File without changes
|
foodviz/agents/__init__.py
ADDED
File without changes
|
foodviz/agents/llm.py
ADDED
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from typing import List, Dict
|
3 |
+
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, ServiceContext
|
4 |
+
from llama_index.core.llms import HuggingFaceLLM
|
5 |
+
from llama_index.core.tools import QueryEngineTool, ToolMetadata
|
6 |
+
from llama_index.core.agent import ReActAgent
|
7 |
+
from llama_index.core.callbacks import CallbackManager, LlamaDebugHandler
|
8 |
+
from duckduckgo_search import DDGS
|
9 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
|
10 |
+
import torch
|
11 |
+
|
12 |
+
# Initialize LlamaIndex
|
13 |
+
llama_debug = LlamaDebugHandler(print_trace_on_end=True)
|
14 |
+
callback_manager = CallbackManager([llama_debug])
|
15 |
+
|
16 |
+
# Load Hugging Face model with 4-bit quantization
|
17 |
+
model_name = "meta-llama/Llama-2-7b-chat-hf" # You can change this to any compatible model
|
18 |
+
bnb_config = BitsAndBytesConfig(
|
19 |
+
load_in_4bit=True,
|
20 |
+
bnb_4bit_use_double_quant=True,
|
21 |
+
bnb_4bit_quant_type="nf4",
|
22 |
+
bnb_4bit_compute_dtype=torch.bfloat16
|
23 |
+
)
|
24 |
+
|
25 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
26 |
+
model = AutoModelForCausalLM.from_pretrained(
|
27 |
+
model_name,
|
28 |
+
quantization_config=bnb_config,
|
29 |
+
device_map="auto",
|
30 |
+
)
|
31 |
+
|
32 |
+
# Create HuggingFaceLLM
|
33 |
+
llm = HuggingFaceLLM(
|
34 |
+
context_window=4096,
|
35 |
+
max_new_tokens=256,
|
36 |
+
generate_kwargs={"temperature": 0.7, "do_sample": False},
|
37 |
+
tokenizer=tokenizer,
|
38 |
+
model=model,
|
39 |
+
)
|
40 |
+
|
41 |
+
# Set up service context
|
42 |
+
service_context = ServiceContext.from_defaults(llm=llm, callback_manager=callback_manager)
|
43 |
+
|
44 |
+
# Load documents and create index
|
45 |
+
documents = SimpleDirectoryReader("path/to/your/documents").load_data()
|
46 |
+
index = VectorStoreIndex.from_documents(documents, service_context=service_context)
|
47 |
+
|
48 |
+
# Create query engine
|
49 |
+
query_engine = index.as_query_engine()
|
50 |
+
|
51 |
+
|
52 |
+
# DuckDuckGo search tool
|
53 |
+
def web_search(query: str) -> List[Dict[str, str]]:
|
54 |
+
with DDGS() as ddgs:
|
55 |
+
results = list(ddgs.text(query, max_results=3))
|
56 |
+
return results
|
57 |
+
|
58 |
+
|
59 |
+
# Custom tool for web search
|
60 |
+
class WebSearchTool:
|
61 |
+
def __init__(self):
|
62 |
+
self.metadata = ToolMetadata(
|
63 |
+
name="web_search",
|
64 |
+
description="Useful for searching the web for current information"
|
65 |
+
)
|
66 |
+
|
67 |
+
def __call__(self, input_text: str) -> str:
|
68 |
+
results = web_search(input_text)
|
69 |
+
return str(results)
|
70 |
+
|
71 |
+
|
72 |
+
# Create tools
|
73 |
+
query_engine_tool = QueryEngineTool(
|
74 |
+
query_engine=query_engine,
|
75 |
+
metadata=ToolMetadata(
|
76 |
+
name="vector_index",
|
77 |
+
description="Useful for answering questions about the documents in the vector index"
|
78 |
+
)
|
79 |
+
)
|
80 |
+
|
81 |
+
web_search_tool = WebSearchTool()
|
82 |
+
|
83 |
+
# Create ReAct agent
|
84 |
+
agent = ReActAgent.from_tools([query_engine_tool, web_search_tool], llm=llm, verbose=True)
|
85 |
+
|
86 |
+
|
87 |
+
def generate_response(query: str) -> str:
|
88 |
+
# Get response from the agent
|
89 |
+
response = agent.chat(query)
|
90 |
+
|
91 |
+
# Extract sources from the vector index query
|
92 |
+
vector_sources = [node.node.metadata for node in response.source_nodes]
|
93 |
+
|
94 |
+
# Extract web search results
|
95 |
+
web_sources = []
|
96 |
+
for tool_use in response.tool_usage:
|
97 |
+
if tool_use.tool_name == "web_search":
|
98 |
+
web_sources = eval(tool_use.output)
|
99 |
+
|
100 |
+
# Combine sources
|
101 |
+
all_sources = vector_sources + web_sources
|
102 |
+
|
103 |
+
# Format the response with sources
|
104 |
+
formatted_response = f"Answer: {response.response}\n\nSources:\n"
|
105 |
+
for idx, source in enumerate(all_sources, 1):
|
106 |
+
if isinstance(source, dict):
|
107 |
+
formatted_response += f"{idx}. {source.get('title', 'N/A')} - {source.get('href', 'N/A')}\n"
|
108 |
+
else:
|
109 |
+
formatted_response += f"{idx}. {source}\n"
|
110 |
+
|
111 |
+
return formatted_response
|
112 |
+
|
113 |
+
|
114 |
+
# Example usage
|
115 |
+
query = "What are the latest advancements in renewable energy?"
|
116 |
+
result = generate_response(query)
|
117 |
+
print(result)
|
foodviz/agents/parse_data.py
ADDED
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import re
|
3 |
+
|
4 |
+
import faiss
|
5 |
+
import pandas as pd
|
6 |
+
from dotenv import load_dotenv
|
7 |
+
from llama_index.core import VectorStoreIndex, StorageContext
|
8 |
+
from llama_index.core.node_parser import SimpleNodeParser
|
9 |
+
from llama_index.core.schema import Document, MetadataMode
|
10 |
+
from llama_index.embeddings.voyageai import VoyageEmbedding
|
11 |
+
from llama_index.vector_stores.faiss import FaissVectorStore
|
12 |
+
|
13 |
+
from foodviz.utils.config import PROJECT_ROOT
|
14 |
+
|
15 |
+
load_dotenv()
|
16 |
+
|
17 |
+
d = 1536
|
18 |
+
faiss_index = faiss.IndexFlatL2(d)
|
19 |
+
|
20 |
+
# Load the VoyageEmbedding model
|
21 |
+
embed_model = VoyageEmbedding(model_name="voyage-large-2", voyage_api_key=os.environ["VOYAGE_API_KEY"])
|
22 |
+
|
23 |
+
|
24 |
+
# Load and preprocess the data
|
25 |
+
def preprocess_and_vectorize_data(file_path, data_store_path=None):
|
26 |
+
df = process_data(file_path)
|
27 |
+
|
28 |
+
# Combine all information into a single string for each item
|
29 |
+
# print(df.head())
|
30 |
+
|
31 |
+
# Create documents for LlamaIndex
|
32 |
+
documents = []
|
33 |
+
for _, row in df.iterrows():
|
34 |
+
document = Document(
|
35 |
+
text=row['local_name'],
|
36 |
+
metadata={"name": row['name'], "scientific_name": row['scie'], "tags": row['tags'], "lang": row['full_lang_name']},
|
37 |
+
metadata_seperator="::",
|
38 |
+
metadata_template="{key}=>{value}",
|
39 |
+
text_template="Metadata: {metadata_str}\n-----\nContent: {content}",
|
40 |
+
)
|
41 |
+
documents.append(document)
|
42 |
+
|
43 |
+
print(documents[5], documents[5].metadata, documents[5].get_content(metadata_mode=MetadataMode.EMBED), sep='\n')
|
44 |
+
|
45 |
+
vector_store = FaissVectorStore(faiss_index=faiss_index)
|
46 |
+
# create storage context using default stores
|
47 |
+
storage_context = StorageContext.from_defaults(
|
48 |
+
vector_store=vector_store,
|
49 |
+
)
|
50 |
+
storage_context.persist(data_store_path)
|
51 |
+
|
52 |
+
# Create nodes from documents
|
53 |
+
parser = SimpleNodeParser.from_defaults(separator=";") # Default separator is "\n"
|
54 |
+
nodes = parser.get_nodes_from_documents(documents)
|
55 |
+
index = VectorStoreIndex(embed_model=embed_model, nodes=nodes, storage_context=storage_context, show_progress=True)
|
56 |
+
index.storage_context.persist(data_store_path)
|
57 |
+
return index
|
58 |
+
|
59 |
+
|
60 |
+
|
61 |
+
|
62 |
+
def process_data(file_path):
|
63 |
+
df = pd.read_csv(file_path)
|
64 |
+
langs = pd.read_csv(PROJECT_ROOT / "data" / "languages.csv")
|
65 |
+
df["lang_names"] = df["lang"].apply(lambda x: str(x).split("; "))
|
66 |
+
df = df.explode("lang_names").reset_index(drop=True)
|
67 |
+
df["lang_names"] = df["lang_names"].apply(lambda x: x.strip())
|
68 |
+
df["lang_names"] = df["lang_names"].astype(str)
|
69 |
+
df["lang"] = df["lang"].astype(str)
|
70 |
+
df["abbr"] = df["lang_names"].apply(lambda x: re.split(r"(?<=\.)\s", x)[0])
|
71 |
+
df["local_name"] = df["lang_names"].str.extract(r"\. (.*)")
|
72 |
+
df2 = pd.merge(df, langs, left_on="abbr", right_on="abbr", how="left")
|
73 |
+
df2 = df2.rename(columns={"lang_y": "full_lang_name"})
|
74 |
+
df2.drop(columns=["abbr", "id"], inplace=True)
|
75 |
+
# print(df2[["name", "local_name", "full_lang_name"]].head(15))
|
76 |
+
return df2
|
77 |
+
|
78 |
+
if __name__ == "__main__":
|
79 |
+
# process_data(PROJECT_ROOT / "data" / "food_local_langs.csv")
|
80 |
+
data_path = "../data/context_storage"
|
81 |
+
# index = preprocess_and_vectorize_data(file_path=PROJECT_ROOT / "data" / "food_local_langs.csv", data_store_path=data_path)
|
82 |
+
index = load_retriever(data_path)
|
83 |
+
retriever = index.as_retriever(similarity_top_k=5)
|
84 |
+
nodes = retriever.retrieve("dunglina dakadi")
|
85 |
+
for node in nodes:
|
86 |
+
print(node.text, node.metadata, node.score)
|
87 |
+
|
88 |
+
|
foodviz/data/.~lock.food_local_langs_processed.csv#
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
,p1utoze,uwuntu,20.07.2024 01:53,file:///home/p1utoze/.config/libreoffice/4;
|
|
|
|
foodviz/pages/__init__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .home import run as home_run
|
2 |
+
from .comparer import run as comparer_run
|
3 |
+
from .viewer import run as viewer_run
|
4 |
+
from .search import run as search_run
|
5 |
+
|
6 |
+
__all__ = ["home_run", "comparer_run", "viewer_run", "search_run"]
|
foodviz/pages/comparer.py
ADDED
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import streamlit as st
|
3 |
+
from foodviz.utils import prepare_unit_data
|
4 |
+
from foodviz.utils.config import COLOR_MAP, PROJECT_ROOT
|
5 |
+
|
6 |
+
|
7 |
+
def run():
|
8 |
+
_sess_state = st.session_state
|
9 |
+
|
10 |
+
if "units_data" not in _sess_state:
|
11 |
+
path = PROJECT_ROOT / "data" / "units_with_e.csv"
|
12 |
+
_sess_state["units_df"] = prepare_unit_data(path, "")
|
13 |
+
if "tooltips" not in _sess_state:
|
14 |
+
_sess_state["tooltips"] = {
|
15 |
+
"mg": "Milligrams",
|
16 |
+
"g": "Grams",
|
17 |
+
"kg": "Kilograms",
|
18 |
+
"ug": "Micrograms",
|
19 |
+
"kJ": "Kilojoules",
|
20 |
+
}
|
21 |
+
|
22 |
+
def highlight_category(val, df):
|
23 |
+
val = df.loc[df["name"] == val, "grup"].values[0].upper()
|
24 |
+
return f"background-color: {COLOR_MAP[val]}"
|
25 |
+
|
26 |
+
def get_food_code(extra_cols=None) -> list:
|
27 |
+
if extra_cols is None:
|
28 |
+
extra_cols = []
|
29 |
+
ss = _sess_state["units_df"].loc[
|
30 |
+
(_sess_state["units_df"]["name"].isin(_sess_state["nutrient"])), ["code"] + extra_cols]
|
31 |
+
return ss[~ss.code.str.endswith("_e")].values.T.tolist()
|
32 |
+
|
33 |
+
@st.cache_data(ttl=600)
|
34 |
+
def get_nutrient_column(table: str, return_columns: str = "*", as_df: bool = True):
|
35 |
+
return_columns = f"name, {return_columns}"
|
36 |
+
res = _sess_state["_conn"].table(table).select(return_columns).execute()
|
37 |
+
if as_df:
|
38 |
+
return pd.DataFrame(res.data)
|
39 |
+
return res.data
|
40 |
+
|
41 |
+
st.header("📊 Nutrient Comparison Table Chart")
|
42 |
+
|
43 |
+
st.multiselect(
|
44 |
+
"Select Nutrient",
|
45 |
+
_sess_state["units_df"].loc[_sess_state["units_df"]["type"].isin(["mass", "energy"]), "name"].unique(),
|
46 |
+
key="nutrient",
|
47 |
+
max_selections=10,
|
48 |
+
)
|
49 |
+
|
50 |
+
st.empty()
|
51 |
+
if "nutrient" in _sess_state and _sess_state["nutrient"]:
|
52 |
+
cols = get_food_code(["name"])
|
53 |
+
cols, names = cols[0], cols[1]
|
54 |
+
cols.append("grup")
|
55 |
+
response = get_nutrient_column("food_ifct", return_columns=",".join(cols))
|
56 |
+
response.set_index("name", inplace=True)
|
57 |
+
config = {}
|
58 |
+
cols.remove("grup")
|
59 |
+
units = _sess_state["units_df"].loc[_sess_state["units_df"]["code"].isin(cols), "unit"].tolist()
|
60 |
+
factors = _sess_state["units_df"].loc[_sess_state["units_df"]["code"].isin(cols), "factor"].tolist()
|
61 |
+
for i in range(len(cols)):
|
62 |
+
response[cols[i]] = response[cols[i]] * factors[i]
|
63 |
+
config[cols[i]] = st.column_config.ProgressColumn(
|
64 |
+
f"{names[i].upper()}",
|
65 |
+
format=f"%.2f {units[i]}",
|
66 |
+
help=f"Values in {_sess_state['tooltips'][units[i]]}",
|
67 |
+
min_value=float(response[cols[i]].min()),
|
68 |
+
max_value=float(response[cols[i]].max()),
|
69 |
+
)
|
70 |
+
|
71 |
+
response.reset_index(inplace=True)
|
72 |
+
color_df = response[["name", "grup"]]
|
73 |
+
response.drop("grup", axis=1, inplace=True)
|
74 |
+
styled_df = response.style.map(highlight_category, df=color_df, subset=["name"])
|
75 |
+
st.dataframe(styled_df, column_config=config, use_container_width=True, height=500, hide_index=True)
|
76 |
+
|
77 |
+
with st.expander("Color Map Reference"):
|
78 |
+
text = []
|
79 |
+
for key, value in COLOR_MAP.items():
|
80 |
+
text.append(f"<span style='color:{value}'>{key}</span>")
|
81 |
+
|
82 |
+
st.markdown(" | ".join(text), unsafe_allow_html=True)
|
foodviz/pages/frontend/bubble.html
ADDED
@@ -0,0 +1,180 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html lang="en">
|
3 |
+
<head>
|
4 |
+
<meta charset="UTF-8">
|
5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
6 |
+
<title>D3.js Bubble Chart with Drag and Collision</title>
|
7 |
+
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/7.8.5/d3.min.js"></script>
|
8 |
+
<style>
|
9 |
+
body {
|
10 |
+
font-family: Arial, sans-serif;
|
11 |
+
display: flex;
|
12 |
+
justify-content: center;
|
13 |
+
align-items: center;
|
14 |
+
height: 100vh;
|
15 |
+
margin: 0;
|
16 |
+
background-color: #373f51;
|
17 |
+
}
|
18 |
+
|
19 |
+
.tooltip {
|
20 |
+
position: absolute;
|
21 |
+
background-color: rgba(0, 0, 0, 0.7);
|
22 |
+
color: white;
|
23 |
+
padding: 5px 10px;
|
24 |
+
border-radius: 5px;
|
25 |
+
font-size: 12px;
|
26 |
+
pointer-events: none;
|
27 |
+
opacity: 0;
|
28 |
+
transition: opacity 0.3s;
|
29 |
+
}
|
30 |
+
.bubble-label {
|
31 |
+
/* font-size: 10px; */
|
32 |
+
text-anchor: middle;
|
33 |
+
dominant-baseline: middle;
|
34 |
+
pointer-events: none;
|
35 |
+
}
|
36 |
+
svg {
|
37 |
+
background-color: #11131C;
|
38 |
+
box-shadow: 0 0 10px rgba(0, 0, 0, 0.1);
|
39 |
+
}
|
40 |
+
|
41 |
+
image {
|
42 |
+
clip-path: inset(5% round 50%);
|
43 |
+
position: relative;
|
44 |
+
/* top: -50%;
|
45 |
+
left: -50%; */
|
46 |
+
}
|
47 |
+
</style>
|
48 |
+
</head>
|
49 |
+
<body>
|
50 |
+
<svg width="800" height="600"></svg>
|
51 |
+
<div class="tooltip"></div>
|
52 |
+
<script>
|
53 |
+
const svg = d3.select("svg");
|
54 |
+
const width = +svg.attr("width");
|
55 |
+
const height = +svg.attr("height");
|
56 |
+
const tooltip = d3.select(".tooltip");
|
57 |
+
|
58 |
+
async function getData() {
|
59 |
+
try {
|
60 |
+
return await d3.csv('https://raw.githubusercontent.com/p1utoze/FoodViz/refs/heads/main/src/data/food-types.csv');
|
61 |
+
} catch (error) {
|
62 |
+
console.error("Error loading CSV:", error);
|
63 |
+
throw error;
|
64 |
+
}
|
65 |
+
}
|
66 |
+
|
67 |
+
async function initializeVisualization() {
|
68 |
+
const csvData = await getData();
|
69 |
+
|
70 |
+
// Find min and max of items
|
71 |
+
const items = csvData.map(d => +d.items);
|
72 |
+
const minItems = d3.min(items);
|
73 |
+
const maxItems = d3.max(items);
|
74 |
+
|
75 |
+
// Create a scale for the radius
|
76 |
+
const radiusScale = d3.scaleLinear()
|
77 |
+
.domain([minItems, maxItems])
|
78 |
+
.range([25, 100]); // You can adjust these values to change the min and max bubble sizes
|
79 |
+
|
80 |
+
const data = csvData.map((k) => ({
|
81 |
+
id: k.grup,
|
82 |
+
r: radiusScale(+k.items), // Use the scaled radius
|
83 |
+
name: k.grup,
|
84 |
+
url: k.url,
|
85 |
+
items: +k.items, // Store the original items value
|
86 |
+
x: Math.random() * width,
|
87 |
+
y: Math.random() * height,
|
88 |
+
vx: 1,
|
89 |
+
vy: 1,
|
90 |
+
}));
|
91 |
+
|
92 |
+
|
93 |
+
console.log("Processed data:", data);
|
94 |
+
|
95 |
+
// Color scale
|
96 |
+
const color = d3.scaleOrdinal(d3.schemeCategory10);
|
97 |
+
|
98 |
+
// Create the simulation
|
99 |
+
const simulation = d3.forceSimulation(data)
|
100 |
+
.force("x", d3.forceX(width / 2).strength(0.05))
|
101 |
+
.force("y", d3.forceY(height / 2).strength(0.05))
|
102 |
+
.force("charge", d3.forceManyBody().strength(-10).distanceMin(d => d.r + 2))
|
103 |
+
.force("collide", d3.forceCollide().radius(d => d.r + 1).iterations(2));
|
104 |
+
|
105 |
+
// Create the bubbles
|
106 |
+
const bubbles = svg.selectAll("g")
|
107 |
+
.data(data)
|
108 |
+
.enter().append("g")
|
109 |
+
.call(d3.drag()
|
110 |
+
.on("start", dragstarted)
|
111 |
+
.on("drag", dragged)
|
112 |
+
.on("end", dragended))
|
113 |
+
.on("mouseover", showTooltip)
|
114 |
+
.on("mousemove", moveTooltip)
|
115 |
+
.on("mouseout", hideTooltip);
|
116 |
+
|
117 |
+
bubbles.append("circle")
|
118 |
+
.attr("r", d => d.r)
|
119 |
+
.attr("fill", (d, i) => color(i));
|
120 |
+
|
121 |
+
bubbles.append("image")
|
122 |
+
.attr("href", d => d.url) // Use the URL from the CSV data
|
123 |
+
.attr("width", d => 2 * d.r)
|
124 |
+
.attr("height", d => 2 * d.r);
|
125 |
+
|
126 |
+
let draggedNode = null;
|
127 |
+
|
128 |
+
// Update bubble positions on each tick of the simulation
|
129 |
+
simulation.on("tick", () => {
|
130 |
+
bubbles.select("circle")
|
131 |
+
.attr("cx", d => d.x)
|
132 |
+
.attr("cy", d => d.y);
|
133 |
+
bubbles.select("image")
|
134 |
+
.attr("x", d => d.x - d.r)
|
135 |
+
.attr("y", d => d.y - d.r);
|
136 |
+
|
137 |
+
// Your existing collision detection code here
|
138 |
+
// ...
|
139 |
+
});
|
140 |
+
|
141 |
+
// Drag functions
|
142 |
+
function dragstarted(event, d) {
|
143 |
+
if (!event.active) simulation.alphaTarget(0.4).restart();
|
144 |
+
d.fx = d.x;
|
145 |
+
d.fy = d.y;
|
146 |
+
}
|
147 |
+
|
148 |
+
function dragged(event, d) {
|
149 |
+
d.fx = event.x;
|
150 |
+
d.fy = event.y;
|
151 |
+
}
|
152 |
+
|
153 |
+
function dragended(event, d) {
|
154 |
+
if (!event.active) simulation.alphaTarget(0);
|
155 |
+
d.fx = null;
|
156 |
+
d.fy = null;
|
157 |
+
draggedNode = null;
|
158 |
+
}
|
159 |
+
|
160 |
+
function showTooltip(event, d) {
|
161 |
+
tooltip.style("opacity", 1)
|
162 |
+
.html(`Name: ${d.name}<br>Items: ${d.items}`);
|
163 |
+
moveTooltip(event);
|
164 |
+
}
|
165 |
+
|
166 |
+
function moveTooltip(event) {
|
167 |
+
tooltip.style("left", (event.pageX + 10) + "px")
|
168 |
+
.style("top", (event.pageY - 10) + "px");
|
169 |
+
}
|
170 |
+
|
171 |
+
function hideTooltip() {
|
172 |
+
tooltip.style("opacity", 0);
|
173 |
+
}
|
174 |
+
}
|
175 |
+
|
176 |
+
// Call the function to initialize the visualization
|
177 |
+
initializeVisualization().catch(error => console.error('Failed to initialize visualization:', error));
|
178 |
+
</script>
|
179 |
+
</body>
|
180 |
+
</html>
|
foodviz/pages/frontend/bubble.js
ADDED
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
const svg = d3.select("svg");
|
2 |
+
const width = +svg.attr("width");
|
3 |
+
const height = +svg.attr("height");
|
4 |
+
const tooltip = d3.select(".tooltip");
|
5 |
+
|
6 |
+
async function getData() {
|
7 |
+
try {
|
8 |
+
return await d3.csv('../../data/food-types.csv');
|
9 |
+
} catch (error) {
|
10 |
+
console.error("Error loading CSV:", error);
|
11 |
+
throw error;
|
12 |
+
}
|
13 |
+
}
|
14 |
+
|
15 |
+
async function initializeVisualization() {
|
16 |
+
const csvData = await getData();
|
17 |
+
const data = csvData.map((k) => ({
|
18 |
+
id: k.grup, // Using 'grup' as id
|
19 |
+
r: +k.items, // Convert to number
|
20 |
+
name: k.grup,
|
21 |
+
url: k.url,
|
22 |
+
x: Math.random() * width,
|
23 |
+
y: Math.random() * height,
|
24 |
+
vx: 1,
|
25 |
+
vy: 1,
|
26 |
+
}));
|
27 |
+
|
28 |
+
console.log("Processed data:", data);
|
29 |
+
|
30 |
+
// Color scale
|
31 |
+
const color = d3.scaleOrdinal(d3.schemeCategory10);
|
32 |
+
|
33 |
+
// Create the simulation
|
34 |
+
const simulation = d3.forceSimulation(data)
|
35 |
+
.force("x", d3.forceX(width / 2).strength(0.05))
|
36 |
+
.force("y", d3.forceY(height / 2).strength(0.05))
|
37 |
+
.force("charge", d3.forceManyBody().strength(-10).distanceMin(d => d.r + 2))
|
38 |
+
.force("collide", d3.forceCollide().radius(d => d.r + 1).iterations(2));
|
39 |
+
|
40 |
+
// Create the bubbles
|
41 |
+
const bubbles = svg.selectAll("g")
|
42 |
+
.data(data)
|
43 |
+
.enter().append("g")
|
44 |
+
.call(d3.drag()
|
45 |
+
.on("start", dragstarted)
|
46 |
+
.on("drag", dragged)
|
47 |
+
.on("end", dragended))
|
48 |
+
.on("mouseover", showTooltip)
|
49 |
+
.on("mousemove", moveTooltip)
|
50 |
+
.on("mouseout", hideTooltip);
|
51 |
+
|
52 |
+
bubbles.append("circle")
|
53 |
+
.attr("r", d => d.r)
|
54 |
+
.attr("fill", (d, i) => color(i));
|
55 |
+
|
56 |
+
bubbles.append("image")
|
57 |
+
.attr("href", d => d.url) // Use the URL from the CSV data
|
58 |
+
.attr("width", d => 1.5 * d.r)
|
59 |
+
.attr("height", d => 1.5 * d.r);
|
60 |
+
|
61 |
+
let draggedNode = null;
|
62 |
+
|
63 |
+
// Update bubble positions on each tick of the simulation
|
64 |
+
simulation.on("tick", () => {
|
65 |
+
bubbles.select("circle")
|
66 |
+
.attr("cx", d => d.x)
|
67 |
+
.attr("cy", d => d.y);
|
68 |
+
bubbles.select("image")
|
69 |
+
.attr("x", d => d.x - d.r * 0.75)
|
70 |
+
.attr("y", d => d.y - d.r * 0.75);
|
71 |
+
|
72 |
+
// Your existing collision detection code here
|
73 |
+
// ...
|
74 |
+
});
|
75 |
+
|
76 |
+
// Drag functions
|
77 |
+
function dragstarted(event, d) {
|
78 |
+
if (!event.active) simulation.alphaTarget(0.4).restart();
|
79 |
+
d.fx = d.x;
|
80 |
+
d.fy = d.y;
|
81 |
+
}
|
82 |
+
|
83 |
+
function dragged(event, d) {
|
84 |
+
d.fx = event.x;
|
85 |
+
d.fy = event.y;
|
86 |
+
}
|
87 |
+
|
88 |
+
function dragended(event, d) {
|
89 |
+
if (!event.active) simulation.alphaTarget(0);
|
90 |
+
d.fx = null;
|
91 |
+
d.fy = null;
|
92 |
+
draggedNode = null;
|
93 |
+
}
|
94 |
+
|
95 |
+
function showTooltip(event, d) {
|
96 |
+
tooltip.style("opacity", 1)
|
97 |
+
.html(`Name: ${d.name}<br>Items: ${d.r}`);
|
98 |
+
moveTooltip(event);
|
99 |
+
}
|
100 |
+
|
101 |
+
function moveTooltip(event) {
|
102 |
+
tooltip.style("left", (event.pageX + 10) + "px")
|
103 |
+
.style("top", (event.pageY - 10) + "px");
|
104 |
+
}
|
105 |
+
|
106 |
+
function hideTooltip() {
|
107 |
+
tooltip.style("opacity", 0);
|
108 |
+
}
|
109 |
+
}
|
110 |
+
|
111 |
+
// Call the function to initialize the visualization
|
112 |
+
initializeVisualization().catch(error => console.error('Failed to initialize visualization:', error));
|
foodviz/pages/home.py
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import streamlit.components.v1 as components
|
3 |
+
from pathlib import Path
|
4 |
+
from foodviz.utils.helpers import load_bubble_data
|
5 |
+
from foodviz.utils.config import PROJECT_ROOT
|
6 |
+
|
7 |
+
custom_chart = Path(__file__).parent / "frontend" / "bubble.html"
|
8 |
+
chart_data = PROJECT_ROOT / "data" / "food-types.csv"
|
9 |
+
|
10 |
+
def run():
|
11 |
+
empty = st.empty()
|
12 |
+
with empty.container():
|
13 |
+
col1, col2 = st.columns([1, 1])
|
14 |
+
col1.markdown(
|
15 |
+
"""
|
16 |
+
<h1 style="font-size: 8em; font-weight: bold; color: #e63946;">FoodViz</h1>
|
17 |
+
<h3 style="font-size: 4em; color: #a8dadc;">Interactive Indian Food Composition Explorer</h3>
|
18 |
+
""",
|
19 |
+
unsafe_allow_html=True,
|
20 |
+
)
|
21 |
+
with open(custom_chart.absolute(), "r") as f:
|
22 |
+
canvas = f.read()
|
23 |
+
|
24 |
+
with col2:
|
25 |
+
components.html(canvas, height=600, width=800)
|
26 |
+
# col2.image(
|
27 |
+
# "https://img.freepik.com/premium-photo/different-types-meats-vegetables-fruits-lay-"
|
28 |
+
# "supermarkets-generative-ai_572887-4418.jpg",
|
29 |
+
# use_column_width=True,
|
30 |
+
# )
|
31 |
+
|
32 |
+
st.divider()
|
33 |
+
|
34 |
+
with st.container():
|
35 |
+
cphrase = "Are you a Gym freak or a Health enthusiast?"
|
36 |
+
cphrase_sub = (
|
37 |
+
"Either way, you are at the right place! Explore the food composition of your favourite indian meals."
|
38 |
+
)
|
39 |
+
st.markdown(
|
40 |
+
f"""
|
41 |
+
<h2 style="font-size: 3em; font-weight: bold; text-align: center; color: #457b9d">{cphrase}</h2>
|
42 |
+
<h5 style="font-size: 2em; text-align: center;">{cphrase_sub}</h5>
|
43 |
+
""",
|
44 |
+
unsafe_allow_html=True,
|
45 |
+
)
|
46 |
+
st.divider()
|
47 |
+
st.markdown(
|
48 |
+
"""
|
49 |
+
I have created this web app to help you find all the food related sources.
|
50 |
+
The explorer is based on the Indian Food Composition Table (IFCT) database which is a comprehensive database
|
51 |
+
consisting of **:blue[542 food items]**
|
52 |
+
My goal is to provide you with a simple and interactive way to discover and compare the nutritional contents
|
53 |
+
and values of different food items.
|
54 |
+
|
55 |
+
**:orange[The Indian Food Composition Table]** (IFCT) is a comprehensive database consisting of 542 food
|
56 |
+
items. The database provides information on the proximate principles and dietary fibre content of Indian
|
57 |
+
foods. The database is useful for nutritionists, dieticians, and health professionals for planning and
|
58 |
+
assessing diets.
|
59 |
+
"""
|
60 |
+
)
|
61 |
+
st.divider()
|
62 |
+
st.subheader("How to use the explorer?")
|
63 |
+
st.markdown(
|
64 |
+
"""
|
65 |
+
The explorer is divided into two main sections:
|
66 |
+
#### 👁🗨️ Viewer Page:
|
67 |
+
- :blue[**Overview**]: This tab provides a brief overview of the food item categorized by food groups.
|
68 |
+
- :blue[**Details**]: This tab provides detailed nutritional information about the selected food item's
|
69 |
+
various food compositions.
|
70 |
+
#### 📊 Comparison Page
|
71 |
+
"""
|
72 |
+
)
|
foodviz/pages/search.py
ADDED
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from streamlit_card import card
|
3 |
+
|
4 |
+
from foodviz.utils.config import PROJECT_ROOT
|
5 |
+
from foodviz.utils.helpers import load_retriever, generate_color_range
|
6 |
+
|
7 |
+
start_green = (0, 255, 0)
|
8 |
+
end_red = (255, 0, 0)
|
9 |
+
|
10 |
+
|
11 |
+
def run():
|
12 |
+
css = """
|
13 |
+
<style>
|
14 |
+
[data-testid="stMetricDelta"] svg {
|
15 |
+
display: none;
|
16 |
+
}
|
17 |
+
[data-testid="stMetricDelta"] > div::before {
|
18 |
+
content:"±";
|
19 |
+
font-weight: bold;
|
20 |
+
font-size: 1.3rem;
|
21 |
+
}
|
22 |
+
.row-gradient {
|
23 |
+
background: linear-gradient(to right, #ff7f50, #ffa500);
|
24 |
+
padding: 10px;
|
25 |
+
margin-bottom: 5px;
|
26 |
+
color: white;
|
27 |
+
}
|
28 |
+
</style>
|
29 |
+
"""
|
30 |
+
st.markdown(css, unsafe_allow_html=True)
|
31 |
+
_sess_state = st.session_state
|
32 |
+
|
33 |
+
if "_conn" not in _sess_state:
|
34 |
+
st.error("Please connect to the database first.")
|
35 |
+
return
|
36 |
+
|
37 |
+
if "retriever" not in _sess_state:
|
38 |
+
data_store_path = PROJECT_ROOT / "data" / "context_storage"
|
39 |
+
_sess_state.retriever = load_retriever(data_store_path)
|
40 |
+
|
41 |
+
if "color_range" not in _sess_state:
|
42 |
+
_sess_state.color_range = generate_color_range(start_green, end_red, 5)
|
43 |
+
|
44 |
+
# Load and preprocess the data
|
45 |
+
|
46 |
+
# Define the search function
|
47 |
+
@st.cache_data(ttl=600)
|
48 |
+
def retrieve_top_matches_callback(query: str):
|
49 |
+
return _sess_state.retriever.retrieve(query)
|
50 |
+
|
51 |
+
# Streamlit app
|
52 |
+
st.title("Optimized Local Language Food Search")
|
53 |
+
|
54 |
+
# Use a container for dynamic updates
|
55 |
+
search_container = st.empty()
|
56 |
+
|
57 |
+
# Create a text input for search
|
58 |
+
search_query = search_container.text_input(
|
59 |
+
"Enter a food name in your local language",
|
60 |
+
)
|
61 |
+
if "idx" not in _sess_state:
|
62 |
+
_sess_state.idx = -1
|
63 |
+
|
64 |
+
# Perform search as user types
|
65 |
+
st.write("Top matches for the search query:")
|
66 |
+
if search_query:
|
67 |
+
nodes = retrieve_top_matches_callback(search_query)
|
68 |
+
node_len = len(nodes)
|
69 |
+
container = st.empty()
|
70 |
+
for i in range(node_len):
|
71 |
+
idx = node_len - i - 1
|
72 |
+
card(
|
73 |
+
title=f"{nodes[idx].text}",
|
74 |
+
text=f"Name: {nodes[idx].metadata['name']} | Scientific Name: {nodes[idx].metadata['scientific_name']} | Local Language: {nodes[idx].metadata['lang']} | Score: {nodes[idx].score}",
|
75 |
+
key=f"card_{idx}",
|
76 |
+
on_click=lambda: _sess_state.update({"idx": idx}),
|
77 |
+
styles={
|
78 |
+
"card": {
|
79 |
+
"width": "100%",
|
80 |
+
"background-color": f"{_sess_state.color_range[i]}",
|
81 |
+
"height": "100px",
|
82 |
+
"border-radius": "15px",
|
83 |
+
"box-shadow": "0 0 10px rgba(0,0,0,0.5)",
|
84 |
+
"padding": "0px",
|
85 |
+
"margin": "0px"
|
86 |
+
}
|
87 |
+
},
|
88 |
+
|
89 |
+
)
|
90 |
+
|
91 |
+
# TODO: Create a sidebar that displays some information about the selected node
|
92 |
+
# st.write(f"Selected index: {_sess_state.idx}, Selected node: {nodes[_sess_state.idx].text}")
|
93 |
+
# Add some information about the search engine
|
94 |
+
st.sidebar.title("About")
|
95 |
+
st.sidebar.info(
|
96 |
+
"This optimized search engine allows you to find food items by their local language names. "
|
97 |
+
"It searches through various Indian languages and returns the top matches "
|
98 |
+
"for the common name and scientific name of the food item. "
|
99 |
+
"The search updates as you type for a more interactive experience."
|
100 |
+
)
|
101 |
+
|
102 |
+
# # Display some statistics
|
103 |
+
# st.sidebar.title("Statistics")
|
104 |
+
# st.sidebar.write(f"Total number of unique local names: {len(name_dict)}")
|
foodviz/pages/viewer.py
ADDED
@@ -0,0 +1,334 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import streamlit as st
|
3 |
+
from plotly import express as px, graph_objects as go
|
4 |
+
from foodviz.utils import (
|
5 |
+
prepare_indian_geojson,
|
6 |
+
prepare_indian_languages,
|
7 |
+
prepare_unit_data,
|
8 |
+
)
|
9 |
+
from foodviz.utils.config import (
|
10 |
+
DB_FOOD_CODE_HEADER,
|
11 |
+
DB_FOOD_NAME_HEADER,
|
12 |
+
DB_FOOD_TAGS_HEADER,
|
13 |
+
DB_SCI_NAME_HEADER,
|
14 |
+
DB_TABLE_NAME,
|
15 |
+
PROJECT_ROOT,
|
16 |
+
)
|
17 |
+
from foodviz.utils.config import load_json
|
18 |
+
|
19 |
+
|
20 |
+
def run():
|
21 |
+
css = """
|
22 |
+
<style>
|
23 |
+
[data-testid="stMetricDelta"] svg {
|
24 |
+
display: none;
|
25 |
+
}
|
26 |
+
[data-testid="stMetricDelta"] > div::before {
|
27 |
+
content:"±";
|
28 |
+
font-weight: bold;
|
29 |
+
font-size: 1.3rem;
|
30 |
+
}
|
31 |
+
</style>
|
32 |
+
"""
|
33 |
+
st.markdown(css, unsafe_allow_html=True)
|
34 |
+
|
35 |
+
_sess_state = st.session_state
|
36 |
+
|
37 |
+
if "is_fil_row" in _sess_state:
|
38 |
+
del st.session_state["is_fil_row"]
|
39 |
+
|
40 |
+
if "gjson" not in _sess_state:
|
41 |
+
_sess_state["gjson"] = prepare_indian_geojson()
|
42 |
+
|
43 |
+
if "indian_states" not in _sess_state:
|
44 |
+
_sess_state["indian_states"] = prepare_indian_languages()
|
45 |
+
|
46 |
+
if "units_df" not in _sess_state:
|
47 |
+
path = PROJECT_ROOT / "data" / "units_with_e.csv"
|
48 |
+
_sess_state["units_df"] = prepare_unit_data(path)
|
49 |
+
|
50 |
+
if "fg" not in _sess_state:
|
51 |
+
_sess_state["fg"] = load_json()
|
52 |
+
|
53 |
+
if "selected_code" not in _sess_state:
|
54 |
+
_sess_state["selected_code"] = None
|
55 |
+
|
56 |
+
if "disable" not in st.session_state:
|
57 |
+
_sess_state["disabled"] = True
|
58 |
+
|
59 |
+
if "comp_name" not in _sess_state:
|
60 |
+
_sess_state["comp_name"] = None
|
61 |
+
|
62 |
+
@st.cache_data()
|
63 |
+
def fetch_image(file_name: str):
|
64 |
+
# _, _, image = _sess_state['_conn'].download(
|
65 |
+
# bucket_id="indian_food",
|
66 |
+
# source_path=file_name
|
67 |
+
# )
|
68 |
+
url = _sess_state["_conn"].get_public_url("indian_food", f"{file_name}.jpg")
|
69 |
+
# res = image.read()
|
70 |
+
return url
|
71 |
+
|
72 |
+
@st.cache_data(ttl=600)
|
73 |
+
def query_with_filter_like(table: str, column: str, value: str, return_columns: str = "*", as_df: bool = True):
|
74 |
+
response = _sess_state["_conn"].table(table).select(return_columns).like(column, f"{value}%25").execute()
|
75 |
+
if as_df:
|
76 |
+
return pd.DataFrame(response.data)
|
77 |
+
return response.data
|
78 |
+
|
79 |
+
@st.cache_data(ttl=600)
|
80 |
+
def query_with_filter_eq(table: str, column: str, value: str, return_columns: str = "*", as_df: bool = True):
|
81 |
+
response = _sess_state["_conn"].table(table).select(return_columns).eq(column, value).execute()
|
82 |
+
if as_df:
|
83 |
+
return pd.DataFrame(response.data)
|
84 |
+
return response.data
|
85 |
+
|
86 |
+
def tab1_box1_callback():
|
87 |
+
_sess_state["comp_name"] = None
|
88 |
+
|
89 |
+
st.header("🚚 IFCT Food Database Overview")
|
90 |
+
st.markdown("Choose the food group and food item from the dropdown to view the basic info of the food item. 👇")
|
91 |
+
|
92 |
+
tab1, tab2 = st.tabs(["🍽 Overview", "🔢 Nutritional Details"])
|
93 |
+
with tab1:
|
94 |
+
with st.container(height=100):
|
95 |
+
return_cols = f"{DB_FOOD_CODE_HEADER}, {DB_FOOD_NAME_HEADER}, {DB_SCI_NAME_HEADER}, {DB_FOOD_TAGS_HEADER}"
|
96 |
+
col1, col2 = st.columns(2, gap="large")
|
97 |
+
grp_name = col1.selectbox(
|
98 |
+
"Choose a food group: ",
|
99 |
+
_sess_state["fg"].keys(),
|
100 |
+
index=None,
|
101 |
+
placeholder="Select a food group",
|
102 |
+
on_change=tab1_box1_callback,
|
103 |
+
)
|
104 |
+
if grp_name:
|
105 |
+
fil_row = query_with_filter_like(
|
106 |
+
return_columns=return_cols,
|
107 |
+
table=DB_TABLE_NAME,
|
108 |
+
column=DB_FOOD_CODE_HEADER,
|
109 |
+
value=_sess_state["fg"][grp_name],
|
110 |
+
)
|
111 |
+
# fil_row = pd.DataFrame(fil_row.data)
|
112 |
+
sel = col2.selectbox(
|
113 |
+
"Choose a food item: ",
|
114 |
+
["ALL"] + fil_row[DB_FOOD_NAME_HEADER].tolist(),
|
115 |
+
index=None,
|
116 |
+
placeholder="Select a food item",
|
117 |
+
)
|
118 |
+
if sel == "ALL":
|
119 |
+
_sess_state["is_fil_row"] = False
|
120 |
+
fil_row[DB_FOOD_TAGS_HEADER] = fil_row[DB_FOOD_TAGS_HEADER].str.split(" ")
|
121 |
+
img_urls = [
|
122 |
+
_sess_state["_conn"].get_public_url("indian_food", f"{code}.jpg")
|
123 |
+
for code in fil_row[DB_FOOD_CODE_HEADER]
|
124 |
+
]
|
125 |
+
fil_row["images"] = img_urls
|
126 |
+
elif sel:
|
127 |
+
sel_name = sel
|
128 |
+
_sess_state["disabled"] = False
|
129 |
+
|
130 |
+
_sess_state["is_fil_row"] = True
|
131 |
+
sel = fil_row.loc[fil_row[DB_FOOD_NAME_HEADER] == sel, DB_FOOD_CODE_HEADER].values[0]
|
132 |
+
_sess_state["selected_code"] = sel
|
133 |
+
else:
|
134 |
+
_sess_state["selected_code"] = None
|
135 |
+
|
136 |
+
view = st.empty()
|
137 |
+
if "is_fil_row" in _sess_state and not _sess_state["is_fil_row"]:
|
138 |
+
st.data_editor(
|
139 |
+
fil_row,
|
140 |
+
column_config={
|
141 |
+
"images": st.column_config.ImageColumn("Preview Image", help="Streamlit app preview screenshots"),
|
142 |
+
"tags": st.column_config.ListColumn(
|
143 |
+
"Diet Preferences", help="Dietary preferences for the food item"
|
144 |
+
),
|
145 |
+
},
|
146 |
+
hide_index=True,
|
147 |
+
use_container_width=True,
|
148 |
+
)
|
149 |
+
|
150 |
+
if "is_fil_row" in _sess_state and _sess_state["is_fil_row"]:
|
151 |
+
with view.container(height=600):
|
152 |
+
img = fetch_image(f"{sel}")
|
153 |
+
col1, col2 = view.columns(2, gap="large")
|
154 |
+
col2.image(img, use_column_width=True, caption=f"Image of {sel_name.upper()}")
|
155 |
+
sel_lang = query_with_filter_eq(
|
156 |
+
return_columns="lang", table=DB_TABLE_NAME, column=DB_FOOD_CODE_HEADER, value=sel, as_df=False
|
157 |
+
)
|
158 |
+
sel_lang: list[str] = sel_lang[0]["lang"].split(";")
|
159 |
+
try:
|
160 |
+
_ = sel_lang[0]
|
161 |
+
sel_lang_code = pd.DataFrame(
|
162 |
+
[lang.strip().split(" ", maxsplit=1) for lang in sel_lang], columns=["abbr", "name"]
|
163 |
+
)
|
164 |
+
df = pd.merge(sel_lang_code, _sess_state["indian_states"], left_on="abbr", right_on="abbr")
|
165 |
+
all_states = pd.json_normalize(_sess_state["gjson"]["features"])["properties.ST_NM"]
|
166 |
+
fig = px.choropleth(
|
167 |
+
all_states,
|
168 |
+
geojson=_sess_state["gjson"],
|
169 |
+
locations="properties.ST_NM",
|
170 |
+
featureidkey="properties.ST_NM",
|
171 |
+
color_discrete_sequence=["lightgrey"],
|
172 |
+
labels={"properties.ST_NM": "State"},
|
173 |
+
)
|
174 |
+
fig.add_trace(
|
175 |
+
go.Choropleth(
|
176 |
+
z=df["id"],
|
177 |
+
name=sel,
|
178 |
+
autocolorscale=False,
|
179 |
+
geojson=_sess_state["gjson"],
|
180 |
+
featureidkey="properties.ST_NM",
|
181 |
+
locations=df["state"],
|
182 |
+
locationmode="geojson-id",
|
183 |
+
customdata=df[["state", "lang", "name"]],
|
184 |
+
colorscale=px.colors.sequential.Inferno_r,
|
185 |
+
uid=f"{sel}",
|
186 |
+
hovertemplate="<br>".join(
|
187 |
+
[
|
188 |
+
"<b>%{customdata[0]}</b><br>",
|
189 |
+
"Language: %{customdata[1]}",
|
190 |
+
"Known as: %{customdata[2]}",
|
191 |
+
]
|
192 |
+
),
|
193 |
+
)
|
194 |
+
)
|
195 |
+
is_nepal = sel_lang_code["abbr"].str.contains("N.")
|
196 |
+
if is_nepal.any():
|
197 |
+
fig.add_trace(
|
198 |
+
go.Choropleth(
|
199 |
+
z=[1],
|
200 |
+
customdata=pd.DataFrame(
|
201 |
+
{
|
202 |
+
"state": ["Nepal"],
|
203 |
+
"lang": "Nepali",
|
204 |
+
"name": sel_lang_code.loc[is_nepal, "name"].values[0],
|
205 |
+
}
|
206 |
+
)[["state", "lang", "name"]],
|
207 |
+
autocolorscale=False,
|
208 |
+
reversescale=True,
|
209 |
+
showscale=False,
|
210 |
+
locationmode="ISO-3",
|
211 |
+
locations=["NPL"],
|
212 |
+
colorscale="greens",
|
213 |
+
hovertemplate="<br>".join(
|
214 |
+
[
|
215 |
+
"<b>%{customdata[0]}</b><br>",
|
216 |
+
"Language: %{customdata[1]}",
|
217 |
+
"Known as: %{customdata[2]}",
|
218 |
+
]
|
219 |
+
),
|
220 |
+
)
|
221 |
+
)
|
222 |
+
fig.data[0]["name"] = "Toggle States"
|
223 |
+
fig.data[1]["colorbar"]["len"] = 0.6
|
224 |
+
fig.data[1]["colorbar"]["y"] = 0.5
|
225 |
+
fig.data[1]["hoverlabel"]["bgcolor"] = "rgba(0, 0, 0, 0.9)"
|
226 |
+
fig.data[1]["hoverlabel"]["bordercolor"] = "white"
|
227 |
+
|
228 |
+
except AssertionError:
|
229 |
+
fig = px.choropleth(
|
230 |
+
locations=["IND"],
|
231 |
+
locationmode="ISO-3",
|
232 |
+
color=[1],
|
233 |
+
scope="asia",
|
234 |
+
fitbounds="locations",
|
235 |
+
)
|
236 |
+
fig.data[0]["name"] = sel
|
237 |
+
fig.data[0]["hovertemplate"] = f"<b>India</b><br>Language: English/Hindi<br>Known as: {sel_name}"
|
238 |
+
fig.update_geos(fitbounds="locations", visible=False)
|
239 |
+
|
240 |
+
fig.update_layout(
|
241 |
+
geo=dict(bgcolor="rgba(14, 17, 20, 0.1)"),
|
242 |
+
margin=dict(l=0, r=0, t=0, b=0),
|
243 |
+
height=350,
|
244 |
+
title=dict(
|
245 |
+
text=f"Regional language distribution of {sel_name}",
|
246 |
+
xanchor="center",
|
247 |
+
x=0.5,
|
248 |
+
xref="paper",
|
249 |
+
yanchor="bottom",
|
250 |
+
y=0.1,
|
251 |
+
font_size=12,
|
252 |
+
font_color="grey",
|
253 |
+
),
|
254 |
+
legend=dict(yanchor="top", y=0.99, xanchor="left", x=0.01, font_size=15),
|
255 |
+
modebar=dict(
|
256 |
+
orientation="h",
|
257 |
+
bgcolor=st._config.get_option("theme.backgroundColor"),
|
258 |
+
color=st._config.get_option("theme.primaryColor"),
|
259 |
+
),
|
260 |
+
)
|
261 |
+
|
262 |
+
fig.update_mapboxes(domain=dict(x=[0, 0.5], y=[0, 1]))
|
263 |
+
col1.plotly_chart(fig, use_container_width=True)
|
264 |
+
|
265 |
+
with tab2:
|
266 |
+
with tab2.container(height=100):
|
267 |
+
st.selectbox(
|
268 |
+
"Choose a food composition category: ",
|
269 |
+
_sess_state["units_df"]["table_name"].dropna().unique(),
|
270 |
+
index=None,
|
271 |
+
key="comp_name",
|
272 |
+
placeholder="Select a food composition category",
|
273 |
+
disabled=_sess_state.disabled,
|
274 |
+
)
|
275 |
+
|
276 |
+
if _sess_state["disabled"]:
|
277 |
+
st.warning("Please select a food item from the Overview tab to view its composition details.")
|
278 |
+
|
279 |
+
if _sess_state["selected_code"]:
|
280 |
+
food_item_df = query_with_filter_eq(
|
281 |
+
return_columns="*", table=DB_TABLE_NAME, column=DB_FOOD_CODE_HEADER, value=_sess_state["selected_code"]
|
282 |
+
)
|
283 |
+
food_item_df = food_item_df.T.reset_index()
|
284 |
+
food_item_df.columns = ["code", "value"]
|
285 |
+
if _sess_state["comp_name"]:
|
286 |
+
cards = pd.merge(
|
287 |
+
_sess_state["units_df"].loc[_sess_state["units_df"]["table_name"] == _sess_state["comp_name"]],
|
288 |
+
food_item_df,
|
289 |
+
left_on="code",
|
290 |
+
right_on="code",
|
291 |
+
how="inner",
|
292 |
+
)
|
293 |
+
cards["value"] = cards["value"].astype(float)
|
294 |
+
cards = cards.loc[cards["value"] != 0, :]
|
295 |
+
card_elements = cards.shape[0]
|
296 |
+
if not card_elements:
|
297 |
+
st.info("No data available for the selected food composition. :red[Please Select different one.]")
|
298 |
+
st.stop()
|
299 |
+
|
300 |
+
condition = cards["code"].str.endswith("_e")
|
301 |
+
item_indexes = cards.loc[~condition, :].index.tolist()
|
302 |
+
item_index_c, row_size = 0, 4
|
303 |
+
metric_containers = [st.container(height=175) for _ in range((len(item_indexes) - 1) // row_size + 1)]
|
304 |
+
for container in metric_containers:
|
305 |
+
with container:
|
306 |
+
columns = st.columns(row_size)
|
307 |
+
for col in columns:
|
308 |
+
try:
|
309 |
+
current_index = item_indexes[item_index_c]
|
310 |
+
measurement_unit = cards.at[current_index, "unit"]
|
311 |
+
factor = cards.at[current_index, "factor"]
|
312 |
+
except IndexError:
|
313 |
+
break
|
314 |
+
|
315 |
+
if _sess_state["selected_code"].startswith("T"):
|
316 |
+
measurement_unit = "%"
|
317 |
+
factor = 1
|
318 |
+
|
319 |
+
with col:
|
320 |
+
try:
|
321 |
+
delta_val = cards.at[current_index + 1, "value"] * factor
|
322 |
+
delta = f"{delta_val:.5f} {measurement_unit}"
|
323 |
+
delta_color = "normal"
|
324 |
+
except KeyError:
|
325 |
+
delta = f"Nil {measurement_unit}"
|
326 |
+
delta_color = "off"
|
327 |
+
|
328 |
+
st.metric(
|
329 |
+
label=cards.at[current_index, "name"],
|
330 |
+
value=f"{cards.at[current_index, 'value'] * factor:.5f} {measurement_unit}",
|
331 |
+
delta=delta,
|
332 |
+
delta_color=delta_color,
|
333 |
+
)
|
334 |
+
item_index_c += 1
|
foodviz/utils/__init__.py
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from dotenv import load_dotenv
|
2 |
+
from st_supabase_connection import SupabaseConnection
|
3 |
+
|
4 |
+
from .helpers import (
|
5 |
+
prepare_indian_geojson,
|
6 |
+
prepare_indian_languages,
|
7 |
+
prepare_unit_data,
|
8 |
+
load_bubble_data
|
9 |
+
)
|
10 |
+
|
11 |
+
load_dotenv()
|
12 |
+
|
13 |
+
__all__ = [
|
14 |
+
"SupabaseConnection",
|
15 |
+
"prepare_indian_geojson",
|
16 |
+
"prepare_indian_languages",
|
17 |
+
"prepare_unit_data",
|
18 |
+
"load_bubble_data"
|
19 |
+
]
|
foodviz/utils/color_slider.py
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import colorsys
|
2 |
+
|
3 |
+
import streamlit as st
|
4 |
+
import streamlit.components.v1 as components
|
5 |
+
|
6 |
+
|
7 |
+
def get_color(value, min_value, max_value):
|
8 |
+
# Convert the value to a hue between 0 and 1
|
9 |
+
hue = (value - min_value) / (max_value - min_value)
|
10 |
+
# Convert HSV to RGB
|
11 |
+
rgb = colorsys.hsv_to_rgb(hue, 1, 1)
|
12 |
+
# Convert RGB to hex
|
13 |
+
return '#{:02x}{:02x}{:02x}'.format(int(rgb[0]*255), int(rgb[1]*255), int(rgb[2]*255))
|
14 |
+
|
15 |
+
def color_changing_select_slider():
|
16 |
+
st.title("Color-Changing Select Slider")
|
17 |
+
|
18 |
+
options = list(range(0, 101, 10))
|
19 |
+
value = st.select_slider("Select a value", options=options)
|
20 |
+
|
21 |
+
color = get_color(value, min(options), max(options))
|
22 |
+
|
23 |
+
# Custom HTML and JavaScript for the color-changing slider
|
24 |
+
custom_slider = """
|
25 |
+
<div id="custom-slider" style="
|
26 |
+
width: 100%;
|
27 |
+
height: 20px;
|
28 |
+
background: linear-gradient(to right, #ff0000, #00ff00, #0000ff);
|
29 |
+
position: relative;
|
30 |
+
margin-top: 20px;
|
31 |
+
">
|
32 |
+
<div id="slider-handle" style="
|
33 |
+
width: 20px;
|
34 |
+
height: 30px;
|
35 |
+
background-color: {color};
|
36 |
+
position: absolute;
|
37 |
+
top: -5px;
|
38 |
+
left: {value}%;
|
39 |
+
transform: translateX(-50%);
|
40 |
+
border-radius: 5px;
|
41 |
+
cursor: pointer;
|
42 |
+
"></div>
|
43 |
+
</div>
|
44 |
+
<p id="slider-value" style="text-align: center; margin-top: 10px;">Value: {value}</p>
|
45 |
+
|
46 |
+
<script>
|
47 |
+
const slider = document.getElementById('custom-slider');
|
48 |
+
const handle = document.getElementById('slider-handle');
|
49 |
+
const valueDisplay = document.getElementById('slider-value');
|
50 |
+
|
51 |
+
let isDragging = false;
|
52 |
+
|
53 |
+
handle.addEventListener('mousedown', (e) => {{
|
54 |
+
isDragging = true;
|
55 |
+
updateSlider(e);
|
56 |
+
}});
|
57 |
+
|
58 |
+
document.addEventListener('mousemove', (e) => {{
|
59 |
+
if (isDragging) {{
|
60 |
+
updateSlider(e);
|
61 |
+
}}
|
62 |
+
}});
|
63 |
+
|
64 |
+
document.addEventListener('mouseup', () => {{
|
65 |
+
isDragging = false;
|
66 |
+
}});
|
67 |
+
|
68 |
+
function updateSlider(e) {{
|
69 |
+
const rect = slider.getBoundingClientRect();
|
70 |
+
let x = e.clientX - rect.left;
|
71 |
+
x = Math.max(0, Math.min(x, rect.width));
|
72 |
+
|
73 |
+
const percentage = (x / rect.width) * 100;
|
74 |
+
handle.style.left = `${{percentage}}%`;
|
75 |
+
|
76 |
+
const value = Math.round(percentage / 10) * 10;
|
77 |
+
valueDisplay.textContent = `Value: ${value}`;
|
78 |
+
|
79 |
+
// Update Streamlit
|
80 |
+
Streamlit.setComponentValue(value);
|
81 |
+
}}
|
82 |
+
</script>
|
83 |
+
""".format(color=color, value=value)
|
84 |
+
|
85 |
+
components.html(custom_slider, height=100)
|
86 |
+
|
87 |
+
st.write(f"Selected value: {value}")
|
88 |
+
st.write(f"Color: {color}")
|
89 |
+
|
90 |
+
if __name__ == "__main__":
|
91 |
+
color_changing_select_slider()
|
foodviz/utils/config.py
ADDED
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from abc import ABC, abstractmethod
|
2 |
+
from collections import OrderedDict
|
3 |
+
from enum import Enum
|
4 |
+
from json import load
|
5 |
+
from pathlib import Path
|
6 |
+
|
7 |
+
import streamlit as st
|
8 |
+
|
9 |
+
GROUPS = Path(__file__).parent.parent / "data" / "food_groups.json"
|
10 |
+
|
11 |
+
def load_json(file_path: str = GROUPS) -> dict:
|
12 |
+
with open(file_path, "r") as f:
|
13 |
+
return OrderedDict(load(f))
|
14 |
+
|
15 |
+
class Page(ABC):
|
16 |
+
@abstractmethod
|
17 |
+
def write(self):
|
18 |
+
pass
|
19 |
+
|
20 |
+
|
21 |
+
def add_custom_css():
|
22 |
+
st.markdown(
|
23 |
+
"""
|
24 |
+
<style>
|
25 |
+
</style>
|
26 |
+
""",
|
27 |
+
unsafe_allow_html=True
|
28 |
+
)
|
29 |
+
|
30 |
+
|
31 |
+
DB_TABLE_NAME = "food_ifct"
|
32 |
+
DB_FOOD_NAME_HEADER = "name"
|
33 |
+
DB_FOOD_CODE_HEADER = "code"
|
34 |
+
DB_SCI_NAME_HEADER = "scie"
|
35 |
+
DB_FOOD_TAGS_HEADER = "tags"
|
36 |
+
|
37 |
+
# Path: config.py
|
38 |
+
PROJECT_ROOT = Path(__file__).parent.parent
|
39 |
+
|
40 |
+
COLOR_MAP = {
|
41 |
+
"CEREALS AND MILLETS": "#440154",
|
42 |
+
"GRAIN LEGUMES": "#471466",
|
43 |
+
"GREEN LEAFY VEGETABLES": "#472575",
|
44 |
+
"OTHER VEGETABLES": "#453681",
|
45 |
+
"FRUITS": "#3f4587",
|
46 |
+
"ROOTS AND TUBERS": "#39558b",
|
47 |
+
"CONDIMENTS AND SPICES": "#32628d",
|
48 |
+
"NUTS AND OIL SEEDS": "#2c708e",
|
49 |
+
"SUGARS": "#277c8e",
|
50 |
+
"MUSHROOMS": "#22898d",
|
51 |
+
"MISCELLANEOUS FOODS": "#1f968b",
|
52 |
+
"MILK AND MILK PRODUCTS": "#1fa386",
|
53 |
+
"EGG AND EGG PRODUCTS": "#29af7f",
|
54 |
+
"POULTRY": "#3dbb74",
|
55 |
+
"ANIMAL MEAT": "#55c666",
|
56 |
+
"MARINE FISH": "#74d054",
|
57 |
+
"MARINE SHELLFISH": "#95d73f",
|
58 |
+
"MARINE MOLLUSKS": "#bade27",
|
59 |
+
"FRESH WATER FISH AND SHELLFISH": "#dce218",
|
60 |
+
"EDIBLE OILS AND FATS": "#fde724",
|
61 |
+
}
|
62 |
+
|
63 |
+
|
64 |
+
class PPADF(Enum):
|
65 |
+
"""Proximate Principles and Dietary Fibre."""
|
66 |
+
|
67 |
+
WATER = "Water"
|
68 |
+
ENERGY = "Energy"
|
69 |
+
PROTEIN = "Protein"
|
70 |
+
FAT = "Fat"
|
71 |
+
CARBOHYDRATES = "Carbohydrates"
|
72 |
+
FIBRE = "Fibre"
|
73 |
+
ASH = "Ash"
|
74 |
+
TOTAL = "Total"
|
75 |
+
|
76 |
+
@classmethod
|
77 |
+
def get_all(cls):
|
78 |
+
return [i.value for i in cls]
|
foodviz/utils/helpers.py
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
import geopandas as gpd
|
4 |
+
import pandas as pd
|
5 |
+
import streamlit as st
|
6 |
+
from llama_index.core import load_index_from_storage, StorageContext
|
7 |
+
from llama_index.core.retrievers import BaseRetriever
|
8 |
+
from llama_index.embeddings.voyageai import VoyageEmbedding
|
9 |
+
from llama_index.vector_stores.faiss import FaissVectorStore
|
10 |
+
|
11 |
+
from .config import PROJECT_ROOT
|
12 |
+
|
13 |
+
|
14 |
+
@st.cache_data(show_spinner=":blue[Loading the database..] Please wait...", persist=True)
|
15 |
+
def prepare_indian_geojson():
|
16 |
+
url = "https://raw.githubusercontent.com/Subhash9325/GeoJson-Data-of-Indian-States/master/Indian_States"
|
17 |
+
# url = PROJECT_ROOT / "data" / "INDIA_STATES.geojson"
|
18 |
+
gdf = gpd.read_file(url)
|
19 |
+
gdf["geometry"] = gdf.to_crs(gdf.estimate_utm_crs()).simplify(1000).to_crs(gdf.crs)
|
20 |
+
india_states = gdf.rename(columns={"NAME_1": "ST_NM"}).__geo_interface__
|
21 |
+
return india_states
|
22 |
+
|
23 |
+
|
24 |
+
@st.cache_data()
|
25 |
+
def prepare_indian_languages():
|
26 |
+
path = PROJECT_ROOT / "data" / "languages.csv"
|
27 |
+
df = pd.read_csv(path)
|
28 |
+
return df
|
29 |
+
|
30 |
+
@st.cache_data()
|
31 |
+
def prepare_unit_data(path: str, drop_col: str = "type") -> pd.DataFrame:
|
32 |
+
df = pd.read_csv(path)
|
33 |
+
if drop_col:
|
34 |
+
df.drop("type", axis=1, inplace=True)
|
35 |
+
return df
|
36 |
+
|
37 |
+
def generate_color_range(start_rgb, end_rgb, steps):
|
38 |
+
"""
|
39 |
+
Generate a color range between two RGB colors.
|
40 |
+
|
41 |
+
:param start_rgb: Tuple representing the starting RGB color.
|
42 |
+
:param end_rgb: Tuple representing the ending RGB color.
|
43 |
+
:param steps: Number of steps in the color range.
|
44 |
+
:return: List of interpolated RGB colors.
|
45 |
+
"""
|
46 |
+
color_range = []
|
47 |
+
for t in range(steps + 1): # +1 to ensure the end color is included
|
48 |
+
r = int(start_rgb[0] * (1 - t/steps) + end_rgb[0] * (t/steps))
|
49 |
+
g = int(start_rgb[1] * (1 - t/steps) + end_rgb[1] * (t/steps))
|
50 |
+
b = int(start_rgb[2] * (1 - t/steps) + end_rgb[2] * (t/steps))
|
51 |
+
color_range.append(f"rgb({r} {g} {b})")
|
52 |
+
return color_range
|
53 |
+
|
54 |
+
@st.cache_resource
|
55 |
+
def load_retriever(persist_dir: str, top_k: int = 5) -> BaseRetriever:
|
56 |
+
embed_model = VoyageEmbedding(model_name="voyage-large-2", voyage_api_key=os.environ["VOYAGE_API_KEY"])
|
57 |
+
storage_context = StorageContext.from_defaults(
|
58 |
+
vector_store=FaissVectorStore.from_persist_dir(persist_dir),
|
59 |
+
persist_dir=persist_dir,
|
60 |
+
)
|
61 |
+
loaded_index = load_index_from_storage(storage_context, embed_model=embed_model, show_progress=True)
|
62 |
+
return loaded_index.as_retriever(similarity_top_k=top_k, vector_store_query_mode="semantic_hybrid", vector_store_kwargs={"alpha": 0.1})
|
63 |
+
|
64 |
+
@st.cache_data
|
65 |
+
def load_bubble_data(df_path: str ):
|
66 |
+
df = pd.read_csv(df_path)
|
67 |
+
query = lambda x: st.session_state["_conn"].storage.from_('indian_food').get_public_url(f'food-classes/{x.capitalize()}.jpg')
|
68 |
+
df["url"] = df["grup"].apply(query)
|
69 |
+
return df
|
70 |
+
|
foodviz/utils/state.py
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from copy import deepcopy
|
2 |
+
from typing import Dict, Any
|
3 |
+
|
4 |
+
from streamlit.runtime import get_instance
|
5 |
+
from streamlit.runtime.legacy_caching.hashing import _CodeHasher
|
6 |
+
from streamlit.runtime.scriptrunner import get_script_run_ctx
|
7 |
+
|
8 |
+
|
9 |
+
class _SessionState:
|
10 |
+
def __init__(self, session, hash_funcs):
|
11 |
+
"""Initialize SessionState instance."""
|
12 |
+
self.__dict__["_state"] = {
|
13 |
+
"data": {},
|
14 |
+
"hash": None,
|
15 |
+
"hasher": _CodeHasher(hash_funcs),
|
16 |
+
"is_rerun": False,
|
17 |
+
"session": session,
|
18 |
+
}
|
19 |
+
|
20 |
+
def __call__(self, **kwargs):
|
21 |
+
"""Initialize state data once."""
|
22 |
+
for item, value in kwargs.items():
|
23 |
+
if item not in self._state["data"]:
|
24 |
+
self._state["data"][item] = value
|
25 |
+
|
26 |
+
def __getitem__(self, item):
|
27 |
+
"""Return a saved state value, None if item is undefined."""
|
28 |
+
return self._state["data"].get(item, None)
|
29 |
+
|
30 |
+
def __getattr__(self, item):
|
31 |
+
"""Return a saved state value, None if item is undefined."""
|
32 |
+
return self._state["data"].get(item, None)
|
33 |
+
|
34 |
+
def __setitem__(self, item, value):
|
35 |
+
"""Set state value."""
|
36 |
+
self._state["data"][item] = value
|
37 |
+
|
38 |
+
def __setattr__(self, item, value):
|
39 |
+
"""Set state value."""
|
40 |
+
self._state["data"][item] = value
|
41 |
+
|
42 |
+
def clear(self):
|
43 |
+
"""Clear session state and request a rerun."""
|
44 |
+
self._state["data"].clear()
|
45 |
+
self._state["session"].request_rerun()
|
46 |
+
|
47 |
+
def sync(self):
|
48 |
+
"""Rerun the app with all state values up to date from the beginning to fix rollbacks."""
|
49 |
+
|
50 |
+
# Ensure to rerun only once to avoid infinite loops
|
51 |
+
# caused by a constantly changing state value at each run.
|
52 |
+
#
|
53 |
+
# Example: state.value += 1
|
54 |
+
if self._state["is_rerun"]:
|
55 |
+
self._state["is_rerun"] = False
|
56 |
+
|
57 |
+
elif self._state["hash"] is not None:
|
58 |
+
if self._state["hash"] != self._state["hasher"].to_bytes(
|
59 |
+
self._state["data"], None
|
60 |
+
):
|
61 |
+
self._state["is_rerun"] = True
|
62 |
+
self._state["session"].request_rerun(None)
|
63 |
+
|
64 |
+
self._state["hash"] = self._state["hasher"].to_bytes(self._state["data"], None)
|
65 |
+
|
66 |
+
|
67 |
+
def _get_session():
|
68 |
+
runtime = get_instance()
|
69 |
+
session_id = get_script_run_ctx().session_id
|
70 |
+
session_info = runtime._session_mgr.get_session_info(session_id)
|
71 |
+
|
72 |
+
if session_info is None:
|
73 |
+
raise RuntimeError("Couldn't get your Streamlit Session object.")
|
74 |
+
|
75 |
+
return session_info.session
|
76 |
+
|
77 |
+
def get_state(hash_funcs=None):
|
78 |
+
session = _get_session()
|
79 |
+
|
80 |
+
if not hasattr(session, "_custom_session_state"):
|
81 |
+
session._custom_session_state = _SessionState(session, hash_funcs)
|
82 |
+
|
83 |
+
return session._custom_session_state
|
84 |
+
|
85 |
+
# Only used for separating namespace, everything can be saved at state variable as well.
|
86 |
+
CONFIG_DEFAULTS: Dict[str, Any] = {"slider_value": 0}
|
87 |
+
|
88 |
+
def provide_state(hash_funcs=None):
|
89 |
+
def inner(func):
|
90 |
+
def wrapper(*args, **kwargs):
|
91 |
+
state = get_state(hash_funcs=hash_funcs)
|
92 |
+
if state.client_config is None:
|
93 |
+
state.client_config = deepcopy(CONFIG_DEFAULTS)
|
94 |
+
|
95 |
+
return_value = func(state=state, *args, **kwargs)
|
96 |
+
state.sync()
|
97 |
+
return return_value
|
98 |
+
|
99 |
+
return wrapper
|
100 |
+
return inner
|
pyproject.toml
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[tool.poetry]
|
2 |
+
name = "ifct-viz"
|
3 |
+
version = "0.1.0"
|
4 |
+
description = ""
|
5 |
+
authors = ["p1utoze <1ds21ai001@dsce.edu.in>"]
|
6 |
+
readme = "README.md"
|
7 |
+
|
8 |
+
[tool.poetry.dependencies]
|
9 |
+
python = "^3.10"
|
10 |
+
asyncpg = "^0.29.0"
|
11 |
+
python-dotenv = "^1.0.1"
|
12 |
+
pydantic-settings = "^2.2.1"
|
13 |
+
streamlit = "^1.32.2"
|
14 |
+
st-supabase-connection = "^1.2.2"
|
15 |
+
plotly = "^5.20.0"
|
16 |
+
plotly-geo = "^1.0.0"
|
17 |
+
geopandas = "^0.14.3"
|
18 |
+
streamlit-antd-components = "^0.3.2"
|
19 |
+
matplotlib = "^3.8.3"
|
20 |
+
|
21 |
+
|
22 |
+
[tool.poetry.group.dev.dependencies]
|
23 |
+
rich = "^13.7.1"
|
24 |
+
pdfservices-sdk = "^2.3.1"
|
25 |
+
ploomber-cloud = "^0.1.19"
|
26 |
+
|
27 |
+
[build-system]
|
28 |
+
requires = ["poetry-core"]
|
29 |
+
build-backend = "poetry.core.masonry.api"
|
requirements.dev.txt
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
build==0.9.0 ; python_version >= "3.10" and python_version < "4.0"
|
2 |
+
certifi==2022.12.7 ; python_version >= "3.10" and python_version < "4.0"
|
3 |
+
cffi==1.15.1 ; python_version >= "3.10" and python_version < "4.0"
|
4 |
+
charset-normalizer==2.0.12 ; python_version >= "3.10" and python_version < "4.0"
|
5 |
+
colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and os_name == "nt"
|
6 |
+
cryptography==3.4.6 ; python_version >= "3.10" and python_version < "4.0"
|
7 |
+
idna==3.6 ; python_version >= "3.10" and python_version < "4.0"
|
8 |
+
markdown-it-py==3.0.0 ; python_version >= "3.10" and python_version < "4.0"
|
9 |
+
mdurl==0.1.2 ; python_version >= "3.10" and python_version < "4.0"
|
10 |
+
multipart==0.2.4 ; python_version >= "3.10" and python_version < "4.0"
|
11 |
+
packaging==21.3 ; python_version >= "3.10" and python_version < "4.0"
|
12 |
+
pdfservices-sdk==2.3.1 ; python_version >= "3.10" and python_version < "4.0"
|
13 |
+
pep517==0.13.0 ; python_version >= "3.10" and python_version < "4.0"
|
14 |
+
polling2==0.5.0 ; python_version >= "3.10" and python_version < "4.0"
|
15 |
+
polling==0.3.2 ; python_version >= "3.10" and python_version < "4.0"
|
16 |
+
pycparser==2.21 ; python_version >= "3.10" and python_version < "4.0"
|
17 |
+
pygments==2.14.0 ; python_version >= "3.10" and python_version < "4.0"
|
18 |
+
pyjwt==2.4.0 ; python_version >= "3.10" and python_version < "4.0"
|
19 |
+
pyparsing==3.0.9 ; python_version >= "3.10" and python_version < "4.0"
|
20 |
+
pyyaml==6.0 ; python_version >= "3.10" and python_version < "4.0"
|
21 |
+
requests-toolbelt==0.10.1 ; python_version >= "3.10" and python_version < "4.0"
|
22 |
+
requests==2.27.1 ; python_version >= "3.10" and python_version < "4.0"
|
23 |
+
rich==13.7.1 ; python_version >= "3.10" and python_version < "4.0"
|
24 |
+
six==1.16.0 ; python_version >= "3.10" and python_version < "4.0"
|
25 |
+
toml==0.10.2 ; python_version >= "3.10" and python_version < "4.0"
|
26 |
+
tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11"
|
27 |
+
urllib3==1.26.13 ; python_version >= "3.10" and python_version < "4.0"
|
requirements.txt
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
altair==5.3.0 ; python_version >= "3.10" and python_version < "4.0"
|
2 |
+
annotated-types==0.6.0 ; python_version >= "3.10" and python_version < "4.0"
|
3 |
+
anyio==4.3.0 ; python_version >= "3.10" and python_version < "4.0"
|
4 |
+
async-timeout==4.0.3 ; python_version >= "3.10" and python_version < "3.12.0"
|
5 |
+
asyncpg==0.29.0 ; python_version >= "3.10" and python_version < "4.0"
|
6 |
+
attrs==23.2.0 ; python_version >= "3.10" and python_version < "4.0"
|
7 |
+
blinker==1.7.0 ; python_version >= "3.10" and python_version < "4.0"
|
8 |
+
cachetools==5.3.3 ; python_version >= "3.10" and python_version < "4.0"
|
9 |
+
certifi==2022.12.7 ; python_version >= "3.10" and python_version < "4.0"
|
10 |
+
charset-normalizer==2.0.12 ; python_version >= "3.10" and python_version < "4.0"
|
11 |
+
click==8.1.7 ; python_version >= "3.10" and python_version < "4.0"
|
12 |
+
click-plugins==1.1.1 ; python_version >= "3.10" and python_version < "4.0"
|
13 |
+
cligj==0.7.2 ; python_version >= "3.10" and python_version < "4"
|
14 |
+
colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows"
|
15 |
+
deprecation==2.1.0 ; python_version >= "3.10" and python_version < "4.0"
|
16 |
+
exceptiongroup==1.2.0 ; python_version >= "3.10" and python_version < "3.11"
|
17 |
+
fiona==1.9.6 ; python_version >= "3.10" and python_version < "4.0"
|
18 |
+
geopandas==0.14.3 ; python_version >= "3.10" and python_version < "4.0"
|
19 |
+
gitdb==4.0.11 ; python_version >= "3.10" and python_version < "4.0"
|
20 |
+
gitpython==3.1.43 ; python_version >= "3.10" and python_version < "4.0"
|
21 |
+
gotrue==2.4.2 ; python_version >= "3.10" and python_version < "4.0"
|
22 |
+
h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0"
|
23 |
+
httpcore==1.0.5 ; python_version >= "3.10" and python_version < "4.0"
|
24 |
+
httpx==0.27.0 ; python_version >= "3.10" and python_version < "4.0"
|
25 |
+
idna==3.6 ; python_version >= "3.10" and python_version < "4.0"
|
26 |
+
jinja2==3.1.3 ; python_version >= "3.10" and python_version < "4.0"
|
27 |
+
jsonschema==4.21.1 ; python_version >= "3.10" and python_version < "4.0"
|
28 |
+
jsonschema-specifications==2023.12.1 ; python_version >= "3.10" and python_version < "4.0"
|
29 |
+
markdown-it-py==3.0.0 ; python_version >= "3.10" and python_version < "4.0"
|
30 |
+
markupsafe==2.1.5 ; python_version >= "3.10" and python_version < "4.0"
|
31 |
+
mdurl==0.1.2 ; python_version >= "3.10" and python_version < "4.0"
|
32 |
+
numpy==1.26.4 ; python_version >= "3.10" and python_version < "4.0"
|
33 |
+
packaging==21.3 ; python_version >= "3.10" and python_version < "4.0"
|
34 |
+
pandas==2.2.1 ; python_version >= "3.10" and python_version < "4.0"
|
35 |
+
pillow==10.3.0 ; python_version >= "3.10" and python_version < "4.0"
|
36 |
+
plotly==5.20.0 ; python_version >= "3.10" and python_version < "4.0"
|
37 |
+
plotly-geo==1.0.0 ; python_version >= "3.10" and python_version < "4.0"
|
38 |
+
postgrest==0.16.2 ; python_version >= "3.10" and python_version < "4.0"
|
39 |
+
protobuf==4.25.3 ; python_version >= "3.10" and python_version < "4.0"
|
40 |
+
pyarrow==15.0.2 ; python_version >= "3.10" and python_version < "4.0"
|
41 |
+
pydantic==2.6.4 ; python_version >= "3.10" and python_version < "4.0"
|
42 |
+
pydantic-core==2.16.3 ; python_version >= "3.10" and python_version < "4.0"
|
43 |
+
pydantic-settings==2.2.1 ; python_version >= "3.10" and python_version < "4.0"
|
44 |
+
pydeck==0.8.0 ; python_version >= "3.10" and python_version < "4.0"
|
45 |
+
pygments==2.14.0 ; python_version >= "3.10" and python_version < "4.0"
|
46 |
+
pyparsing==3.0.9 ; python_version >= "3.10" and python_version < "4.0"
|
47 |
+
pyproj==3.6.1 ; python_version >= "3.10" and python_version < "4.0"
|
48 |
+
python-dateutil==2.9.0.post0 ; python_version >= "3.10" and python_version < "4.0"
|
49 |
+
python-dotenv==1.0.1 ; python_version >= "3.10" and python_version < "4.0"
|
50 |
+
pytz==2024.1 ; python_version >= "3.10" and python_version < "4.0"
|
51 |
+
realtime==1.0.3 ; python_version >= "3.10" and python_version < "4.0"
|
52 |
+
referencing==0.34.0 ; python_version >= "3.10" and python_version < "4.0"
|
53 |
+
requests==2.32.3 ; python_version >= "3.10" and python_version < "4.0"
|
54 |
+
rich==13.7.1 ; python_version >= "3.10" and python_version < "4.0"
|
55 |
+
rpds-py==0.18.0 ; python_version >= "3.10" and python_version < "4.0"
|
56 |
+
shapely==2.0.3 ; python_version >= "3.10" and python_version < "4.0"
|
57 |
+
six==1.16.0 ; python_version >= "3.10" and python_version < "4.0"
|
58 |
+
smmap==5.0.1 ; python_version >= "3.10" and python_version < "4.0"
|
59 |
+
sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0"
|
60 |
+
st-supabase-connection==1.2.2 ; python_version >= "3.10" and python_version < "4.0"
|
61 |
+
storage3==0.7.4 ; python_version >= "3.10" and python_version < "4.0"
|
62 |
+
streamlit==1.32.2 ; python_version >= "3.10" and python_version < "4.0"
|
63 |
+
streamlit-antd-components==0.3.2 ; python_version >= "3.10" and python_version < "4.0"
|
64 |
+
strenum==0.4.15 ; python_version >= "3.10" and python_version < "4.0"
|
65 |
+
supabase==2.4.1 ; python_version >= "3.10" and python_version < "4.0"
|
66 |
+
supafunc==0.4.5 ; python_version >= "3.10" and python_version < "4.0"
|
67 |
+
tenacity==8.2.3 ; python_version >= "3.10" and python_version < "4.0"
|
68 |
+
toml==0.10.2 ; python_version >= "3.10" and python_version < "4.0"
|
69 |
+
toolz==0.12.1 ; python_version >= "3.10" and python_version < "4.0"
|
70 |
+
tornado==6.4 ; python_version >= "3.10" and python_version < "4.0"
|
71 |
+
typing-extensions==4.10.0 ; python_version >= "3.10" and python_version < "4.0"
|
72 |
+
tzdata==2024.1 ; python_version >= "3.10" and python_version < "4.0"
|
73 |
+
urllib3==1.26.13 ; python_version >= "3.10" and python_version < "4.0"
|
74 |
+
watchdog==4.0.0 ; python_version >= "3.10" and python_version < "4.0" and platform_system != "Darwin"
|
75 |
+
websockets==12.0 ; python_version >= "3.10" and python_version < "4.0"
|
76 |
+
streamlit-card==1.0.2 ; python_version >= "3.10" and python_version < "4.0"
|
77 |
+
llama-index==0.10.55 ; python_version >= "3.10" and python_version < "4.0"
|
78 |
+
llama-index-embeddings-voyageai==0.1.4 ; python_version >= "3.10" and python_version < "4.0"
|
79 |
+
faiss-cpu==1.8.0.post1 ; python_version >= "3.10" and python_version < "4.0"
|
80 |
+
llama-index-vector-stores-faiss==0.1.2 ; python_version >= "3.10" and python_version < "4.0"
|