Datasets:
Bappadala Rohith Kumar Naidu commited on
Commit Β·
93d3cf7
1
Parent(s): 34518f8
chore: rename SafeVisionAI to SafeVixAI across dataset hub
Browse files- README.md +11 -11
- data/chatbot_service/data/legal/motor_vehicles_act_1988_summary.txt +1 -1
- notebooks/ChromaDB_RAG_Vectorstore_Build_chatbot_service_data_chroma_db_2.ipynb +1 -1
- notebooks/README.md +6 -6
- notebooks/Risk_Model_ONNX_Training_frontend_public_models_5.ipynb +1 -1
- notebooks/Roads_Data_Processing_backend_data_4.ipynb +1 -1
- requirements.txt +1 -1
- scripts/README.md +6 -6
- scripts/backend/data/prepare_road_sources.py +1 -1
- scripts/backend/data/road_sources.json +2 -2
- scripts/chatbot_service/data/_overpass_utils.py +1 -1
- scripts/scripts/data/_overpass_utils.py +1 -1
- scripts/scripts/data/download_legal_pdfs.py +1 -1
README.md
CHANGED
|
@@ -13,16 +13,16 @@ tags:
|
|
| 13 |
- traffic-law
|
| 14 |
- geospatial
|
| 15 |
- rag
|
| 16 |
-
pretty_name:
|
| 17 |
size_categories:
|
| 18 |
- 1B<n<10B
|
| 19 |
---
|
| 20 |
|
| 21 |
-
#
|
| 22 |
|
| 23 |
-
> The **Intelligence Layer** for the
|
| 24 |
|
| 25 |
-
This repository hosts all datasets, pre-trained models, notebooks, and **reproducible data acquisition scripts** that power the
|
| 26 |
|
| 27 |
**Main Application Repo:** [SafeVision-AI/SafeVision-AI](https://github.com/SafeVision-AI/SafeVision-AI)
|
| 28 |
|
|
@@ -32,12 +32,12 @@ This repository hosts all datasets, pre-trained models, notebooks, and **reprodu
|
|
| 32 |
|
| 33 |
```python
|
| 34 |
# Clone the entire intelligence layer
|
| 35 |
-
!git clone https://huggingface.co/datasets/rohith083/
|
| 36 |
|
| 37 |
# Symlink into the app structure
|
| 38 |
import os
|
| 39 |
-
os.makedirs("/content/
|
| 40 |
-
!ln -sfn /content/data/data/chatbot_service/data /content/
|
| 41 |
```
|
| 42 |
|
| 43 |
---
|
|
@@ -45,7 +45,7 @@ os.makedirs("/content/SafeVisionAI/chatbot_service", exist_ok=True)
|
|
| 45 |
## π¦ Repository Structure
|
| 46 |
|
| 47 |
```
|
| 48 |
-
|
| 49 |
βββ data/ β 3.6 GB of raw intelligence data
|
| 50 |
β βββ chatbot_service/data/ β Legal PDFs, GIS CSVs, accident data, models
|
| 51 |
β βββ backend/datasets/ β Challan rules, road infrastructure
|
|
@@ -82,7 +82,7 @@ All scripts here are **pure Python** β they run without any database or backen
|
|
| 82 |
|
| 83 |
```
|
| 84 |
scripts/
|
| 85 |
-
βββ scripts/data/ β from
|
| 86 |
β βββ _overpass_utils.py β Core GIS utility (basic version)
|
| 87 |
β βββ fetch_hospitals.py β Hospital data from OpenStreetMap
|
| 88 |
β βββ fetch_police.py β Police station data
|
|
@@ -99,14 +99,14 @@ scripts/
|
|
| 99 |
β βββ check_all_scripts.py β Script syntax validator
|
| 100 |
β βββ setup_kaggle.ps1 β Kaggle API auth setup
|
| 101 |
β
|
| 102 |
-
βββ backend/data/ β from
|
| 103 |
β βββ seed_violations.py β Traffic fine normalizer (MVA 2019)
|
| 104 |
β βββ prepare_road_sources.py β CSV/GeoJSON β LineString converter
|
| 105 |
β βββ sample_pmgsy.py β PMGSY 867K roads sampler
|
| 106 |
β βββ road_sources.example.json β Manifest template
|
| 107 |
β βββ road_sources.json β Active road source manifest
|
| 108 |
β
|
| 109 |
-
βββ chatbot_service/data/ β from
|
| 110 |
βββ _overpass_utils.py β Pro GIS utility (with retries + backoff)
|
| 111 |
βββ fetch_hospitals.py β Pro hospital fetcher
|
| 112 |
βββ fetch_police.py β Pro police fetcher
|
|
|
|
| 13 |
- traffic-law
|
| 14 |
- geospatial
|
| 15 |
- rag
|
| 16 |
+
pretty_name: SafeVixAI Dataset Hub
|
| 17 |
size_categories:
|
| 18 |
- 1B<n<10B
|
| 19 |
---
|
| 20 |
|
| 21 |
+
# SafeVixAI Dataset Hub π‘οΈ
|
| 22 |
|
| 23 |
+
> The **Intelligence Layer** for the SafeVixAI platform β IIT Madras Road Safety Hackathon 2026
|
| 24 |
|
| 25 |
+
This repository hosts all datasets, pre-trained models, notebooks, and **reproducible data acquisition scripts** that power the SafeVixAI application. It is designed to be cloned directly into Google Colab or any research environment.
|
| 26 |
|
| 27 |
**Main Application Repo:** [SafeVision-AI/SafeVision-AI](https://github.com/SafeVision-AI/SafeVision-AI)
|
| 28 |
|
|
|
|
| 32 |
|
| 33 |
```python
|
| 34 |
# Clone the entire intelligence layer
|
| 35 |
+
!git clone https://huggingface.co/datasets/rohith083/SafeVixAI-Dataset-Hub /content/data
|
| 36 |
|
| 37 |
# Symlink into the app structure
|
| 38 |
import os
|
| 39 |
+
os.makedirs("/content/SafeVixAI/chatbot_service", exist_ok=True)
|
| 40 |
+
!ln -sfn /content/data/data/chatbot_service/data /content/SafeVixAI/chatbot_service/data
|
| 41 |
```
|
| 42 |
|
| 43 |
---
|
|
|
|
| 45 |
## π¦ Repository Structure
|
| 46 |
|
| 47 |
```
|
| 48 |
+
SafeVixAI-Dataset-Hub/
|
| 49 |
βββ data/ β 3.6 GB of raw intelligence data
|
| 50 |
β βββ chatbot_service/data/ β Legal PDFs, GIS CSVs, accident data, models
|
| 51 |
β βββ backend/datasets/ β Challan rules, road infrastructure
|
|
|
|
| 82 |
|
| 83 |
```
|
| 84 |
scripts/
|
| 85 |
+
βββ scripts/data/ β from SafeVixAI/scripts/data/
|
| 86 |
β βββ _overpass_utils.py β Core GIS utility (basic version)
|
| 87 |
β βββ fetch_hospitals.py β Hospital data from OpenStreetMap
|
| 88 |
β βββ fetch_police.py β Police station data
|
|
|
|
| 99 |
β βββ check_all_scripts.py β Script syntax validator
|
| 100 |
β βββ setup_kaggle.ps1 β Kaggle API auth setup
|
| 101 |
β
|
| 102 |
+
βββ backend/data/ β from SafeVixAI/backend/scripts/data/
|
| 103 |
β βββ seed_violations.py β Traffic fine normalizer (MVA 2019)
|
| 104 |
β βββ prepare_road_sources.py β CSV/GeoJSON β LineString converter
|
| 105 |
β βββ sample_pmgsy.py β PMGSY 867K roads sampler
|
| 106 |
β βββ road_sources.example.json β Manifest template
|
| 107 |
β βββ road_sources.json β Active road source manifest
|
| 108 |
β
|
| 109 |
+
βββ chatbot_service/data/ β from SafeVixAI/chatbot_service/scripts/data/
|
| 110 |
βββ _overpass_utils.py β Pro GIS utility (with retries + backoff)
|
| 111 |
βββ fetch_hospitals.py β Pro hospital fetcher
|
| 112 |
βββ fetch_police.py β Pro police fetcher
|
data/chatbot_service/data/legal/motor_vehicles_act_1988_summary.txt
CHANGED
|
@@ -386,5 +386,5 @@ For the complete text: https://indiacode.nic.in
|
|
| 386 |
For official notifications: https://morth.nic.in
|
| 387 |
For traffic violation information: https://parivahan.gov.in/echallan
|
| 388 |
|
| 389 |
-
This summary is intended for the
|
| 390 |
"""
|
|
|
|
| 386 |
For official notifications: https://morth.nic.in
|
| 387 |
For traffic violation information: https://parivahan.gov.in/echallan
|
| 388 |
|
| 389 |
+
This summary is intended for the SafeVixAI RAG knowledge base to answer user queries about traffic laws and road safety regulations in India.
|
| 390 |
"""
|
notebooks/ChromaDB_RAG_Vectorstore_Build_chatbot_service_data_chroma_db_2.ipynb
CHANGED
|
@@ -11,7 +11,7 @@
|
|
| 11 |
"\n",
|
| 12 |
"**Output:** `chroma_db/` directory -> deployed to `chatbot_service/data/chroma_db/`\n",
|
| 13 |
"\n",
|
| 14 |
-
"This notebook builds the **Retrieval-Augmented Generation (RAG)** knowledge base for the
|
| 15 |
"It ingests Indian legal documents and first-aid medical PDFs, chunks them, embeds them, and stores them in a **ChromaDB** vector store.\n",
|
| 16 |
"\n",
|
| 17 |
"---\n",
|
|
|
|
| 11 |
"\n",
|
| 12 |
"**Output:** `chroma_db/` directory -> deployed to `chatbot_service/data/chroma_db/`\n",
|
| 13 |
"\n",
|
| 14 |
+
"This notebook builds the **Retrieval-Augmented Generation (RAG)** knowledge base for the SafeVixAI chatbot.\n",
|
| 15 |
"It ingests Indian legal documents and first-aid medical PDFs, chunks them, embeds them, and stores them in a **ChromaDB** vector store.\n",
|
| 16 |
"\n",
|
| 17 |
"---\n",
|
notebooks/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
-
#
|
| 2 |
|
| 3 |
-
These notebooks are the **research and training layer** of
|
| 4 |
|
| 5 |
---
|
| 6 |
|
|
@@ -10,7 +10,7 @@ Run this at the top of **any** notebook to mount the full dataset:
|
|
| 10 |
|
| 11 |
```python
|
| 12 |
# Step 1 β Clone the Hub (only run once per session)
|
| 13 |
-
!git clone https://huggingface.co/datasets/rohith083/
|
| 14 |
|
| 15 |
# Step 2 β Install dependencies
|
| 16 |
!pip install -q pdfplumber chromadb sentence-transformers ultralytics onnx
|
|
@@ -76,11 +76,11 @@ import os
|
|
| 76 |
os.environ["HUGGING_FACE_TOKEN"] = "your_token_here"
|
| 77 |
|
| 78 |
!cd /content/hub && git config user.email "you@example.com"
|
| 79 |
-
!cd /content/hub && git config user.name "
|
| 80 |
!cd /content/hub && git add . && git commit -m "chore: update trained model outputs"
|
| 81 |
-
!cd /content/hub && git push https://your_username:$HUGGING_FACE_TOKEN@huggingface.co/datasets/rohith083/
|
| 82 |
```
|
| 83 |
|
| 84 |
---
|
| 85 |
|
| 86 |
-
*Part of the [
|
|
|
|
| 1 |
+
# SafeVixAI β Research Notebooks π
|
| 2 |
|
| 3 |
+
These notebooks are the **research and training layer** of SafeVixAI. Each one processes raw data from the Hub and produces a model, index, or processed dataset used by the live application.
|
| 4 |
|
| 5 |
---
|
| 6 |
|
|
|
|
| 10 |
|
| 11 |
```python
|
| 12 |
# Step 1 β Clone the Hub (only run once per session)
|
| 13 |
+
!git clone https://huggingface.co/datasets/rohith083/SafeVixAI-Dataset-Hub /content/hub
|
| 14 |
|
| 15 |
# Step 2 β Install dependencies
|
| 16 |
!pip install -q pdfplumber chromadb sentence-transformers ultralytics onnx
|
|
|
|
| 76 |
os.environ["HUGGING_FACE_TOKEN"] = "your_token_here"
|
| 77 |
|
| 78 |
!cd /content/hub && git config user.email "you@example.com"
|
| 79 |
+
!cd /content/hub && git config user.name "SafeVixAI"
|
| 80 |
!cd /content/hub && git add . && git commit -m "chore: update trained model outputs"
|
| 81 |
+
!cd /content/hub && git push https://your_username:$HUGGING_FACE_TOKEN@huggingface.co/datasets/rohith083/SafeVixAI-Dataset-Hub main
|
| 82 |
```
|
| 83 |
|
| 84 |
---
|
| 85 |
|
| 86 |
+
*Part of the [SafeVixAI](https://github.com/SafeVision-AI/SafeVision-AI) β IIT Madras Road Safety Hackathon 2026*
|
notebooks/Risk_Model_ONNX_Training_frontend_public_models_5.ipynb
CHANGED
|
@@ -26,7 +26,7 @@
|
|
| 26 |
"### Pipeline\n",
|
| 27 |
"Synthetic data generation -> GBM training -> ONNX conversion -> Download\n",
|
| 28 |
"\n",
|
| 29 |
-
"> The model runs entirely client-side in the
|
| 30 |
]
|
| 31 |
},
|
| 32 |
{
|
|
|
|
| 26 |
"### Pipeline\n",
|
| 27 |
"Synthetic data generation -> GBM training -> ONNX conversion -> Download\n",
|
| 28 |
"\n",
|
| 29 |
+
"> The model runs entirely client-side in the SafeVixAI PWA using onnxruntime-web."
|
| 30 |
]
|
| 31 |
},
|
| 32 |
{
|
notebooks/Roads_Data_Processing_backend_data_4.ipynb
CHANGED
|
@@ -9,7 +9,7 @@
|
|
| 9 |
"**Output:** `toll_plazas_lite.json` -> deployed to `backend/data/roads/`\n",
|
| 10 |
"\n",
|
| 11 |
"This notebook processes the **NHAI Toll Plaza dataset** to produce a lightweight JSON\n",
|
| 12 |
-
"suitable for the
|
| 13 |
"\n",
|
| 14 |
"---\n",
|
| 15 |
"### Dataset\n",
|
|
|
|
| 9 |
"**Output:** `toll_plazas_lite.json` -> deployed to `backend/data/roads/`\n",
|
| 10 |
"\n",
|
| 11 |
"This notebook processes the **NHAI Toll Plaza dataset** to produce a lightweight JSON\n",
|
| 12 |
+
"suitable for the SafeVixAI backend API and offline PWA map layer.\n",
|
| 13 |
"\n",
|
| 14 |
"---\n",
|
| 15 |
"### Dataset\n",
|
requirements.txt
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
#
|
| 2 |
# Install with: pip install -r requirements.txt
|
| 3 |
|
| 4 |
# ββ Core (all scripts) ββββββββββββββββββββββββββββββββββββββββ
|
|
|
|
| 1 |
+
# SafeVixAI Dataset Hub β Script Dependencies
|
| 2 |
# Install with: pip install -r requirements.txt
|
| 3 |
|
| 4 |
# ββ Core (all scripts) ββββββββββββββββββββββββββββββββββββββββ
|
scripts/README.md
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
-
#
|
| 2 |
|
| 3 |
-
These scripts are the **raw data pipeline** that built the 3.6GB
|
| 4 |
All scripts here are **pure Python** β they require no database, no Redis, no PostGIS.
|
| 5 |
|
| 6 |
-
> Scripts are mirrored from the [
|
| 7 |
|
| 8 |
---
|
| 9 |
|
|
@@ -11,7 +11,7 @@ All scripts here are **pure Python** β they require no database, no Redis, no
|
|
| 11 |
|
| 12 |
```
|
| 13 |
scripts/
|
| 14 |
-
βββ scripts/data/ β from
|
| 15 |
β βββ fetch_*.py β Overpass GIS fetchers (basic version)
|
| 16 |
β βββ _overpass_utils.py β Core GIS utility
|
| 17 |
β βββ download_legal_pdfs.py
|
|
@@ -24,13 +24,13 @@ scripts/
|
|
| 24 |
β βββ check_all_scripts.py
|
| 25 |
β βββ setup_kaggle.ps1
|
| 26 |
β
|
| 27 |
-
βββ backend/data/ β from
|
| 28 |
β βββ seed_violations.py β MVA 2019 traffic fine normalizer
|
| 29 |
β βββ prepare_road_sources.py
|
| 30 |
β βββ sample_pmgsy.py
|
| 31 |
β βββ road_sources.example.json
|
| 32 |
β
|
| 33 |
-
βββ chatbot_service/data/ β from
|
| 34 |
βββ _overpass_utils.py β β Pro version (retries + backoff)
|
| 35 |
βββ fetch_*.py β β Pro fetchers (use these over scripts/data/)
|
| 36 |
```
|
|
|
|
| 1 |
+
# SafeVixAI β Data Acquisition Scripts π¬
|
| 2 |
|
| 3 |
+
These scripts are the **raw data pipeline** that built the 3.6GB SafeVixAI dataset.
|
| 4 |
All scripts here are **pure Python** β they require no database, no Redis, no PostGIS.
|
| 5 |
|
| 6 |
+
> Scripts are mirrored from the [SafeVixAI main repo](https://github.com/SafeVision-AI/SafeVision-AI) and organized by their origin folder.
|
| 7 |
|
| 8 |
---
|
| 9 |
|
|
|
|
| 11 |
|
| 12 |
```
|
| 13 |
scripts/
|
| 14 |
+
βββ scripts/data/ β from SafeVixAI/scripts/data/
|
| 15 |
β βββ fetch_*.py β Overpass GIS fetchers (basic version)
|
| 16 |
β βββ _overpass_utils.py β Core GIS utility
|
| 17 |
β βββ download_legal_pdfs.py
|
|
|
|
| 24 |
β βββ check_all_scripts.py
|
| 25 |
β βββ setup_kaggle.ps1
|
| 26 |
β
|
| 27 |
+
βββ backend/data/ β from SafeVixAI/backend/scripts/data/
|
| 28 |
β βββ seed_violations.py β MVA 2019 traffic fine normalizer
|
| 29 |
β βββ prepare_road_sources.py
|
| 30 |
β βββ sample_pmgsy.py
|
| 31 |
β βββ road_sources.example.json
|
| 32 |
β
|
| 33 |
+
βββ chatbot_service/data/ β from SafeVixAI/chatbot_service/scripts/data/
|
| 34 |
βββ _overpass_utils.py β β Pro version (retries + backoff)
|
| 35 |
βββ fetch_*.py β β Pro fetchers (use these over scripts/data/)
|
| 36 |
```
|
scripts/backend/data/prepare_road_sources.py
CHANGED
|
@@ -25,7 +25,7 @@ import json
|
|
| 25 |
import sys
|
| 26 |
from pathlib import Path
|
| 27 |
|
| 28 |
-
ROOT = Path(__file__).resolve().parents[1] #
|
| 29 |
CHATBOT_DATA = ROOT.parent / "chatbot_service" / "data"
|
| 30 |
OUT_DIR = ROOT / "datasets" / "roads"
|
| 31 |
OUT_DIR.mkdir(parents=True, exist_ok=True)
|
|
|
|
| 25 |
import sys
|
| 26 |
from pathlib import Path
|
| 27 |
|
| 28 |
+
ROOT = Path(__file__).resolve().parents[1] # SafeVixAI/backend/
|
| 29 |
CHATBOT_DATA = ROOT.parent / "chatbot_service" / "data"
|
| 30 |
OUT_DIR = ROOT / "datasets" / "roads"
|
| 31 |
OUT_DIR.mkdir(parents=True, exist_ok=True)
|
scripts/backend/data/road_sources.json
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0fab7e082337cf00095f344c3636dabf47e25f2993db379567f53310cd2b6e43
|
| 3 |
+
size 725
|
scripts/chatbot_service/data/_overpass_utils.py
CHANGED
|
@@ -20,7 +20,7 @@ DEFAULT_ENDPOINTS = (
|
|
| 20 |
)
|
| 21 |
DEFAULT_HEADERS = {
|
| 22 |
'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8',
|
| 23 |
-
'User-Agent': '
|
| 24 |
}
|
| 25 |
CSV_COLUMNS = [
|
| 26 |
'name',
|
|
|
|
| 20 |
)
|
| 21 |
DEFAULT_HEADERS = {
|
| 22 |
'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8',
|
| 23 |
+
'User-Agent': 'SafeVixAI chatbot data fetcher/1.0',
|
| 24 |
}
|
| 25 |
CSV_COLUMNS = [
|
| 26 |
'name',
|
scripts/scripts/data/_overpass_utils.py
CHANGED
|
@@ -16,7 +16,7 @@ DEFAULT_ENDPOINTS = (
|
|
| 16 |
)
|
| 17 |
DEFAULT_HEADERS = {
|
| 18 |
'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8',
|
| 19 |
-
'User-Agent': '
|
| 20 |
}
|
| 21 |
CSV_COLUMNS = [
|
| 22 |
'osm_id',
|
|
|
|
| 16 |
)
|
| 17 |
DEFAULT_HEADERS = {
|
| 18 |
'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8',
|
| 19 |
+
'User-Agent': 'SafeVixAI bootstrap scripts/1.0',
|
| 20 |
}
|
| 21 |
CSV_COLUMNS = [
|
| 22 |
'osm_id',
|
scripts/scripts/data/download_legal_pdfs.py
CHANGED
|
@@ -80,7 +80,7 @@ def download_first_working(sources: list[str], destination: Path) -> bool:
|
|
| 80 |
req = urllib.request.Request(
|
| 81 |
url,
|
| 82 |
headers={
|
| 83 |
-
"User-Agent": "Mozilla/5.0 (
|
| 84 |
},
|
| 85 |
)
|
| 86 |
with urllib.request.urlopen(req, timeout=60) as response:
|
|
|
|
| 80 |
req = urllib.request.Request(
|
| 81 |
url,
|
| 82 |
headers={
|
| 83 |
+
"User-Agent": "Mozilla/5.0 (SafeVixAI-DataPipeline/1.0; +https://github.com)"
|
| 84 |
},
|
| 85 |
)
|
| 86 |
with urllib.request.urlopen(req, timeout=60) as response:
|