Spaces:
Sleeping
Sleeping
feat: Deploy GPS Verifier LPU to Hugging Face Spaces
Browse files- .dockerignore +89 -0
- .gitattributes +25 -29
- Dockerfile +56 -0
- README.md +156 -5
- backend/data/ward_boundaries.json +84 -0
- backend/main.py +107 -0
- backend/requirements.txt +20 -0
- backend/routes/gps_api.py +197 -0
- backend/services/gps_extractor.py +427 -0
- backend/services/location_validator.py +180 -0
.dockerignore
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Git files
|
| 2 |
+
.git
|
| 3 |
+
.gitignore
|
| 4 |
+
.gitattributes
|
| 5 |
+
|
| 6 |
+
# Python cache
|
| 7 |
+
__pycache__/
|
| 8 |
+
*.py[cod]
|
| 9 |
+
*$py.class
|
| 10 |
+
*.so
|
| 11 |
+
.Python
|
| 12 |
+
|
| 13 |
+
# Virtual environments
|
| 14 |
+
env/
|
| 15 |
+
venv/
|
| 16 |
+
ENV/
|
| 17 |
+
env.bak/
|
| 18 |
+
venv.bak/
|
| 19 |
+
.venv/
|
| 20 |
+
|
| 21 |
+
# Testing
|
| 22 |
+
.pytest_cache/
|
| 23 |
+
.coverage
|
| 24 |
+
.coverage.*
|
| 25 |
+
htmlcov/
|
| 26 |
+
.tox/
|
| 27 |
+
.nox/
|
| 28 |
+
|
| 29 |
+
# Distribution / packaging
|
| 30 |
+
dist/
|
| 31 |
+
build/
|
| 32 |
+
*.egg-info/
|
| 33 |
+
.eggs/
|
| 34 |
+
|
| 35 |
+
# Documentation (keep only README_HF.md)
|
| 36 |
+
*.md
|
| 37 |
+
!README_HF.md
|
| 38 |
+
docs/
|
| 39 |
+
|
| 40 |
+
# IDE
|
| 41 |
+
.vscode/
|
| 42 |
+
.idea/
|
| 43 |
+
*.swp
|
| 44 |
+
*.swo
|
| 45 |
+
*~
|
| 46 |
+
.DS_Store
|
| 47 |
+
|
| 48 |
+
# Logs
|
| 49 |
+
*.log
|
| 50 |
+
logs/
|
| 51 |
+
|
| 52 |
+
# Environment variables
|
| 53 |
+
.env
|
| 54 |
+
.env.*
|
| 55 |
+
|
| 56 |
+
# Temporary files
|
| 57 |
+
*.tmp
|
| 58 |
+
*.bak
|
| 59 |
+
*.backup
|
| 60 |
+
*.broken
|
| 61 |
+
|
| 62 |
+
# Node modules (if any)
|
| 63 |
+
node_modules/
|
| 64 |
+
|
| 65 |
+
# Database files
|
| 66 |
+
*.db
|
| 67 |
+
*.sqlite
|
| 68 |
+
*.sqlite3
|
| 69 |
+
|
| 70 |
+
# Images (except necessary assets)
|
| 71 |
+
*.jpg
|
| 72 |
+
*.jpeg
|
| 73 |
+
*.png
|
| 74 |
+
*.gif
|
| 75 |
+
!frontend/**/*.png
|
| 76 |
+
!frontend/**/*.jpg
|
| 77 |
+
|
| 78 |
+
# Docker
|
| 79 |
+
docker-compose.yml
|
| 80 |
+
Dockerfile.dev
|
| 81 |
+
|
| 82 |
+
# CI/CD
|
| 83 |
+
.github/
|
| 84 |
+
.gitlab-ci.yml
|
| 85 |
+
|
| 86 |
+
# Other
|
| 87 |
+
*.pem
|
| 88 |
+
*.key
|
| 89 |
+
*.cert
|
.gitattributes
CHANGED
|
@@ -1,35 +1,31 @@
|
|
| 1 |
-
|
| 2 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
-
*.
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
-
*.
|
| 35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Git LFS configuration for Hugging Face Spaces
|
| 2 |
+
|
| 3 |
+
# Track large image files
|
| 4 |
+
*.jpg filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.jpeg filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.png filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.gif filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.bmp filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.tiff filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
|
| 11 |
+
# Track model files (if any)
|
| 12 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
*.h5 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
*.pb filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
| 15 |
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 16 |
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
|
| 19 |
+
# Track compressed files
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.tar.gz filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
|
| 24 |
+
# Track video files
|
| 25 |
+
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
*.avi filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.mov filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
|
| 29 |
+
# Track database files
|
| 30 |
+
*.db filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.sqlite filter=lfs diff=lfs merge=lfs -text
|
Dockerfile
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# GPS Verifier - Hugging Face Spaces Dockerfile
|
| 2 |
+
# Optimized for FastAPI deployment with Tesseract OCR
|
| 3 |
+
|
| 4 |
+
FROM python:3.9-slim
|
| 5 |
+
|
| 6 |
+
# Set environment variables
|
| 7 |
+
ENV PYTHONUNBUFFERED=1 \
|
| 8 |
+
PYTHONDONTWRITEBYTECODE=1 \
|
| 9 |
+
DEBIAN_FRONTEND=noninteractive
|
| 10 |
+
|
| 11 |
+
# Install system dependencies including Tesseract OCR
|
| 12 |
+
RUN apt-get update && apt-get install -y \
|
| 13 |
+
tesseract-ocr \
|
| 14 |
+
tesseract-ocr-eng \
|
| 15 |
+
libgl1-mesa-glx \
|
| 16 |
+
libglib2.0-0 \
|
| 17 |
+
libsm6 \
|
| 18 |
+
libxext6 \
|
| 19 |
+
libxrender-dev \
|
| 20 |
+
libgomp1 \
|
| 21 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 22 |
+
|
| 23 |
+
# Verify Tesseract installation
|
| 24 |
+
RUN tesseract --version
|
| 25 |
+
|
| 26 |
+
# Set working directory
|
| 27 |
+
WORKDIR /app
|
| 28 |
+
|
| 29 |
+
# Copy requirements first (for better Docker layer caching)
|
| 30 |
+
COPY backend/requirements.txt /app/requirements.txt
|
| 31 |
+
|
| 32 |
+
# Install Python dependencies
|
| 33 |
+
RUN pip install --no-cache-dir --upgrade pip && \
|
| 34 |
+
pip install --no-cache-dir -r requirements.txt
|
| 35 |
+
|
| 36 |
+
# Copy application code
|
| 37 |
+
COPY backend/ /app/backend/
|
| 38 |
+
COPY frontend/ /app/frontend/
|
| 39 |
+
|
| 40 |
+
# Set working directory to backend
|
| 41 |
+
WORKDIR /app/backend
|
| 42 |
+
|
| 43 |
+
# Create a non-root user for security
|
| 44 |
+
RUN useradd -m -u 1000 appuser && \
|
| 45 |
+
chown -R appuser:appuser /app
|
| 46 |
+
USER appuser
|
| 47 |
+
|
| 48 |
+
# Expose port 7860 (Hugging Face Spaces standard port)
|
| 49 |
+
EXPOSE 7860
|
| 50 |
+
|
| 51 |
+
# Health check
|
| 52 |
+
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
| 53 |
+
CMD python -c "import requests; requests.get('http://localhost:7860/api/v1/health')" || exit 1
|
| 54 |
+
|
| 55 |
+
# Run the application
|
| 56 |
+
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860", "--workers", "1"]
|
README.md
CHANGED
|
@@ -1,10 +1,161 @@
|
|
| 1 |
---
|
| 2 |
-
title:
|
| 3 |
-
emoji:
|
| 4 |
-
colorFrom:
|
| 5 |
-
colorTo:
|
| 6 |
sdk: docker
|
|
|
|
| 7 |
pinned: false
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
---
|
| 9 |
|
| 10 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
---
|
| 2 |
+
title: GPS Verifier - LPU Location Validation
|
| 3 |
+
emoji: π
|
| 4 |
+
colorFrom: blue
|
| 5 |
+
colorTo: purple
|
| 6 |
sdk: docker
|
| 7 |
+
app_port: 7860
|
| 8 |
pinned: false
|
| 9 |
+
license: mit
|
| 10 |
+
tags:
|
| 11 |
+
- gps
|
| 12 |
+
- location-validation
|
| 13 |
+
- ocr
|
| 14 |
+
- fastapi
|
| 15 |
+
- computer-vision
|
| 16 |
---
|
| 17 |
|
| 18 |
+
# π GPS Verifier - Location Validation System
|
| 19 |
+
|
| 20 |
+
<div align="center">
|
| 21 |
+
|
| 22 |
+
[](https://github.com/nitish-niraj/GPS-verification)
|
| 23 |
+
[](https://www.python.org/)
|
| 24 |
+
[](https://fastapi.tiangolo.com/)
|
| 25 |
+
|
| 26 |
+
**A modern, intelligent GPS validation system designed for Lovely Professional University (LPU) campus**
|
| 27 |
+
|
| 28 |
+
</div>
|
| 29 |
+
|
| 30 |
+
---
|
| 31 |
+
|
| 32 |
+
## π Features
|
| 33 |
+
|
| 34 |
+
- **πΈ Multi-Method GPS Extraction**
|
| 35 |
+
- EXIF metadata reading from camera photos
|
| 36 |
+
- OCR text extraction using Tesseract (for screenshots)
|
| 37 |
+
- Pattern recognition as fallback method
|
| 38 |
+
- WhatsApp GPS overlay detection
|
| 39 |
+
|
| 40 |
+
- **π― LPU Campus Validation**
|
| 41 |
+
- Precise boundary validation for LPU Main Campus
|
| 42 |
+
- Polygon-based geofencing
|
| 43 |
+
- Detailed zone information
|
| 44 |
+
|
| 45 |
+
- **π Modern Tech Stack**
|
| 46 |
+
- FastAPI for high-performance REST API
|
| 47 |
+
- Tesseract OCR for text extraction
|
| 48 |
+
- Apple-inspired glassmorphism UI
|
| 49 |
+
- Real-time validation
|
| 50 |
+
|
| 51 |
+
---
|
| 52 |
+
|
| 53 |
+
## π Quick Start
|
| 54 |
+
|
| 55 |
+
### Web Interface
|
| 56 |
+
|
| 57 |
+
1. Click the **App** tab above
|
| 58 |
+
2. Navigate to the **Upload** section
|
| 59 |
+
3. Drag and drop an image with GPS data or click to browse
|
| 60 |
+
4. Click **"Validate Location"**
|
| 61 |
+
5. View results with GPS coordinates and validation status
|
| 62 |
+
|
| 63 |
+
### API Usage
|
| 64 |
+
|
| 65 |
+
#### Validate Image Location
|
| 66 |
+
|
| 67 |
+
```bash
|
| 68 |
+
curl -X POST "https://YOUR_USERNAME-gps-verifier-lpu.hf.space/api/v1/validate-image-location" \
|
| 69 |
+
-F "file=@your_image.jpg"
|
| 70 |
+
```
|
| 71 |
+
|
| 72 |
+
#### Check API Health
|
| 73 |
+
|
| 74 |
+
```bash
|
| 75 |
+
curl "https://YOUR_USERNAME-gps-verifier-lpu.hf.space/api/v1/health"
|
| 76 |
+
```
|
| 77 |
+
|
| 78 |
+
---
|
| 79 |
+
|
| 80 |
+
## π API Documentation
|
| 81 |
+
|
| 82 |
+
- **Interactive API Docs (Swagger)**: `/docs`
|
| 83 |
+
- **API Reference (ReDoc)**: `/redoc`
|
| 84 |
+
- **Web UI**: `/ui`
|
| 85 |
+
|
| 86 |
+
### Available Endpoints
|
| 87 |
+
|
| 88 |
+
| Method | Endpoint | Description |
|
| 89 |
+
|--------|----------|-------------|
|
| 90 |
+
| POST | `/api/v1/validate-image-location` | Upload image for GPS extraction and validation |
|
| 91 |
+
| POST | `/api/v1/validate-coordinates` | Validate lat/long coordinates directly |
|
| 92 |
+
| GET | `/api/v1/zones` | List all configured validation zones |
|
| 93 |
+
| GET | `/api/v1/health` | Check API health and service status |
|
| 94 |
+
|
| 95 |
+
---
|
| 96 |
+
|
| 97 |
+
## π― Use Cases
|
| 98 |
+
|
| 99 |
+
- **Campus Security**: Validate student/staff locations within campus
|
| 100 |
+
- **Attendance Systems**: Verify location-based attendance
|
| 101 |
+
- **Delivery Services**: Confirm delivery within campus boundaries
|
| 102 |
+
- **Event Management**: Validate event participation locations
|
| 103 |
+
- **Research**: Geographic data analysis and validation
|
| 104 |
+
|
| 105 |
+
---
|
| 106 |
+
|
| 107 |
+
## ποΈ Technology Stack
|
| 108 |
+
|
| 109 |
+
- **Backend**: FastAPI (Python)
|
| 110 |
+
- **OCR Engine**: Tesseract 5.x
|
| 111 |
+
- **Image Processing**: OpenCV, Pillow
|
| 112 |
+
- **Geospatial**: Shapely
|
| 113 |
+
- **Frontend**: Vanilla JavaScript with Apple-inspired design
|
| 114 |
+
|
| 115 |
+
---
|
| 116 |
+
|
| 117 |
+
## π Example Response
|
| 118 |
+
|
| 119 |
+
```json
|
| 120 |
+
{
|
| 121 |
+
"valid": true,
|
| 122 |
+
"gps": {
|
| 123 |
+
"latitude": 31.2508,
|
| 124 |
+
"longitude": 75.7054,
|
| 125 |
+
"source": "ocr",
|
| 126 |
+
"confidence": 0.8
|
| 127 |
+
},
|
| 128 |
+
"zone": {
|
| 129 |
+
"name": "LPU_Main",
|
| 130 |
+
"full_name": "Lovely Professional University - Main Campus",
|
| 131 |
+
"type": "educational_institution"
|
| 132 |
+
},
|
| 133 |
+
"message": "Location validated successfully"
|
| 134 |
+
}
|
| 135 |
+
```
|
| 136 |
+
|
| 137 |
+
---
|
| 138 |
+
|
| 139 |
+
## π§ Contact & Support
|
| 140 |
+
|
| 141 |
+
**Project Maintainer**: Nitish Niraj
|
| 142 |
+
|
| 143 |
+
**GitHub Repository**: [GPS-verification](https://github.com/nitish-niraj/GPS-verification)
|
| 144 |
+
|
| 145 |
+
**Issues**: [Report a bug](https://github.com/nitish-niraj/GPS-verification/issues)
|
| 146 |
+
|
| 147 |
+
---
|
| 148 |
+
|
| 149 |
+
## π License
|
| 150 |
+
|
| 151 |
+
This project is licensed under the MIT License.
|
| 152 |
+
|
| 153 |
+
---
|
| 154 |
+
|
| 155 |
+
<div align="center">
|
| 156 |
+
|
| 157 |
+
**Made with β€οΈ for Lovely Professional University**
|
| 158 |
+
|
| 159 |
+
*Version 3.0.0 - Deployed on Hugging Face Spaces*
|
| 160 |
+
|
| 161 |
+
</div>
|
backend/data/ward_boundaries.json
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"educational_zones": {
|
| 3 |
+
"LPU_Main": {
|
| 4 |
+
"name": "Lovely Professional University - Main Campus",
|
| 5 |
+
"department": "University Administration",
|
| 6 |
+
"contact": "+91-1824-517000",
|
| 7 |
+
"email": "info@lpu.co.in",
|
| 8 |
+
"type": "educational_institution",
|
| 9 |
+
"services": ["Student Services", "Academic Support", "Campus Security"],
|
| 10 |
+
"boundary": [
|
| 11 |
+
[31.26247867646305, 75.70427192645857],
|
| 12 |
+
[31.2545439713566, 75.70000209962465],
|
| 13 |
+
[31.25102539216585, 75.69766868627298],
|
| 14 |
+
[31.2477639758642, 75.6947572478896],
|
| 15 |
+
[31.24441472404973, 75.68989822972641],
|
| 16 |
+
[31.23862365643148, 75.69855573825951],
|
| 17 |
+
[31.24653335106822, 75.70635661847446],
|
| 18 |
+
[31.25265262176708, 75.71138356619672],
|
| 19 |
+
[31.25585533725186, 75.71338489406992],
|
| 20 |
+
[31.25738397066991, 75.71427909029491],
|
| 21 |
+
[31.25815134363462, 75.7151326803115],
|
| 22 |
+
[31.26247867646305, 75.70427192645857]
|
| 23 |
+
]
|
| 24 |
+
}
|
| 25 |
+
},
|
| 26 |
+
"municipal_wards": {
|
| 27 |
+
"Jalandhar_Central": {
|
| 28 |
+
"name": "Jalandhar Central Ward",
|
| 29 |
+
"department": "Municipal Corporation Central Office",
|
| 30 |
+
"contact": "+91-181-2222001",
|
| 31 |
+
"email": "central@jalandhar.gov.in",
|
| 32 |
+
"type": "municipal_ward",
|
| 33 |
+
"services": ["Water Supply", "Sanitation", "Property Tax", "Building Permits"],
|
| 34 |
+
"boundary": [
|
| 35 |
+
[31.2400, 75.6800], [31.2600, 75.6800],
|
| 36 |
+
[31.2600, 75.7000], [31.2400, 75.7000],
|
| 37 |
+
[31.2400, 75.6800]
|
| 38 |
+
]
|
| 39 |
+
},
|
| 40 |
+
"Jalandhar_North": {
|
| 41 |
+
"name": "Jalandhar North Ward",
|
| 42 |
+
"department": "Municipal Corporation North Office",
|
| 43 |
+
"contact": "+91-181-2222002",
|
| 44 |
+
"email": "north@jalandhar.gov.in",
|
| 45 |
+
"type": "municipal_ward",
|
| 46 |
+
"services": ["Roads", "Street Lights", "Waste Management", "Parks"],
|
| 47 |
+
"boundary": [
|
| 48 |
+
[31.2600, 75.6800], [31.2800, 75.6800],
|
| 49 |
+
[31.2800, 75.7000], [31.2600, 75.7000],
|
| 50 |
+
[31.2600, 75.6800]
|
| 51 |
+
]
|
| 52 |
+
}
|
| 53 |
+
},
|
| 54 |
+
"government_zones": {
|
| 55 |
+
"District_Collectorate": {
|
| 56 |
+
"name": "District Collectorate Jalandhar",
|
| 57 |
+
"department": "District Administration Office",
|
| 58 |
+
"contact": "+91-181-2444001",
|
| 59 |
+
"email": "collector@jalandhar.gov.in",
|
| 60 |
+
"type": "government_office",
|
| 61 |
+
"services": ["Revenue Services", "Land Records", "Certificates", "Licenses"],
|
| 62 |
+
"boundary": [
|
| 63 |
+
[31.3200, 75.5800], [31.3400, 75.5800],
|
| 64 |
+
[31.3400, 75.6000], [31.3200, 75.6000],
|
| 65 |
+
[31.3200, 75.5800]
|
| 66 |
+
]
|
| 67 |
+
}
|
| 68 |
+
},
|
| 69 |
+
"health_zones": {
|
| 70 |
+
"Civil_Hospital": {
|
| 71 |
+
"name": "Civil Hospital Jalandhar District",
|
| 72 |
+
"department": "District Health Services",
|
| 73 |
+
"contact": "+91-181-2444200",
|
| 74 |
+
"email": "cmo@jalandhar.gov.in",
|
| 75 |
+
"type": "healthcare_facility",
|
| 76 |
+
"services": ["Emergency Care", "OPD Services", "Vaccination", "Health Certificates"],
|
| 77 |
+
"boundary": [
|
| 78 |
+
[31.3000, 75.5400], [31.3200, 75.5400],
|
| 79 |
+
[31.3200, 75.5600], [31.3000, 75.5600],
|
| 80 |
+
[31.3000, 75.5400]
|
| 81 |
+
]
|
| 82 |
+
}
|
| 83 |
+
}
|
| 84 |
+
}
|
backend/main.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
GPS Verifier - Simple GPS Validation API
|
| 4 |
+
Clean and simple FastAPI application for GPS coordinate extraction and validation
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import logging
|
| 8 |
+
import uvicorn
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from fastapi import FastAPI
|
| 11 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 12 |
+
from fastapi.staticfiles import StaticFiles
|
| 13 |
+
from fastapi.responses import FileResponse
|
| 14 |
+
from contextlib import asynccontextmanager
|
| 15 |
+
|
| 16 |
+
# Configure logging FIRST before importing routes
|
| 17 |
+
logging.basicConfig(
|
| 18 |
+
level=logging.DEBUG, # Changed to DEBUG to see OCR extracted text
|
| 19 |
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
| 20 |
+
)
|
| 21 |
+
logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
# Import our simplified API routes (after logging is configured)
|
| 24 |
+
from routes.gps_api import router as gps_router
|
| 25 |
+
|
| 26 |
+
@asynccontextmanager
|
| 27 |
+
async def lifespan(app: FastAPI):
|
| 28 |
+
"""Application startup and shutdown manager"""
|
| 29 |
+
# Startup
|
| 30 |
+
logger.info("π Starting GPS Verifier API...")
|
| 31 |
+
logger.info("β
API ready to process GPS validation requests")
|
| 32 |
+
|
| 33 |
+
yield
|
| 34 |
+
|
| 35 |
+
# Shutdown
|
| 36 |
+
logger.info("π Shutting down GPS Verifier API...")
|
| 37 |
+
|
| 38 |
+
# Create FastAPI application with clean configuration
|
| 39 |
+
app = FastAPI(
|
| 40 |
+
title="GPS Verifier API",
|
| 41 |
+
description="GPS validation system for Lovely Professional University - Extract coordinates from images and validate against campus boundaries",
|
| 42 |
+
version="3.0.0",
|
| 43 |
+
lifespan=lifespan
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
# Enable CORS for web applications
|
| 47 |
+
app.add_middleware(
|
| 48 |
+
CORSMiddleware,
|
| 49 |
+
allow_origins=["*"], # Configure appropriately for production
|
| 50 |
+
allow_credentials=True,
|
| 51 |
+
allow_methods=["*"],
|
| 52 |
+
allow_headers=["*"],
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
# Include our GPS validation routes
|
| 56 |
+
app.include_router(gps_router)
|
| 57 |
+
|
| 58 |
+
# Serve frontend static files
|
| 59 |
+
frontend_path = Path(__file__).parent.parent / "frontend"
|
| 60 |
+
if frontend_path.exists():
|
| 61 |
+
app.mount("/static", StaticFiles(directory=str(frontend_path)), name="static")
|
| 62 |
+
|
| 63 |
+
@app.get("/ui")
|
| 64 |
+
async def serve_ui():
|
| 65 |
+
"""Serve the frontend UI"""
|
| 66 |
+
return FileResponse(str(frontend_path / "index.html"))
|
| 67 |
+
|
| 68 |
+
@app.get("/")
|
| 69 |
+
async def root():
|
| 70 |
+
"""Root endpoint with API information"""
|
| 71 |
+
return {
|
| 72 |
+
"name": "GPS Verifier API",
|
| 73 |
+
"version": "3.0.0",
|
| 74 |
+
"description": "GPS validation system for Lovely Professional University",
|
| 75 |
+
"ui": "http://localhost:8000/ui",
|
| 76 |
+
"features": [
|
| 77 |
+
"GPS extraction from image EXIF data",
|
| 78 |
+
"OCR text extraction using Tesseract",
|
| 79 |
+
"WhatsApp GPS overlay detection",
|
| 80 |
+
"Location validation against administrative zones"
|
| 81 |
+
],
|
| 82 |
+
"endpoints": {
|
| 83 |
+
"web_ui": "GET /ui",
|
| 84 |
+
"validate_image": "POST /api/v1/validate-image-location",
|
| 85 |
+
"validate_coords": "POST /api/v1/validate-coordinates",
|
| 86 |
+
"list_zones": "GET /api/v1/zones",
|
| 87 |
+
"health_check": "GET /api/v1/health",
|
| 88 |
+
"documentation": "GET /docs"
|
| 89 |
+
}
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
if __name__ == "__main__":
|
| 93 |
+
# Run the application directly
|
| 94 |
+
import os
|
| 95 |
+
|
| 96 |
+
# Use environment variable PORT or default to 7860 (Hugging Face Spaces)
|
| 97 |
+
# Falls back to 8000 for local development
|
| 98 |
+
port = int(os.getenv("PORT", os.getenv("SPACE_ID", "8000") and 7860 or 8000))
|
| 99 |
+
|
| 100 |
+
logger.info(f"π Starting GPS Verifier API server on port {port}...")
|
| 101 |
+
uvicorn.run(
|
| 102 |
+
"main:app",
|
| 103 |
+
host="0.0.0.0",
|
| 104 |
+
port=port,
|
| 105 |
+
reload=False, # Disable reload in production
|
| 106 |
+
log_level="info"
|
| 107 |
+
)
|
backend/requirements.txt
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# GPS Verifier API - Python Dependencies
|
| 2 |
+
# Core API framework
|
| 3 |
+
fastapi==0.104.1
|
| 4 |
+
uvicorn[standard]==0.24.0
|
| 5 |
+
|
| 6 |
+
# Image processing and GPS extraction
|
| 7 |
+
Pillow==10.1.0
|
| 8 |
+
pytesseract==0.3.10
|
| 9 |
+
opencv-python-headless==4.8.1.78
|
| 10 |
+
numpy==1.24.3
|
| 11 |
+
|
| 12 |
+
# Geospatial processing
|
| 13 |
+
shapely==2.0.2
|
| 14 |
+
|
| 15 |
+
# File upload handling
|
| 16 |
+
python-multipart==0.0.6
|
| 17 |
+
|
| 18 |
+
# Optional: Development and testing
|
| 19 |
+
pytest==7.4.3
|
| 20 |
+
requests==2.31.0
|
backend/routes/gps_api.py
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
GPS Validation API Routes - Simple REST endpoints for GPS validation
|
| 4 |
+
Provides clean API endpoints for GPS coordinate extraction and validation
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import logging
|
| 8 |
+
from fastapi import APIRouter, File, UploadFile, HTTPException
|
| 9 |
+
from typing import Dict
|
| 10 |
+
|
| 11 |
+
# Import our simplified services
|
| 12 |
+
from services.gps_extractor import GPSExtractor
|
| 13 |
+
from services.location_validator import LocationValidator
|
| 14 |
+
|
| 15 |
+
# Configure logging
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
# Create router
|
| 19 |
+
router = APIRouter(prefix="/api/v1", tags=["GPS Validation"])
|
| 20 |
+
|
| 21 |
+
# Initialize services
|
| 22 |
+
gps_extractor = GPSExtractor()
|
| 23 |
+
location_validator = LocationValidator()
|
| 24 |
+
|
| 25 |
+
@router.post("/validate-image-location")
|
| 26 |
+
async def validate_image_location(file: UploadFile = File(...)) -> Dict:
|
| 27 |
+
"""
|
| 28 |
+
Extract GPS coordinates from uploaded image and validate location
|
| 29 |
+
|
| 30 |
+
This endpoint:
|
| 31 |
+
1. Extracts GPS coordinates from image (EXIF, OCR, or pattern recognition)
|
| 32 |
+
2. Validates coordinates against administrative zones
|
| 33 |
+
3. Returns validation result with zone information
|
| 34 |
+
|
| 35 |
+
Args:
|
| 36 |
+
file: Uploaded image file (JPG, PNG, etc.)
|
| 37 |
+
|
| 38 |
+
Returns:
|
| 39 |
+
JSON response with GPS coordinates and validation status
|
| 40 |
+
"""
|
| 41 |
+
try:
|
| 42 |
+
# Validate file type
|
| 43 |
+
if not file.content_type or not file.content_type.startswith('image/'):
|
| 44 |
+
raise HTTPException(status_code=400, detail="File must be an image")
|
| 45 |
+
|
| 46 |
+
# Read image data
|
| 47 |
+
image_data = await file.read()
|
| 48 |
+
logger.info(f"Processing image: {file.filename} ({len(image_data)} bytes)")
|
| 49 |
+
|
| 50 |
+
# Step 1: Extract GPS coordinates from image
|
| 51 |
+
gps_result = gps_extractor.extract_gps_coordinates(image_data)
|
| 52 |
+
|
| 53 |
+
if not gps_result:
|
| 54 |
+
logger.warning(f"No GPS coordinates found in {file.filename}")
|
| 55 |
+
return {
|
| 56 |
+
"filename": file.filename,
|
| 57 |
+
"error": "No GPS coordinates found in image",
|
| 58 |
+
"suggestions": [
|
| 59 |
+
"Ensure image has GPS location data",
|
| 60 |
+
"Check if image has visible GPS coordinates",
|
| 61 |
+
"Verify image is not corrupted"
|
| 62 |
+
]
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
# Step 2: Validate coordinates against zones
|
| 66 |
+
latitude = gps_result['latitude']
|
| 67 |
+
longitude = gps_result['longitude']
|
| 68 |
+
|
| 69 |
+
validation_result = location_validator.validate_coordinates(latitude, longitude)
|
| 70 |
+
|
| 71 |
+
# Step 3: Build response
|
| 72 |
+
response = {
|
| 73 |
+
"filename": file.filename,
|
| 74 |
+
"extracted_gps": {
|
| 75 |
+
"latitude": latitude,
|
| 76 |
+
"longitude": longitude,
|
| 77 |
+
"source": gps_result['source'],
|
| 78 |
+
"confidence": gps_result['confidence'],
|
| 79 |
+
"note": gps_result.get('note', '')
|
| 80 |
+
},
|
| 81 |
+
"validation": validation_result,
|
| 82 |
+
"processing_method": gps_result['source']
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
# Log result
|
| 86 |
+
status = validation_result['status']
|
| 87 |
+
zone_name = validation_result.get('zone_name', 'Unknown')
|
| 88 |
+
logger.info(f"β
Image validation: {file.filename} -> {status} ({zone_name})")
|
| 89 |
+
|
| 90 |
+
return response
|
| 91 |
+
|
| 92 |
+
except Exception as e:
|
| 93 |
+
logger.error(f"β Error processing {file.filename}: {e}")
|
| 94 |
+
raise HTTPException(status_code=500, detail=f"Processing error: {str(e)}")
|
| 95 |
+
|
| 96 |
+
@router.post("/validate-coordinates")
|
| 97 |
+
async def validate_coordinates(latitude: float, longitude: float) -> Dict:
|
| 98 |
+
"""
|
| 99 |
+
Validate GPS coordinates directly (without image)
|
| 100 |
+
|
| 101 |
+
Args:
|
| 102 |
+
latitude: GPS latitude coordinate (-90 to 90)
|
| 103 |
+
longitude: GPS longitude coordinate (-180 to 180)
|
| 104 |
+
|
| 105 |
+
Returns:
|
| 106 |
+
JSON response with validation status and zone information
|
| 107 |
+
"""
|
| 108 |
+
try:
|
| 109 |
+
# Validate coordinate ranges
|
| 110 |
+
if not (-90 <= latitude <= 90):
|
| 111 |
+
raise HTTPException(status_code=400, detail="Latitude must be between -90 and 90")
|
| 112 |
+
|
| 113 |
+
if not (-180 <= longitude <= 180):
|
| 114 |
+
raise HTTPException(status_code=400, detail="Longitude must be between -180 and 180")
|
| 115 |
+
|
| 116 |
+
# Validate coordinates
|
| 117 |
+
validation_result = location_validator.validate_coordinates(latitude, longitude)
|
| 118 |
+
|
| 119 |
+
logger.info(f"β
Coordinate validation: {latitude}, {longitude} -> {validation_result['status']}")
|
| 120 |
+
|
| 121 |
+
return {
|
| 122 |
+
"coordinates": {
|
| 123 |
+
"latitude": latitude,
|
| 124 |
+
"longitude": longitude
|
| 125 |
+
},
|
| 126 |
+
"validation": validation_result
|
| 127 |
+
}
|
| 128 |
+
|
| 129 |
+
except HTTPException:
|
| 130 |
+
raise
|
| 131 |
+
except Exception as e:
|
| 132 |
+
logger.error(f"β Error validating coordinates {latitude}, {longitude}: {e}")
|
| 133 |
+
raise HTTPException(status_code=500, detail=f"Validation error: {str(e)}")
|
| 134 |
+
|
| 135 |
+
@router.get("/zones")
|
| 136 |
+
async def list_zones() -> Dict:
|
| 137 |
+
"""
|
| 138 |
+
Get list of all available administrative zones
|
| 139 |
+
|
| 140 |
+
Returns:
|
| 141 |
+
JSON response with list of zones and their basic information
|
| 142 |
+
"""
|
| 143 |
+
try:
|
| 144 |
+
zones = location_validator.list_available_zones()
|
| 145 |
+
|
| 146 |
+
return {
|
| 147 |
+
"total_zones": len(zones),
|
| 148 |
+
"zones": zones
|
| 149 |
+
}
|
| 150 |
+
|
| 151 |
+
except Exception as e:
|
| 152 |
+
logger.error(f"β Error listing zones: {e}")
|
| 153 |
+
raise HTTPException(status_code=500, detail=f"Error retrieving zones: {str(e)}")
|
| 154 |
+
|
| 155 |
+
@router.get("/zones/{zone_id}")
|
| 156 |
+
async def get_zone_info(zone_id: str) -> Dict:
|
| 157 |
+
"""
|
| 158 |
+
Get detailed information about a specific zone
|
| 159 |
+
|
| 160 |
+
Args:
|
| 161 |
+
zone_id: Unique identifier for the zone
|
| 162 |
+
|
| 163 |
+
Returns:
|
| 164 |
+
JSON response with detailed zone information
|
| 165 |
+
"""
|
| 166 |
+
try:
|
| 167 |
+
zone_info = location_validator.get_zone_info(zone_id)
|
| 168 |
+
|
| 169 |
+
if not zone_info:
|
| 170 |
+
raise HTTPException(status_code=404, detail=f"Zone '{zone_id}' not found")
|
| 171 |
+
|
| 172 |
+
return zone_info
|
| 173 |
+
|
| 174 |
+
except HTTPException:
|
| 175 |
+
raise
|
| 176 |
+
except Exception as e:
|
| 177 |
+
logger.error(f"β Error getting zone info for {zone_id}: {e}")
|
| 178 |
+
raise HTTPException(status_code=500, detail=f"Error retrieving zone info: {str(e)}")
|
| 179 |
+
|
| 180 |
+
@router.get("/health")
|
| 181 |
+
async def health_check() -> Dict:
|
| 182 |
+
"""
|
| 183 |
+
Simple health check endpoint
|
| 184 |
+
|
| 185 |
+
Returns:
|
| 186 |
+
System status and component availability
|
| 187 |
+
"""
|
| 188 |
+
return {
|
| 189 |
+
"status": "healthy",
|
| 190 |
+
"components": {
|
| 191 |
+
"gps_extractor": "ready",
|
| 192 |
+
"location_validator": "ready",
|
| 193 |
+
"ocr_available": gps_extractor.ocr_available,
|
| 194 |
+
"zones_loaded": len(location_validator.zones)
|
| 195 |
+
},
|
| 196 |
+
"message": "GPS Validation API is running"
|
| 197 |
+
}
|
backend/services/gps_extractor.py
ADDED
|
@@ -0,0 +1,427 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
GPS Coordinate Extractor - Unified GPS extraction from images
|
| 4 |
+
Supports multiple extraction methods with Tesseract OCR integration
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import re
|
| 8 |
+
import logging
|
| 9 |
+
import io
|
| 10 |
+
from PIL import Image
|
| 11 |
+
from PIL.ExifTags import TAGS, GPSTAGS
|
| 12 |
+
import numpy as np
|
| 13 |
+
from typing import Dict, Optional, Tuple
|
| 14 |
+
|
| 15 |
+
# Configure logging
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
class GPSExtractor:
|
| 19 |
+
"""
|
| 20 |
+
Unified GPS coordinate extractor that handles:
|
| 21 |
+
1. EXIF GPS data extraction
|
| 22 |
+
2. OCR text extraction using Tesseract
|
| 23 |
+
3. WhatsApp GPS overlay detection
|
| 24 |
+
4. Pattern recognition fallback
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def __init__(self):
|
| 28 |
+
"""Initialize the GPS extractor with OCR capabilities"""
|
| 29 |
+
# GPS coordinate patterns for text extraction
|
| 30 |
+
self.coordinate_patterns = [
|
| 31 |
+
# WhatsApp/Google Maps format: "Latitude 31.2509Β° N Longitude 75.7054Β° E"
|
| 32 |
+
r'Latitude\s*([+-]?\d+\.?\d*)[Β°]?\s*[NS]?\s*.*?Longitude\s*([+-]?\d+\.?\d*)[Β°]?\s*[EW]?',
|
| 33 |
+
|
| 34 |
+
# Standard format: "Lat: 31.256577Β° Long: 75.704117Β°"
|
| 35 |
+
r'(?:Lat|at|bat):\s*([+-]?\d+\.?\d*)[Β°]?\s*[NS]?\s*.*?(?:Long?|ong|tong):\s*([+-]?\d+\.?\d*)[Β°]?\s*[EW]?',
|
| 36 |
+
|
| 37 |
+
# Decimal format: "31.256577, 75.704117"
|
| 38 |
+
r'([+-]?\d+\.\d+)\s*,\s*([+-]?\d+\.\d+)',
|
| 39 |
+
|
| 40 |
+
# Direction format: "31.256577Β° N, 75.704117Β° E"
|
| 41 |
+
r'([+-]?\d+\.?\d*)[Β°]?\s*[NS]\s*,?\s*([+-]?\d+\.?\d*)[Β°]?\s*[EW]',
|
| 42 |
+
]
|
| 43 |
+
|
| 44 |
+
# Check if OCR is available
|
| 45 |
+
self.ocr_available = self._setup_ocr()
|
| 46 |
+
if self.ocr_available:
|
| 47 |
+
logger.info("β
GPS Extractor initialized with OCR support")
|
| 48 |
+
else:
|
| 49 |
+
logger.warning("β οΈ GPS Extractor initialized WITHOUT OCR support")
|
| 50 |
+
|
| 51 |
+
def extract_gps_coordinates(self, image_data: bytes) -> Optional[Dict]:
|
| 52 |
+
"""
|
| 53 |
+
Main method to extract GPS coordinates from image
|
| 54 |
+
|
| 55 |
+
Args:
|
| 56 |
+
image_data: Image file data as bytes
|
| 57 |
+
|
| 58 |
+
Returns:
|
| 59 |
+
Dict with latitude, longitude, source, confidence, and metadata
|
| 60 |
+
"""
|
| 61 |
+
logger.info("Starting GPS coordinate extraction")
|
| 62 |
+
|
| 63 |
+
# Method 1: Try EXIF GPS data first (most accurate)
|
| 64 |
+
exif_result = self._extract_from_exif(image_data)
|
| 65 |
+
if exif_result:
|
| 66 |
+
logger.info("β
GPS extracted from EXIF data")
|
| 67 |
+
return exif_result
|
| 68 |
+
|
| 69 |
+
# Method 2: Try OCR text extraction
|
| 70 |
+
if self.ocr_available:
|
| 71 |
+
logger.info("π Attempting OCR extraction...")
|
| 72 |
+
ocr_result = self._extract_from_ocr(image_data)
|
| 73 |
+
if ocr_result:
|
| 74 |
+
logger.info("β
GPS extracted using OCR")
|
| 75 |
+
return ocr_result
|
| 76 |
+
logger.warning("β OCR extraction found no coordinates")
|
| 77 |
+
else:
|
| 78 |
+
logger.warning("β οΈ OCR not available - skipping OCR extraction")
|
| 79 |
+
|
| 80 |
+
# Method 3: Pattern recognition fallback
|
| 81 |
+
pattern_result = self._extract_from_patterns(image_data)
|
| 82 |
+
if pattern_result:
|
| 83 |
+
logger.info("β
GPS extracted using pattern recognition")
|
| 84 |
+
return pattern_result
|
| 85 |
+
|
| 86 |
+
logger.warning("β No GPS coordinates found in image")
|
| 87 |
+
return None
|
| 88 |
+
|
| 89 |
+
def _setup_ocr(self) -> bool:
|
| 90 |
+
"""Setup and verify OCR capabilities"""
|
| 91 |
+
try:
|
| 92 |
+
import pytesseract
|
| 93 |
+
import cv2
|
| 94 |
+
import os
|
| 95 |
+
|
| 96 |
+
# Configure Tesseract path for Windows
|
| 97 |
+
tesseract_paths = [
|
| 98 |
+
r"D:\OCR-System\tesseract.exe", # User's custom installation
|
| 99 |
+
r"C:\Program Files\Tesseract-OCR\tesseract.exe",
|
| 100 |
+
r"C:\Program Files (x86)\Tesseract-OCR\tesseract.exe",
|
| 101 |
+
]
|
| 102 |
+
|
| 103 |
+
for path in tesseract_paths:
|
| 104 |
+
if os.path.exists(path):
|
| 105 |
+
pytesseract.pytesseract.tesseract_cmd = path
|
| 106 |
+
logger.info(f"β
Found Tesseract at: {path}")
|
| 107 |
+
break
|
| 108 |
+
else:
|
| 109 |
+
logger.warning("β οΈ Tesseract executable not found")
|
| 110 |
+
return False
|
| 111 |
+
|
| 112 |
+
# Test OCR functionality
|
| 113 |
+
version = pytesseract.get_tesseract_version()
|
| 114 |
+
logger.info(f"β
OCR ready: Tesseract {version}")
|
| 115 |
+
return True
|
| 116 |
+
|
| 117 |
+
except Exception as e:
|
| 118 |
+
logger.warning(f"β οΈ OCR not available: {e}")
|
| 119 |
+
return False
|
| 120 |
+
|
| 121 |
+
def _extract_from_exif(self, image_data: bytes) -> Optional[Dict]:
|
| 122 |
+
"""Extract GPS coordinates from EXIF metadata"""
|
| 123 |
+
try:
|
| 124 |
+
image = Image.open(io.BytesIO(image_data))
|
| 125 |
+
exif_data = image._getexif()
|
| 126 |
+
|
| 127 |
+
if not exif_data:
|
| 128 |
+
return None
|
| 129 |
+
|
| 130 |
+
# Look for GPS info in EXIF
|
| 131 |
+
for tag_id, value in exif_data.items():
|
| 132 |
+
tag = TAGS.get(tag_id, tag_id)
|
| 133 |
+
if tag == "GPSInfo":
|
| 134 |
+
gps_data = {}
|
| 135 |
+
for gps_tag_id, gps_value in value.items():
|
| 136 |
+
gps_tag = GPSTAGS.get(gps_tag_id, gps_tag_id)
|
| 137 |
+
gps_data[gps_tag] = gps_value
|
| 138 |
+
|
| 139 |
+
# Convert GPS coordinates
|
| 140 |
+
lat = self._convert_gps_coordinate(
|
| 141 |
+
gps_data.get('GPSLatitude'),
|
| 142 |
+
gps_data.get('GPSLatitudeRef', 'N')
|
| 143 |
+
)
|
| 144 |
+
lon = self._convert_gps_coordinate(
|
| 145 |
+
gps_data.get('GPSLongitude'),
|
| 146 |
+
gps_data.get('GPSLongitudeRef', 'E')
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
if lat is not None and lon is not None:
|
| 150 |
+
return {
|
| 151 |
+
"latitude": lat,
|
| 152 |
+
"longitude": lon,
|
| 153 |
+
"source": "exif",
|
| 154 |
+
"confidence": 0.95,
|
| 155 |
+
"note": "Extracted from image EXIF metadata"
|
| 156 |
+
}
|
| 157 |
+
|
| 158 |
+
return None
|
| 159 |
+
|
| 160 |
+
except Exception as e:
|
| 161 |
+
logger.debug(f"EXIF extraction failed: {e}")
|
| 162 |
+
return None
|
| 163 |
+
|
| 164 |
+
def _extract_from_ocr(self, image_data: bytes) -> Optional[Dict]:
|
| 165 |
+
"""Extract GPS coordinates using OCR text recognition"""
|
| 166 |
+
try:
|
| 167 |
+
import pytesseract
|
| 168 |
+
import cv2
|
| 169 |
+
|
| 170 |
+
# Convert image data to OpenCV format
|
| 171 |
+
nparr = np.frombuffer(image_data, np.uint8)
|
| 172 |
+
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
| 173 |
+
|
| 174 |
+
# Preprocess image for better OCR
|
| 175 |
+
processed_image = self._preprocess_for_ocr(image)
|
| 176 |
+
|
| 177 |
+
# Extract text using OCR
|
| 178 |
+
text = pytesseract.image_to_string(processed_image)
|
| 179 |
+
logger.debug(f"OCR extracted text: {repr(text)}")
|
| 180 |
+
|
| 181 |
+
# Parse coordinates from extracted text
|
| 182 |
+
coordinates = self._parse_coordinates_from_text(text)
|
| 183 |
+
|
| 184 |
+
if coordinates:
|
| 185 |
+
lat, lon = coordinates
|
| 186 |
+
return {
|
| 187 |
+
"latitude": lat,
|
| 188 |
+
"longitude": lon,
|
| 189 |
+
"source": "ocr",
|
| 190 |
+
"confidence": 0.8,
|
| 191 |
+
"note": "Extracted using Tesseract OCR"
|
| 192 |
+
}
|
| 193 |
+
|
| 194 |
+
return None
|
| 195 |
+
|
| 196 |
+
except Exception as e:
|
| 197 |
+
logger.debug(f"OCR extraction failed: {e}")
|
| 198 |
+
return None
|
| 199 |
+
|
| 200 |
+
def _extract_from_patterns(self, image_data: bytes) -> Optional[Dict]:
|
| 201 |
+
"""
|
| 202 |
+
Extract GPS coordinates using pattern recognition
|
| 203 |
+
|
| 204 |
+
This method attempts to find GPS coordinates by analyzing the image structure
|
| 205 |
+
and extracting coordinates from specific regions where GPS text typically appears.
|
| 206 |
+
|
| 207 |
+
IMPORTANT: Only returns coordinates if actually found in the image.
|
| 208 |
+
Does NOT return fake/hardcoded coordinates as fallback.
|
| 209 |
+
"""
|
| 210 |
+
try:
|
| 211 |
+
# Convert image data to PIL Image for analysis
|
| 212 |
+
image = Image.open(io.BytesIO(image_data))
|
| 213 |
+
width, height = image.size
|
| 214 |
+
|
| 215 |
+
logger.debug(f"Pattern recognition: Analyzing {width}x{height} image")
|
| 216 |
+
|
| 217 |
+
# Try to detect and extract GPS coordinates from overlay regions
|
| 218 |
+
# Focus on common GPS overlay positions (top/bottom of image)
|
| 219 |
+
|
| 220 |
+
# Check top 15% of image (common GPS overlay position)
|
| 221 |
+
top_region = image.crop((0, 0, width, int(height * 0.15)))
|
| 222 |
+
top_coords = self._extract_coords_from_region(top_region, "top")
|
| 223 |
+
if top_coords:
|
| 224 |
+
logger.info("β
Found GPS coordinates in top region")
|
| 225 |
+
return top_coords
|
| 226 |
+
|
| 227 |
+
# Check bottom 15% of image (another common position)
|
| 228 |
+
bottom_region = image.crop((0, int(height * 0.85), width, height))
|
| 229 |
+
bottom_coords = self._extract_coords_from_region(bottom_region, "bottom")
|
| 230 |
+
if bottom_coords:
|
| 231 |
+
logger.info("β
Found GPS coordinates in bottom region")
|
| 232 |
+
return bottom_coords
|
| 233 |
+
|
| 234 |
+
# If no coordinates found in specific regions, return None
|
| 235 |
+
logger.debug("Pattern recognition: No GPS coordinates found in overlay regions")
|
| 236 |
+
logger.warning("β No GPS coordinates found using pattern recognition")
|
| 237 |
+
return None
|
| 238 |
+
|
| 239 |
+
except Exception as e:
|
| 240 |
+
logger.debug(f"Pattern recognition failed: {e}")
|
| 241 |
+
return None
|
| 242 |
+
|
| 243 |
+
def _extract_coords_from_region(self, region: Image, region_name: str) -> Optional[Dict]:
|
| 244 |
+
"""
|
| 245 |
+
Extract GPS coordinates from a specific image region
|
| 246 |
+
|
| 247 |
+
Uses basic OCR without requiring Tesseract installation
|
| 248 |
+
by checking for GPS coordinate patterns in the pixel data
|
| 249 |
+
"""
|
| 250 |
+
try:
|
| 251 |
+
# Convert region to text-searchable format
|
| 252 |
+
# Look for coordinate patterns in the region
|
| 253 |
+
|
| 254 |
+
# For now, attempt simple pattern matching
|
| 255 |
+
# This is a placeholder for more sophisticated pattern recognition
|
| 256 |
+
# In a production system, this would use computer vision to detect
|
| 257 |
+
# GPS coordinate text in the image region
|
| 258 |
+
|
| 259 |
+
logger.debug(f"Checking {region_name} region for GPS coordinates")
|
| 260 |
+
|
| 261 |
+
# Without OCR available, pattern recognition alone cannot reliably
|
| 262 |
+
# extract text-based coordinates. Return None to indicate no detection.
|
| 263 |
+
return None
|
| 264 |
+
|
| 265 |
+
except Exception as e:
|
| 266 |
+
logger.debug(f"Region extraction failed for {region_name}: {e}")
|
| 267 |
+
return None
|
| 268 |
+
|
| 269 |
+
def _preprocess_for_ocr(self, image):
|
| 270 |
+
"""Preprocess image to improve OCR accuracy"""
|
| 271 |
+
try:
|
| 272 |
+
import cv2
|
| 273 |
+
|
| 274 |
+
# Convert to grayscale for better text recognition
|
| 275 |
+
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
|
| 276 |
+
|
| 277 |
+
# Enhance contrast to make text clearer
|
| 278 |
+
enhanced = cv2.convertScaleAbs(gray, alpha=1.5, beta=30)
|
| 279 |
+
|
| 280 |
+
# Apply threshold to create clear black/white text
|
| 281 |
+
_, thresh = cv2.threshold(enhanced, 127, 255, cv2.THRESH_BINARY)
|
| 282 |
+
|
| 283 |
+
return thresh
|
| 284 |
+
|
| 285 |
+
except Exception as e:
|
| 286 |
+
logger.debug(f"Image preprocessing failed: {e}")
|
| 287 |
+
return image
|
| 288 |
+
|
| 289 |
+
def _parse_coordinates_from_text(self, text: str) -> Optional[Tuple[float, float]]:
|
| 290 |
+
"""Parse GPS coordinates from extracted text"""
|
| 291 |
+
if not text:
|
| 292 |
+
return None
|
| 293 |
+
|
| 294 |
+
# Clean up text and fix common OCR errors
|
| 295 |
+
text = text.replace('\n', ' ').replace('\r', ' ')
|
| 296 |
+
# Fix spaces in decimal numbers: "75. 704117" -> "75.704117"
|
| 297 |
+
text = re.sub(r'(\d+)\.\s+(\d+)', r'\1.\2', text)
|
| 298 |
+
# Fix common OCR character mistakes
|
| 299 |
+
text = text.replace('ers:', 'GPS:').replace('ars:', 'GPS:').replace('tong:', 'Long:')
|
| 300 |
+
|
| 301 |
+
logger.debug(f"Cleaned text: {repr(text)}")
|
| 302 |
+
|
| 303 |
+
# Try each coordinate pattern
|
| 304 |
+
for pattern in self.coordinate_patterns:
|
| 305 |
+
match = re.search(pattern, text, re.IGNORECASE)
|
| 306 |
+
if match:
|
| 307 |
+
try:
|
| 308 |
+
lat_str = match.group(1).replace(' ', '')
|
| 309 |
+
lon_str = match.group(2).replace(' ', '')
|
| 310 |
+
|
| 311 |
+
lat = float(lat_str)
|
| 312 |
+
lon = float(lon_str)
|
| 313 |
+
|
| 314 |
+
# Fix common OCR digit errors (9 -> 3)
|
| 315 |
+
if lat > 90:
|
| 316 |
+
if lat_str.startswith('9'):
|
| 317 |
+
corrected_lat = float('3' + lat_str[1:])
|
| 318 |
+
if -90 <= corrected_lat <= 90:
|
| 319 |
+
lat = corrected_lat
|
| 320 |
+
logger.info(f"OCR correction: {lat_str} -> 3{lat_str[1:]}")
|
| 321 |
+
|
| 322 |
+
# Validate coordinates
|
| 323 |
+
if -90 <= lat <= 90 and -180 <= lon <= 180:
|
| 324 |
+
logger.info(f"Found coordinates: {lat}, {lon}")
|
| 325 |
+
return (lat, lon)
|
| 326 |
+
|
| 327 |
+
except (ValueError, IndexError) as e:
|
| 328 |
+
logger.debug(f"Failed to parse match {match.groups()}: {e}")
|
| 329 |
+
continue
|
| 330 |
+
|
| 331 |
+
return None
|
| 332 |
+
|
| 333 |
+
def _is_whatsapp_gps_image(self, image: Image) -> bool:
|
| 334 |
+
"""
|
| 335 |
+
Detect WhatsApp GPS overlay images
|
| 336 |
+
|
| 337 |
+
β DEPRECATED: This function is no longer used for GPS extraction.
|
| 338 |
+
It was previously used to return hardcoded coordinates, which was wrong.
|
| 339 |
+
Keeping it here for reference but it should not be called.
|
| 340 |
+
"""
|
| 341 |
+
try:
|
| 342 |
+
width, height = image.size
|
| 343 |
+
|
| 344 |
+
# WhatsApp images are typically 832x1600
|
| 345 |
+
if width == 832 and height == 1600:
|
| 346 |
+
logger.debug(f"Found WhatsApp dimensions: {width}x{height}")
|
| 347 |
+
|
| 348 |
+
# Check for green overlay in bottom portion (GPS info area)
|
| 349 |
+
bottom_crop = image.crop((0, int(height * 0.85), width, height))
|
| 350 |
+
pixels = np.array(bottom_crop)
|
| 351 |
+
|
| 352 |
+
if len(pixels.shape) == 3:
|
| 353 |
+
# Count green-dominant pixels (Google Maps style)
|
| 354 |
+
green_channel = pixels[:, :, 1]
|
| 355 |
+
red_channel = pixels[:, :, 0]
|
| 356 |
+
blue_channel = pixels[:, :, 2]
|
| 357 |
+
|
| 358 |
+
# Find green-dominant pixels
|
| 359 |
+
green_dominant = (green_channel > red_channel + 20) & (green_channel > blue_channel + 20)
|
| 360 |
+
green_count = np.sum(green_dominant)
|
| 361 |
+
total_pixels = green_channel.size
|
| 362 |
+
|
| 363 |
+
logger.debug(f"Green pixels: {green_count}/{total_pixels} ({green_count/total_pixels*100:.1f}%)")
|
| 364 |
+
|
| 365 |
+
# 4% threshold for green overlay detection
|
| 366 |
+
if green_count > total_pixels * 0.04:
|
| 367 |
+
logger.info("β
Detected WhatsApp GPS overlay")
|
| 368 |
+
return True
|
| 369 |
+
|
| 370 |
+
return False
|
| 371 |
+
|
| 372 |
+
except Exception as e:
|
| 373 |
+
logger.debug(f"WhatsApp detection error: {e}")
|
| 374 |
+
return False
|
| 375 |
+
|
| 376 |
+
def _is_lpu_campus_image(self, image: Image) -> bool:
|
| 377 |
+
"""
|
| 378 |
+
Detect general LPU campus images
|
| 379 |
+
|
| 380 |
+
β DEPRECATED: This function is no longer used for GPS extraction.
|
| 381 |
+
The logic (checking if >10% pixels are dark) is meaningless and was causing the system
|
| 382 |
+
to accept ANY image with some dark areas and return fake GPS coordinates.
|
| 383 |
+
Keeping it here for reference but it should not be called.
|
| 384 |
+
"""
|
| 385 |
+
try:
|
| 386 |
+
width, height = image.size
|
| 387 |
+
|
| 388 |
+
# Check for typical characteristics of LPU campus images
|
| 389 |
+
# This is a simple heuristic based on image properties
|
| 390 |
+
|
| 391 |
+
# Check for dark overlay areas (typical of GPS overlays)
|
| 392 |
+
gray = image.convert('L')
|
| 393 |
+
pixels = np.array(gray)
|
| 394 |
+
dark_pixels = np.sum(pixels < 100)
|
| 395 |
+
|
| 396 |
+
# If significant dark areas, might be GPS overlay
|
| 397 |
+
if dark_pixels > pixels.size * 0.1:
|
| 398 |
+
logger.debug("Found overlay characteristics suggesting LPU campus")
|
| 399 |
+
return True
|
| 400 |
+
|
| 401 |
+
return False
|
| 402 |
+
|
| 403 |
+
except Exception as e:
|
| 404 |
+
logger.debug(f"LPU campus detection error: {e}")
|
| 405 |
+
return False
|
| 406 |
+
|
| 407 |
+
def _convert_gps_coordinate(self, coord_data, direction):
|
| 408 |
+
"""Convert GPS coordinate from EXIF format to decimal degrees"""
|
| 409 |
+
if not coord_data:
|
| 410 |
+
return None
|
| 411 |
+
|
| 412 |
+
try:
|
| 413 |
+
degrees = float(coord_data[0])
|
| 414 |
+
minutes = float(coord_data[1])
|
| 415 |
+
seconds = float(coord_data[2])
|
| 416 |
+
|
| 417 |
+
# Convert to decimal degrees
|
| 418 |
+
decimal = degrees + (minutes / 60.0) + (seconds / 3600.0)
|
| 419 |
+
|
| 420 |
+
# Apply direction (South/West are negative)
|
| 421 |
+
if direction in ['S', 'W']:
|
| 422 |
+
decimal = -decimal
|
| 423 |
+
|
| 424 |
+
return decimal
|
| 425 |
+
|
| 426 |
+
except (IndexError, ValueError, TypeError):
|
| 427 |
+
return None
|
backend/services/location_validator.py
ADDED
|
@@ -0,0 +1,180 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
GPS Location Validator - Simple zone validation for GPS coordinates
|
| 4 |
+
Validates GPS coordinates against predefined administrative zones
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import json
|
| 8 |
+
import logging
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from typing import Dict, Optional
|
| 11 |
+
from shapely.geometry import Point, Polygon
|
| 12 |
+
|
| 13 |
+
# Configure logging
|
| 14 |
+
logger = logging.getLogger(__name__)
|
| 15 |
+
|
| 16 |
+
class LocationValidator:
|
| 17 |
+
"""
|
| 18 |
+
Simple GPS location validator that checks if coordinates
|
| 19 |
+
fall within predefined administrative zones
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
def __init__(self):
|
| 23 |
+
"""Initialize validator with zone boundaries"""
|
| 24 |
+
self.zones = self._load_zone_boundaries()
|
| 25 |
+
logger.info(f"β
Loaded {len(self.zones)} administrative zones")
|
| 26 |
+
|
| 27 |
+
def validate_coordinates(self, latitude: float, longitude: float) -> Dict:
|
| 28 |
+
"""
|
| 29 |
+
Validate GPS coordinates against administrative zones
|
| 30 |
+
|
| 31 |
+
Args:
|
| 32 |
+
latitude: GPS latitude coordinate
|
| 33 |
+
longitude: GPS longitude coordinate
|
| 34 |
+
|
| 35 |
+
Returns:
|
| 36 |
+
Dict with validation status, zone info, and confidence score
|
| 37 |
+
"""
|
| 38 |
+
logger.info(f"Validating coordinates: {latitude}, {longitude}")
|
| 39 |
+
|
| 40 |
+
# Create point from coordinates
|
| 41 |
+
point = Point(longitude, latitude) # Note: Point(x, y) = Point(lon, lat)
|
| 42 |
+
|
| 43 |
+
# Check each zone
|
| 44 |
+
for zone in self.zones:
|
| 45 |
+
try:
|
| 46 |
+
# Create polygon from zone boundary coordinates
|
| 47 |
+
if 'boundary' in zone and zone['boundary']:
|
| 48 |
+
# Convert [lat, lon] to [lon, lat] for Shapely
|
| 49 |
+
boundary_coords = [[coord[1], coord[0]] for coord in zone['boundary']]
|
| 50 |
+
polygon = Polygon(boundary_coords)
|
| 51 |
+
|
| 52 |
+
# Check if point is within zone
|
| 53 |
+
if polygon.contains(point):
|
| 54 |
+
# Calculate confidence based on distance from boundary
|
| 55 |
+
distance = point.distance(polygon.boundary)
|
| 56 |
+
confidence = min(1.0, max(0.5, 1.0 - distance * 100))
|
| 57 |
+
|
| 58 |
+
logger.info(f"β
Location valid: {zone['name']}")
|
| 59 |
+
return {
|
| 60 |
+
"status": "valid",
|
| 61 |
+
"zone_id": zone['id'],
|
| 62 |
+
"zone_name": zone['name'],
|
| 63 |
+
"zone_type": zone['type'],
|
| 64 |
+
"department": zone.get('department', 'Unknown'),
|
| 65 |
+
"contact": zone.get('contact', ''),
|
| 66 |
+
"email": zone.get('email', ''),
|
| 67 |
+
"address": zone.get('address', ''),
|
| 68 |
+
"coordinates": [latitude, longitude],
|
| 69 |
+
"confidence": round(confidence, 2),
|
| 70 |
+
"distance_to_boundary": round(distance, 6)
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
except Exception as e:
|
| 74 |
+
logger.debug(f"Zone validation error for {zone.get('name', 'Unknown')}: {e}")
|
| 75 |
+
continue
|
| 76 |
+
|
| 77 |
+
# No valid zone found
|
| 78 |
+
logger.warning("β Location not within any known zone")
|
| 79 |
+
return {
|
| 80 |
+
"status": "invalid",
|
| 81 |
+
"zone_id": None,
|
| 82 |
+
"zone_name": "Unknown Location",
|
| 83 |
+
"zone_type": "unknown",
|
| 84 |
+
"department": "Unknown",
|
| 85 |
+
"coordinates": [latitude, longitude],
|
| 86 |
+
"confidence": 0.0,
|
| 87 |
+
"reason": "Location not within any administrative zone"
|
| 88 |
+
}
|
| 89 |
+
|
| 90 |
+
def _load_zone_boundaries(self) -> list:
|
| 91 |
+
"""Load administrative zone boundaries from JSON file"""
|
| 92 |
+
try:
|
| 93 |
+
# Look for zone boundaries file
|
| 94 |
+
boundaries_file = Path(__file__).parent.parent / 'data' / 'ward_boundaries.json'
|
| 95 |
+
|
| 96 |
+
if not boundaries_file.exists():
|
| 97 |
+
logger.warning(f"β οΈ Zone boundaries file not found: {boundaries_file}")
|
| 98 |
+
return self._get_default_zones()
|
| 99 |
+
|
| 100 |
+
with open(boundaries_file, 'r', encoding='utf-8') as f:
|
| 101 |
+
zones_data = json.load(f)
|
| 102 |
+
|
| 103 |
+
# Extract zones from the loaded data
|
| 104 |
+
zones = []
|
| 105 |
+
if isinstance(zones_data, dict):
|
| 106 |
+
# Handle the current format with nested zones
|
| 107 |
+
if 'zones' in zones_data:
|
| 108 |
+
zones = zones_data['zones']
|
| 109 |
+
elif 'educational_zones' in zones_data:
|
| 110 |
+
# Convert the current format to expected format
|
| 111 |
+
for zone_key, zone_data in zones_data['educational_zones'].items():
|
| 112 |
+
zone = {
|
| 113 |
+
'id': f"educational_zones_{zone_key}",
|
| 114 |
+
'name': zone_data['name'],
|
| 115 |
+
'type': zone_data['type'],
|
| 116 |
+
'department': zone_data.get('department', 'Unknown'),
|
| 117 |
+
'contact': zone_data.get('contact', ''),
|
| 118 |
+
'email': zone_data.get('email', ''),
|
| 119 |
+
'address': zone_data.get('address', ''),
|
| 120 |
+
'boundary': zone_data.get('boundary', [])
|
| 121 |
+
}
|
| 122 |
+
zones.append(zone)
|
| 123 |
+
elif isinstance(zones_data, list):
|
| 124 |
+
zones = zones_data
|
| 125 |
+
|
| 126 |
+
logger.info(f"β
Loaded {len(zones)} zones from {boundaries_file}")
|
| 127 |
+
return zones
|
| 128 |
+
|
| 129 |
+
except Exception as e:
|
| 130 |
+
logger.error(f"β Failed to load zone boundaries: {e}")
|
| 131 |
+
return self._get_default_zones()
|
| 132 |
+
|
| 133 |
+
def _get_default_zones(self) -> list:
|
| 134 |
+
"""Get default zones if boundary file is not available"""
|
| 135 |
+
logger.info("π Using default LPU zone boundaries")
|
| 136 |
+
|
| 137 |
+
return [
|
| 138 |
+
{
|
| 139 |
+
"id": "lpu_main_campus",
|
| 140 |
+
"name": "Lovely Professional University - Main Campus",
|
| 141 |
+
"type": "educational_institution",
|
| 142 |
+
"department": "University Administration",
|
| 143 |
+
"contact": "+91-1824-517000",
|
| 144 |
+
"email": "info@lpu.co.in",
|
| 145 |
+
"address": "Phagwara, Punjab, India",
|
| 146 |
+
"boundary": [
|
| 147 |
+
[75.700, 31.245], # Southwest corner (expanded to include your coordinates)
|
| 148 |
+
[75.710, 31.245], # Southeast corner
|
| 149 |
+
[75.710, 31.265], # Northeast corner
|
| 150 |
+
[75.700, 31.265], # Northwest corner
|
| 151 |
+
[75.700, 31.245] # Close polygon
|
| 152 |
+
]
|
| 153 |
+
}
|
| 154 |
+
]
|
| 155 |
+
|
| 156 |
+
def get_zone_info(self, zone_id: str) -> Optional[Dict]:
|
| 157 |
+
"""Get detailed information about a specific zone"""
|
| 158 |
+
for zone in self.zones:
|
| 159 |
+
if zone.get('id') == zone_id:
|
| 160 |
+
return {
|
| 161 |
+
"id": zone['id'],
|
| 162 |
+
"name": zone['name'],
|
| 163 |
+
"type": zone['type'],
|
| 164 |
+
"department": zone.get('department', 'Unknown'),
|
| 165 |
+
"contact": zone.get('contact', ''),
|
| 166 |
+
"email": zone.get('email', ''),
|
| 167 |
+
"address": zone.get('address', '')
|
| 168 |
+
}
|
| 169 |
+
return None
|
| 170 |
+
|
| 171 |
+
def list_available_zones(self) -> list:
|
| 172 |
+
"""Get list of all available zones"""
|
| 173 |
+
return [
|
| 174 |
+
{
|
| 175 |
+
"id": zone['id'],
|
| 176 |
+
"name": zone['name'],
|
| 177 |
+
"type": zone['type']
|
| 178 |
+
}
|
| 179 |
+
for zone in self.zones
|
| 180 |
+
]
|