Upload 8 files
Browse files- .gitattributes +1 -0
- __init__.py +18 -0
- app.py +106 -0
- blood_group.h5 +3 -0
- gender_model.keras +3 -0
- model_loader.py +108 -0
- predictor.py +146 -0
- preprocessing.py +103 -0
- requirements.txt +9 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
gender_model.keras filter=lfs diff=lfs merge=lfs -text
|
__init__.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Utilities package for fingerprint analysis
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from .model_loader import load_blood_group_model, load_gender_model, set_model_paths
|
| 6 |
+
from .predictor import predict_blood_group, predict_gender
|
| 7 |
+
from .preprocessing import preprocess_fingerprint, enhance_fingerprint, normalize_fingerprint
|
| 8 |
+
|
| 9 |
+
__all__ = [
|
| 10 |
+
'load_blood_group_model',
|
| 11 |
+
'load_gender_model',
|
| 12 |
+
'set_model_paths',
|
| 13 |
+
'predict_blood_group',
|
| 14 |
+
'predict_gender',
|
| 15 |
+
'preprocess_fingerprint',
|
| 16 |
+
'enhance_fingerprint',
|
| 17 |
+
'normalize_fingerprint'
|
| 18 |
+
]
|
app.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import cv2
|
| 3 |
+
import numpy as np
|
| 4 |
+
from PIL import Image
|
| 5 |
+
import os
|
| 6 |
+
from utils.model_loader import load_blood_group_model, load_gender_model
|
| 7 |
+
from utils.predictor import predict_blood_group, predict_gender
|
| 8 |
+
from utils.preprocessing import preprocess_fingerprint
|
| 9 |
+
|
| 10 |
+
blood_model = None
|
| 11 |
+
gender_model = None
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def load_models():
|
| 15 |
+
global blood_model, gender_model
|
| 16 |
+
if blood_model is None:
|
| 17 |
+
try:
|
| 18 |
+
blood_model = load_blood_group_model()
|
| 19 |
+
except Exception as e:
|
| 20 |
+
print(f"Warning: Blood group model not loaded: {e}")
|
| 21 |
+
blood_model = None
|
| 22 |
+
|
| 23 |
+
if gender_model is None:
|
| 24 |
+
try:
|
| 25 |
+
gender_model = load_gender_model()
|
| 26 |
+
except Exception as e:
|
| 27 |
+
print(f"Warning: Gender model not loaded: {e}")
|
| 28 |
+
gender_model = None
|
| 29 |
+
|
| 30 |
+
return blood_model, gender_model
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def analyze_fingerprint(image):
|
| 34 |
+
"""Analyze uploaded fingerprint image"""
|
| 35 |
+
if image is None:
|
| 36 |
+
return "Please upload a fingerprint image.", "", "", ""
|
| 37 |
+
|
| 38 |
+
try:
|
| 39 |
+
# Load models lazily
|
| 40 |
+
blood_model, gender_model = load_models()
|
| 41 |
+
if blood_model is None or gender_model is None:
|
| 42 |
+
missing = []
|
| 43 |
+
if blood_model is None:
|
| 44 |
+
missing.append("blood group")
|
| 45 |
+
if gender_model is None:
|
| 46 |
+
missing.append("gender")
|
| 47 |
+
missing_text = ", ".join(missing)
|
| 48 |
+
msg = f"Model files not deployed on Vercel. Missing: {missing_text}."
|
| 49 |
+
return msg, "", msg, ""
|
| 50 |
+
|
| 51 |
+
# Preprocess for both models
|
| 52 |
+
preprocessed_blood = preprocess_fingerprint(image, model_type="blood_group")
|
| 53 |
+
preprocessed_gender = preprocess_fingerprint(image, model_type="gender")
|
| 54 |
+
|
| 55 |
+
# Get predictions
|
| 56 |
+
blood_result = predict_blood_group(blood_model, preprocessed_blood)
|
| 57 |
+
gender_result = predict_gender(gender_model, preprocessed_gender)
|
| 58 |
+
|
| 59 |
+
# Format results
|
| 60 |
+
if blood_result["success"]:
|
| 61 |
+
blood_text = f"**Predicted Blood Group:** {blood_result['blood_group']}\n**Confidence:** {blood_result['confidence']:.2%}"
|
| 62 |
+
blood_details = "\n".join([f"{k}: {v:.2%}" for k, v in blood_result['all_scores'].items()])
|
| 63 |
+
else:
|
| 64 |
+
blood_text = "Error predicting blood group"
|
| 65 |
+
blood_details = str(blood_result.get('error', 'Unknown error'))
|
| 66 |
+
|
| 67 |
+
if gender_result["success"]:
|
| 68 |
+
gender_text = f"**Predicted Gender:** {gender_result['gender']}\n**Confidence:** {gender_result['confidence']:.2%}"
|
| 69 |
+
else:
|
| 70 |
+
gender_text = "Error predicting gender"
|
| 71 |
+
|
| 72 |
+
return blood_text, blood_details, gender_text, ""
|
| 73 |
+
|
| 74 |
+
except Exception as e:
|
| 75 |
+
return f"Error processing image: {str(e)}", "", "", ""
|
| 76 |
+
|
| 77 |
+
# Create Gradio interface
|
| 78 |
+
with gr.Blocks(title="Forensic Fingerprint Analysis", theme=gr.themes.Soft()) as demo:
|
| 79 |
+
gr.Markdown("# 🔍 Forensic Fingerprint Analysis System")
|
| 80 |
+
gr.Markdown("Upload a fingerprint image to predict blood group and gender using machine learning models.")
|
| 81 |
+
|
| 82 |
+
with gr.Row():
|
| 83 |
+
with gr.Column():
|
| 84 |
+
image_input = gr.Image(label="Upload Fingerprint Image", type="pil")
|
| 85 |
+
analyze_btn = gr.Button("Analyze Fingerprint", variant="primary")
|
| 86 |
+
|
| 87 |
+
with gr.Column():
|
| 88 |
+
gr.Markdown("### Results")
|
| 89 |
+
blood_output = gr.Textbox(label="Blood Group Prediction", lines=2)
|
| 90 |
+
blood_scores = gr.Textbox(label="All Blood Group Scores", lines=8)
|
| 91 |
+
gender_output = gr.Textbox(label="Gender Prediction", lines=2)
|
| 92 |
+
|
| 93 |
+
# Connect the function
|
| 94 |
+
analyze_btn.click(
|
| 95 |
+
analyze_fingerprint,
|
| 96 |
+
inputs=image_input,
|
| 97 |
+
outputs=[blood_output, blood_scores, gender_output, gr.Textbox(visible=False)]
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
gr.Markdown("---")
|
| 101 |
+
gr.Markdown("**About:** 4th Year Forensic Research Project - Analyzing fingerprint patterns for biometric identification.")
|
| 102 |
+
|
| 103 |
+
# Launch the app
|
| 104 |
+
if __name__ == "__main__":
|
| 105 |
+
port = int(os.environ.get("PORT", 7860))
|
| 106 |
+
demo.launch(server_name="0.0.0.0", server_port=port)
|
blood_group.h5
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3ea178e44a802816aed922387593bb75fc087bd3096ca49aad3684fbf5b8be6d
|
| 3 |
+
size 228500768
|
gender_model.keras
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b66ad937568293257a76d3a80f561a8f72e092103d8c98f687398779365fef06
|
| 3 |
+
size 53635603
|
model_loader.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Model loading utilities for fingerprint analysis models
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import pickle
|
| 7 |
+
import joblib
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
import warnings
|
| 10 |
+
warnings.filterwarnings('ignore')
|
| 11 |
+
|
| 12 |
+
# Import TensorFlow
|
| 13 |
+
import tensorflow as tf
|
| 14 |
+
|
| 15 |
+
# Configure paths
|
| 16 |
+
BASE_DIR = Path(__file__).parent.parent
|
| 17 |
+
MODELS_DIR = BASE_DIR / "models"
|
| 18 |
+
|
| 19 |
+
# Model paths
|
| 20 |
+
BLOOD_GROUP_MODEL_PATH = os.environ.get(
|
| 21 |
+
"BLOOD_GROUP_MODEL_PATH",
|
| 22 |
+
str(MODELS_DIR / "blood_group.h5")
|
| 23 |
+
)
|
| 24 |
+
GENDER_MODEL_PATH = os.environ.get(
|
| 25 |
+
"GENDER_MODEL_PATH",
|
| 26 |
+
str(MODELS_DIR / "gender_model.keras")
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
# Global variables to cache loaded models
|
| 30 |
+
_blood_group_model = None
|
| 31 |
+
_gender_model = None
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def load_blood_group_model():
|
| 35 |
+
"""Load the blood group prediction model"""
|
| 36 |
+
global _blood_group_model
|
| 37 |
+
|
| 38 |
+
if _blood_group_model is not None:
|
| 39 |
+
return _blood_group_model
|
| 40 |
+
|
| 41 |
+
try:
|
| 42 |
+
if BLOOD_GROUP_MODEL_PATH.endswith(('.h5', '.keras')):
|
| 43 |
+
try:
|
| 44 |
+
_blood_group_model = tf.keras.models.load_model(BLOOD_GROUP_MODEL_PATH)
|
| 45 |
+
except Exception as e:
|
| 46 |
+
# Fallback: load without compile
|
| 47 |
+
print(f"⚠️ Trying compile=False...")
|
| 48 |
+
_blood_group_model = tf.keras.models.load_model(BLOOD_GROUP_MODEL_PATH, compile=False)
|
| 49 |
+
_blood_group_model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
|
| 50 |
+
elif BLOOD_GROUP_MODEL_PATH.endswith('.pkl'):
|
| 51 |
+
with open(BLOOD_GROUP_MODEL_PATH, 'rb') as f:
|
| 52 |
+
_blood_group_model = pickle.load(f)
|
| 53 |
+
elif BLOOD_GROUP_MODEL_PATH.endswith('.joblib'):
|
| 54 |
+
_blood_group_model = joblib.load(BLOOD_GROUP_MODEL_PATH)
|
| 55 |
+
else:
|
| 56 |
+
raise ValueError(f"Unsupported format: {BLOOD_GROUP_MODEL_PATH}")
|
| 57 |
+
|
| 58 |
+
print(f"✓ Blood group model loaded")
|
| 59 |
+
return _blood_group_model
|
| 60 |
+
|
| 61 |
+
except Exception as e:
|
| 62 |
+
raise Exception(f"Error loading blood group model: {str(e)}")
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def load_gender_model():
|
| 66 |
+
"""Load the gender classification model"""
|
| 67 |
+
global _gender_model
|
| 68 |
+
|
| 69 |
+
if _gender_model is not None:
|
| 70 |
+
return _gender_model
|
| 71 |
+
|
| 72 |
+
try:
|
| 73 |
+
if GENDER_MODEL_PATH.endswith(('.h5', '.keras')):
|
| 74 |
+
try:
|
| 75 |
+
_gender_model = tf.keras.models.load_model(GENDER_MODEL_PATH)
|
| 76 |
+
except Exception as e:
|
| 77 |
+
# Fallback: load without compile
|
| 78 |
+
print(f"⚠️ Trying compile=False...")
|
| 79 |
+
_gender_model = tf.keras.models.load_model(GENDER_MODEL_PATH, compile=False)
|
| 80 |
+
_gender_model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
|
| 81 |
+
elif GENDER_MODEL_PATH.endswith('.pkl'):
|
| 82 |
+
with open(GENDER_MODEL_PATH, 'rb') as f:
|
| 83 |
+
_gender_model = pickle.load(f)
|
| 84 |
+
elif GENDER_MODEL_PATH.endswith('.joblib'):
|
| 85 |
+
_gender_model = joblib.load(GENDER_MODEL_PATH)
|
| 86 |
+
else:
|
| 87 |
+
raise ValueError(f"Unsupported format: {GENDER_MODEL_PATH}")
|
| 88 |
+
|
| 89 |
+
print(f"✓ Gender model loaded")
|
| 90 |
+
return _gender_model
|
| 91 |
+
|
| 92 |
+
except Exception as e:
|
| 93 |
+
raise Exception(f"Error loading gender model: {str(e)}")
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def set_model_paths(blood_group_path, gender_path):
|
| 97 |
+
"""Update model paths at runtime"""
|
| 98 |
+
global BLOOD_GROUP_MODEL_PATH, GENDER_MODEL_PATH
|
| 99 |
+
global _blood_group_model, _gender_model
|
| 100 |
+
|
| 101 |
+
BLOOD_GROUP_MODEL_PATH = blood_group_path
|
| 102 |
+
GENDER_MODEL_PATH = gender_path
|
| 103 |
+
|
| 104 |
+
# Clear cached models
|
| 105 |
+
_blood_group_model = None
|
| 106 |
+
_gender_model = None
|
| 107 |
+
|
| 108 |
+
print(f"Model paths updated successfully")
|
predictor.py
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Prediction functions for fingerprint analysis models
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import numpy as np
|
| 6 |
+
from typing import Dict, Any
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
# Blood group classes
|
| 10 |
+
BLOOD_GROUPS = ['A+', 'A-', 'B+', 'B-', 'AB+', 'AB-', 'O+', 'O-']
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def predict_blood_group(model, image_array: np.ndarray) -> Dict[str, Any]:
|
| 14 |
+
"""
|
| 15 |
+
Predict blood group from fingerprint image
|
| 16 |
+
|
| 17 |
+
Args:
|
| 18 |
+
model: Loaded blood group model
|
| 19 |
+
image_array: Preprocessed image array (150x150x3)
|
| 20 |
+
|
| 21 |
+
Returns:
|
| 22 |
+
Dictionary with prediction results
|
| 23 |
+
"""
|
| 24 |
+
try:
|
| 25 |
+
# Ensure image has batch dimension
|
| 26 |
+
if len(image_array.shape) == 3:
|
| 27 |
+
image_array = np.expand_dims(image_array, axis=0)
|
| 28 |
+
|
| 29 |
+
print(f"DEBUG: Blood group input shape: {image_array.shape}")
|
| 30 |
+
print(f"DEBUG: Blood group input min/max: {image_array.min()}/{image_array.max()}")
|
| 31 |
+
|
| 32 |
+
# Get predictions
|
| 33 |
+
predictions = model.predict(image_array, verbose=0)
|
| 34 |
+
|
| 35 |
+
print(f"DEBUG: Raw predictions shape: {predictions.shape}")
|
| 36 |
+
print(f"DEBUG: Raw predictions: {predictions[0]}")
|
| 37 |
+
print(f"DEBUG: Raw predictions min/max: {predictions.min()}/{predictions.max()}")
|
| 38 |
+
|
| 39 |
+
# Apply softmax if values look like logits (large range)
|
| 40 |
+
if np.max(np.abs(predictions)) > 50:
|
| 41 |
+
print("DEBUG: Applying softmax...")
|
| 42 |
+
predictions = np.exp(predictions) / np.sum(np.exp(predictions), axis=1, keepdims=True)
|
| 43 |
+
|
| 44 |
+
# Handle different output shapes
|
| 45 |
+
if predictions.shape[1] == len(BLOOD_GROUPS):
|
| 46 |
+
# Multiple classes (classification)
|
| 47 |
+
confidence = np.max(predictions[0])
|
| 48 |
+
predicted_class = np.argmax(predictions[0])
|
| 49 |
+
predicted_blood_group = BLOOD_GROUPS[predicted_class]
|
| 50 |
+
|
| 51 |
+
# Get all scores
|
| 52 |
+
all_scores = {BLOOD_GROUPS[i]: float(predictions[0][i])
|
| 53 |
+
for i in range(len(BLOOD_GROUPS))}
|
| 54 |
+
else:
|
| 55 |
+
# Different output size, map to blood groups
|
| 56 |
+
confidence = np.max(predictions[0])
|
| 57 |
+
predicted_class = np.argmax(predictions[0])
|
| 58 |
+
predicted_blood_group = BLOOD_GROUPS[min(predicted_class, len(BLOOD_GROUPS)-1)]
|
| 59 |
+
|
| 60 |
+
all_scores = {BLOOD_GROUPS[i]: float(predictions[0][i])
|
| 61 |
+
for i in range(min(len(predictions[0]), len(BLOOD_GROUPS)))}
|
| 62 |
+
|
| 63 |
+
print(f"DEBUG: Final confidence: {confidence}")
|
| 64 |
+
|
| 65 |
+
return {
|
| 66 |
+
"blood_group": predicted_blood_group,
|
| 67 |
+
"confidence": float(confidence),
|
| 68 |
+
"all_scores": all_scores,
|
| 69 |
+
"success": True
|
| 70 |
+
}
|
| 71 |
+
|
| 72 |
+
except Exception as e:
|
| 73 |
+
print(f"ERROR in predict_blood_group: {str(e)}")
|
| 74 |
+
import traceback
|
| 75 |
+
traceback.print_exc()
|
| 76 |
+
return {
|
| 77 |
+
"blood_group": None,
|
| 78 |
+
"confidence": 0.0,
|
| 79 |
+
"all_scores": {},
|
| 80 |
+
"success": False,
|
| 81 |
+
"error": str(e)
|
| 82 |
+
}
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def predict_gender(model, image_array: np.ndarray) -> Dict[str, Any]:
|
| 86 |
+
"""
|
| 87 |
+
Predict gender from fingerprint image
|
| 88 |
+
|
| 89 |
+
Args:
|
| 90 |
+
model: Loaded gender classification model
|
| 91 |
+
image_array: Preprocessed image array (224x224x1)
|
| 92 |
+
|
| 93 |
+
Returns:
|
| 94 |
+
Dictionary with prediction results
|
| 95 |
+
"""
|
| 96 |
+
try:
|
| 97 |
+
# Ensure image has batch dimension
|
| 98 |
+
if len(image_array.shape) == 3:
|
| 99 |
+
image_array = np.expand_dims(image_array, axis=0)
|
| 100 |
+
|
| 101 |
+
print(f"DEBUG (gender): Input shape: {image_array.shape}")
|
| 102 |
+
print(f"DEBUG (gender): Input min/max: {image_array.min()}/{image_array.max()}")
|
| 103 |
+
|
| 104 |
+
# Get prediction
|
| 105 |
+
prediction = model.predict(image_array, verbose=0)
|
| 106 |
+
|
| 107 |
+
print(f"DEBUG (gender): Raw predictions shape: {prediction.shape}")
|
| 108 |
+
print(f"DEBUG (gender): Raw predictions: {prediction[0]}")
|
| 109 |
+
print(f"DEBUG (gender): Raw predictions min/max: {prediction.min()}/{prediction.max()}")
|
| 110 |
+
|
| 111 |
+
# Apply softmax if values look like logits
|
| 112 |
+
if np.max(np.abs(prediction)) > 50:
|
| 113 |
+
print("DEBUG (gender): Applying softmax...")
|
| 114 |
+
prediction = np.exp(prediction) / np.sum(np.exp(prediction), axis=1, keepdims=True)
|
| 115 |
+
|
| 116 |
+
# Handle different output shapes
|
| 117 |
+
if prediction.shape[1] == 1:
|
| 118 |
+
# Binary classification: single output (sigmoid)
|
| 119 |
+
confidence = float(prediction[0][0])
|
| 120 |
+
predicted_gender = "Female" if confidence < 0.5 else "Male"
|
| 121 |
+
# Adjust confidence to be distance from 0.5
|
| 122 |
+
confidence = abs(confidence - 0.5) * 2
|
| 123 |
+
else:
|
| 124 |
+
# Binary classification: two outputs (softmax)
|
| 125 |
+
confidence = float(np.max(prediction[0]))
|
| 126 |
+
predicted_class = np.argmax(prediction[0])
|
| 127 |
+
predicted_gender = "Female" if predicted_class == 0 else "Male"
|
| 128 |
+
|
| 129 |
+
print(f"DEBUG (gender): Final confidence: {confidence}")
|
| 130 |
+
|
| 131 |
+
return {
|
| 132 |
+
"gender": predicted_gender,
|
| 133 |
+
"confidence": confidence,
|
| 134 |
+
"success": True
|
| 135 |
+
}
|
| 136 |
+
|
| 137 |
+
except Exception as e:
|
| 138 |
+
print(f"ERROR in predict_gender: {str(e)}")
|
| 139 |
+
import traceback
|
| 140 |
+
traceback.print_exc()
|
| 141 |
+
return {
|
| 142 |
+
"gender": None,
|
| 143 |
+
"confidence": 0.0,
|
| 144 |
+
"success": False,
|
| 145 |
+
"error": str(e)
|
| 146 |
+
}
|
preprocessing.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Image preprocessing utilities for fingerprint images
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import cv2
|
| 6 |
+
import numpy as np
|
| 7 |
+
from PIL import Image
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def preprocess_fingerprint(image: Image.Image, model_type: str = "gender", target_size: tuple = None) -> np.ndarray:
|
| 11 |
+
"""
|
| 12 |
+
Preprocess fingerprint image for model prediction
|
| 13 |
+
|
| 14 |
+
Args:
|
| 15 |
+
image: PIL Image object
|
| 16 |
+
model_type: "blood_group" or "gender" - determines preprocessing
|
| 17 |
+
target_size: Override target size
|
| 18 |
+
|
| 19 |
+
Returns:
|
| 20 |
+
Preprocessed numpy array with correct shape for the model
|
| 21 |
+
"""
|
| 22 |
+
# Set target size and channels based on model type
|
| 23 |
+
if model_type == "blood_group":
|
| 24 |
+
target_size = target_size or (150, 150)
|
| 25 |
+
output_channels = 3 # RGB
|
| 26 |
+
else: # gender
|
| 27 |
+
target_size = target_size or (224, 224)
|
| 28 |
+
output_channels = 1 # Grayscale
|
| 29 |
+
|
| 30 |
+
# Convert PIL Image to numpy array
|
| 31 |
+
img_array = np.array(image)
|
| 32 |
+
|
| 33 |
+
# Convert to grayscale first
|
| 34 |
+
if len(img_array.shape) == 3:
|
| 35 |
+
img_array = cv2.cvtColor(img_array, cv2.COLOR_RGB2GRAY)
|
| 36 |
+
|
| 37 |
+
# Resize to target size
|
| 38 |
+
img_resized = cv2.resize(img_array, target_size, interpolation=cv2.INTER_AREA)
|
| 39 |
+
|
| 40 |
+
# Normalize pixel values to 0-1 range
|
| 41 |
+
img_normalized = img_resized.astype('float32') / 255.0
|
| 42 |
+
|
| 43 |
+
# Apply histogram equalization for better contrast
|
| 44 |
+
img_normalized = cv2.equalizeHist((img_normalized * 255).astype(np.uint8)).astype(np.float32) / 255.0
|
| 45 |
+
|
| 46 |
+
# Format output based on model type
|
| 47 |
+
if output_channels == 3:
|
| 48 |
+
# Blood group: Convert grayscale to RGB by repeating channels
|
| 49 |
+
img_output = np.stack([img_normalized, img_normalized, img_normalized], axis=-1)
|
| 50 |
+
else:
|
| 51 |
+
# Gender: Keep as grayscale with single channel
|
| 52 |
+
img_output = np.expand_dims(img_normalized, axis=-1)
|
| 53 |
+
|
| 54 |
+
print(f"DEBUG preprocessing ({model_type}): Output shape = {img_output.shape}, min/max = {img_output.min()}/{img_output.max()}")
|
| 55 |
+
|
| 56 |
+
return img_output
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def enhance_fingerprint(image: np.ndarray) -> np.ndarray:
|
| 60 |
+
"""
|
| 61 |
+
Enhance fingerprint features using image processing techniques
|
| 62 |
+
|
| 63 |
+
Args:
|
| 64 |
+
image: Fingerprint image as numpy array
|
| 65 |
+
|
| 66 |
+
Returns:
|
| 67 |
+
Enhanced image array
|
| 68 |
+
"""
|
| 69 |
+
# Convert to grayscale if needed
|
| 70 |
+
if len(image.shape) == 3:
|
| 71 |
+
image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
|
| 72 |
+
|
| 73 |
+
# Apply morphological operations
|
| 74 |
+
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5, 5))
|
| 75 |
+
enhanced = cv2.morphologyEx(image, cv2.MORPH_CLOSE, kernel)
|
| 76 |
+
enhanced = cv2.morphologyEx(enhanced, cv2.MORPH_OPEN, kernel)
|
| 77 |
+
|
| 78 |
+
# Apply median filter to remove noise
|
| 79 |
+
enhanced = cv2.medianBlur(enhanced, 5)
|
| 80 |
+
|
| 81 |
+
return enhanced
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def normalize_fingerprint(image: np.ndarray) -> np.ndarray:
|
| 85 |
+
"""
|
| 86 |
+
Normalize fingerprint image using standard normalization
|
| 87 |
+
|
| 88 |
+
Args:
|
| 89 |
+
image: Fingerprint image as numpy array
|
| 90 |
+
|
| 91 |
+
Returns:
|
| 92 |
+
Normalized image array
|
| 93 |
+
"""
|
| 94 |
+
if len(image.shape) == 3 and image.shape[-1] == 3:
|
| 95 |
+
# RGB normalization
|
| 96 |
+
mean = np.array([0.485, 0.456, 0.406])
|
| 97 |
+
std = np.array([0.229, 0.224, 0.225])
|
| 98 |
+
return (image - mean) / std
|
| 99 |
+
else:
|
| 100 |
+
# Grayscale normalization
|
| 101 |
+
mean = 0.5
|
| 102 |
+
std = 0.5
|
| 103 |
+
return (image - mean) / std
|
requirements.txt
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
gradio==6.13.0
|
| 2 |
+
opencv-python==4.8.1.78
|
| 3 |
+
numpy==1.26.4
|
| 4 |
+
Pillow==12.2.0
|
| 5 |
+
tensorflow-cpu==2.21.0
|
| 6 |
+
scikit-learn==1.8.0
|
| 7 |
+
pandas==2.2.3
|
| 8 |
+
fastapi>=0.115.2
|
| 9 |
+
uvicorn==0.24.0
|