File size: 1,253 Bytes
faba7a9 aab5cd2 faba7a9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
import cv2
import numpy as np
from tensorflow.keras.models import load_model
# Load the model
model_output = '/content/drive/MyDrive/saved_models/gender_model/gender_compiled_model'
model = load_model(model_output)
# Load and preprocess an image (assuming 'image_path' is the path to your image)
def preprocess_image(image_path):
# Load the image using OpenCV
img = cv2.imread(image_path)
# Resize the image to 160x160, which is the expected input size for InceptionResNetV1
img = cv2.resize(img, (224, 224))
# Convert the image to RGB
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
# Normalize the image (the model expects pixel values between -1 and 1)
img = img.astype('float32') / 127.5 - 1
# Add a batch dimension
img = np.expand_dims(img, axis=0)
return img
# Path to the image you want to test
image_path = '/content/pic.jpeg'
# Preprocess the image
input_image = preprocess_image(image_path)
# Perform inference to get the face embedding
pred = model.predict(input_image)
# Labels for the genders
labels = ["Woman", "Man"]
predicted_label_index = np.argmax(pred)
# Get the corresponding label
predicted_gender = labels[predicted_label_index]
print(f"Predicted Gender: {predicted_gender}")
|