face_emotion / app.py
tensorgirl's picture
Update app.py
38a3ff0 verified
raw
history blame contribute delete
No virus
3.86 kB
import gradio as gr
import tensorflow as tf
import keras
import numpy as np
import sklearn
from sklearnex import patch_sklearn, unpatch_sklearn
patch_sklearn()
import xgboost as xgb
from keras import Model, Sequential
from keras.layers import Layer
import keras.layers as nn
class ChannelAttn(Layer):
def __init__(self, c=512) -> None:
super(ChannelAttn,self).__init__()
self.gap = nn.AveragePooling2D(7)
self.attention = Sequential([
nn.Dense(32),
nn.BatchNormalization(),
nn.ReLU(),
nn.Dense(c,activation='sigmoid')]
)
def call(self, x):
x = self.gap(x)
x = nn.Flatten()(x)
y = self.attention(x)
return x * y
class SpatialAttn(Layer):
def __init__(self, c=512):
super(SpatialAttn,self).__init__()
self.conv1x1 = Sequential([
nn.Conv2D(256, 1),
nn.BatchNormalization()]
)
self.conv_3x3 = Sequential([
nn.ZeroPadding2D(padding=(1, 1)),
nn.Conv2D(512, 3,1),
nn.BatchNormalization()]
)
self.conv_1x3 = Sequential([
nn.ZeroPadding2D(padding=(0, 1)),
nn.Conv2D(512, (1,3)),
nn.BatchNormalization()]
)
self.conv_3x1 = Sequential([
nn.ZeroPadding2D(padding=(1, 0)),
nn.Conv2D(512,(3,1)),
nn.BatchNormalization()]
)
self.norm = nn.ReLU()
def call(self, x) :
y = self.conv1x1(x)
y = self.norm(self.conv_3x3(y) + self.conv_1x3(y) + self.conv_3x1(y))
y = tf.math.reduce_sum(y,axis=1, keepdims=True)
return x*y
class CrossAttnHead(Layer):
def __init__(self, c=512):
super(CrossAttnHead,self).__init__()
self.sa = SpatialAttn(c)
self.ca = ChannelAttn(c)
def call(self, x):
return self.ca(self.sa(x))
@keras.saving.register_keras_serializable(package='custom')
class DAN(Model):
def __init__(self, num_classes=8,trainable=True,dtype='float32'):
super(DAN,self).__init__()
self.mod = keras.applications.ResNet50(
include_top=False,
weights="imagenet",
input_shape=(224,224,3)
)
self.mod.trainable= False
self.num_head = 4
self.hd = CrossAttnHead()
self.hd=[]
for i in range(self.num_head):
self.hd.append(CrossAttnHead())
self.features = nn.Conv2D(512, 1,padding='same')
self.fc = nn.Dense(num_classes)
self.bn = nn.BatchNormalization()
def call(self, x) :
x = self.mod(x)
x=self.features(x)
heads = []
for h in self.hd:
heads.append(h(x))
heads = tf.transpose(tf.stack(heads),perm=(1,0,2))
heads = keras.ops.log_softmax(heads, axis=1)
return tf.math.reduce_sum(heads,axis=1)
backbone = DAN()
backbone.load_weights('weights.keras')
xgb_params = {
'objective': 'binary:logistic',
'predictor': 'cpu_predictor',
'disable_default_eval_metric': 1,
}
model_xgb= xgb.XGBClassifier(**xgb_params)
model_xgb.load_model('xgb.json')
emotions = ['anger', 'contempt', 'disgust', 'fear', 'happy', 'neutral', 'sad', 'surprise']
def fn(image):
if len(image.shape)==2:
img = np.stack([image,image,image],axis=2)
img = np.resize(img,(224,224,3))
elif len(image.shape)==3 and image.shape[2]==1:
img = np.stack([image[:,:,0],image[:,:,0],image[:,:,0]],axis=2)
img = np.resize(img,(224,224,3))
else:
img = np.resize(image,(224,224,3))
img = np.expand_dims(img,axis=0)
feats = backbone.predict(img)
pred = model_xgb.predict(feats)
print(pred)
return emotions[pred[0]]
demo = gr.Interface(
fn,['image'],"text",
)
if __name__ == "__main__":
demo.launch()