Spaces:
Sleeping
Sleeping
nehapasricha94
commited on
Commit
•
785de28
1
Parent(s):
74fb5a4
Update app.py
Browse files
app.py
CHANGED
@@ -12,67 +12,84 @@ emotion_classifier = pipeline("text-classification", model="j-hartmann/emotion-e
|
|
12 |
|
13 |
# Function to analyze colors in an image
|
14 |
def analyze_colors(image):
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
|
|
|
|
|
|
|
|
|
|
36 |
|
37 |
# Function to detect emotions from colors (simplified emotion-color mapping)
|
38 |
def color_emotion_analysis(dominant_colors):
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
|
|
|
|
51 |
|
52 |
# Function to analyze patterns and shapes using OpenCV
|
53 |
def analyze_patterns(image):
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
|
|
|
|
|
|
|
|
65 |
|
66 |
# Main function to process image and analyze emotional expression
|
67 |
def analyze_emotion_from_image(image):
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
76 |
|
77 |
# Gradio interface to upload image files and perform analysis
|
78 |
iface = gr.Interface(fn=analyze_emotion_from_image, inputs="image", outputs="text")
|
|
|
12 |
|
13 |
# Function to analyze colors in an image
|
14 |
def analyze_colors(image):
|
15 |
+
try:
|
16 |
+
# Convert image to RGB if not already in that mode
|
17 |
+
if image.mode != "RGB":
|
18 |
+
image = image.convert("RGB")
|
19 |
+
# Resize image for faster processing
|
20 |
+
image = image.resize((150, 150))
|
21 |
+
# Convert to numpy array
|
22 |
+
img_array = np.array(image)
|
23 |
+
# Reshape image to be a list of pixels
|
24 |
+
pixels = img_array.reshape((-1, 3))
|
25 |
+
|
26 |
+
# Use KMeans to find the dominant colors
|
27 |
+
kmeans = KMeans(n_clusters=5)
|
28 |
+
kmeans.fit(pixels)
|
29 |
+
dominant_colors = kmeans.cluster_centers_
|
30 |
+
|
31 |
+
# Plot the colors for visualization
|
32 |
+
plt.figure(figsize=(8, 6))
|
33 |
+
plt.imshow([dominant_colors.astype(int)])
|
34 |
+
plt.axis('off')
|
35 |
+
plt.show()
|
36 |
+
|
37 |
+
return dominant_colors
|
38 |
+
except Exception as e:
|
39 |
+
print(f"Error in analyze_colors: {e}")
|
40 |
+
return None
|
41 |
|
42 |
# Function to detect emotions from colors (simplified emotion-color mapping)
|
43 |
def color_emotion_analysis(dominant_colors):
|
44 |
+
try:
|
45 |
+
emotions = []
|
46 |
+
for color in dominant_colors:
|
47 |
+
# Simple logic: darker tones could indicate sadness
|
48 |
+
if np.mean(color) < 85:
|
49 |
+
emotions.append("Sadness")
|
50 |
+
elif np.mean(color) > 170:
|
51 |
+
emotions.append("Happiness")
|
52 |
+
else:
|
53 |
+
emotions.append("Neutral")
|
54 |
+
return emotions
|
55 |
+
except Exception as e:
|
56 |
+
print(f"Error in color_emotion_analysis: {e}")
|
57 |
+
return ["Error analyzing emotions"]
|
58 |
|
59 |
# Function to analyze patterns and shapes using OpenCV
|
60 |
def analyze_patterns(image):
|
61 |
+
try:
|
62 |
+
# Convert to grayscale for edge detection
|
63 |
+
gray_image = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2GRAY)
|
64 |
+
edges = cv2.Canny(gray_image, 100, 200)
|
65 |
+
|
66 |
+
# Calculate the number of edges (chaos metric)
|
67 |
+
num_edges = np.sum(edges > 0)
|
68 |
+
|
69 |
+
if num_edges > 10000: # Arbitrary threshold for "chaos"
|
70 |
+
return "Chaotic patterns - possibly distress"
|
71 |
+
else:
|
72 |
+
return "Orderly patterns - possibly calm"
|
73 |
+
except Exception as e:
|
74 |
+
print(f"Error in analyze_patterns: {e}")
|
75 |
+
return "Error analyzing patterns"
|
76 |
|
77 |
# Main function to process image and analyze emotional expression
|
78 |
def analyze_emotion_from_image(image):
|
79 |
+
try:
|
80 |
+
# Analyze colors
|
81 |
+
dominant_colors = analyze_colors(image)
|
82 |
+
if dominant_colors is None:
|
83 |
+
return "Error analyzing colors"
|
84 |
+
|
85 |
+
color_emotions = color_emotion_analysis(dominant_colors)
|
86 |
+
|
87 |
+
# Analyze patterns
|
88 |
+
pattern_analysis = analyze_patterns(image)
|
89 |
+
|
90 |
+
return f"Color-based emotions: {color_emotions}\nPattern analysis: {pattern_analysis}"
|
91 |
+
except Exception as e:
|
92 |
+
return f"Error processing image: {str(e)}"
|
93 |
|
94 |
# Gradio interface to upload image files and perform analysis
|
95 |
iface = gr.Interface(fn=analyze_emotion_from_image, inputs="image", outputs="text")
|