.gitattributes CHANGED
@@ -33,7 +33,3 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
- nutri/images/*.jpg filter=lfs diff=lfs merge=lfs -text
37
- images/*.jpg filter=lfs diff=lfs merge=lfs -text
38
- images/*.png filter=lfs diff=lfs merge=lfs -text
39
- images/*.avif filter=lfs diff=lfs merge=lfs -text
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
app.py CHANGED
@@ -2,8 +2,7 @@ import os
2
  import base64
3
  from io import BytesIO
4
  from PIL import Image
5
- from fastapi import FastAPI, HTTPException, Security
6
- from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
7
  from pydantic import BaseModel
8
  from transformers import pipeline
9
  from ultralytics import YOLO
@@ -13,37 +12,30 @@ import threading
13
  import logging
14
  import requests
15
  import asyncio
16
- import math
17
 
 
18
  # Logging
 
19
  logging.basicConfig(level=logging.INFO)
20
  logger = logging.getLogger(__name__)
21
 
 
22
  # Models
 
23
  food_classifier = pipeline("image-classification", model="nateraw/food")
24
  yolo_model = YOLO("yolov8n.pt")
25
 
26
  # USDA API config
27
  USDA_API_URL = "https://api.nal.usda.gov/fdc/v1/foods/search"
28
- USDA_API_KEY = os.getenv("USDA_API_KEY", "DEMO_KEY") # Replace with your USDA API key
29
 
30
  # FastAPI app
31
  app = FastAPI()
32
- security = HTTPBearer()
33
-
34
- # Debug endpoint to verify secrets
35
- @app.get("/debug_secrets")
36
- async def debug_secrets():
37
- return {
38
- "HUGGINGFACE_API_TOKEN": bool(os.getenv("HUGGINGFACE_API_TOKEN")),
39
- "USDA_API_KEY": bool(os.getenv("USDA_API_KEY"))
40
- }
41
 
42
  # Request schema
43
  class ImageRequest(BaseModel):
44
- image: str
45
- portion_size: float | None = None
46
- reference_object_size: float | None = None
47
 
48
  # Decode base64 image
49
  def decode_base64_image(base64_string):
@@ -55,109 +47,9 @@ def decode_base64_image(base64_string):
55
  logger.error(f"Image decoding failed: {str(e)}")
56
  raise HTTPException(status_code=400, detail="Invalid base64 image")
57
 
58
- # Estimate portion size based on bounding box area
59
- def estimate_portion_size(yolo_results, img_width, img_height, reference_object_size=None, food_labels=None, container_labels=None, top_food=None):
60
- try:
61
- food_area_pixels = 0
62
- scaling_factor = None
63
-
64
- # Default plate area (25 cm diameter -> ~490 cmΒ²)
65
- default_ref_area_cm2 = math.pi * (25 / 2) ** 2 # Area of a 25 cm diameter plate
66
-
67
- # Validate reference_object_size
68
- if reference_object_size is not None and reference_object_size <= 0:
69
- logger.warning(f"Invalid reference_object_size ({reference_object_size} cm), using default plate size")
70
- reference_object_size = None
71
-
72
- # Check for reference object (e.g., plate, bowl)
73
- if reference_object_size and container_labels:
74
- for result in yolo_results:
75
- for box, cls in zip(result.boxes.xyxy, result.boxes.cls):
76
- label = result.names[int(cls)]
77
- if label in container_labels:
78
- x1, y1, x2, y2 = map(int, box)
79
- container_area_pixels = (x2 - x1) * (y2 - y1)
80
- if container_area_pixels > 0:
81
- ref_area_cm2 = math.pi * (reference_object_size / 2) ** 2
82
- scaling_factor = ref_area_cm2 / container_area_pixels
83
- logger.info(f"Container detected: {label}, area={container_area_pixels} pixels, scaling_factor={scaling_factor}")
84
- break
85
-
86
- # If no reference object or size, use default scaling
87
- if not scaling_factor:
88
- image_area_pixels = img_width * img_height
89
- scaling_factor = default_ref_area_cm2 / (image_area_pixels * 0.5)
90
- logger.info(f"No reference object, using default scaling: image_area={image_area_pixels} pixels, scaling_factor={scaling_factor}")
91
-
92
- # Calculate food area
93
- for result in yolo_results:
94
- for box, cls in zip(result.boxes.xyxy, result.boxes.cls):
95
- label = result.names[int(cls)]
96
- if label in food_labels:
97
- x1, y1, x2, y2 = map(int, box)
98
- food_area_pixels += (x2 - x1) * (y2 - y1)
99
-
100
- if food_area_pixels == 0:
101
- logger.warning("No food detected for portion size estimation")
102
- return 100 # Default portion size in grams
103
-
104
- # Convert pixel area to cmΒ²
105
- food_area_cm2 = food_area_pixels * scaling_factor
106
- logger.info(f"Food area: {food_area_pixels} pixels, {food_area_cm2} cmΒ²")
107
-
108
- # Adjust density for specific foods (e.g., akpu/fufu ~0.8 g/cmΒ³)
109
- density = 0.8 if top_food in ["akpu", "fufu"] else 1.0
110
- portion_size_grams = food_area_cm2 * density
111
-
112
- # Cap portion size to realistic range (50-500 g)
113
- portion_size_grams = max(min(portion_size_grams, 500), 50)
114
- logger.info(f"Estimated portion size: {portion_size_grams} grams (density={density} g/cmΒ³)")
115
-
116
- return portion_size_grams
117
- except Exception as e:
118
- logger.error(f"Portion size estimation failed: {str(e)}")
119
- return 100 # Fallback to default portion size
120
-
121
  # Crop image to food or container
122
  def crop_image_to_food(img, yolo_results,
123
- food_labels=[
124
- "chicken_curry", "pizza", "salad", "lasagna", "risotto",
125
- "akpu", "fufu", "egusi_soup", "jollof_rice", "pounded_yam",
126
- "banga_soup", "bitterleaf_soup", "edikaikong_soup", "okra_soup",
127
- "ogbono_soup", "nkwobi", "moin_moin", "puff_puff", "suya",
128
- "abacha", "amala", "ewa_agoyin", "ofada_stew", "pepper_soup",
129
- "kimchi", "bibimbap", "bulgogi", "japchae", "tteokbokki",
130
- "samgyeopsal", "kimchi_jjigae", "doenjang_jjigae", "sundubu_jjigae",
131
- "galbi", "kimbap", "jajangmyeon", "naengmyeon", "dakgalbi",
132
- "haemul_pajeon", "samgyetang", "bossam", "seolleongtang",
133
- "mandu", "yangnyeom_chicken", "gamjatang", "jokbal", "budae_jjigae",
134
- "haemul_tang", "dongtae_jjigae", "kongguksu", "mul_naengmyeon",
135
- "tteokguk", "miyeokguk",
136
- "sushi", "ramen", "udon", "tempura", "sashimi", "onigiri",
137
- "yakitori", "miso_soup", "okonomiyaki", "takoyaki",
138
- "donburi", "gyudon", "katsu_curry", "soba", "tonkatsu",
139
- "shabu_shabu", "natto", "unagi", "chawanmushi", "tamagoyaki",
140
- "yakisoba", "omurice", "kare_raisu", "oyakodon", "gyoza",
141
- "fried_rice", "dumplings", "mapo_tofu", "kung_pao_chicken",
142
- "sweet_sour_pork", "chow_mein", "spring_rolls", "peking_duck",
143
- "dim_sum", "hot_pot", "xiaolongbao", "char_siu", "wonton_soup",
144
- "egg_foo_young", "beef_broccoli", "szechuan_chicken", "lo_mein",
145
- "hunan_pork", "crispy_duck", "ma_la_tang", "dan_dan_noodles",
146
- "zha_jiang_mian", "lion_head_meatballs",
147
- "pad_thai", "tom_yum", "green_curry", "red_curry", "som_tam",
148
- "massaman_curry", "khao_soi", "pad_kra_pao", "tom_kha_gai",
149
- "larb", "panang_curry", "pad_see_ew", "khao_man_gai",
150
- "nam_tok", "gaeng_som", "khao_pad", "mango_sticky_rice",
151
- "satay_chicken", "thai_fried_rice", "tod_man_pla", "kuay_teow",
152
- "butter_chicken", "biryani", "paneer_tikka", "dal", "naan",
153
- "rogan_josh", "palak_paneer", "samosa", "chole", "tandoori_chicken",
154
- "aloo_gobi", "vindaloo", "dosa", "idli", "vada",
155
- "rajma", "pav_bhaji", "korma", "malai_kofta", "jalebi",
156
- "paratha", "bhindi_masala", "chicken_tikka_masala",
157
- "pho", "banh_mi", "laksa", "nasi_goreng", "rendang", "satay",
158
- "adobo", "sinigang", "hainan_chicken_rice", "char_kway_teow",
159
- "lechon", "soto_ayam", "bubur_ayam", "nasi_lemak", "mee_goreng"
160
- ],
161
  container_labels=["bowl", "plate", "dish"]):
162
  try:
163
  for result in yolo_results:
@@ -177,19 +69,15 @@ def crop_image_to_food(img, yolo_results,
177
  logger.error(f"Cropping failed: {str(e)}")
178
  return img, False
179
 
180
- # Calculate nutrients using USDA API with fallback for Nigerian foods
181
  def calculate_nutrients(food_items, portion_size):
182
  nutrients = {"protein": 0, "carbs": 0, "fat": 0, "fiber": 0, "sodium": 0}
183
  micronutrients = {"vitamin_c": 0, "calcium": 0, "iron": 0}
184
- calories = 0
185
  top_food = max(food_items, key=food_items.get, default=None)
186
  if not top_food:
187
- logger.warning("No food items detected")
188
- return nutrients, micronutrients, calories
189
 
190
  query_food = top_food.replace("_", " ")
191
- if query_food == "akpu":
192
- query_food = "fufu" # Map akpu to fufu for USDA API
193
  try:
194
  response = requests.get(USDA_API_URL, params={
195
  "api_key": USDA_API_KEY,
@@ -199,24 +87,7 @@ def calculate_nutrients(food_items, portion_size):
199
  response.raise_for_status()
200
  data = response.json()
201
  if not data.get("foods"):
202
- logger.warning(f"No USDA data found for {query_food}")
203
- # Fallback for Nigerian foods like akpu/fufu
204
- if query_food == "fufu":
205
- nutrients = {
206
- "protein": 1.1 * (portion_size / 100),
207
- "carbs": 38.1 * (portion_size / 100),
208
- "fat": 0.2 * (portion_size / 100),
209
- "fiber": 1.6 * (portion_size / 100),
210
- "sodium": 0.02 * (portion_size / 100),
211
- }
212
- micronutrients = {
213
- "vitamin_c": 20.6 * (portion_size / 100),
214
- "calcium": 12 * (portion_size / 100),
215
- "iron": 0.7 * (portion_size / 100),
216
- }
217
- calories = (nutrients["protein"] * 4) + (nutrients["carbs"] * 4) + (nutrients["fat"] * 9)
218
- return nutrients, micronutrients, round(calories, 2)
219
- return nutrients, micronutrients, calories
220
 
221
  food_data = data["foods"][0]
222
  food_nutrients = {n["nutrientName"]: n["value"] for n in food_data["foodNutrients"]}
@@ -226,7 +97,7 @@ def calculate_nutrients(food_items, portion_size):
226
  "carbs": food_nutrients.get("Carbohydrate, by difference", 0) * (portion_size / 100),
227
  "fat": food_nutrients.get("Total lipid (fat)", 0) * (portion_size / 100),
228
  "fiber": food_nutrients.get("Fiber, total dietary", 0) * (portion_size / 100),
229
- "sodium": food_nutrients.get("Sodium, Na", 0) * (portion_size / 100) / 1000,
230
  }
231
  micronutrients = {
232
  "vitamin_c": food_nutrients.get("Vitamin C, total ascorbic acid", 0) * (portion_size / 100),
@@ -234,146 +105,30 @@ def calculate_nutrients(food_items, portion_size):
234
  "iron": food_nutrients.get("Iron, Fe", 0) * (portion_size / 100),
235
  }
236
  calories = (nutrients["protein"] * 4) + (nutrients["carbs"] * 4) + (nutrients["fat"] * 9)
 
237
  except Exception as e:
238
  logger.error(f"USDA API request failed: {str(e)}")
239
- # Fallback for Nigerian foods if API fails
240
- if query_food == "fufu":
241
- nutrients = {
242
- "protein": 1.1 * (portion_size / 100),
243
- "carbs": 38.1 * (portion_size / 100),
244
- "fat": 0.2 * (portion_size / 100),
245
- "fiber": 1.6 * (portion_size / 100),
246
- "sodium": 0.02 * (portion_size / 100),
247
- }
248
- micronutrients = {
249
- "vitamin_c": 20.6 * (portion_size / 100),
250
- "calcium": 12 * (portion_size / 100),
251
- "iron": 0.7 * (portion_size / 100),
252
- }
253
- calories = (nutrients["protein"] * 4) + (nutrients["carbs"] * 4) + (nutrients["fat"] * 9)
254
- else:
255
- raise HTTPException(status_code=500, detail=f"Failed to fetch nutrient data: {str(e)}")
256
-
257
- return nutrients, micronutrients, round(calories, 2)
258
 
 
259
  # FastAPI endpoint
 
260
  @app.post("/analyze_food")
261
- async def analyze_food(request: ImageRequest, credentials: HTTPAuthorizationCredentials = Security(security)):
262
  try:
263
- logger.info(f"Received request: portion_size={request.portion_size}, reference_object_size={request.reference_object_size}")
264
-
265
- # Validate API token
266
- expected_token = os.getenv("HUGGINGFACE_API_TOKEN")
267
- if not expected_token:
268
- logger.error("Hugging Face API token not configured")
269
- raise HTTPException(status_code=500, detail="Hugging Face API token not configured")
270
- if credentials.credentials != expected_token:
271
- raise HTTPException(status_code=401, detail="Invalid API token")
272
-
273
  img = decode_base64_image(request.image)
274
  yolo_results = yolo_model(img)
275
-
276
- # Food classification to get top_food for density adjustment
277
  cropped_img, was_cropped = crop_image_to_food(img, yolo_results)
 
 
278
  food_results = food_classifier(cropped_img)
279
  food_items = {r["label"]: r["score"] for r in food_results if r["score"] >= 0.3}
280
- top_food = max(food_items, key=food_items.get, default=None)
281
-
282
- # Estimate portion size if not provided
283
- food_labels = [
284
- "chicken_curry", "pizza", "salad", "lasagna", "risotto",
285
- "akpu", "fufu", "egusi_soup", "jollof_rice", "pounded_yam",
286
- "banga_soup", "bitterleaf_soup", "edikaikong_soup", "okra_soup",
287
- "ogbono_soup", "nkwobi", "moin_moin", "puff_puff", "suya",
288
- "abacha", "amala", "ewa_agoyin", "ofada_stew", "pepper_soup",
289
- "kimchi", "bibimbap", "bulgogi", "japchae", "tteokbokki",
290
- "samgyeopsal", "kimchi_jjigae", "doenjang_jjigae", "sundubu_jjigae",
291
- "galbi", "kimbap", "jajangmyeon", "naengmyeon", "dakgalbi",
292
- "haemul_pajeon", "samgyetang", "bossam", "seolleongtang",
293
- "mandu", "yangnyeom_chicken", "gamjatang", "jokbal", "budae_jjigae",
294
- "haemul_tang", "dongtae_jjigae", "kongguksu", "mul_naengmyeon",
295
- "tteokguk", "miyeokguk",
296
- "sushi", "ramen", "udon", "tempura", "sashimi", "onigiri",
297
- "yakitori", "miso_soup", "okonomiyaki", "takoyaki",
298
- "donburi", "gyudon", "katsu_curry", "soba", "tonkatsu",
299
- "shabu_shabu", "natto", "unagi", "chawanmushi", "tamagoyaki",
300
- "yakisoba", "omurice", "kare_raisu", "oyakodon", "gyoza",
301
- "fried_rice", "dumplings", "mapo_tofu", "kung_pao_chicken",
302
- "sweet_sour_pork", "chow_mein", "spring_rolls", "peking_duck",
303
- "dim_sum", "hot_pot", "xiaolongbao", "char_siu", "wonton_soup",
304
- "egg_foo_young", "beef_broccoli", "szechuan_chicken", "lo_mein",
305
- "hunan_pork", "crispy_duck", "ma_la_tang", "dan_dan_noodles",
306
- "zha_jiang_mian", "lion_head_meatballs",
307
- "pad_thai", "tom_yum", "green_curry", "red_curry", "som_tam",
308
- "massaman_curry", "khao_soi", "pad_kra_pao", "tom_kha_gai",
309
- "larb", "panang_curry", "pad_see_ew", "khao_man_gai",
310
- "nam_tok", "gaeng_som", "khao_pad", "mango_sticky_rice",
311
- "satay_chicken", "thai_fried_rice", "tod_man_pla", "kuay_teow",
312
- "butter_chicken", "biryani", "paneer_tikka", "dal", "naan",
313
- "rogan_josh", "palak_paneer", "samosa", "chole", "tandoori_chicken",
314
- "aloo_gobi", "vindaloo", "dosa", "idli", "vada",
315
- "rajma", "pav_bhaji", "korma", "malai_kofta", "jalebi",
316
- "paratha", "bhindi_masala", "chicken_tikka_masala",
317
- "pho", "banh_mi", "laksa", "nasi_goreng", "rendang", "satay",
318
- "adobo", "sinigang", "hainan_chicken_rice", "char_kway_teow",
319
- "lechon", "soto_ayam", "bubur_ayam", "nasi_lemak", "mee_goreng"
320
- ]
321
- container_labels = ["bowl", "plate", "dish"]
322
-
323
- portion_size = request.portion_size
324
- if portion_size is None or portion_size <= 0:
325
- portion_size = estimate_portion_size(
326
- yolo_results,
327
- img_width=img.width,
328
- img_height=img.height,
329
- reference_object_size=request.reference_object_size,
330
- food_labels=food_labels,
331
- container_labels=container_labels,
332
- top_food=top_food
333
- )
334
- logger.info(f"Estimated portion size: {portion_size} grams")
335
 
336
- # Whitelist food labels
337
  food_label_whitelist = [
338
  "pizza", "salad", "chicken", "chicken_wings", "shrimp_and_grits",
339
- "lasagna", "risotto", "burger", "sandwich", "pasta",
340
- "akpu", "fufu", "egusi_soup", "jollof_rice", "pounded_yam",
341
- "banga_soup", "bitterleaf_soup", "edikaikong_soup", "okra_soup",
342
- "ogbono_soup", "nkwobi", "moin_moin", "puff_puff", "suya",
343
- "abacha", "amala", "ewa_agoyin", "ofada_stew", "pepper_soup",
344
- "kimchi", "bibimbap", "bulgogi", "japchae", "tteokbokki",
345
- "samgyeopsal", "kimchi_jjigae", "doenjang_jjigae", "sundubu_jjigae",
346
- "galbi", "kimbap", "jajangmyeon", "naengmyeon", "dakgalbi",
347
- "haemul_pajeon", "samgyetang", "bossam", "seolleongtang",
348
- "mandu", "yangnyeom_chicken", "gamjatang", "jokbal", "budae_jjigae",
349
- "haemul_tang", "dongtae_jjigae", "kongguksu", "mul_naengmyeon",
350
- "tteokguk", "miyeokguk",
351
- "sushi", "ramen", "udon", "tempura", "sashimi", "onigiri",
352
- "yakitori", "miso_soup", "okonomiyaki", "takoyaki",
353
- "donburi", "gyudon", "katsu_curry", "soba", "tonkatsu",
354
- "shabu_shabu", "natto", "unagi", "chawanmushi", "tamagoyaki",
355
- "yakisoba", "omurice", "kare_raisu", "oyakodon", "gyoza",
356
- "fried_rice", "dumplings", "mapo_tofu", "kung_pao_chicken",
357
- "sweet_sour_pork", "chow_mein", "spring_rolls", "peking_duck",
358
- "dim_sum", "hot_pot", "xiaolongbao", "char_siu", "wonton_soup",
359
- "egg_foo_young", "beef_broccoli", "szechuan_chicken", "lo_mein",
360
- "hunan_pork", "crispy_duck", "ma_la_tang", "dan_dan_noodles",
361
- "zha_jiang_mian", "lion_head_meatballs",
362
- "pad_thai", "tom_yum", "green_curry", "red_curry", "som_tam",
363
- "massaman_curry", "khao_soi", "pad_kra_pao", "tom_kha_gai",
364
- "larb", "panang_curry", "pad_see_ew", "khao_man_gai",
365
- "nam_tok", "gaeng_som", "khao_pad", "mango_sticky_rice",
366
- "satay_chicken", "thai_fried_rice", "tod_man_pla", "kuay_teow",
367
- "butter_chicken", "biryani", "paneer_tikka", "dal", "naan",
368
- "rogan_josh", "palak_paneer", "samosa", "chole", "tandoori_chicken",
369
- "aloo_gobi", "vindaloo", "dosa", "idli", "vada",
370
- "rajma", "pav_bhaji", "korma", "malai_kofta", "jalebi",
371
- "paratha", "bhindi_masala", "chicken_tikka_masala",
372
- "pho", "banh_mi", "laksa", "nasi_goreng", "rendang", "satay",
373
- "adobo", "sinigang", "hainan_chicken_rice", "char_kway_teow",
374
- "lechon", "soto_ayam", "bubur_ayam", "nasi_lemak", "mee_goreng"
375
  ]
376
-
377
  non_food_items = [
378
  r.names[int(cls)]
379
  for r in yolo_results
@@ -382,171 +137,16 @@ async def analyze_food(request: ImageRequest, credentials: HTTPAuthorizationCred
382
  ]
383
 
384
  is_non_food = len(non_food_items) > len(food_items) and max(food_items.values(), default=0) < 0.5
385
- nutrients, micronutrients, calories = calculate_nutrients(food_items, portion_size)
 
386
 
387
  ingredient_map = {
388
  "pizza": ["dough", "tomato sauce", "cheese"],
389
  "salad": ["lettuce", "tomato", "cucumber"],
390
  "chicken_curry": ["chicken", "curry sauce", "spices"],
391
  "lasagna": ["pasta", "tomato sauce", "cheese", "meat"],
392
- "risotto": ["rice", "broth", "parmesan"],
393
- "akpu": ["cassava", "water"],
394
- "fufu": ["cassava", "water"],
395
- "egusi_soup": ["egusi seeds", "vegetables", "palm oil", "meat"],
396
- "jollof_rice": ["rice", "tomato", "pepper", "onion"],
397
- "pounded_yam": ["yam", "water"],
398
- "banga_soup": ["palm fruit", "fish", "spices"],
399
- "bitterleaf_soup": ["bitterleaf", "meat", "palm oil"],
400
- "edikaikong_soup": ["waterleaf", "ugwu", "meat", "periwinkle"],
401
- "okra_soup": ["okra", "meat", "palm oil"],
402
- "ogbono_soup": ["ogbono seeds", "meat", "palm oil"],
403
- "nkwobi": ["cow foot", "palm oil", "spices"],
404
- "moin_moin": ["beans", "pepper", "oil"],
405
- "puff_puff": ["flour", "sugar", "yeast"],
406
- "suya": ["beef", "peanut spice", "onion"],
407
- "abacha": ["cassava", "palm oil", "fish"],
408
- "amala": ["yam flour", "water"],
409
- "ewa_agoyin": ["beans", "palm oil", "pepper"],
410
- "ofada_stew": ["pepper", "locust beans", "meat"],
411
- "pepper_soup": ["meat", "pepper", "spices"],
412
- "kimchi": ["napa cabbage", "chili powder", "garlic", "ginger"],
413
- "bibimbap": ["rice", "mixed vegetables", "gochujang", "egg"],
414
- "bulgogi": ["beef", "soy sauce", "garlic", "sesame oil"],
415
- "japchae": ["sweet potato noodles", "vegetables", "soy sauce", "beef"],
416
- "tteokbokki": ["rice cakes", "red chili paste", "fish cakes"],
417
- "samgyeopsal": ["pork belly", "garlic", "sesame oil"],
418
- "kimchi_jjigae": ["kimchi", "pork", "tofu", "green onions"],
419
- "doenjang_jjigae": ["soybean paste", "tofu", "vegetables", "mushrooms"],
420
- "sundubu_jjigae": ["soft tofu", "seafood", "chili paste", "egg"],
421
- "galbi": ["short ribs", "soy sauce", "garlic", "sugar"],
422
- "kimbap": ["rice", "seaweed", "vegetables", "meat"],
423
- "jajangmyeon": ["noodles", "black bean sauce", "pork", "vegetables"],
424
- "naengmyeon": ["buckwheat noodles", "beef broth", "cucumber", "egg"],
425
- "dakgalbi": ["chicken", "gochujang", "cabbage", "sweet potato"],
426
- "haemul_pajeon": ["seafood", "green onions", "flour", "egg"],
427
- "samgyetang": ["chicken", "ginseng", "jujube", "rice"],
428
- "bossam": ["pork belly", "cabbage", "garlic", "ssamjang"],
429
- "seolleongtang": ["beef", "bone broth", "noodles", "green onions"],
430
- "mandu": ["dumpling wrapper", "pork", "cabbage", "garlic"],
431
- "yangnyeom_chicken": ["chicken", "gochujang", "soy sauce", "honey"],
432
- "gamjatang": ["pork spine", "potato", "perilla leaves", "chili"],
433
- "jokbal": ["pig's feet", "soy sauce", "ginger", "garlic"],
434
- "budae_jjigae": ["sausage", "spam", "kimchi", "noodles"],
435
- "haemul_tang": ["seafood", "radish", "chili", "broth"],
436
- "dongtae_jjigae": ["pollack", "tofu", "radish", "chili"],
437
- "kongguksu": ["soybean noodles", "soy milk", "cucumber", "sesame"],
438
- "mul_naengmyeon": ["buckwheat noodles", "cold broth", "beef", "egg"],
439
- "tteokguk": ["rice cake", "beef broth", "egg", "seaweed"],
440
- "miyeokguk": ["seaweed", "beef", "soy sauce", "garlic"],
441
- "sushi": ["rice", "raw fish", "seaweed", "vinegar"],
442
- "ramen": ["noodles", "broth", "pork", "seaweed"],
443
- "udon": ["thick noodles", "broth", "green onions", "fish cake"],
444
- "tempura": ["shrimp", "vegetables", "batter", "soy dipping sauce"],
445
- "sashimi": ["raw fish", "soy sauce", "wasabi"],
446
- "onigiri": ["rice", "seaweed", "fish", "pickled plum"],
447
- "yakitori": ["chicken", "skewers", "soy sauce", "mirin"],
448
- "miso_soup": ["miso paste", "tofu", "seaweed", "green onions"],
449
- "okonomiyaki": ["cabbage", "batter", "sauce", "bonito flakes"],
450
- "takoyaki": ["octopus", "batter", "sauce", "bonito flakes"],
451
- "donburi": ["rice", "meat", "egg", "onion"],
452
- "gyudon": ["beef", "rice", "onion", "soy sauce"],
453
- "katsu_curry": ["breaded cutlet", "curry sauce", "rice"],
454
- "soba": ["buckwheat noodles", "soy dipping sauce", "green onions"],
455
- "tonkatsu": ["pork cutlet", "bread crumbs", "cabbage", "sauce"],
456
- "shabu_shabu": ["beef", "vegetables", "broth", "dipping sauce"],
457
- "natto": ["fermented soybeans", "soy sauce", "mustard"],
458
- "unagi": ["eel", "soy sauce", "mirin", "rice"],
459
- "chawanmushi": ["egg custard", "shrimp", "mushrooms", "gingko"],
460
- "tamagoyaki": ["egg", "soy sauce", "mirin", "sugar"],
461
- "yakisoba": ["noodles", "pork", "cabbage", "sauce"],
462
- "omurice": ["rice", "egg", "ketchup", "chicken"],
463
- "kare_raisu": ["curry", "rice", "carrot", "potato"],
464
- "oyakodon": ["chicken", "egg", "onion", "rice"],
465
- "gyoza": ["dumpling wrapper", "pork", "cabbage", "garlic"],
466
- "fried_rice": ["rice", "egg", "vegetables", "soy sauce"],
467
- "dumplings": ["pork", "cabbage", "wrapper", "ginger"],
468
- "mapo_tofu": ["tofu", "ground pork", "sichuan pepper", "chili oil"],
469
- "kung_pao_chicken": ["chicken", "peanuts", "chili peppers", "soy sauce"],
470
- "sweet_sour_pork": ["pork", "pineapple", "bell peppers", "sweet sour sauce"],
471
- "chow_mein": ["noodles", "vegetables", "meat", "soy sauce"],
472
- "spring_rolls": ["wrapper", "cabbage", "carrot", "pork"],
473
- "peking_duck": ["duck", "pancakes", "hoisin sauce", "cucumber"],
474
- "dim_sum": ["various fillings", "wrapper", "bamboo steamer"],
475
- "hot_pot": ["broth", "beef", "vegetables", "tofu"],
476
- "xiaolongbao": ["pork", "soup", "wrapper", "ginger"],
477
- "char_siu": ["pork", "honey", "soy sauce", "hoisin"],
478
- "wonton_soup": ["wontons", "broth", "shrimp", "pork"],
479
- "egg_foo_young": ["egg", "vegetables", "meat", "gravy"],
480
- "beef_broccoli": ["beef", "broccoli", "soy sauce", "garlic"],
481
- "szechuan_chicken": ["chicken", "sichuan pepper", "chili", "peanuts"],
482
- "lo_mein": ["noodles", "vegetables", "meat", "soy sauce"],
483
- "hunan_pork": ["pork", "chili", "garlic", "soy sauce"],
484
- "crispy_duck": ["duck", "soy sauce", "spices", "hoisin"],
485
- "ma_la_tang": ["broth", "noodles", "vegetables", "spices"],
486
- "dan_dan_noodles": ["noodles", "pork", "sichuan pepper", "peanut sauce"],
487
- "zha_jiang_mian": ["noodles", "pork", "bean sauce", "cucumber"],
488
- "lion_head_meatballs": ["pork", "water chestnuts", "egg", "broth"],
489
- "pad_thai": ["rice noodles", "shrimp", "tamarind paste", "peanuts"],
490
- "tom_yum": ["shrimp", "lemongrass", "chili", "galangal"],
491
- "green_curry": ["coconut milk", "green chili", "chicken", "bamboo shoots"],
492
- "red_curry": ["coconut milk", "red chili", "chicken", "basil"],
493
- "som_tam": ["green papaya", "chili", "lime", "fish sauce"],
494
- "massaman_curry": ["coconut milk", "beef", "potatoes", "peanuts"],
495
- "khao_soi": ["egg noodles", "coconut curry", "chicken", "chili"],
496
- "pad_kra_pao": ["basil", "chicken", "chili", "fish sauce"],
497
- "tom_kha_gai": ["coconut milk", "chicken", "galangal", "lemongrass"],
498
- "larb": ["minced pork", "lime", "fish sauce", "chili"],
499
- "panang_curry": ["coconut milk", "peanut", "chicken", "chili"],
500
- "pad_see_ew": ["wide noodles", "soy sauce", "chicken", "broccoli"],
501
- "khao_man_gai": ["chicken", "rice", "cucumber", "chili sauce"],
502
- "nam_tok": ["beef", "lime", "fish sauce", "chili"],
503
- "gaeng_som": ["fish", "tamarind", "chili", "vegetables"],
504
- "khao_pad": ["rice", "shrimp", "egg", "soy sauce"],
505
- "mango_sticky_rice": ["sticky rice", "mango", "coconut milk"],
506
- "satay_chicken": ["chicken", "peanut sauce", "skewers"],
507
- "thai_fried_rice": ["rice", "shrimp", "egg", "fish sauce"],
508
- "tod_man_pla": ["fish cakes", "chili paste", "lime leaves"],
509
- "kuay_teow": ["rice noodles", "broth", "beef", "herbs"],
510
- "butter_chicken": ["chicken", "tomato", "butter", "cream"],
511
- "biryani": ["rice", "chicken", "spices", "yogurt"],
512
- "paneer_tikka": ["paneer", "spices", "yogurt", "bell peppers"],
513
- "dal": ["lentils", "spices", "tomato", "ghee"],
514
- "naan": ["flour", "yeast", "butter", "yogurt"],
515
- "rogan_josh": ["lamb", "yogurt", "spices", "tomato"],
516
- "palak_paneer": ["spinach", "paneer", "spices", "cream"],
517
- "samosa": ["pastry", "potato", "peas", "spices"],
518
- "chole": ["chickpeas", "tomato", "spices", "onion"],
519
- "tandoori_chicken": ["chicken", "yogurt", "spices", "lemon"],
520
- "aloo_gobi": ["potato", "cauliflower", "spices", "tomato"],
521
- "vindaloo": ["pork", "vinegar", "chili", "spices"],
522
- "dosa": ["rice batter", "lentils", "potato filling"],
523
- "idli": ["rice", "lentils", "steamed"],
524
- "vada": ["lentils", "spices", "fried"],
525
- "rajma": ["kidney beans", "tomato", "spices", "onion"],
526
- "pav_bhaji": ["mixed vegetables", "spices", "butter", "bun"],
527
- "korma": ["chicken", "yogurt", "cream", "spices"],
528
- "malai_kofta": ["paneer balls", "cream", "tomato", "spices"],
529
- "jalebi": ["flour", "sugar syrup", "saffron"],
530
- "paratha": ["flour", "ghee", "stuffed vegetables"],
531
- "bhindi_masala": ["okra", "spices", "tomato", "onion"],
532
- "chicken_tikka_masala": ["chicken", "tomato", "cream", "spices"],
533
- "pho": ["rice noodles", "beef", "broth", "herbs"],
534
- "banh_mi": ["baguette", "pork", "pickled vegetables", "cilantro"],
535
- "laksa": ["coconut milk", "noodles", "chicken", "chili"],
536
- "nasi_goreng": ["rice", "chicken", "shrimp paste", "egg"],
537
- "rendang": ["beef", "coconut milk", "lemongrass", "spices"],
538
- "satay": ["chicken", "peanut sauce", "skewers", "soy sauce"],
539
- "adobo": ["chicken", "soy sauce", "vinegar", "garlic"],
540
- "sinigang": ["pork", "tamarind", "vegetables", "broth"],
541
- "hainan_chicken_rice": ["chicken", "rice", "cucumber", "chili sauce"],
542
- "char_kway_teow": ["flat noodles", "shrimp", "soy sauce", "egg"],
543
- "lechon": ["roast pig", "garlic", "lemongrass"],
544
- "soto_ayam": ["chicken", "noodles", "turmeric", "broth"],
545
- "bubur_ayam": ["rice porridge", "chicken", "ginger", "green onions"],
546
- "nasi_lemak": ["coconut rice", "sambal", "egg", "anchovies"],
547
- "mee_goreng": ["noodles", "chicken", "soy sauce", "chili"]
548
  }
549
-
550
  ingredients = [
551
  {"name": food, "probability": prob, "subclasses": ingredient_map.get(food.lower(), [])}
552
  for food, prob in food_items.items()
@@ -566,45 +166,40 @@ async def analyze_food(request: ImageRequest, credentials: HTTPAuthorizationCred
566
  "micronutrients": micronutrients,
567
  "calories": calories,
568
  "source": "huggingface",
569
- "was_cropped": was_cropped,
570
- "estimated_portion_size": portion_size,
571
- "reference_object_size": request.reference_object_size
572
  }
573
  except Exception as e:
574
  logger.error(f"Analysis failed: {str(e)}")
575
  raise HTTPException(status_code=500, detail=str(e))
576
 
 
577
  # Gradio interface
578
- def gradio_analyze(image, portion_size=None, reference_object_size=None):
 
579
  try:
580
- if image is None:
581
- return {"error": "Please upload a valid image."}
582
-
583
  buffered = BytesIO()
584
  image.save(buffered, format="JPEG")
585
  base64_image = base64.b64encode(buffered.getvalue()).decode()
586
- request = ImageRequest(image=base64_image, portion_size=portion_size, reference_object_size=reference_object_size)
587
 
 
588
  loop = asyncio.new_event_loop()
589
  asyncio.set_event_loop(loop)
590
- result = loop.run_until_complete(analyze_food(request, HTTPAuthorizationCredentials(scheme="Bearer", credentials=os.getenv("HUGGINGFACE_API_TOKEN"))))
591
  loop.close()
592
  return result
 
593
  except Exception as e:
594
  return {"error": str(e)}
595
 
596
  iface = gr.Interface(
597
  fn=gradio_analyze,
598
- inputs=[
599
- gr.Image(type="pil", label="Upload Food Image"),
600
- gr.Number(label="Portion Size (grams, optional, leave blank to estimate)", value=None),
601
- gr.Number(label="Reference Object Size (cm, e.g., plate diameter, optional)", value=None)
602
- ],
603
  outputs="json",
604
  title="Food Analysis API",
605
- description="Upload an image to analyze food items and nutritional content, including Nigerian foods like akpu, egusi, and jollof rice. Portion size is optional; if not provided, it will be estimated based on the food's area in the image. Provide a reference object size (e.g., plate diameter in cm) for better accuracy."
606
  )
607
 
608
  if __name__ == "__main__":
609
  threading.Thread(target=lambda: uvicorn.run(app, host="0.0.0.0", port=8000)).start()
610
- iface.launch(server_name="0.0.0.0", server_port=7860, share=False)
 
2
  import base64
3
  from io import BytesIO
4
  from PIL import Image
5
+ from fastapi import FastAPI, HTTPException
 
6
  from pydantic import BaseModel
7
  from transformers import pipeline
8
  from ultralytics import YOLO
 
12
  import logging
13
  import requests
14
  import asyncio
 
15
 
16
+ # ==============================
17
  # Logging
18
+ # ==============================
19
  logging.basicConfig(level=logging.INFO)
20
  logger = logging.getLogger(__name__)
21
 
22
+ # ==============================
23
  # Models
24
+ # ==============================
25
  food_classifier = pipeline("image-classification", model="nateraw/food")
26
  yolo_model = YOLO("yolov8n.pt")
27
 
28
  # USDA API config
29
  USDA_API_URL = "https://api.nal.usda.gov/fdc/v1/foods/search"
30
+ USDA_API_KEY = os.getenv("USDA_API_KEY", "qktfia6caeuBSww2A5SYns8NaLlE2OuozHaEASzw")
31
 
32
  # FastAPI app
33
  app = FastAPI()
 
 
 
 
 
 
 
 
 
34
 
35
  # Request schema
36
  class ImageRequest(BaseModel):
37
+ image: str # Base64 image
38
+ portion_size: float = 100.0
 
39
 
40
  # Decode base64 image
41
  def decode_base64_image(base64_string):
 
47
  logger.error(f"Image decoding failed: {str(e)}")
48
  raise HTTPException(status_code=400, detail="Invalid base64 image")
49
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
  # Crop image to food or container
51
  def crop_image_to_food(img, yolo_results,
52
+ food_labels=["chicken_curry", "pizza", "salad", "lasagna", "risotto"],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  container_labels=["bowl", "plate", "dish"]):
54
  try:
55
  for result in yolo_results:
 
69
  logger.error(f"Cropping failed: {str(e)}")
70
  return img, False
71
 
72
+ # Calculate nutrients
73
  def calculate_nutrients(food_items, portion_size):
74
  nutrients = {"protein": 0, "carbs": 0, "fat": 0, "fiber": 0, "sodium": 0}
75
  micronutrients = {"vitamin_c": 0, "calcium": 0, "iron": 0}
 
76
  top_food = max(food_items, key=food_items.get, default=None)
77
  if not top_food:
78
+ return nutrients, micronutrients, 0
 
79
 
80
  query_food = top_food.replace("_", " ")
 
 
81
  try:
82
  response = requests.get(USDA_API_URL, params={
83
  "api_key": USDA_API_KEY,
 
87
  response.raise_for_status()
88
  data = response.json()
89
  if not data.get("foods"):
90
+ return nutrients, micronutrients, 0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
 
92
  food_data = data["foods"][0]
93
  food_nutrients = {n["nutrientName"]: n["value"] for n in food_data["foodNutrients"]}
 
97
  "carbs": food_nutrients.get("Carbohydrate, by difference", 0) * (portion_size / 100),
98
  "fat": food_nutrients.get("Total lipid (fat)", 0) * (portion_size / 100),
99
  "fiber": food_nutrients.get("Fiber, total dietary", 0) * (portion_size / 100),
100
+ "sodium": food_nutrients.get("Sodium, Na", 0) * (portion_size / 100),
101
  }
102
  micronutrients = {
103
  "vitamin_c": food_nutrients.get("Vitamin C, total ascorbic acid", 0) * (portion_size / 100),
 
105
  "iron": food_nutrients.get("Iron, Fe", 0) * (portion_size / 100),
106
  }
107
  calories = (nutrients["protein"] * 4) + (nutrients["carbs"] * 4) + (nutrients["fat"] * 9)
108
+ return nutrients, micronutrients, calories
109
  except Exception as e:
110
  logger.error(f"USDA API request failed: {str(e)}")
111
+ return nutrients, micronutrients, 0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
 
113
+ # ==============================
114
  # FastAPI endpoint
115
+ # ==============================
116
  @app.post("/analyze_food")
117
+ async def analyze_food(request: ImageRequest):
118
  try:
 
 
 
 
 
 
 
 
 
 
119
  img = decode_base64_image(request.image)
120
  yolo_results = yolo_model(img)
 
 
121
  cropped_img, was_cropped = crop_image_to_food(img, yolo_results)
122
+
123
+ # Food classification
124
  food_results = food_classifier(cropped_img)
125
  food_items = {r["label"]: r["score"] for r in food_results if r["score"] >= 0.3}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
126
 
127
+ # Fix: whitelist food labels
128
  food_label_whitelist = [
129
  "pizza", "salad", "chicken", "chicken_wings", "shrimp_and_grits",
130
+ "lasagna", "risotto", "burger", "sandwich", "pasta"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
  ]
 
132
  non_food_items = [
133
  r.names[int(cls)]
134
  for r in yolo_results
 
137
  ]
138
 
139
  is_non_food = len(non_food_items) > len(food_items) and max(food_items.values(), default=0) < 0.5
140
+
141
+ nutrients, micronutrients, calories = calculate_nutrients(food_items, request.portion_size)
142
 
143
  ingredient_map = {
144
  "pizza": ["dough", "tomato sauce", "cheese"],
145
  "salad": ["lettuce", "tomato", "cucumber"],
146
  "chicken_curry": ["chicken", "curry sauce", "spices"],
147
  "lasagna": ["pasta", "tomato sauce", "cheese", "meat"],
148
+ "risotto": ["rice", "broth", "parmesan"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
  }
 
150
  ingredients = [
151
  {"name": food, "probability": prob, "subclasses": ingredient_map.get(food.lower(), [])}
152
  for food, prob in food_items.items()
 
166
  "micronutrients": micronutrients,
167
  "calories": calories,
168
  "source": "huggingface",
169
+ "was_cropped": was_cropped
 
 
170
  }
171
  except Exception as e:
172
  logger.error(f"Analysis failed: {str(e)}")
173
  raise HTTPException(status_code=500, detail=str(e))
174
 
175
+ # ==============================
176
  # Gradio interface
177
+ # ==============================
178
+ def gradio_analyze(image):
179
  try:
 
 
 
180
  buffered = BytesIO()
181
  image.save(buffered, format="JPEG")
182
  base64_image = base64.b64encode(buffered.getvalue()).decode()
183
+ request = ImageRequest(image=base64_image, portion_size=100.0)
184
 
185
+ # Always create a new event loop
186
  loop = asyncio.new_event_loop()
187
  asyncio.set_event_loop(loop)
188
+ result = loop.run_until_complete(analyze_food(request))
189
  loop.close()
190
  return result
191
+
192
  except Exception as e:
193
  return {"error": str(e)}
194
 
195
  iface = gr.Interface(
196
  fn=gradio_analyze,
197
+ inputs=gr.Image(type="pil"),
 
 
 
 
198
  outputs="json",
199
  title="Food Analysis API",
200
+ description="Upload an image to analyze food items, non-food items, and nutritional content."
201
  )
202
 
203
  if __name__ == "__main__":
204
  threading.Thread(target=lambda: uvicorn.run(app, host="0.0.0.0", port=8000)).start()
205
+ iface.launch(server_name="0.0.0.0", server_port=7860, share=True)
batch_analyze.py CHANGED
@@ -10,9 +10,9 @@ from oauth2client.service_account import ServiceAccountCredentials
10
  # =========================
11
  # CONFIG
12
  # =========================
13
- ANALYZE_URL = "https://versatile153-nutri.hf.space/analyze_food" # HF Space API URL
14
- IMAGE_DIR = "images" # Folder containing images
15
- SHEET_ID = "1N4I1HzxycvDvD7Tm4f7m-4ppp7yy46SQzJ1ALEOOZl0"
16
 
17
  # Google Sheets credentials
18
  scope = ["https://spreadsheets.google.com/feeds",
@@ -30,12 +30,14 @@ except Exception as e:
30
  # HELPERS
31
  # =========================
32
  def encode_image(image_path):
 
33
  img = Image.open(image_path).convert("RGB")
34
  buffered = BytesIO()
35
  img.save(buffered, format="JPEG")
36
  return base64.b64encode(buffered.getvalue()).decode("utf-8")
37
 
38
  def analyze_image(image_path):
 
39
  try:
40
  base64_img = encode_image(image_path)
41
  payload = {"image": base64_img, "portion_size": 100.0}
@@ -46,11 +48,12 @@ def analyze_image(image_path):
46
  return {"error": str(e)}
47
 
48
  # =========================
49
- # MAIN PROCESS
50
  # =========================
51
  def process_images():
52
  files = [f for f in os.listdir(IMAGE_DIR) if f.lower().endswith((".jpg", ".png", ".jpeg"))][:500]
53
 
 
54
  if len(sheet.get_all_values()) == 0:
55
  sheet.insert_row([
56
  "Filename", "Food Items", "Non-Food Items",
@@ -62,9 +65,10 @@ def process_images():
62
  path = os.path.join(IMAGE_DIR, file)
63
  result = analyze_image(path)
64
 
 
 
65
  if "error" in result:
66
  row = [file, "ERROR", result["error"], "", "", "", "", "", ""]
67
- print(f"[{i}/{len(files)}] ❌ {file}: {result['error']}")
68
  else:
69
  food_items = json.dumps(result.get("food_items", {}))
70
  non_food = json.dumps(result.get("non_food_items", []))
@@ -80,15 +84,12 @@ def process_images():
80
  nutrients.get("sodium", 0),
81
  result.get("calories", 0)
82
  ]
83
- print(f"[{i}/{len(files)}] βœ… Added {file}")
84
 
85
  try:
86
  sheet.append_row(row)
 
87
  except Exception as e:
88
  print(f"[{i}/{len(files)}] ❌ Failed to add {file}: {e}")
89
 
90
- # =========================
91
- # RUN AUTOMATICALLY
92
- # =========================
93
  if __name__ == "__main__":
94
  process_images()
 
10
  # =========================
11
  # CONFIG
12
  # =========================
13
+ ANALYZE_URL = "http://localhost:8000/analyze_food" # Or HuggingFace Space URL
14
+ IMAGE_DIR = "images_batch" # Folder for test images
15
+ SHEET_ID = "1N4I1HzxycvDvD7Tm4f7m-4ppp7yy46SQzJ1ALEOOZl0" # <-- Your sheet ID
16
 
17
  # Google Sheets credentials
18
  scope = ["https://spreadsheets.google.com/feeds",
 
30
  # HELPERS
31
  # =========================
32
  def encode_image(image_path):
33
+ """Convert image to base64 for sending to API."""
34
  img = Image.open(image_path).convert("RGB")
35
  buffered = BytesIO()
36
  img.save(buffered, format="JPEG")
37
  return base64.b64encode(buffered.getvalue()).decode("utf-8")
38
 
39
  def analyze_image(image_path):
40
+ """Send image to analysis API."""
41
  try:
42
  base64_img = encode_image(image_path)
43
  payload = {"image": base64_img, "portion_size": 100.0}
 
48
  return {"error": str(e)}
49
 
50
  # =========================
51
+ # MAIN LOOP
52
  # =========================
53
  def process_images():
54
  files = [f for f in os.listdir(IMAGE_DIR) if f.lower().endswith((".jpg", ".png", ".jpeg"))][:500]
55
 
56
+ # Add headers only if sheet is empty
57
  if len(sheet.get_all_values()) == 0:
58
  sheet.insert_row([
59
  "Filename", "Food Items", "Non-Food Items",
 
65
  path = os.path.join(IMAGE_DIR, file)
66
  result = analyze_image(path)
67
 
68
+ print(f"Result for {file}: {result}") # Debug print
69
+
70
  if "error" in result:
71
  row = [file, "ERROR", result["error"], "", "", "", "", "", ""]
 
72
  else:
73
  food_items = json.dumps(result.get("food_items", {}))
74
  non_food = json.dumps(result.get("non_food_items", []))
 
84
  nutrients.get("sodium", 0),
85
  result.get("calories", 0)
86
  ]
 
87
 
88
  try:
89
  sheet.append_row(row)
90
+ print(f"[{i}/{len(files)}] βœ… Added {file} to Google Sheet")
91
  except Exception as e:
92
  print(f"[{i}/{len(files)}] ❌ Failed to add {file}: {e}")
93
 
 
 
 
94
  if __name__ == "__main__":
95
  process_images()
images/1.jpg DELETED

Git LFS Details

  • SHA256: e060bbafe750466dee230ad9852dc12aa00616218ee5f29c212f2dd0dcc62a2d
  • Pointer size: 130 Bytes
  • Size of remote file: 32.5 kB
images/54156table-with-food-for-usa-4th-july-independence-day-2023-11-27-05-37-19-utc.avif DELETED

Git LFS Details

  • SHA256: 41bc5f289a6a9d6498ac1315e56f5172d0d49f6a40066c924765b8002c6c04e3
  • Pointer size: 131 Bytes
  • Size of remote file: 112 kB
images/General-Tsos-Chicken-Chinese-Foods.jpg DELETED

Git LFS Details

  • SHA256: 8ab7f777252c59c3b08cb8234cea763a01b7a11d60adbc1beffa5792bc8e77c3
  • Pointer size: 131 Bytes
  • Size of remote file: 494 kB
images/Hamburger_(12164386105).jpg DELETED

Git LFS Details

  • SHA256: 209cfdd79dbe53c9f320f20b72d310f2de3848d03f34a04c5f5f26612276d54d
  • Pointer size: 131 Bytes
  • Size of remote file: 111 kB
images/dashboard-design-example-hcare.png DELETED

Git LFS Details

  • SHA256: 9a7be46f9d54a22f322e3212ec34164fe8fc49d4d9b666d412bb115dd83722c5
  • Pointer size: 131 Bytes
  • Size of remote file: 144 kB
images/high-quality-food-stock-photos-thumbnail.jpg DELETED

Git LFS Details

  • SHA256: d3762a2d921fb82c3dfba8bd5227f6df3f7b6d0a226745f705868f1d07794a40
  • Pointer size: 131 Bytes
  • Size of remote file: 107 kB
images/images (2).jpg DELETED

Git LFS Details

  • SHA256: 0371c75701ce35bccc2a77fb8e9c308ea76ac3b9ddb4c8d53e5b564b64650684
  • Pointer size: 130 Bytes
  • Size of remote file: 11.7 kB
images/images (3).jpg DELETED

Git LFS Details

  • SHA256: 9c35c066527cad54cbe5d36fbcef23d7c979744a2c6a05e8b266f4cc73e514a0
  • Pointer size: 130 Bytes
  • Size of remote file: 10.6 kB
images/images (4).jpg DELETED

Git LFS Details

  • SHA256: 52a8caaf10aa412c854882048f532fc4bc1ec1aab065f9f6a4de76ca892b72e4
  • Pointer size: 130 Bytes
  • Size of remote file: 12.5 kB
images/istockphoto-628650996-612x612.jpg DELETED

Git LFS Details

  • SHA256: d1651f9a86f3e114b8172f3abd02d9ea81d0b17054b1ce07793c6ee2b861137c
  • Pointer size: 130 Bytes
  • Size of remote file: 56.8 kB
images/lemonade-fried-chicken-1022FOO-2000-0912e05702e64dac9bfeffbd9199aee5.jpg DELETED

Git LFS Details

  • SHA256: 6bf42447dc66de88dc52dcf991e6dc8799098af7c8469d8e314676fd1cdabfd3
  • Pointer size: 131 Bytes
  • Size of remote file: 270 kB
images/pexels-robinstickel-70497.jpg DELETED

Git LFS Details

  • SHA256: b9119b40f935b9791d8932b14732f7fb6fca8e6641d75aad10ade68b039428b4
  • Pointer size: 131 Bytes
  • Size of remote file: 354 kB
images/shutterstock_2495596739-(1)@2x.jpg DELETED

Git LFS Details

  • SHA256: e9746524ae8175b99ab0acffa0e6ef722d215f6aa59b678d37f4fa5353a4494d
  • Pointer size: 131 Bytes
  • Size of remote file: 565 kB
images_batch ADDED
File without changes