Spaces:
Sleeping
Sleeping
| import uuid | |
| import os | |
| import shutil | |
| import cv2 | |
| import numpy as np | |
| import torch | |
| import torch.nn.functional as F | |
| # --- CHANGE 1: Import APIRouter --- | |
| from fastapi import FastAPI, File, Form, UploadFile, APIRouter | |
| from fastapi.responses import JSONResponse | |
| from fastapi.staticfiles import StaticFiles | |
| from PIL import Image | |
| from transformers import SegformerImageProcessor, SegformerForSemanticSegmentation | |
| from typing import Optional | |
| import json | |
| # (Your CONFIGURATION, MODEL LOADING, and HELPER FUNCTIONS stay exactly the same) | |
| # ... | |
| COLOR_MAP = {0: ("Built Area", [128, 128, 128]), 1: ("Vegetation", [0, 255, 0]), 2: ("Water", [255, 0, 0]), 3: ("Barren Land", [135, 184, 222])} | |
| ADE20K_TO_LULC = { 0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 11: 0, 15: 0, 16: 0, 18: 0, 29: 0, 30: 0, 34: 0, 41: 0, 42: 0, 54: 0, 55: 0, 56: 0, 60: 0, 64: 0, 67: 0, 71: 0, 84: 0, 91: 0, 92: 0, 96: 0, 98: 0, 100: 0, 102: 0, 107: 0, 112: 0, 118: 0, 119: 0, 120: 0, 122: 0, 125: 0, 129: 0, 134: 0, 138: 0, 148: 0, 9: 1, 13: 1, 22: 1, 33: 1, 45: 1, 14: 2, 25: 2, 89: 2, 12: 3, 20: 3, 44: 3, 70: 3, } | |
| PROCESSOR = SegformerImageProcessor.from_pretrained("nvidia/segformer-b1-finetuned-ade-512-512") | |
| MODEL = SegformerForSemanticSegmentation.from_pretrained("nvidia/segformer-b1-finetuned-ade-512-512") | |
| def postprocess_output(mask_2d): | |
| height, width = mask_2d.shape | |
| color_mask_bgr = np.zeros((height, width, 3), dtype=np.uint8) | |
| total_pixels = height * width | |
| percentages = {} | |
| for lulc_index, (class_name, color) in COLOR_MAP.items(): | |
| pixels = (mask_2d == lulc_index) | |
| color_mask_bgr[pixels] = color | |
| pixel_count = np.sum(pixels) | |
| percentage = (pixel_count / total_pixels) * 100 | |
| percentages[class_name] = f"{percentage:.2f}%" | |
| return color_mask_bgr, percentages | |
| # ... | |
| app = FastAPI() | |
| # --- CHANGE 2: Create a router object --- | |
| router = APIRouter() | |
| app.mount("/processed", StaticFiles(directory="processed"), name="processed_files") | |
| # --- CHANGE 3: Use @router instead of @app for the endpoints --- | |
| async def process_lulc_image( | |
| file: UploadFile = File(...), | |
| labels_json: Optional[str] = Form(None) | |
| ): | |
| # --- YOUR EXISTING FUNCTION LOGIC IS UNCHANGED --- | |
| # (Just copy-paste your whole function here) | |
| try: | |
| if labels_json: | |
| print("Received Optional Labels:", json.loads(labels_json)) | |
| upload_filename = f"uploads/{uuid.uuid4()}_{file.filename}" | |
| with open(upload_filename, "wb") as buffer: | |
| shutil.copyfileobj(file.file, buffer) | |
| image = Image.open(upload_filename).convert("RGB") | |
| original_size = image.size[::-1] | |
| inputs = PROCESSOR(images=image, return_tensors="pt") | |
| with torch.no_grad(): | |
| outputs = MODEL(**inputs) | |
| logits = outputs.logits.cpu() | |
| interpolated_logits = F.interpolate(logits, size=original_size, mode="bilinear", align_corners=False) | |
| prediction_mask_150_classes = interpolated_logits.argmax(dim=1)[0].numpy().astype(np.uint8) | |
| simplified_mask = np.full(prediction_mask_150_classes.shape, -1, dtype=np.uint8) | |
| for original_class, lulc_class in ADE20K_TO_LULC.items(): | |
| simplified_mask[prediction_mask_150_classes == original_class] = lulc_class | |
| colored_mask, class_percentages = postprocess_output(simplified_mask) | |
| processed_filename_base = f"{uuid.uuid4()}.png" | |
| processed_filename_path = f"processed/{processed_filename_base}" | |
| cv2.imwrite(processed_filename_path, colored_mask) | |
| processed_image_url = f"/processed/{processed_filename_base}" | |
| return JSONResponse(content={"message": "Processing successful", "processed_image_url": processed_image_url, "labels": class_percentages}) | |
| except Exception as e: | |
| import traceback | |
| print(traceback.format_exc()) | |
| return JSONResponse(content={"error": str(e)}, status_code=500) | |
| def read_root(): | |
| return {"message": "LULC Model API is running."} | |
| # --- CHANGE 4: Include the router in the main app --- | |
| app.include_router(router) |