import base64
import json
import os
import os.path as osp

import numpy as np
import PIL.Image
from labelme import utils

def ensure_directory(directory):
    if not os.path.exists(directory):
        os.makedirs(directory)

def process_json_file(path, jpgs_path, pngs_path, classes):
    with open(path) as json_file:
        data = json.load(json_file)

    if data['imageData']:
        imageData = data['imageData']
    else:
        imagePath = os.path.join(os.path.dirname(path), data['imagePath'])
        with open(imagePath, 'rb') as f:
            imageData = f.read()
            imageData = base64.b64encode(imageData).decode('utf-8')

    img = utils.img_b64_to_arr(imageData)

    # Map from label names to integer values
    label_name_to_value = {'_background_': 0}
    for shape in data['shapes']:
        label_name = shape['label']
        if label_name not in label_name_to_value:
            label_name_to_value[label_name] = len(label_name_to_value)

    # We want to ensure the label values are dense, i.e., 0, 1, 2, ..., N
    label_values = list(label_name_to_value.values())
    assert label_values == list(range(len(label_values))), f"Expected label values to be continuous: {label_values}"

    # Create label image
    lbl = np.zeros(img.shape[:2], dtype=np.int32)
    for shape in data['shapes']:
        points = shape['points']
        shape_type = shape.get('shape_type', 'polygon')

        # Skip invalid shapes
        if shape_type == 'polygon' and len(points) <= 2:
            print(f"Skipping invalid polygon {shape}")
            continue

        mask = utils.shape_to_mask(img.shape[:2], points, shape_type)

        lbl_value = label_name_to_value[shape['label']]
        lbl[mask] = lbl_value

    PIL.Image.fromarray(img).save(osp.join(jpgs_path, osp.splitext(osp.basename(path))[0] + '.jpg'))

    new = np.zeros((img.shape[0], img.shape[1]), dtype=np.int32)
    for name, index_json in label_name_to_value.items():
        if name in classes:
            index_all = classes.index(name)
            new[lbl == index_json] = index_all
            print(f"Processing {name}: JSON index {index_json}, Classes index {index_all}")

    utils.lblsave(osp.join(pngs_path, osp.splitext(osp.basename(path))[0] + '.png'), new.astype(np.uint8))

    unique_values = np.unique(new)
    print("Unique label values in 'new':", unique_values)
    print('Saved ' + osp.splitext(osp.basename(path))[0] + '.jpg and ' + osp.splitext(osp.basename(path))[0] + '.png')

if __name__ == '__main__':
    jpgs_path = "datasets/JPEGImages"
    pngs_path = "datasets/SegmentationClass"
    classes = ["_background_", "leaf", "point"]

    ensure_directory(jpgs_path)
    ensure_directory(pngs_path)

    base_dir = "D:/AI_PYCODE/deeplabv3-plus-pytorch-main/bingdubing2"
    count = os.listdir(base_dir)
    for filename in count:
        file_path = os.path.join(base_dir, filename)
        if os.path.isfile(file_path) and file_path.endswith('json'):
            process_json_file(file_path, jpgs_path, pngs_path, classes)
