#!/usr/bin/env python3
import argparse, os, sys, zarr, numpy as np
from termcolor import cprint

# Global variable for groups to exclude from merging
excluded_groups = ['qpos/state','qpos/action','cameras','data/action','data/state','data/point_cloud']

def check_structure(zarr_path):
    store = zarr.open(zarr_path, mode='r')
    sizes = {}
    def traverse(group, prefix=""):
        for key, item in group.items():
            path = f"{prefix}/{key}" if prefix else key
            if hasattr(item, 'shape') and len(item.shape) > 0:
                sizes[path] = item.shape[0]
            elif hasattr(item, 'keys'):
                traverse(item, path)
    traverse(store)
    return sizes

def create_single_symlink(zarr_path, source_path, dest_path, dest_name):
    """Create a single symbolic link with user prompt for existing destinations"""
    print(f"  Creating symlink: {source_path.name} -> {dest_name}")
    try:
        # Check if destination already exists and prompt user
        if dest_path.is_symlink() or dest_path.exists():
            if dest_path.is_symlink():
                action = "unlink"
                item_type = "symbolic link"
            else:
                action = "remove"
                item_type = "file"
            
            ans = input(f"  {dest_name} already exists ({item_type}). {action.capitalize()} existing? (y/n): ").strip().lower()
            if ans == 'y':
                dest_path.unlink()
                print(f"  {action.capitalize()}ed existing {dest_name}")
            else:
                print(f"  Skipping {dest_name} symlink creation")
                return False
        
        dest_path.symlink_to(source_path, target_is_directory=False)
        print(f"✓ Created symlink: {dest_name} -> {source_path.name}")
        return True
    except Exception as e:
        cprint(f"WARNING: Failed to create {dest_name} symlink: {e}", 'yellow')
        return False

def create_symlinks(output_store, pcd_type):
    """Create symbolic links for action, state, and point cloud data"""
    print("Creating symbolic links...")
    
    # Get the zarr directory path
    from pathlib import Path
    zarr_path = Path(output_store.store.path)
    
    # Create ee/action -> data/action link
    if 'ee/action' in output_store and 'data/action' not in output_store:
        data_action_path = zarr_path / "data" / "action"
        ee_action_path = zarr_path / "ee" / "action"
        create_single_symlink(zarr_path, ee_action_path, data_action_path, "data/action")
    
    # Create ee/state -> data/state link
    if 'ee/state' in output_store and 'data/state' not in output_store:
        data_state_path = zarr_path / "data" / "state"
        ee_state_path = zarr_path / "ee" / "state"
        create_single_symlink(zarr_path, ee_state_path, data_state_path, "data/state")
    
    # Create point cloud link based on pcd_type
    if pcd_type and pcd_type in ['pcd/pointcloud', 'pcd/segpointcloud', 'pcd/repointcloud']:
        if pcd_type in output_store and 'data/point_cloud' not in output_store:
            data_point_cloud_path = zarr_path / "data" / "point_cloud"
            target_path = zarr_path / pcd_type
            create_single_symlink(zarr_path, target_path, data_point_cloud_path, "data/point_cloud")
        elif pcd_type not in output_store:
            cprint(f"WARNING: Specified point cloud type '{pcd_type}' not found in merged data", 'yellow')
    
    print("Symbolic links created successfully!")

def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-z', '--zarr-files', nargs='+', required=True)
    parser.add_argument('-o', '--output', required=True)
    parser.add_argument('--pcd', choices=['pcd/pointcloud', 'pcd/segpointcloud', 'pcd/repointcloud'], 
                       default='pcd/pointcloud',
                       help='Specify which point cloud type to link to data/point_cloud (default: pcd/pointcloud)')
    args = parser.parse_args()
    
    # Check if output zarr already exists
    should_merge = True
    if os.path.exists(args.output):
        ans = input(f"Output zarr {args.output} already exists. Remove it? (y/n): ").strip().lower()
        if ans == 'y':
            import shutil
            shutil.rmtree(args.output)
            print(f"Removed existing output zarr: {args.output}")
        else:
            print("Keeping existing zarr file. Will only create symbolic links.")
            should_merge = False
    
    for zarr_path in args.zarr_files:
        assert os.path.exists(zarr_path), f"Zarr file {zarr_path} does not exist"
    
    if should_merge:
        first_structure = check_structure(args.zarr_files[0])
        # Filter out excluded groups from the groups to process
        def should_exclude_group(group_name):
            for excluded in excluded_groups:
                if group_name == excluded or group_name.startswith(excluded + '/'):
                    return True
            return False
        
        group_names = set(key for key in first_structure.keys() if not should_exclude_group(key))
        total_sizes = {group: 0 for group in group_names}
        
        # Collect episode_ends from each zarr file
        episode_ends_list = []
        data_group_sizes = []
        
        # --- Process episode_ends first and check for disinquency ---
        for i, zarr_path in enumerate(args.zarr_files):
            print(f"Checking zarr file {i+1}/{len(args.zarr_files)}: {zarr_path}")
            structure = check_structure(zarr_path)
            # Filter out excluded groups from current structure
            current_groups = set(key for key in structure.keys() if not should_exclude_group(key))
            assert current_groups == group_names, f"Zarr file {zarr_path} has different groups (excluding excluded_groups) {current_groups - group_names}"
            for group in group_names:
                total_sizes[group] += structure[group]
            
            # Load episode_ends from this zarr file
            store = zarr.open(zarr_path, mode='r')
            assert 'meta/episode_ends' in store, f"No episode_ends found in {zarr_path}"
            episode_ends = store['meta/episode_ends'][:]
            episode_ends_list.append(episode_ends)
            print(f"  Found {len(episode_ends)} episodes in {zarr_path}")
            # Get the size of a data group for disinquency check
            for group_name in group_names:
                if group_name != 'meta/episode_ends':
                    data_group_size = store[group_name].shape[0]
                    data_group_sizes.append(data_group_size)
                    # Check for disinquency
                    if len(episode_ends) > 0 and episode_ends[-1] != data_group_size:
                        cprint(f"WARNING: episode_ends[-1]={episode_ends[-1]} does not match data array length={data_group_size} in {zarr_path}. This may indicate padding or incomplete episodes! Offsets will use data array length.", 'yellow')
                    break
        
        print(f"Validated {len(args.zarr_files)} zarr files")
        for group, size in total_sizes.items():
            print(f"  {group}: {size}")
        
        output_store = zarr.open(args.output, mode='w')
        
        # --- Handle episode_ends merging and offsetting first ---
        if episode_ends_list:
            print("Processing episode_ends with proper offsetting...")
            
            cumulative_offset = 0
            merged_episode_ends = []
            for i, episode_ends in enumerate(episode_ends_list):
                print(f"  Zarr {i+1}: offsetting {len(episode_ends)} episodes by {cumulative_offset}")
                offset_episode_ends = episode_ends + cumulative_offset
                merged_episode_ends.extend(offset_episode_ends)
                # Use actual data array length for offset
                cumulative_offset += data_group_sizes[i]
            merged_episode_ends = np.array(merged_episode_ends, dtype='int64')
            output_store.create_dataset('meta/episode_ends', data=merged_episode_ends, dtype='int64')
            print(f"  Merged episode_ends: {len(merged_episode_ends)} episodes total")
            print(f"  Final episode_ends shape: {merged_episode_ends.shape}")
        
        # --- Process normal groups after episode_ends for faster debug ---
        for group_name in group_names:
            if group_name == 'meta/episode_ends':
                continue  # Already handled
            print(f"Processing group: {group_name}")
            first_store = zarr.open(args.zarr_files[0], mode='r')
            first_array = first_store[group_name]
            output_shape = (total_sizes[group_name],) + first_array.shape[1:]
            output_array = output_store.create_dataset(group_name, shape=output_shape, dtype=first_array.dtype, chunks=first_array.chunks)
            current_offset = 0
            for zarr_path in args.zarr_files:
                print(f"  Copying from {zarr_path}...")
                source_store = zarr.open(zarr_path, mode='r')
                source_array = source_store[group_name]
                output_array[current_offset:current_offset + source_array.shape[0]] = source_array[:]
                current_offset += source_array.shape[0]
    else:
        # Just open the existing output store for creating symbolic links
        output_store = zarr.open(args.output, mode='r+')
        print("Skipping merge process, only creating symbolic links...")
    
    # --- Create symbolic links ---
    create_symlinks(output_store, args.pcd)
    
    if should_merge:
        print(f"Successfully merged zarr files to: {args.output}")
    else:
        print(f"Successfully created symbolic links in existing zarr file: {args.output}")

if __name__ == "__main__":
    main() 