

      
#!/usr/bin/env python3

import argparse
import json
import logging
import sys
from pathlib import Path
import time
import torch

from octo.model.octo_model_pt import OctoModelPt


def setup_logging(verbose: bool = False):
    level = logging.DEBUG if verbose else logging.INFO
    logging.basicConfig(
        level=level,
        format='%(asctime)s - %(levelname)s - %(message)s',
        handlers=[logging.StreamHandler(sys.stdout)]
    )


def validate_model(model: OctoModelPt, device: str = "cpu") -> bool:
    try:
        model.eval()
        model = model.to(device)
        
        batch_size = 1
        tasks = model.create_tasks(
            texts=["pick up the red block"], 
            device=device
        )
        
        example_obs = model.example_batch["observation"]
        observations = {}
        for key, value in example_obs.items():
            if key == "timestep_pad_mask":
                observations[key] = torch.ones((batch_size, value.shape[1]), dtype=torch.bool, device=device)
            else:
                shape = (batch_size,) + value.shape[1:]
                observations[key] = torch.zeros(shape, dtype=value.dtype, device=device)
        
        with torch.no_grad():
            transformer_outputs, _ = model(
                observations=observations,
                tasks=tasks,
                timestep_pad_mask=observations["timestep_pad_mask"],
                train=False,
                transformer_only=True
            )
        
        logging.info("模型验证成功！")
        return True
        
    except Exception as e:
        logging.error(f"模型验证失败: {e}")
        return False


def convert_jax_to_pth(
    jax_checkpoint_path: str,
    output_path: str,
    model_name: str = "octo_model",
    skip_validation: bool = False,
    device: str = "cpu"
) -> bool:
    
    try:
        logging.info(f"正在从JAX checkpoint加载模型: {jax_checkpoint_path}")
        start_time = time.time()
        
        loaded_dict = OctoModelPt.load_pretrained_from_jax(
            jax_checkpoint_path,
            skip_keys_regex='.*hf_model'
        )
        
        model = loaded_dict['octo_model']
        missing_keys = loaded_dict.get('missing_keys', [])
        skipped_keys = loaded_dict.get('skipped_keys', [])
        
        load_time = time.time() - start_time
        logging.info(f"JAX模型加载完成，耗时: {load_time:.2f}秒")
        
        if missing_keys:
            logging.warning(f"缺失的参数: {missing_keys}")
        if skipped_keys:
            logging.info(f"跳过的参数: {len(skipped_keys)}个")
        
        if not skip_validation:
            logging.info("正在验证模型...")
            if not validate_model(model, device):
                logging.error("模型验证失败，转换中止")
                return False
        
        logging.info(f"正在保存PyTorch checkpoint到: {output_path}")
        output_path = Path(output_path)
        output_path.mkdir(parents=True, exist_ok=True)
        
        model.save_pretrained(
            step=0, 
            checkpoint_path=str(output_path),
            optimizer=None
        )
        
        conversion_info = {
            "source_checkpoint": jax_checkpoint_path,
            "model_name": model_name,
            "conversion_time": time.time(),
            "pytorch_version": torch.__version__,
            "missing_keys": missing_keys,
            "skipped_keys_count": len(skipped_keys),
            "model_config": {
                "max_horizon": model.config["model"]["max_horizon"],
                "token_embedding_size": model.config["model"]["token_embedding_size"],
                "heads": list(model.config["model"]["heads"].keys()) if "heads" in model.config["model"] else []
            }
        }
        
        with open(output_path / "conversion_info.json", 'w') as f:
            json.dump(conversion_info, f, indent=2)
        
        total_time = time.time() - start_time
        logging.info(f"转换完成！总耗时: {total_time:.2f}秒")
        logging.info(f"输出文件:")
        logging.info(f"  - 模型权重: {output_path}/0/weights.pth")
        logging.info(f"  - 配置文件: {output_path}/config.json")
        logging.info(f"  - 转换信息: {output_path}/conversion_info.json")
        logging.info(f"  - 加载示例: {output_path}/load_model_example.py")
        
        return True
        
    except Exception as e:
        logging.error(f"转换失败: {e}")
        import traceback
        traceback.print_exc()
        return False


def test_converted_model(checkpoint_path: str, device: str = "cpu") -> bool:
    try:
        logging.info(f"正在测试转换后的模型: {checkpoint_path}")
        
        loaded_dict = OctoModelPt.load_pretrained(checkpoint_path)
        model = loaded_dict['octo_model']
        
        if validate_model(model, device):
            logging.info("转换后的模型测试通过！")
            return True
        else:
            logging.error("转换后的模型测试失败！")
            return False
            
    except Exception as e:
        logging.error(f"测试失败: {e}")
        return False


def main():
    parser = argparse.ArgumentParser(description="将JAX Octo模型转换为PyTorch .pth格式")
    
    parser.add_argument(
        "jax_checkpoint", 
        type=str,
        help="JAX checkpoint路径 (例如: hf://rail-berkeley/octo-small-1.5)"
    )
    
    parser.add_argument(
        "output_path",
        type=str,
        help="PyTorch checkpoint输出路径"
    )
    
    parser.add_argument(
        "--model-name",
        type=str,
        default="octo_model",
        help="模型名称 (默认: octo_model)"
    )
    
    parser.add_argument(
        "--skip-validation",
        action="store_true",
        help="跳过模型验证"
    )
    
    parser.add_argument(
        "--device",
        type=str,
        default="cpu",
        choices=["cpu", "cuda"],
        help="验证时使用的设备 (默认: cpu)"
    )
    
    parser.add_argument(
        "--test-converted",
        action="store_true",
        help="转换完成后测试模型"
    )
    
    parser.add_argument(
        "--verbose", "-v",
        action="store_true",
        help="详细输出"
    )
    
    args = parser.parse_args()
    
    setup_logging(args.verbose)
    
    if args.device == "cuda" and not torch.cuda.is_available():
        logging.warning("CUDA不可用，使用CPU")
        args.device = "cpu"
    
    success = convert_jax_to_pth(
        jax_checkpoint_path=args.jax_checkpoint,
        output_path=args.output_path,
        model_name=args.model_name,
        skip_validation=args.skip_validation,
        device=args.device
    )
    
    if not success:
        sys.exit(1)
    
    if args.test_converted:
        test_success = test_converted_model(args.output_path, args.device)
        if not test_success:
            sys.exit(1)
    
    logging.info("所有操作完成！")


if __name__ == "__main__":
    main()

    


