{ "_name_or_path": "/home/robo/Projects/p7_vla/openvla_hand_bin_model_v1/runs/openvla-7b+adapthand_dataset+b8+lr-2e-05+lora-r32+dropout-0.0", "arch_specifier": "no-align+fused-gelu-mlp", "architectures": [ "OpenVLAForActionPrediction" ], "auto_map": { "AutoConfig": "configuration_prismatic.OpenVLAConfig", "AutoModelForVision2Seq": "modeling_prismatic.OpenVLAForActionPrediction" }, "hf_llm_id": "meta-llama/Llama-2-7b-hf", "image_resize_strategy": "resize-naive", "image_sizes": [ 224, 224 ], "llm_backbone_id": "llama2-7b-pure", "llm_max_length": 2048, "model_type": "openvla", "n_action_bins": 256, "norm_stats": { "adapthand_dataset": { "action": { "mask": [ true, true, true, true, true, true, false ], "max": [ 0.0934932678937912, 0.07073106616735458, 0.13955646753311157, 25.86539649963379, 28.09152603149414, 22.63760757446289, 1.0 ], "mean": [ -0.0004674541705753654, -7.014157858975523e-07, -0.0026117716915905476, -0.9660947918891907, 0.0769726037979126, 0.23577763140201569, 0.533703625202179 ], "min": [ -0.11865614354610443, -0.06380447000265121, -0.12263807654380798, -18.1720027923584, -11.647391319274902, -20.10640525817871, -0.0 ], "q01": [ -0.06869871757924557, -0.04083644054830075, -0.09486489862203598, -12.397282724380494, -6.393810477256775, -10.171583023071289, -0.0 ], "q99": [ 0.06211262740194787, 0.04004575975239276, 0.12924241289496421, 14.330417251586896, 6.437622961997966, 9.655190410613951, 1.0 ], "std": [ 0.024569831788539886, 0.015697479248046875, 0.04801836237311363, 5.1880879402160645, 2.133551836013794, 3.278003454208374, 0.33055374026298523 ] }, "num_trajectories": 20, "num_transitions": 1864, "proprio": { "max": [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], "mean": [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], "min": [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], "q01": [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], "q99": [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], "std": [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] } } }, "output_projector_states": false, "pad_to_multiple_of": 64, "pad_token_id": 32000, "text_config": { "model_type": "llama", "pad_token_id": 32000, "torch_dtype": "bfloat16", "vocab_size": 32064 }, "timm_model_ids": [ "vit_large_patch14_reg4_dinov2.lvd142m", "vit_so400m_patch14_siglip_224" ], "timm_override_act_layers": [ null, null ], "torch_dtype": "bfloat16", "transformers_version": "4.40.1", "use_fused_vision_backbone": true, "vision_backbone_id": "dinosiglip-vit-so-224px" }