File size: 5,911 Bytes
6eb610d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
{
"module": "keras_hub.src.models.sam.sam_image_segmenter",
"class_name": "SAMImageSegmenter",
"config": {
"backbone": {
"module": "keras_hub.src.models.sam.sam_backbone",
"class_name": "SAMBackbone",
"config": {
"name": "sam_backbone",
"trainable": true,
"image_encoder": {
"module": "keras_hub.src.models.vit_det.vit_det_backbone",
"class_name": "ViTDetBackbone",
"config": {
"name": "vi_t_det_backbone",
"trainable": true,
"image_shape": [
1024,
1024,
3
],
"patch_size": 16,
"hidden_size": 1024,
"num_layers": 24,
"intermediate_dim": 4096,
"num_heads": 16,
"num_output_channels": 256,
"use_bias": true,
"use_abs_pos": true,
"use_rel_pos": true,
"window_size": 14,
"global_attention_layer_indices": [
5,
11,
17,
23
],
"layer_norm_epsilon": 1e-06
},
"registered_name": "keras_hub>ViTDetBackbone"
},
"prompt_encoder": {
"module": "keras_hub.src.models.sam.sam_prompt_encoder",
"class_name": "SAMPromptEncoder",
"config": {
"name": "sam_prompt_encoder",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"hidden_size": 256,
"image_embedding_size": [
64,
64
],
"input_image_size": [
1024,
1024
],
"mask_in_channels": 16,
"activation": "gelu"
},
"registered_name": "keras_hub>SAMPromptEncoder"
},
"mask_decoder": {
"module": "keras_hub.src.models.sam.sam_mask_decoder",
"class_name": "SAMMaskDecoder",
"config": {
"name": "sam_mask_decoder",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"hidden_size": 256,
"num_layers": 2,
"intermediate_dim": 2048,
"num_heads": 8,
"embedding_dim": 256,
"num_multimask_outputs": 3,
"iou_head_depth": 3,
"iou_head_hidden_dim": 256,
"activation": "gelu"
},
"registered_name": "keras_hub>SAMMaskDecoder"
}
},
"registered_name": "keras_hub>SAMBackbone"
},
"preprocessor": {
"module": "keras_hub.src.models.sam.sam_image_segmenter_preprocessor",
"class_name": "SAMImageSegmenterPreprocessor",
"config": {
"name": "sam_image_segmenter_preprocessor",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"image_converter": {
"module": "keras_hub.src.models.sam.sam_image_converter",
"class_name": "SAMImageConverter",
"config": {
"name": "sam_image_converter",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"image_size": [
1024,
1024
],
"scale": 0.00392156862745098,
"offset": null,
"interpolation": "bilinear",
"crop_to_aspect_ratio": true
},
"registered_name": "keras_hub>SAMImageConverter"
},
"config_file": "preprocessor.json"
},
"registered_name": "keras_hub>SAMImageSegmenterPreprocessor"
},
"name": "sam_image_segmenter"
},
"registered_name": "keras_hub>SAMImageSegmenter"
} |