File size: 1,272 Bytes
6a1af83 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
{
"one_external_file": true,
"opset": null,
"optimization": {
"disable_attention": null,
"disable_attention_fusion": false,
"disable_bias_gelu": null,
"disable_bias_gelu_fusion": false,
"disable_bias_skip_layer_norm": null,
"disable_bias_skip_layer_norm_fusion": false,
"disable_embed_layer_norm": true,
"disable_embed_layer_norm_fusion": true,
"disable_gelu": null,
"disable_gelu_fusion": false,
"disable_group_norm_fusion": true,
"disable_layer_norm": null,
"disable_layer_norm_fusion": false,
"disable_packed_kv": true,
"disable_rotary_embeddings": false,
"disable_shape_inference": false,
"disable_skip_layer_norm": null,
"disable_skip_layer_norm_fusion": false,
"enable_gelu_approximation": false,
"enable_gemm_fast_gelu_fusion": false,
"enable_transformers_specific_optimizations": true,
"fp16": true,
"no_attention_mask": false,
"optimization_level": 2,
"optimize_for_gpu": true,
"optimize_with_onnxruntime_only": null,
"use_mask_index": false,
"use_multi_head_attention": false,
"use_raw_attention_mask": false
},
"optimum_version": "1.19.2",
"quantization": {},
"transformers_version": "4.40.2",
"use_external_data_format": true
}
|