whisper-tiny-hi-model / ipu_config.json
TheAIchemist13's picture
Upload 16 files
e411293
{
"auto_loss_scaling": false,
"device_iterations": 1,
"embedding_serialization_factor": 1,
"enable_half_partials": true,
"executable_cache_dir": "/tmp/exe_cache/3.3.0/whisper",
"execute_encoder_on_cpu_for_generation": false,
"explicit_ir_inference": false,
"gradient_accumulation_steps": 16,
"inference_device_iterations": 1,
"inference_embedding_serialization_factor": 1,
"inference_ipus_per_replica": 2,
"inference_layers_per_ipu": [
4,
4
],
"inference_matmul_proportion": 0.2,
"inference_parallelize_kwargs": {
"on_device_generation_steps": 16,
"use_cache": true,
"use_encoder_output_buffer": true
},
"inference_projection_serialization_factor": 1,
"inference_replication_factor": 1,
"inference_serialized_embedding_splits_per_ipu": null,
"inference_serialized_projection_splits_per_ipu": null,
"ipus_per_replica": 4,
"layers_per_ipu": [
2,
2,
2,
2
],
"matmul_proportion": [
0.2,
0.2,
0.6,
0.6
],
"optimizer_state_offchip": true,
"optimum_version": "1.6.1",
"output_mode": "final",
"parallelize_kwargs": {},
"projection_serialization_factor": 5,
"recompute_checkpoint_every_layer": true,
"replicated_tensor_sharding": false,
"replication_factor": 1,
"seed": 42,
"serialized_embedding_splits_per_ipu": null,
"serialized_projection_splits_per_ipu": null,
"transformers_version": "4.29.2"
}