aotrih commited on
Commit
353892c
1 Parent(s): 60420c9

whisperkittools-a8c3cdeab8da5d76a7b952aa74ffebfbcd44804b generated files: openai_whisper-tiny.en

Browse files
openai_whisper-tiny.en/AudioEncoder.mlmodelc/analytics/coremldata.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:be62662cccc25b6eca93e6f407e024f90f72cde23a1cda8b0ca753f084274a6e
3
  size 243
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2f28a8448a805dd27c1760766f421c803c10e795bfd4e03e2a70c3b11ae6f42
3
  size 243
openai_whisper-tiny.en/AudioEncoder.mlmodelc/coremldata.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:892364c46ef43333af61711e2ef67e32cc6e7040d35263b805cb5795c8d69233
3
  size 347
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9581199d046733e8ea79832e1c9c56297059650e02d21bed3d735aa587c1100
3
  size 347
openai_whisper-tiny.en/AudioEncoder.mlmodelc/metadata.json CHANGED
@@ -46,7 +46,7 @@
46
  },
47
  "userDefinedMetadata" : {
48
  "com.github.apple.coremltools.source_dialect" : "TorchScript",
49
- "com.github.apple.coremltools.source" : "torch==2.2.2",
50
  "com.github.apple.coremltools.version" : "7.2"
51
  },
52
  "inputSchema" : [
 
46
  },
47
  "userDefinedMetadata" : {
48
  "com.github.apple.coremltools.source_dialect" : "TorchScript",
49
+ "com.github.apple.coremltools.source" : "torch==2.3.0",
50
  "com.github.apple.coremltools.version" : "7.2"
51
  },
52
  "inputSchema" : [
openai_whisper-tiny.en/AudioEncoder.mlmodelc/model.mil CHANGED
@@ -1,5 +1,5 @@
1
  program(1.0)
2
- [buildInfo = dict<tensor<string, []>, tensor<string, []>>({{"coremlc-component-MIL", "3304.5.2"}, {"coremlc-version", "3304.6.2"}, {"coremltools-component-torch", "2.2.2"}, {"coremltools-source-dialect", "TorchScript"}, {"coremltools-version", "7.2"}})]
3
  {
4
  func main<ios16>(tensor<fp16, [1, 80, 1, 3000]> melspectrogram_features) {
5
  tensor<int32, [2]> var_34 = const()[name = tensor<string, []>("op_34"), val = tensor<int32, [2]>([1, 1])];
 
1
  program(1.0)
2
+ [buildInfo = dict<tensor<string, []>, tensor<string, []>>({{"coremlc-component-MIL", "5.33.5"}, {"coremlc-version", "1877.40.3"}, {"coremltools-component-torch", "2.3.0"}, {"coremltools-source-dialect", "TorchScript"}, {"coremltools-version", "7.2"}})]
3
  {
4
  func main<ios16>(tensor<fp16, [1, 80, 1, 3000]> melspectrogram_features) {
5
  tensor<int32, [2]> var_34 = const()[name = tensor<string, []>("op_34"), val = tensor<int32, [2]>([1, 1])];
openai_whisper-tiny.en/AudioEncoder.mlmodelc/weights/weight.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:86a53cab0dcd92efa0d89803401347c8119203aac37f336d5a699c0587d01c48
3
  size 16422784
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e88ab3ae14798a39b8c89d7b923d0c7b6dd696305c11e0210631a0d7c2d8848
3
  size 16422784
openai_whisper-tiny.en/MelSpectrogram.mlmodelc/analytics/coremldata.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2fc2d0799af479af957359c81021ff6a464d3251b3415064f8d2c6403cbea68f
3
  size 243
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5bf602e29aa68e760d77a8920a6bab11c6133b0a9ef5b35ec4ae6b6f69b45d2
3
  size 243
openai_whisper-tiny.en/MelSpectrogram.mlmodelc/coremldata.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c977c1199f029235ab96a7dc394e5c5c6d2b606d333f2cab46df750a4df89329
3
  size 328
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf1301d71390cf02593a84695ad086ff603e1998e8325787c7d42a7a605279e3
3
  size 328
openai_whisper-tiny.en/MelSpectrogram.mlmodelc/metadata.json CHANGED
@@ -50,8 +50,8 @@
50
  },
51
  "userDefinedMetadata" : {
52
  "com.github.apple.coremltools.source_dialect" : "TorchScript",
53
- "com.github.apple.coremltools.version" : "7.2",
54
- "com.github.apple.coremltools.source" : "torch==2.2.2"
55
  },
56
  "inputSchema" : [
57
  {
 
50
  },
51
  "userDefinedMetadata" : {
52
  "com.github.apple.coremltools.source_dialect" : "TorchScript",
53
+ "com.github.apple.coremltools.source" : "torch==2.3.0",
54
+ "com.github.apple.coremltools.version" : "7.2"
55
  },
56
  "inputSchema" : [
57
  {
openai_whisper-tiny.en/MelSpectrogram.mlmodelc/model.mil CHANGED
@@ -1,5 +1,5 @@
1
  program(1.0)
2
- [buildInfo = dict<tensor<string, []>, tensor<string, []>>({{"coremlc-component-MIL", "3304.5.2"}, {"coremlc-version", "3304.6.2"}, {"coremltools-component-torch", "2.2.2"}, {"coremltools-source-dialect", "TorchScript"}, {"coremltools-version", "7.2"}})]
3
  {
4
  func main<ios16>(tensor<fp16, [480000]> audio) {
5
  tensor<int32, [3]> var_10 = const()[name = tensor<string, []>("op_10"), val = tensor<int32, [3]>([1, 1, 480000])];
 
1
  program(1.0)
2
+ [buildInfo = dict<tensor<string, []>, tensor<string, []>>({{"coremlc-component-MIL", "5.33.5"}, {"coremlc-version", "1877.40.3"}, {"coremltools-component-torch", "2.3.0"}, {"coremltools-source-dialect", "TorchScript"}, {"coremltools-version", "7.2"}})]
3
  {
4
  func main<ios16>(tensor<fp16, [480000]> audio) {
5
  tensor<int32, [3]> var_10 = const()[name = tensor<string, []>("op_10"), val = tensor<int32, [3]>([1, 1, 480000])];
openai_whisper-tiny.en/MelSpectrogram.mlmodelc/weights/weight.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a3afe232689fe92b1958d124f42f9ccf43e611ce1055e584190d40f11dc5a3d6
3
  size 354080
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:caf24cefbf63a77ddf2d4940b8dd76d61119280d3d85baac0814b775eac97121
3
  size 354080
openai_whisper-tiny.en/TextDecoder.mlmodelc/analytics/coremldata.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a3e8ecb0c4fc6f8c91c97b3a8b15fb84715aaa68d64fbe125553224c0c64c743
3
  size 243
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9abbd99914ed45a2c3eed96465262d47c0b3cc4d71330f7e5809cb5237ef56ef
3
  size 243
openai_whisper-tiny.en/TextDecoder.mlmodelc/coremldata.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d8bae9187c5fb7e29a7cb0c8aa2c753e82040ba85403c2dbad6dd30f9a6a008d
3
  size 633
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f0bcfc0ac989fa020e05d1381c0d0514cab88342acb4c52bcac5abd626b15ec
3
  size 633
openai_whisper-tiny.en/TextDecoder.mlmodelc/metadata.json CHANGED
@@ -84,7 +84,7 @@
84
  },
85
  "userDefinedMetadata" : {
86
  "com.github.apple.coremltools.source_dialect" : "TorchScript",
87
- "com.github.apple.coremltools.source" : "torch==2.2.2",
88
  "com.github.apple.coremltools.version" : "7.2"
89
  },
90
  "inputSchema" : [
 
84
  },
85
  "userDefinedMetadata" : {
86
  "com.github.apple.coremltools.source_dialect" : "TorchScript",
87
+ "com.github.apple.coremltools.source" : "torch==2.3.0",
88
  "com.github.apple.coremltools.version" : "7.2"
89
  },
90
  "inputSchema" : [
openai_whisper-tiny.en/TextDecoder.mlmodelc/model.mil CHANGED
@@ -1,5 +1,5 @@
1
  program(1.0)
2
- [buildInfo = dict<tensor<string, []>, tensor<string, []>>({{"coremlc-component-MIL", "3304.5.2"}, {"coremlc-version", "3304.6.2"}, {"coremltools-component-torch", "2.2.2"}, {"coremltools-source-dialect", "TorchScript"}, {"coremltools-version", "7.2"}})]
3
  {
4
  func main<ios16>(tensor<int32, [1]> cache_length, tensor<fp16, [1, 448]> decoder_key_padding_mask, tensor<fp16, [1, 384, 1, 1500]> encoder_output_embeds, tensor<int32, [1]> input_ids, tensor<fp16, [1, 1536, 1, 448]> key_cache, tensor<fp16, [1, 448]> kv_cache_update_mask, tensor<fp16, [1, 1536, 1, 448]> value_cache) {
5
  tensor<int32, []> var_24_axis_0 = const()[name = tensor<string, []>("op_24_axis_0"), val = tensor<int32, []>(0)];
 
1
  program(1.0)
2
+ [buildInfo = dict<tensor<string, []>, tensor<string, []>>({{"coremlc-component-MIL", "5.33.5"}, {"coremlc-version", "1877.40.3"}, {"coremltools-component-torch", "2.3.0"}, {"coremltools-source-dialect", "TorchScript"}, {"coremltools-version", "7.2"}})]
3
  {
4
  func main<ios16>(tensor<int32, [1]> cache_length, tensor<fp16, [1, 448]> decoder_key_padding_mask, tensor<fp16, [1, 384, 1, 1500]> encoder_output_embeds, tensor<int32, [1]> input_ids, tensor<fp16, [1, 1536, 1, 448]> key_cache, tensor<fp16, [1, 448]> kv_cache_update_mask, tensor<fp16, [1, 1536, 1, 448]> value_cache) {
5
  tensor<int32, []> var_24_axis_0 = const()[name = tensor<string, []>("op_24_axis_0"), val = tensor<int32, []>(0)];
openai_whisper-tiny.en/TextDecoder.mlmodelc/weights/weight.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12c57bf53565400bb60c09b13e9a6e31fdaa147585a8e21aeea1b60a96e3400e
3
  size 59215664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bfc597b783feca52db7541a6b96ebee31d2e39b0da77ca786e0018f229323444
3
  size 59215664