shuttie commited on
Commit
9cac294
1 Parent(s): ecbc657

set proper input names while converting

Browse files
Files changed (2) hide show
  1. convert.py +10 -3
  2. pytorch_model.onnx +2 -2
convert.py CHANGED
@@ -1,11 +1,18 @@
1
  from transformers import AutoTokenizer, AutoModel
2
  import torch
3
 
 
 
4
  model = AutoModel.from_pretrained("sentence-transformers/all-MiniLM-L12-v2")
5
  model.eval()
6
 
7
- sample = torch.randint(low=0, high=1, size=(1,128))
8
- input = (sample, sample, sample)
 
 
 
9
 
10
- torch.onnx.export(model, input, 'pytorch_model.onnx', export_params=True)
 
 
11
 
 
1
  from transformers import AutoTokenizer, AutoModel
2
  import torch
3
 
4
+ max_seq_length=128
5
+
6
  model = AutoModel.from_pretrained("sentence-transformers/all-MiniLM-L12-v2")
7
  model.eval()
8
 
9
+ inputs = {"input_ids": torch.ones(1, max_seq_length, dtype=torch.int64),
10
+ "attention_mask": torch.ones(1, max_seq_length, dtype=torch.int64),
11
+ "token_type_ids": torch.ones(1, max_seq_length, dtype=torch.int64)}
12
+
13
+ symbolic_names = {0: 'batch_size', 1: 'max_seq_len'}
14
 
15
+ torch.onnx.export(model, args=tuple(inputs.values()), f='pytorch_model.onnx', export_params=True,
16
+ input_names=['input_ids', 'attention_mask', 'token_type_ids'], output_names=['output'],
17
+ dynamic_axes={'input_ids': symbolic_names, 'attention_mask': symbolic_names, 'token_type_ids': symbolic_names})
18
 
pytorch_model.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a504d31e57702a74fecf5c2d7df67fa9fcb5635baa2cd335ac74bc2cbdefcbdd
3
- size 133597269
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6bdde0dabded044bf81d8945dc92bd4c15328a056c893cb57ef7bbc5cae80dac
3
+ size 133694646