File size: 433 Bytes
d8760c5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
from transformers import (BertForMaskedLM, BertTokenizer)

modelpath = 'bert-large-uncased-whole-word-masking'
model = BertForMaskedLM.from_pretrained(modelpath)

model.save_pretrained('./bert-large-uncased-whole-word-masking')




# from transformers import (BertForMaskedLM, BertTokenizer)

# modelpath = 'bert-large-uncased'
# model = BertForMaskedLM.from_pretrained(modelpath)

# model.save_pretrained('./bert-large-uncased')