KoichiYasuoka
commited on
Commit
•
de88226
1
Parent(s):
ebffe8b
usage improved
Browse files
README.md
CHANGED
@@ -26,7 +26,7 @@ from transformers import AutoTokenizer,AutoModelForTokenClassification
|
|
26 |
tokenizer=AutoTokenizer.from_pretrained("KoichiYasuoka/bert-large-japanese-upos")
|
27 |
model=AutoModelForTokenClassification.from_pretrained("KoichiYasuoka/bert-large-japanese-upos")
|
28 |
s="国境の長いトンネルを抜けると雪国であった。"
|
29 |
-
p=[model.config.id2label[q] for q in torch.argmax(model(tokenizer.encode(s,return_tensors="pt"))[
|
30 |
print(list(zip(s,p)))
|
31 |
```
|
32 |
|
|
|
26 |
tokenizer=AutoTokenizer.from_pretrained("KoichiYasuoka/bert-large-japanese-upos")
|
27 |
model=AutoModelForTokenClassification.from_pretrained("KoichiYasuoka/bert-large-japanese-upos")
|
28 |
s="国境の長いトンネルを抜けると雪国であった。"
|
29 |
+
p=[model.config.id2label[q] for q in torch.argmax(model(tokenizer.encode(s,return_tensors="pt"))["logits"],dim=2)[0].tolist()[1:-1]]
|
30 |
print(list(zip(s,p)))
|
31 |
```
|
32 |
|