de-francophones commited on
Commit
351c76b
·
verified ·
1 Parent(s): 0cc2e32

873c28caab157cbde081ab660a6efa09eb2d7252e2e599d91751e47ec3591475

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +6 -0
  2. sysoutputs/grew/02/pertreebank/pl_pud-ud-test.eval.log +17 -0
  3. sysoutputs/grew/02/pertreebank/rest.conllu +0 -0
  4. sysoutputs/grew/02/pertreebank/ru_syntagrus-ud-test-gold.nen.conllu +3 -0
  5. sysoutputs/grew/02/pertreebank/ru_syntagrus-ud-test-sys.conllu +3 -0
  6. sysoutputs/grew/02/pertreebank/ru_syntagrus-ud-test-sys.nen.conllu +3 -0
  7. sysoutputs/grew/02/pertreebank/ru_syntagrus-ud-test.eval.log +17 -0
  8. sysoutputs/grew/02/pertreebank/sk_snk-ud-test-gold.nen.conllu +0 -0
  9. sysoutputs/grew/02/pertreebank/sk_snk-ud-test-sys.conllu +0 -0
  10. sysoutputs/grew/02/pertreebank/sk_snk-ud-test-sys.nen.conllu +0 -0
  11. sysoutputs/grew/02/pertreebank/sk_snk-ud-test.eval.log +17 -0
  12. sysoutputs/grew/02/pertreebank/sv_pud-ud-test-gold.nen.conllu +0 -0
  13. sysoutputs/grew/02/pertreebank/sv_pud-ud-test-sys.conllu +0 -0
  14. sysoutputs/grew/02/pertreebank/sv_pud-ud-test-sys.nen.conllu +0 -0
  15. sysoutputs/grew/02/pertreebank/sv_pud-ud-test.eval.log +17 -0
  16. sysoutputs/grew/02/pertreebank/sv_talbanken-ud-test-gold.nen.conllu +0 -0
  17. sysoutputs/grew/02/pertreebank/sv_talbanken-ud-test-sys.conllu +0 -0
  18. sysoutputs/grew/02/pertreebank/sv_talbanken-ud-test-sys.nen.conllu +0 -0
  19. sysoutputs/grew/02/pertreebank/sv_talbanken-ud-test.eval.log +17 -0
  20. sysoutputs/grew/02/pertreebank/ta_ttb-ud-test-gold.nen.conllu +0 -0
  21. sysoutputs/grew/02/pertreebank/ta_ttb-ud-test-sys.conllu +0 -0
  22. sysoutputs/grew/02/pertreebank/ta_ttb-ud-test-sys.nen.conllu +0 -0
  23. sysoutputs/grew/02/pertreebank/ta_ttb-ud-test.eval.log +17 -0
  24. sysoutputs/grew/02/pertreebank/uk_iu-ud-test-gold.nen.conllu +0 -0
  25. sysoutputs/grew/02/pertreebank/uk_iu-ud-test-sys.conllu +0 -0
  26. sysoutputs/grew/02/pertreebank/uk_iu-ud-test-sys.nen.conllu +0 -0
  27. sysoutputs/grew/02/pertreebank/uk_iu-ud-test.eval.log +17 -0
  28. sysoutputs/grew/02/pl.conllu +0 -0
  29. sysoutputs/grew/02/pl.eval.log +17 -0
  30. sysoutputs/grew/02/pl.gold.nen.conllu +0 -0
  31. sysoutputs/grew/02/pl.nen.conllu +0 -0
  32. sysoutputs/grew/02/ru.conllu +3 -0
  33. sysoutputs/grew/02/ru.eval.log +17 -0
  34. sysoutputs/grew/02/ru.gold.nen.conllu +3 -0
  35. sysoutputs/grew/02/ru.nen.conllu +3 -0
  36. sysoutputs/grew/02/sk.conllu +0 -0
  37. sysoutputs/grew/02/sk.eval.log +17 -0
  38. sysoutputs/grew/02/sk.gold.nen.conllu +0 -0
  39. sysoutputs/grew/02/sk.nen.conllu +0 -0
  40. sysoutputs/grew/02/sv.conllu +0 -0
  41. sysoutputs/grew/02/sv.eval.log +17 -0
  42. sysoutputs/grew/02/sv.gold.nen.conllu +0 -0
  43. sysoutputs/grew/02/sv.nen.conllu +0 -0
  44. sysoutputs/grew/02/ta.conllu +0 -0
  45. sysoutputs/grew/02/ta.eval.log +17 -0
  46. sysoutputs/grew/02/ta.gold.nen.conllu +0 -0
  47. sysoutputs/grew/02/ta.nen.conllu +0 -0
  48. sysoutputs/grew/02/uk.conllu +0 -0
  49. sysoutputs/grew/02/uk.eval.log +17 -0
  50. sysoutputs/grew/02/uk.gold.nen.conllu +0 -0
.gitattributes CHANGED
@@ -102,3 +102,9 @@ sysoutputs/grew/02/cs.nen.conllu filter=lfs diff=lfs merge=lfs -text
102
  sysoutputs/grew/02/pertreebank/cs_pdt-ud-test-gold.nen.conllu filter=lfs diff=lfs merge=lfs -text
103
  sysoutputs/grew/02/pertreebank/cs_pdt-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
104
  sysoutputs/grew/02/pertreebank/cs_pdt-ud-test-sys.nen.conllu filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
102
  sysoutputs/grew/02/pertreebank/cs_pdt-ud-test-gold.nen.conllu filter=lfs diff=lfs merge=lfs -text
103
  sysoutputs/grew/02/pertreebank/cs_pdt-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
104
  sysoutputs/grew/02/pertreebank/cs_pdt-ud-test-sys.nen.conllu filter=lfs diff=lfs merge=lfs -text
105
+ sysoutputs/grew/02/pertreebank/ru_syntagrus-ud-test-gold.nen.conllu filter=lfs diff=lfs merge=lfs -text
106
+ sysoutputs/grew/02/pertreebank/ru_syntagrus-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
107
+ sysoutputs/grew/02/pertreebank/ru_syntagrus-ud-test-sys.nen.conllu filter=lfs diff=lfs merge=lfs -text
108
+ sysoutputs/grew/02/ru.conllu filter=lfs diff=lfs merge=lfs -text
109
+ sysoutputs/grew/02/ru.gold.nen.conllu filter=lfs diff=lfs merge=lfs -text
110
+ sysoutputs/grew/02/ru.nen.conllu filter=lfs diff=lfs merge=lfs -text
sysoutputs/grew/02/pertreebank/pl_pud-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.71 | 99.87 | 99.79 |
4
+ Sentences | 95.57 | 97.00 | 96.28 |
5
+ Words | 99.67 | 99.86 | 99.77 |
6
+ UPOS | 97.99 | 98.17 | 98.08 | 98.31
7
+ XPOS | 94.30 | 94.47 | 94.38 | 94.60
8
+ UFeats | 94.42 | 94.59 | 94.51 | 94.73
9
+ AllTags | 93.19 | 93.36 | 93.27 | 93.49
10
+ Lemmas | 97.17 | 97.35 | 97.26 | 97.49
11
+ UAS | 93.80 | 93.97 | 93.88 | 94.10
12
+ LAS | 91.47 | 91.64 | 91.55 | 91.77
13
+ ELAS | 79.48 | 79.53 | 79.51 | 85.59
14
+ EULAS | 90.36 | 90.42 | 90.39 | 97.30
15
+ CLAS | 89.45 | 89.51 | 89.48 | 89.70
16
+ MLAS | 82.48 | 82.54 | 82.51 | 82.71
17
+ BLEX | 86.56 | 86.62 | 86.59 | 86.80
sysoutputs/grew/02/pertreebank/rest.conllu ADDED
File without changes
sysoutputs/grew/02/pertreebank/ru_syntagrus-ud-test-gold.nen.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fff205aa03254693f66a46edcd65dd43490c4b3dbe7729467c36afe8d70cac2a
3
+ size 11142792
sysoutputs/grew/02/pertreebank/ru_syntagrus-ud-test-sys.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c41268428b29363958382091f46555ae0ef2d36b5433dce7d24e02b73fa3f46
3
+ size 13713672
sysoutputs/grew/02/pertreebank/ru_syntagrus-ud-test-sys.nen.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec4fdaad197e20c7edb3249219690604c8fc966874ee9d7ac33483db1199b967
3
+ size 13713743
sysoutputs/grew/02/pertreebank/ru_syntagrus-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.55 | 99.64 | 99.60 |
4
+ Sentences | 98.77 | 98.83 | 98.80 |
5
+ Words | 99.55 | 99.64 | 99.60 |
6
+ UPOS | 98.82 | 98.90 | 98.86 | 99.26
7
+ XPOS | 99.55 | 99.64 | 99.60 | 100.00
8
+ UFeats | 88.93 | 89.01 | 88.97 | 89.33
9
+ AllTags | 88.72 | 88.80 | 88.76 | 89.12
10
+ Lemmas | 98.29 | 98.37 | 98.33 | 98.73
11
+ UAS | 94.18 | 94.26 | 94.22 | 94.60
12
+ LAS | 92.93 | 93.02 | 92.97 | 93.35
13
+ ELAS | 90.53 | 90.60 | 90.56 | 96.22
14
+ EULAS | 91.56 | 91.63 | 91.59 | 97.31
15
+ CLAS | 91.84 | 91.88 | 91.86 | 92.28
16
+ MLAS | 76.90 | 76.93 | 76.91 | 77.26
17
+ BLEX | 90.33 | 90.37 | 90.35 | 90.77
sysoutputs/grew/02/pertreebank/sk_snk-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/sk_snk-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/sk_snk-ud-test-sys.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/sk_snk-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 100.00 | 100.00 | 100.00 |
4
+ Sentences | 86.86 | 83.51 | 85.15 |
5
+ Words | 99.99 | 99.98 | 99.99 |
6
+ UPOS | 97.68 | 97.67 | 97.67 | 97.68
7
+ XPOS | 90.15 | 90.14 | 90.14 | 90.15
8
+ UFeats | 93.43 | 93.42 | 93.42 | 93.43
9
+ AllTags | 89.20 | 89.19 | 89.19 | 89.20
10
+ Lemmas | 96.47 | 96.47 | 96.47 | 96.48
11
+ UAS | 92.27 | 92.27 | 92.27 | 92.28
12
+ LAS | 90.45 | 90.44 | 90.45 | 90.46
13
+ ELAS | 86.95 | 86.89 | 86.92 | 93.43
14
+ EULAS | 88.76 | 88.70 | 88.73 | 95.38
15
+ CLAS | 89.40 | 89.39 | 89.40 | 89.40
16
+ MLAS | 80.74 | 80.73 | 80.74 | 80.74
17
+ BLEX | 85.30 | 85.29 | 85.29 | 85.30
sysoutputs/grew/02/pertreebank/sv_pud-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/sv_pud-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/sv_pud-ud-test-sys.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/sv_pud-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 98.81 | 98.01 | 98.41 |
4
+ Sentences | 88.44 | 92.60 | 90.47 |
5
+ Words | 98.81 | 98.01 | 98.41 |
6
+ UPOS | 96.40 | 95.61 | 96.00 | 97.56
7
+ XPOS | 94.13 | 93.36 | 93.74 | 95.26
8
+ UFeats | 80.02 | 79.37 | 79.70 | 80.99
9
+ AllTags | 78.47 | 77.83 | 78.15 | 79.41
10
+ Lemmas | 89.04 | 88.32 | 88.68 | 90.11
11
+ UAS | 86.69 | 85.99 | 86.34 | 87.74
12
+ LAS | 83.62 | 82.94 | 83.27 | 84.62
13
+ ELAS | 78.68 | 78.25 | 78.47 | 84.22
14
+ EULAS | 82.61 | 82.16 | 82.39 | 88.43
15
+ CLAS | 82.16 | 82.44 | 82.30 | 83.68
16
+ MLAS | 56.85 | 57.05 | 56.95 | 57.90
17
+ BLEX | 72.55 | 72.81 | 72.68 | 73.90
sysoutputs/grew/02/pertreebank/sv_talbanken-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/sv_talbanken-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/sv_talbanken-ud-test-sys.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/sv_talbanken-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.87 | 99.92 | 99.89 |
4
+ Sentences | 96.45 | 95.82 | 96.13 |
5
+ Words | 99.87 | 99.92 | 99.89 |
6
+ UPOS | 98.38 | 98.43 | 98.41 | 98.51
7
+ XPOS | 97.24 | 97.29 | 97.26 | 97.37
8
+ UFeats | 97.31 | 97.36 | 97.33 | 97.44
9
+ AllTags | 96.43 | 96.49 | 96.46 | 96.56
10
+ Lemmas | 98.17 | 98.22 | 98.19 | 98.30
11
+ UAS | 91.96 | 92.01 | 91.99 | 92.08
12
+ LAS | 89.66 | 89.70 | 89.68 | 89.77
13
+ ELAS | 84.22 | 84.54 | 84.38 | 89.97
14
+ EULAS | 88.36 | 88.70 | 88.53 | 94.40
15
+ CLAS | 87.94 | 87.64 | 87.79 | 87.75
16
+ MLAS | 83.77 | 83.48 | 83.63 | 83.59
17
+ BLEX | 85.97 | 85.68 | 85.83 | 85.79
sysoutputs/grew/02/pertreebank/ta_ttb-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/ta_ttb-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/ta_ttb-ud-test-sys.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/ta_ttb-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 98.88 | 99.44 | 99.16 |
4
+ Sentences | 96.72 | 98.33 | 97.52 |
5
+ Words | 94.86 | 93.67 | 94.26 |
6
+ UPOS | 84.73 | 83.66 | 84.19 | 89.32
7
+ XPOS | 83.20 | 82.15 | 82.67 | 87.71
8
+ UFeats | 82.79 | 81.75 | 82.27 | 87.28
9
+ AllTags | 76.12 | 75.16 | 75.64 | 80.25
10
+ Lemmas | 89.51 | 88.39 | 88.95 | 94.36
11
+ UAS | 69.70 | 68.83 | 69.26 | 73.48
12
+ LAS | 61.46 | 60.68 | 61.07 | 64.79
13
+ ELAS | 59.55 | 57.77 | 58.65 | 64.25
14
+ EULAS | 61.09 | 59.27 | 60.17 | 65.92
15
+ CLAS | 57.79 | 57.99 | 57.89 | 62.20
16
+ MLAS | 48.74 | 48.91 | 48.82 | 52.45
17
+ BLEX | 54.19 | 54.37 | 54.28 | 58.32
sysoutputs/grew/02/pertreebank/uk_iu-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/uk_iu-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/uk_iu-ud-test-sys.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pertreebank/uk_iu-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.84 | 99.85 | 99.85 |
4
+ Sentences | 95.91 | 97.31 | 96.61 |
5
+ Words | 99.80 | 99.82 | 99.81 |
6
+ UPOS | 97.88 | 97.90 | 97.89 | 98.07
7
+ XPOS | 94.21 | 94.23 | 94.22 | 94.40
8
+ UFeats | 94.18 | 94.19 | 94.18 | 94.36
9
+ AllTags | 93.12 | 93.14 | 93.13 | 93.31
10
+ Lemmas | 97.38 | 97.39 | 97.39 | 97.57
11
+ UAS | 90.58 | 90.59 | 90.59 | 90.76
12
+ LAS | 88.24 | 88.25 | 88.24 | 88.41
13
+ ELAS | 84.12 | 83.68 | 83.90 | 90.77
14
+ EULAS | 86.09 | 85.64 | 85.87 | 92.90
15
+ CLAS | 85.97 | 85.64 | 85.80 | 85.81
16
+ MLAS | 78.91 | 78.60 | 78.76 | 78.76
17
+ BLEX | 83.35 | 83.03 | 83.19 | 83.20
sysoutputs/grew/02/pl.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pl.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.51 | 99.29 | 99.40 |
4
+ Sentences | 97.70 | 97.33 | 97.52 |
5
+ Words | 99.77 | 99.88 | 99.83 |
6
+ UPOS | 98.44 | 98.55 | 98.50 | 98.67
7
+ XPOS | 92.99 | 93.09 | 93.04 | 93.20
8
+ UFeats | 90.75 | 90.85 | 90.80 | 90.96
9
+ AllTags | 87.66 | 87.75 | 87.70 | 87.85
10
+ Lemmas | 97.82 | 97.92 | 97.87 | 98.04
11
+ UAS | 93.40 | 93.50 | 93.45 | 93.61
12
+ LAS | 90.78 | 90.88 | 90.83 | 90.99
13
+ ELAS | 78.13 | 78.28 | 78.20 | 83.65
14
+ EULAS | 89.67 | 89.84 | 89.76 | 96.01
15
+ CLAS | 89.14 | 89.23 | 89.19 | 89.37
16
+ MLAS | 77.52 | 77.59 | 77.55 | 77.71
17
+ BLEX | 86.92 | 87.00 | 86.96 | 87.14
sysoutputs/grew/02/pl.gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/pl.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/ru.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c41268428b29363958382091f46555ae0ef2d36b5433dce7d24e02b73fa3f46
3
+ size 13713672
sysoutputs/grew/02/ru.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.55 | 99.64 | 99.60 |
4
+ Sentences | 98.77 | 98.83 | 98.80 |
5
+ Words | 99.55 | 99.64 | 99.60 |
6
+ UPOS | 98.82 | 98.90 | 98.86 | 99.26
7
+ XPOS | 99.55 | 99.64 | 99.60 | 100.00
8
+ UFeats | 88.93 | 89.01 | 88.97 | 89.33
9
+ AllTags | 88.72 | 88.80 | 88.76 | 89.12
10
+ Lemmas | 98.29 | 98.37 | 98.33 | 98.73
11
+ UAS | 94.18 | 94.26 | 94.22 | 94.60
12
+ LAS | 92.93 | 93.02 | 92.97 | 93.35
13
+ ELAS | 90.53 | 90.60 | 90.56 | 96.22
14
+ EULAS | 91.56 | 91.63 | 91.59 | 97.31
15
+ CLAS | 91.84 | 91.88 | 91.86 | 92.28
16
+ MLAS | 76.90 | 76.93 | 76.91 | 77.26
17
+ BLEX | 90.33 | 90.37 | 90.35 | 90.77
sysoutputs/grew/02/ru.gold.nen.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aced9757fb05aeea85d3d976438202bfb86412e9289a7c05fbace6a0fe3e6108
3
+ size 11142801
sysoutputs/grew/02/ru.nen.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec4fdaad197e20c7edb3249219690604c8fc966874ee9d7ac33483db1199b967
3
+ size 13713743
sysoutputs/grew/02/sk.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/sk.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 100.00 | 100.00 | 100.00 |
4
+ Sentences | 86.86 | 83.51 | 85.15 |
5
+ Words | 99.99 | 99.98 | 99.99 |
6
+ UPOS | 97.68 | 97.67 | 97.67 | 97.68
7
+ XPOS | 90.15 | 90.14 | 90.14 | 90.15
8
+ UFeats | 93.43 | 93.42 | 93.42 | 93.43
9
+ AllTags | 89.20 | 89.19 | 89.19 | 89.20
10
+ Lemmas | 96.47 | 96.47 | 96.47 | 96.48
11
+ UAS | 92.27 | 92.27 | 92.27 | 92.28
12
+ LAS | 90.45 | 90.44 | 90.45 | 90.46
13
+ ELAS | 86.95 | 86.89 | 86.92 | 93.43
14
+ EULAS | 88.76 | 88.70 | 88.73 | 95.38
15
+ CLAS | 89.40 | 89.39 | 89.40 | 89.40
16
+ MLAS | 80.74 | 80.73 | 80.74 | 80.74
17
+ BLEX | 85.30 | 85.29 | 85.29 | 85.30
sysoutputs/grew/02/sk.gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/sk.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/sv.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/sv.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.36 | 99.00 | 99.18 |
4
+ Sentences | 92.74 | 94.37 | 93.54 |
5
+ Words | 99.36 | 99.00 | 99.18 |
6
+ UPOS | 97.43 | 97.07 | 97.25 | 98.05
7
+ XPOS | 95.74 | 95.39 | 95.57 | 96.36
8
+ UFeats | 88.99 | 88.66 | 88.82 | 89.56
9
+ AllTags | 87.79 | 87.47 | 87.63 | 88.35
10
+ Lemmas | 93.77 | 93.43 | 93.60 | 94.38
11
+ UAS | 89.42 | 89.10 | 89.26 | 90.00
12
+ LAS | 86.75 | 86.43 | 86.59 | 87.31
13
+ ELAS | 81.56 | 81.51 | 81.54 | 87.21
14
+ EULAS | 85.61 | 85.55 | 85.58 | 91.54
15
+ CLAS | 85.14 | 85.13 | 85.14 | 85.80
16
+ MLAS | 70.74 | 70.74 | 70.74 | 71.29
17
+ BLEX | 79.48 | 79.47 | 79.48 | 80.10
sysoutputs/grew/02/sv.gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/sv.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/ta.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/ta.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 98.88 | 99.44 | 99.16 |
4
+ Sentences | 96.72 | 98.33 | 97.52 |
5
+ Words | 94.86 | 93.67 | 94.26 |
6
+ UPOS | 84.73 | 83.66 | 84.19 | 89.32
7
+ XPOS | 83.20 | 82.15 | 82.67 | 87.71
8
+ UFeats | 82.79 | 81.75 | 82.27 | 87.28
9
+ AllTags | 76.12 | 75.16 | 75.64 | 80.25
10
+ Lemmas | 89.51 | 88.39 | 88.95 | 94.36
11
+ UAS | 69.70 | 68.83 | 69.26 | 73.48
12
+ LAS | 61.46 | 60.68 | 61.07 | 64.79
13
+ ELAS | 59.62 | 57.78 | 58.69 | 64.36
14
+ EULAS | 61.16 | 59.28 | 60.21 | 66.02
15
+ CLAS | 57.79 | 57.99 | 57.89 | 62.20
16
+ MLAS | 48.74 | 48.91 | 48.82 | 52.45
17
+ BLEX | 54.19 | 54.37 | 54.28 | 58.32
sysoutputs/grew/02/ta.gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/ta.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/uk.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/grew/02/uk.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.84 | 99.85 | 99.85 |
4
+ Sentences | 95.91 | 97.31 | 96.61 |
5
+ Words | 99.80 | 99.82 | 99.81 |
6
+ UPOS | 97.88 | 97.90 | 97.89 | 98.07
7
+ XPOS | 94.21 | 94.23 | 94.22 | 94.40
8
+ UFeats | 94.18 | 94.19 | 94.18 | 94.36
9
+ AllTags | 93.12 | 93.14 | 93.13 | 93.31
10
+ Lemmas | 97.38 | 97.39 | 97.39 | 97.57
11
+ UAS | 90.58 | 90.59 | 90.59 | 90.76
12
+ LAS | 88.24 | 88.25 | 88.24 | 88.41
13
+ ELAS | 84.12 | 83.68 | 83.90 | 90.77
14
+ EULAS | 86.09 | 85.64 | 85.87 | 92.90
15
+ CLAS | 85.97 | 85.64 | 85.80 | 85.81
16
+ MLAS | 78.91 | 78.60 | 78.76 | 78.76
17
+ BLEX | 83.35 | 83.03 | 83.19 | 83.20
sysoutputs/grew/02/uk.gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff