de-francophones commited on
Commit
521241a
1 Parent(s): 51d9121

29f15962a946364ba9cb768b9ea51df2153f7987ef088b42214ba25a68dca42c

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +3 -0
  2. sysoutputs/fastparse/v1_2021/fr.gold.nen.conllu +0 -0
  3. sysoutputs/fastparse/v1_2021/fr.nen.conllu +0 -0
  4. sysoutputs/fastparse/v1_2021/it.conllu +0 -0
  5. sysoutputs/fastparse/v1_2021/it.eval.log +17 -0
  6. sysoutputs/fastparse/v1_2021/it.gold.nen.conllu +0 -0
  7. sysoutputs/fastparse/v1_2021/it.nen.conllu +0 -0
  8. sysoutputs/fastparse/v1_2021/lt.conllu +0 -0
  9. sysoutputs/fastparse/v1_2021/lt.eval.log +17 -0
  10. sysoutputs/fastparse/v1_2021/lt.gold.nen.conllu +0 -0
  11. sysoutputs/fastparse/v1_2021/lt.nen.conllu +0 -0
  12. sysoutputs/fastparse/v1_2021/lv.conllu +0 -0
  13. sysoutputs/fastparse/v1_2021/lv.eval.log +17 -0
  14. sysoutputs/fastparse/v1_2021/lv.gold.nen.conllu +0 -0
  15. sysoutputs/fastparse/v1_2021/lv.nen.conllu +0 -0
  16. sysoutputs/fastparse/v1_2021/metadata.txt +9 -0
  17. sysoutputs/fastparse/v1_2021/nl.conllu +0 -0
  18. sysoutputs/fastparse/v1_2021/nl.eval.log +17 -0
  19. sysoutputs/fastparse/v1_2021/nl.gold.nen.conllu +0 -0
  20. sysoutputs/fastparse/v1_2021/nl.nen.conllu +0 -0
  21. sysoutputs/fastparse/v1_2021/pertreebank/ar_padt-ud-test-gold.nen.conllu +0 -0
  22. sysoutputs/fastparse/v1_2021/pertreebank/ar_padt-ud-test-sys.conllu +0 -0
  23. sysoutputs/fastparse/v1_2021/pertreebank/ar_padt-ud-test-sys.nen.conllu +0 -0
  24. sysoutputs/fastparse/v1_2021/pertreebank/ar_padt-ud-test.eval.log +17 -0
  25. sysoutputs/fastparse/v1_2021/pertreebank/bg_btb-ud-test-gold.nen.conllu +0 -0
  26. sysoutputs/fastparse/v1_2021/pertreebank/bg_btb-ud-test-sys.conllu +0 -0
  27. sysoutputs/fastparse/v1_2021/pertreebank/bg_btb-ud-test-sys.nen.conllu +0 -0
  28. sysoutputs/fastparse/v1_2021/pertreebank/bg_btb-ud-test.eval.log +17 -0
  29. sysoutputs/fastparse/v1_2021/pertreebank/cs_cac-ud-test-gold.nen.conllu +0 -0
  30. sysoutputs/fastparse/v1_2021/pertreebank/cs_cac-ud-test-sys.conllu +0 -0
  31. sysoutputs/fastparse/v1_2021/pertreebank/cs_cac-ud-test-sys.nen.conllu +0 -0
  32. sysoutputs/fastparse/v1_2021/pertreebank/cs_cac-ud-test.eval.log +17 -0
  33. sysoutputs/fastparse/v1_2021/pertreebank/cs_fictree-ud-test-gold.nen.conllu +0 -0
  34. sysoutputs/fastparse/v1_2021/pertreebank/cs_fictree-ud-test-sys.conllu +0 -0
  35. sysoutputs/fastparse/v1_2021/pertreebank/cs_fictree-ud-test-sys.nen.conllu +0 -0
  36. sysoutputs/fastparse/v1_2021/pertreebank/cs_fictree-ud-test.eval.log +17 -0
  37. sysoutputs/fastparse/v1_2021/pertreebank/cs_pdt-ud-test-gold.nen.conllu +3 -0
  38. sysoutputs/fastparse/v1_2021/pertreebank/cs_pdt-ud-test-sys.conllu +3 -0
  39. sysoutputs/fastparse/v1_2021/pertreebank/cs_pdt-ud-test-sys.nen.conllu +3 -0
  40. sysoutputs/fastparse/v1_2021/pertreebank/cs_pdt-ud-test.eval.log +17 -0
  41. sysoutputs/fastparse/v1_2021/pertreebank/cs_pud-ud-test-gold.nen.conllu +0 -0
  42. sysoutputs/fastparse/v1_2021/pertreebank/cs_pud-ud-test-sys.conllu +0 -0
  43. sysoutputs/fastparse/v1_2021/pertreebank/cs_pud-ud-test-sys.nen.conllu +0 -0
  44. sysoutputs/fastparse/v1_2021/pertreebank/cs_pud-ud-test.eval.log +17 -0
  45. sysoutputs/fastparse/v1_2021/pertreebank/en_ewt-ud-test-gold.nen.conllu +0 -0
  46. sysoutputs/fastparse/v1_2021/pertreebank/en_ewt-ud-test-sys.conllu +0 -0
  47. sysoutputs/fastparse/v1_2021/pertreebank/en_ewt-ud-test-sys.nen.conllu +0 -0
  48. sysoutputs/fastparse/v1_2021/pertreebank/en_ewt-ud-test.eval.log +17 -0
  49. sysoutputs/fastparse/v1_2021/pertreebank/en_gum-ud-test-gold.nen.conllu +0 -0
  50. sysoutputs/fastparse/v1_2021/pertreebank/en_gum-ud-test-sys.conllu +0 -0
.gitattributes CHANGED
@@ -87,3 +87,6 @@ sysoutputs/dcu_epfl/primary/ru.nen.conllu filter=lfs diff=lfs merge=lfs -text
87
  sysoutputs/fastparse/v1_2021/cs.conllu filter=lfs diff=lfs merge=lfs -text
88
  sysoutputs/fastparse/v1_2021/cs.gold.nen.conllu filter=lfs diff=lfs merge=lfs -text
89
  sysoutputs/fastparse/v1_2021/cs.nen.conllu filter=lfs diff=lfs merge=lfs -text
 
 
 
 
87
  sysoutputs/fastparse/v1_2021/cs.conllu filter=lfs diff=lfs merge=lfs -text
88
  sysoutputs/fastparse/v1_2021/cs.gold.nen.conllu filter=lfs diff=lfs merge=lfs -text
89
  sysoutputs/fastparse/v1_2021/cs.nen.conllu filter=lfs diff=lfs merge=lfs -text
90
+ sysoutputs/fastparse/v1_2021/pertreebank/cs_pdt-ud-test-gold.nen.conllu filter=lfs diff=lfs merge=lfs -text
91
+ sysoutputs/fastparse/v1_2021/pertreebank/cs_pdt-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
92
+ sysoutputs/fastparse/v1_2021/pertreebank/cs_pdt-ud-test-sys.nen.conllu filter=lfs diff=lfs merge=lfs -text
sysoutputs/fastparse/v1_2021/fr.gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/fr.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/it.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/it.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.91 | 99.95 | 99.93 |
4
+ Sentences | 98.76 | 98.76 | 98.76 |
5
+ Words | 99.82 | 99.87 | 99.84 |
6
+ UPOS | 98.49 | 98.54 | 98.52 | 98.67
7
+ XPOS | 98.42 | 98.46 | 98.44 | 98.60
8
+ UFeats | 98.21 | 98.25 | 98.23 | 98.39
9
+ AllTags | 97.64 | 97.69 | 97.66 | 97.82
10
+ Lemmas | 98.64 | 98.68 | 98.66 | 98.82
11
+ UAS | 88.14 | 88.18 | 88.16 | 88.30
12
+ LAS | 84.90 | 84.94 | 84.92 | 85.05
13
+ ELAS | 75.38 | 81.50 | 78.32 | 84.97
14
+ EULAS | 79.03 | 85.44 | 82.11 | 89.08
15
+ CLAS | 77.17 | 77.13 | 77.15 | 77.26
16
+ MLAS | 74.00 | 73.95 | 73.97 | 74.08
17
+ BLEX | 75.81 | 75.76 | 75.79 | 75.90
sysoutputs/fastparse/v1_2021/it.gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/it.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/lt.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/lt.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.91 | 99.91 | 99.91 |
4
+ Sentences | 90.03 | 85.82 | 87.87 |
5
+ Words | 99.91 | 99.91 | 99.91 |
6
+ UPOS | 95.97 | 95.97 | 95.97 | 96.06
7
+ XPOS | 90.37 | 90.37 | 90.37 | 90.45
8
+ UFeats | 91.07 | 91.07 | 91.07 | 91.15
9
+ AllTags | 89.41 | 89.41 | 89.41 | 89.49
10
+ Lemmas | 93.61 | 93.61 | 93.61 | 93.70
11
+ UAS | 61.39 | 61.39 | 61.39 | 61.44
12
+ LAS | 53.55 | 53.55 | 53.55 | 53.60
13
+ ELAS | 41.19 | 58.28 | 48.27 | 64.36
14
+ EULAS | 44.82 | 63.42 | 52.52 | 70.04
15
+ CLAS | 47.76 | 47.59 | 47.68 | 47.62
16
+ MLAS | 41.78 | 41.63 | 41.70 | 41.66
17
+ BLEX | 44.74 | 44.58 | 44.66 | 44.61
sysoutputs/fastparse/v1_2021/lt.gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/lt.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/lv.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/lv.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.40 | 99.26 | 99.33 |
4
+ Sentences | 98.42 | 99.07 | 98.74 |
5
+ Words | 99.40 | 99.26 | 99.33 |
6
+ UPOS | 96.35 | 96.22 | 96.28 | 96.94
7
+ XPOS | 89.70 | 89.58 | 89.64 | 90.24
8
+ UFeats | 93.85 | 93.73 | 93.79 | 94.42
9
+ AllTags | 88.90 | 88.78 | 88.84 | 89.44
10
+ Lemmas | 95.88 | 95.75 | 95.81 | 96.46
11
+ UAS | 78.43 | 78.32 | 78.37 | 78.90
12
+ LAS | 72.08 | 71.98 | 72.03 | 72.51
13
+ ELAS | 61.46 | 72.26 | 66.43 | 78.54
14
+ EULAS | 62.56 | 73.56 | 67.62 | 79.95
15
+ CLAS | 67.76 | 67.86 | 67.81 | 68.58
16
+ MLAS | 60.42 | 60.52 | 60.47 | 61.16
17
+ BLEX | 65.21 | 65.31 | 65.26 | 66.00
sysoutputs/fastparse/v1_2021/lv.gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/lv.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/metadata.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ timestamp=2021-05-11-19-01-06
2
+ remoteaddr=88.148.91.10
3
+ team=fastparse
4
+ submid=v1_2021
5
+ dataset=test
6
+ affiliation=Universidade da Coruña
7
+ name=Mark Anderson
8
+ email=m.anderson@udc.es
9
+ filename=fastparse-v1_2021.tgz
sysoutputs/fastparse/v1_2021/nl.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/nl.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.76 | 99.73 | 99.74 |
4
+ Sentences | 78.77 | 61.79 | 69.26 |
5
+ Words | 99.76 | 99.73 | 99.74 |
6
+ UPOS | 96.80 | 96.77 | 96.79 | 97.04
7
+ XPOS | 95.30 | 95.27 | 95.29 | 95.53
8
+ UFeats | 96.46 | 96.43 | 96.44 | 96.69
9
+ AllTags | 94.63 | 94.60 | 94.61 | 94.86
10
+ Lemmas | 97.07 | 97.04 | 97.06 | 97.31
11
+ UAS | 79.87 | 79.84 | 79.85 | 80.06
12
+ LAS | 74.38 | 74.36 | 74.37 | 74.56
13
+ ELAS | 63.55 | 75.20 | 68.89 | 78.87
14
+ EULAS | 65.03 | 76.95 | 70.49 | 80.71
15
+ CLAS | 65.35 | 65.11 | 65.23 | 65.30
16
+ MLAS | 60.21 | 59.99 | 60.10 | 60.16
17
+ BLEX | 63.17 | 62.94 | 63.06 | 63.12
sysoutputs/fastparse/v1_2021/nl.gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/nl.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/ar_padt-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/ar_padt-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/ar_padt-ud-test-sys.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/ar_padt-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.98 | 99.98 | 99.98 |
4
+ Sentences | 76.20 | 88.97 | 82.09 |
5
+ Words | 95.76 | 93.42 | 94.58 |
6
+ UPOS | 92.83 | 90.55 | 91.68 | 96.93
7
+ XPOS | 90.07 | 87.87 | 88.96 | 94.06
8
+ UFeats | 90.25 | 88.04 | 89.14 | 94.25
9
+ AllTags | 89.76 | 87.56 | 88.65 | 93.73
10
+ Lemmas | 91.50 | 89.26 | 90.37 | 95.55
11
+ UAS | 70.72 | 68.99 | 69.84 | 73.84
12
+ LAS | 65.69 | 64.08 | 64.88 | 68.60
13
+ ELAS | 50.12 | 57.95 | 53.75 | 65.88
14
+ EULAS | 57.00 | 65.91 | 61.13 | 74.93
15
+ CLAS | 59.10 | 59.09 | 59.10 | 62.62
16
+ MLAS | 54.19 | 54.17 | 54.18 | 57.41
17
+ BLEX | 56.34 | 56.32 | 56.33 | 59.68
sysoutputs/fastparse/v1_2021/pertreebank/bg_btb-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/bg_btb-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/bg_btb-ud-test-sys.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/bg_btb-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.94 | 99.88 | 99.91 |
4
+ Sentences | 94.33 | 94.00 | 94.17 |
5
+ Words | 99.94 | 99.88 | 99.91 |
6
+ UPOS | 99.18 | 99.12 | 99.15 | 99.24
7
+ XPOS | 97.21 | 97.16 | 97.19 | 97.27
8
+ UFeats | 97.98 | 97.92 | 97.95 | 98.04
9
+ AllTags | 96.87 | 96.81 | 96.84 | 96.93
10
+ Lemmas | 98.00 | 97.95 | 97.97 | 98.06
11
+ UAS | 87.88 | 87.83 | 87.85 | 87.93
12
+ LAS | 83.41 | 83.36 | 83.39 | 83.46
13
+ ELAS | 74.86 | 83.03 | 78.73 | 85.51
14
+ EULAS | 76.68 | 85.06 | 80.65 | 87.59
15
+ CLAS | 78.15 | 78.01 | 78.08 | 78.07
16
+ MLAS | 74.69 | 74.56 | 74.62 | 74.61
17
+ BLEX | 75.97 | 75.84 | 75.91 | 75.90
sysoutputs/fastparse/v1_2021/pertreebank/cs_cac-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/cs_cac-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/cs_cac-ud-test-sys.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/cs_cac-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.98 | 99.96 | 99.97 |
4
+ Sentences | 91.67 | 94.59 | 93.10 |
5
+ Words | 99.97 | 99.95 | 99.96 |
6
+ UPOS | 98.58 | 98.56 | 98.57 | 98.61
7
+ XPOS | 96.06 | 96.04 | 96.05 | 96.09
8
+ UFeats | 96.15 | 96.13 | 96.14 | 96.18
9
+ AllTags | 95.05 | 95.03 | 95.04 | 95.07
10
+ Lemmas | 97.88 | 97.86 | 97.87 | 97.91
11
+ UAS | 83.16 | 83.14 | 83.15 | 83.18
12
+ LAS | 77.45 | 77.44 | 77.44 | 77.47
13
+ ELAS | 68.27 | 72.91 | 70.51 | 81.63
14
+ EULAS | 71.69 | 76.56 | 74.04 | 85.72
15
+ CLAS | 72.29 | 71.74 | 72.01 | 71.78
16
+ MLAS | 67.90 | 67.39 | 67.64 | 67.42
17
+ BLEX | 71.04 | 70.50 | 70.77 | 70.54
sysoutputs/fastparse/v1_2021/pertreebank/cs_fictree-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/cs_fictree-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/cs_fictree-ud-test-sys.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/cs_fictree-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.97 | 99.99 | 99.98 |
4
+ Sentences | 93.01 | 86.52 | 89.65 |
5
+ Words | 99.93 | 99.93 | 99.93 |
6
+ UPOS | 97.49 | 97.49 | 97.49 | 97.56
7
+ XPOS | 78.68 | 78.68 | 78.68 | 78.74
8
+ UFeats | 81.39 | 81.39 | 81.39 | 81.45
9
+ AllTags | 74.93 | 74.93 | 74.93 | 74.98
10
+ Lemmas | 98.86 | 98.86 | 98.86 | 98.93
11
+ UAS | 82.56 | 82.56 | 82.56 | 82.62
12
+ LAS | 77.10 | 77.10 | 77.10 | 77.15
13
+ ELAS | 67.98 | 74.13 | 70.92 | 80.75
14
+ EULAS | 71.13 | 77.56 | 74.20 | 84.48
15
+ CLAS | 70.96 | 71.11 | 71.03 | 71.14
16
+ MLAS | 48.00 | 48.10 | 48.05 | 48.12
17
+ BLEX | 69.86 | 70.01 | 69.93 | 70.04
sysoutputs/fastparse/v1_2021/pertreebank/cs_pdt-ud-test-gold.nen.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bafd3772078fb1d2c197a42c6c2c234e8ab679fbff1d85d9d324f51f87433792
3
+ size 19782064
sysoutputs/fastparse/v1_2021/pertreebank/cs_pdt-ud-test-sys.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3730e0fe5b33399bfb807a28974a31b8b85b282d31cb1c0240bb128d88898f6c
3
+ size 22766879
sysoutputs/fastparse/v1_2021/pertreebank/cs_pdt-ud-test-sys.nen.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3730e0fe5b33399bfb807a28974a31b8b85b282d31cb1c0240bb128d88898f6c
3
+ size 22766879
sysoutputs/fastparse/v1_2021/pertreebank/cs_pdt-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.96 | 99.90 | 99.93 |
4
+ Sentences | 94.76 | 91.98 | 93.35 |
5
+ Words | 99.96 | 99.90 | 99.93 |
6
+ UPOS | 99.25 | 99.19 | 99.22 | 99.29
7
+ XPOS | 97.63 | 97.58 | 97.61 | 97.67
8
+ UFeats | 97.61 | 97.56 | 97.59 | 97.65
9
+ AllTags | 97.15 | 97.10 | 97.13 | 97.19
10
+ Lemmas | 99.06 | 99.00 | 99.03 | 99.10
11
+ UAS | 84.03 | 83.98 | 84.00 | 84.06
12
+ LAS | 79.64 | 79.59 | 79.61 | 79.67
13
+ ELAS | 70.03 | 76.89 | 73.30 | 82.78
14
+ EULAS | 73.17 | 80.34 | 76.59 | 86.49
15
+ CLAS | 75.21 | 75.23 | 75.22 | 75.30
16
+ MLAS | 72.18 | 72.20 | 72.19 | 72.27
17
+ BLEX | 74.45 | 74.46 | 74.45 | 74.54
sysoutputs/fastparse/v1_2021/pertreebank/cs_pud-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/cs_pud-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/cs_pud-ud-test-sys.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/cs_pud-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.24 | 99.30 | 99.27 |
4
+ Sentences | 95.27 | 96.60 | 95.93 |
5
+ Words | 99.24 | 99.30 | 99.27 |
6
+ UPOS | 96.90 | 96.95 | 96.92 | 97.64
7
+ XPOS | 94.47 | 94.52 | 94.50 | 95.19
8
+ UFeats | 94.33 | 94.39 | 94.36 | 95.05
9
+ AllTags | 92.89 | 92.94 | 92.92 | 93.60
10
+ Lemmas | 96.75 | 96.80 | 96.77 | 97.48
11
+ UAS | 82.85 | 82.90 | 82.88 | 83.48
12
+ LAS | 77.82 | 77.86 | 77.84 | 78.41
13
+ ELAS | 68.48 | 75.34 | 71.74 | 80.44
14
+ EULAS | 71.70 | 78.88 | 75.12 | 84.23
15
+ CLAS | 72.72 | 73.30 | 73.01 | 73.81
16
+ MLAS | 67.22 | 67.76 | 67.49 | 68.23
17
+ BLEX | 71.44 | 72.01 | 71.72 | 72.51
sysoutputs/fastparse/v1_2021/pertreebank/en_ewt-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/en_ewt-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/en_ewt-ud-test-sys.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/en_ewt-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 96.64 | 97.88 | 97.26 |
4
+ Sentences | 89.35 | 84.02 | 86.60 |
5
+ Words | 98.98 | 98.92 | 98.95 |
6
+ UPOS | 96.16 | 96.11 | 96.13 | 97.16
7
+ XPOS | 96.09 | 96.04 | 96.06 | 97.08
8
+ UFeats | 96.58 | 96.53 | 96.56 | 97.58
9
+ AllTags | 94.67 | 94.62 | 94.65 | 95.65
10
+ Lemmas | 97.49 | 97.43 | 97.46 | 98.50
11
+ UAS | 83.46 | 83.42 | 83.44 | 84.33
12
+ LAS | 79.48 | 79.43 | 79.46 | 80.30
13
+ ELAS | 70.86 | 79.31 | 74.85 | 83.48
14
+ EULAS | 72.10 | 80.71 | 76.16 | 84.95
15
+ CLAS | 74.85 | 74.75 | 74.80 | 75.63
16
+ MLAS | 69.97 | 69.87 | 69.92 | 70.69
17
+ BLEX | 73.53 | 73.43 | 73.48 | 74.29
sysoutputs/fastparse/v1_2021/pertreebank/en_gum-ud-test-gold.nen.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/fastparse/v1_2021/pertreebank/en_gum-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff