de-francophones commited on
Commit
6a1383d
·
verified ·
1 Parent(s): 78bf907

f9563180a63fa0dbc3248a5bd9a5ca9c9de2f1656a3c9b88bbd1926a44077cf8

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +3 -0
  2. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/pl_pud-ud-test.eval.log +17 -0
  3. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/ru_syntagrus-ud-test-sys.conllu +3 -0
  4. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/ru_syntagrus-ud-test.eval.log +17 -0
  5. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sk_snk-ud-test-sys.conllu +0 -0
  6. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sk_snk-ud-test.eval.log +17 -0
  7. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sv_pud-ud-test-sys.conllu +0 -0
  8. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sv_pud-ud-test.eval.log +17 -0
  9. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sv_talbanken-ud-test-sys.conllu +0 -0
  10. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sv_talbanken-ud-test.eval.log +17 -0
  11. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/ta_ttb-ud-test-sys.conllu +0 -0
  12. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/ta_ttb-ud-test.eval.log +17 -0
  13. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/uk_iu-ud-test-sys.conllu +0 -0
  14. sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/uk_iu-ud-test.eval.log +17 -0
  15. sysoutputs/koebsala/udpipe_test_20200425_134739/pl.conllu +0 -0
  16. sysoutputs/koebsala/udpipe_test_20200425_134739/pl.eval.log +17 -0
  17. sysoutputs/koebsala/udpipe_test_20200425_134739/ru.conllu +3 -0
  18. sysoutputs/koebsala/udpipe_test_20200425_134739/ru.eval.log +17 -0
  19. sysoutputs/koebsala/udpipe_test_20200425_134739/sk.conllu +0 -0
  20. sysoutputs/koebsala/udpipe_test_20200425_134739/sk.eval.log +17 -0
  21. sysoutputs/koebsala/udpipe_test_20200425_134739/sv.conllu +0 -0
  22. sysoutputs/koebsala/udpipe_test_20200425_134739/sv.eval.log +17 -0
  23. sysoutputs/koebsala/udpipe_test_20200425_134739/ta.conllu +0 -0
  24. sysoutputs/koebsala/udpipe_test_20200425_134739/ta.eval.log +17 -0
  25. sysoutputs/koebsala/udpipe_test_20200425_134739/uk.conllu +0 -0
  26. sysoutputs/koebsala/udpipe_test_20200425_134739/uk.eval.log +17 -0
  27. sysoutputs/orange_deskin/test_02/ar.conllu +0 -0
  28. sysoutputs/orange_deskin/test_02/ar.eval.log +17 -0
  29. sysoutputs/orange_deskin/test_02/bg.conllu +0 -0
  30. sysoutputs/orange_deskin/test_02/bg.eval.log +17 -0
  31. sysoutputs/orange_deskin/test_02/cs.conllu +3 -0
  32. sysoutputs/orange_deskin/test_02/cs.eval.log +17 -0
  33. sysoutputs/orange_deskin/test_02/en.conllu +0 -0
  34. sysoutputs/orange_deskin/test_02/en.eval.log +17 -0
  35. sysoutputs/orange_deskin/test_02/et.conllu +0 -0
  36. sysoutputs/orange_deskin/test_02/et.eval.log +17 -0
  37. sysoutputs/orange_deskin/test_02/fi.conllu +0 -0
  38. sysoutputs/orange_deskin/test_02/fi.eval.log +17 -0
  39. sysoutputs/orange_deskin/test_02/fr.conllu +0 -0
  40. sysoutputs/orange_deskin/test_02/fr.eval.log +17 -0
  41. sysoutputs/orange_deskin/test_02/it.conllu +0 -0
  42. sysoutputs/orange_deskin/test_02/it.eval.log +17 -0
  43. sysoutputs/orange_deskin/test_02/lt.conllu +0 -0
  44. sysoutputs/orange_deskin/test_02/lt.eval.log +17 -0
  45. sysoutputs/orange_deskin/test_02/lv.conllu +0 -0
  46. sysoutputs/orange_deskin/test_02/lv.eval.log +17 -0
  47. sysoutputs/orange_deskin/test_02/nl.conllu +0 -0
  48. sysoutputs/orange_deskin/test_02/nl.eval.log +17 -0
  49. sysoutputs/orange_deskin/test_02/pertreebank/ar_padt-ud-test-sys.conllu +0 -0
  50. sysoutputs/orange_deskin/test_02/pertreebank/ar_padt-ud-test.eval.log +17 -0
.gitattributes CHANGED
@@ -90,3 +90,6 @@ sysoutputs/fastparse/v2/pertreebank/ru_syntagrus-ud-test-sys.conllu filter=lfs d
90
  sysoutputs/fastparse/v2/ru.conllu filter=lfs diff=lfs merge=lfs -text
91
  sysoutputs/koebsala/udpipe_test_20200425_134739/cs.conllu filter=lfs diff=lfs merge=lfs -text
92
  sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/cs_pdt-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
 
 
 
 
90
  sysoutputs/fastparse/v2/ru.conllu filter=lfs diff=lfs merge=lfs -text
91
  sysoutputs/koebsala/udpipe_test_20200425_134739/cs.conllu filter=lfs diff=lfs merge=lfs -text
92
  sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/cs_pdt-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
93
+ sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/ru_syntagrus-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
94
+ sysoutputs/koebsala/udpipe_test_20200425_134739/ru.conllu filter=lfs diff=lfs merge=lfs -text
95
+ sysoutputs/orange_deskin/test_02/cs.conllu filter=lfs diff=lfs merge=lfs -text
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/pl_pud-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.71 | 99.87 | 99.79 |
4
+ Sentences | 95.57 | 97.00 | 96.28 |
5
+ Words | 99.67 | 99.86 | 99.77 |
6
+ UPOS | 95.22 | 95.40 | 95.31 | 95.53
7
+ XPOS | 83.87 | 84.03 | 83.95 | 84.15
8
+ UFeats | 84.50 | 84.65 | 84.58 | 84.77
9
+ AllTags | 82.87 | 83.03 | 82.95 | 83.15
10
+ Lemmas | 94.78 | 94.95 | 94.87 | 95.09
11
+ UAS | 85.19 | 85.34 | 85.27 | 85.47
12
+ LAS | 80.80 | 80.95 | 80.87 | 81.06
13
+ ELAS | 53.99 | 74.41 | 62.57 | 80.07
14
+ EULAS | 58.29 | 80.34 | 67.56 | 86.46
15
+ CLAS | 76.93 | 76.82 | 76.88 | 76.98
16
+ MLAS | 62.45 | 62.36 | 62.41 | 62.49
17
+ BLEX | 71.91 | 71.80 | 71.86 | 71.95
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/ru_syntagrus-ud-test-sys.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb9b76b50f162eb3334e98ee92763ccd9ba00226e6b85c5f8cb557241c986fab
3
+ size 11454992
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/ru_syntagrus-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.55 | 99.64 | 99.60 |
4
+ Sentences | 98.77 | 98.83 | 98.80 |
5
+ Words | 99.55 | 99.64 | 99.60 |
6
+ UPOS | 97.73 | 97.82 | 97.78 | 98.17
7
+ XPOS | 99.55 | 99.64 | 99.60 | 100.00
8
+ UFeats | 85.30 | 85.38 | 85.34 | 85.68
9
+ AllTags | 84.93 | 85.00 | 84.97 | 85.31
10
+ Lemmas | 96.51 | 96.59 | 96.55 | 96.94
11
+ UAS | 87.59 | 87.66 | 87.62 | 87.98
12
+ LAS | 84.97 | 85.04 | 85.01 | 85.35
13
+ ELAS | 54.60 | 77.98 | 64.23 | 82.81
14
+ EULAS | 56.66 | 80.92 | 66.65 | 85.93
15
+ CLAS | 82.75 | 82.57 | 82.66 | 82.93
16
+ MLAS | 66.16 | 66.02 | 66.09 | 66.31
17
+ BLEX | 79.48 | 79.31 | 79.39 | 79.66
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sk_snk-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sk_snk-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 100.00 | 100.00 | 100.00 |
4
+ Sentences | 85.65 | 84.92 | 85.28 |
5
+ Words | 100.00 | 100.00 | 100.00 |
6
+ UPOS | 92.93 | 92.93 | 92.93 | 92.93
7
+ XPOS | 77.06 | 77.06 | 77.06 | 77.06
8
+ UFeats | 80.34 | 80.34 | 80.34 | 80.34
9
+ AllTags | 76.71 | 76.71 | 76.71 | 76.71
10
+ Lemmas | 86.56 | 86.56 | 86.56 | 86.56
11
+ UAS | 80.95 | 80.95 | 80.95 | 80.95
12
+ LAS | 75.97 | 75.97 | 75.97 | 75.97
13
+ ELAS | 54.71 | 77.33 | 64.08 | 82.44
14
+ EULAS | 57.41 | 81.15 | 67.25 | 86.51
15
+ CLAS | 71.94 | 71.80 | 71.87 | 71.80
16
+ MLAS | 55.71 | 55.60 | 55.66 | 55.60
17
+ BLEX | 60.31 | 60.20 | 60.25 | 60.20
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sv_pud-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sv_pud-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 98.81 | 98.01 | 98.41 |
4
+ Sentences | 88.44 | 92.60 | 90.47 |
5
+ Words | 98.81 | 98.01 | 98.41 |
6
+ UPOS | 91.31 | 90.56 | 90.93 | 92.40
7
+ XPOS | 88.15 | 87.43 | 87.79 | 89.21
8
+ UFeats | 74.89 | 74.28 | 74.58 | 75.79
9
+ AllTags | 73.02 | 72.43 | 72.73 | 73.90
10
+ Lemmas | 84.53 | 83.84 | 84.18 | 85.54
11
+ UAS | 76.15 | 75.53 | 75.84 | 77.06
12
+ LAS | 71.40 | 70.82 | 71.11 | 72.26
13
+ ELAS | 52.68 | 75.22 | 61.96 | 80.95
14
+ EULAS | 53.95 | 77.04 | 63.46 | 82.92
15
+ CLAS | 67.10 | 66.88 | 66.99 | 67.88
16
+ MLAS | 43.67 | 43.53 | 43.60 | 44.18
17
+ BLEX | 54.91 | 54.73 | 54.82 | 55.55
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sv_talbanken-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/sv_talbanken-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.87 | 99.92 | 99.89 |
4
+ Sentences | 96.45 | 95.82 | 96.13 |
5
+ Words | 99.87 | 99.92 | 99.89 |
6
+ UPOS | 95.57 | 95.62 | 95.59 | 95.69
7
+ XPOS | 93.87 | 93.92 | 93.90 | 94.00
8
+ UFeats | 94.44 | 94.49 | 94.46 | 94.56
9
+ AllTags | 92.84 | 92.89 | 92.87 | 92.97
10
+ Lemmas | 95.36 | 95.41 | 95.38 | 95.48
11
+ UAS | 82.47 | 82.51 | 82.49 | 82.58
12
+ LAS | 78.54 | 78.58 | 78.56 | 78.64
13
+ ELAS | 56.70 | 81.43 | 66.85 | 86.66
14
+ EULAS | 58.11 | 83.46 | 68.52 | 88.82
15
+ CLAS | 75.23 | 74.74 | 74.98 | 74.83
16
+ MLAS | 70.23 | 69.77 | 70.00 | 69.86
17
+ BLEX | 70.60 | 70.14 | 70.37 | 70.23
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/ta_ttb-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/ta_ttb-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 98.88 | 99.44 | 99.16 |
4
+ Sentences | 96.72 | 98.33 | 97.52 |
5
+ Words | 95.11 | 93.92 | 94.51 |
6
+ UPOS | 81.82 | 80.79 | 81.31 | 86.03
7
+ XPOS | 76.83 | 75.87 | 76.35 | 80.78
8
+ UFeats | 80.96 | 79.94 | 80.45 | 85.12
9
+ AllTags | 76.07 | 75.11 | 75.59 | 79.98
10
+ Lemmas | 84.67 | 83.61 | 84.14 | 89.03
11
+ UAS | 59.32 | 58.57 | 58.94 | 62.37
12
+ LAS | 52.29 | 51.63 | 51.96 | 54.98
13
+ ELAS | 40.99 | 56.40 | 47.47 | 61.35
14
+ EULAS | 42.70 | 58.76 | 49.46 | 63.92
15
+ CLAS | 49.20 | 48.20 | 48.70 | 51.60
16
+ MLAS | 42.76 | 41.89 | 42.32 | 44.84
17
+ BLEX | 43.87 | 42.97 | 43.42 | 46.00
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/uk_iu-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/uk_iu-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.84 | 99.85 | 99.85 |
4
+ Sentences | 95.91 | 97.31 | 96.61 |
5
+ Words | 99.80 | 99.82 | 99.81 |
6
+ UPOS | 94.91 | 94.92 | 94.91 | 95.10
7
+ XPOS | 84.02 | 84.03 | 84.03 | 84.18
8
+ UFeats | 84.27 | 84.29 | 84.28 | 84.44
9
+ AllTags | 83.32 | 83.33 | 83.32 | 83.48
10
+ Lemmas | 93.55 | 93.57 | 93.56 | 93.74
11
+ UAS | 79.43 | 79.44 | 79.43 | 79.58
12
+ LAS | 74.82 | 74.84 | 74.83 | 74.97
13
+ ELAS | 54.87 | 77.26 | 64.17 | 83.81
14
+ EULAS | 56.09 | 78.98 | 65.60 | 85.68
15
+ CLAS | 70.02 | 69.58 | 69.80 | 69.72
16
+ MLAS | 57.77 | 57.40 | 57.58 | 57.52
17
+ BLEX | 64.39 | 63.98 | 64.18 | 64.11
sysoutputs/koebsala/udpipe_test_20200425_134739/pl.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/koebsala/udpipe_test_20200425_134739/pl.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.51 | 99.29 | 99.40 |
4
+ Sentences | 97.70 | 97.33 | 97.52 |
5
+ Words | 99.77 | 99.88 | 99.83 |
6
+ UPOS | 96.38 | 96.48 | 96.43 | 96.60
7
+ XPOS | 84.82 | 84.91 | 84.87 | 85.01
8
+ UFeats | 83.57 | 83.66 | 83.62 | 83.76
9
+ AllTags | 80.31 | 80.40 | 80.35 | 80.49
10
+ Lemmas | 95.55 | 95.65 | 95.60 | 95.77
11
+ UAS | 86.42 | 86.51 | 86.46 | 86.61
12
+ LAS | 82.07 | 82.15 | 82.11 | 82.25
13
+ ELAS | 52.92 | 72.96 | 61.34 | 77.97
14
+ EULAS | 58.42 | 80.55 | 67.73 | 86.08
15
+ CLAS | 78.81 | 78.72 | 78.76 | 78.85
16
+ MLAS | 62.24 | 62.17 | 62.21 | 62.27
17
+ BLEX | 74.51 | 74.42 | 74.47 | 74.54
sysoutputs/koebsala/udpipe_test_20200425_134739/ru.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb9b76b50f162eb3334e98ee92763ccd9ba00226e6b85c5f8cb557241c986fab
3
+ size 11454992
sysoutputs/koebsala/udpipe_test_20200425_134739/ru.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.55 | 99.64 | 99.60 |
4
+ Sentences | 98.77 | 98.83 | 98.80 |
5
+ Words | 99.55 | 99.64 | 99.60 |
6
+ UPOS | 97.73 | 97.82 | 97.78 | 98.17
7
+ XPOS | 99.55 | 99.64 | 99.60 | 100.00
8
+ UFeats | 85.30 | 85.38 | 85.34 | 85.68
9
+ AllTags | 84.93 | 85.00 | 84.97 | 85.31
10
+ Lemmas | 96.51 | 96.59 | 96.55 | 96.94
11
+ UAS | 87.59 | 87.66 | 87.62 | 87.98
12
+ LAS | 84.97 | 85.04 | 85.01 | 85.35
13
+ ELAS | 54.60 | 77.98 | 64.23 | 82.81
14
+ EULAS | 56.66 | 80.92 | 66.65 | 85.93
15
+ CLAS | 82.75 | 82.57 | 82.66 | 82.93
16
+ MLAS | 66.16 | 66.02 | 66.09 | 66.31
17
+ BLEX | 79.48 | 79.31 | 79.39 | 79.66
sysoutputs/koebsala/udpipe_test_20200425_134739/sk.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/koebsala/udpipe_test_20200425_134739/sk.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 100.00 | 100.00 | 100.00 |
4
+ Sentences | 85.65 | 84.92 | 85.28 |
5
+ Words | 100.00 | 100.00 | 100.00 |
6
+ UPOS | 92.93 | 92.93 | 92.93 | 92.93
7
+ XPOS | 77.06 | 77.06 | 77.06 | 77.06
8
+ UFeats | 80.34 | 80.34 | 80.34 | 80.34
9
+ AllTags | 76.71 | 76.71 | 76.71 | 76.71
10
+ Lemmas | 86.56 | 86.56 | 86.56 | 86.56
11
+ UAS | 80.95 | 80.95 | 80.95 | 80.95
12
+ LAS | 75.97 | 75.97 | 75.97 | 75.97
13
+ ELAS | 54.71 | 77.33 | 64.08 | 82.44
14
+ EULAS | 57.41 | 81.15 | 67.25 | 86.51
15
+ CLAS | 71.94 | 71.80 | 71.87 | 71.80
16
+ MLAS | 55.71 | 55.60 | 55.66 | 55.60
17
+ BLEX | 60.31 | 60.20 | 60.25 | 60.20
sysoutputs/koebsala/udpipe_test_20200425_134739/sv.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/koebsala/udpipe_test_20200425_134739/sv.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.36 | 99.00 | 99.18 |
4
+ Sentences | 92.74 | 94.37 | 93.54 |
5
+ Words | 99.36 | 99.00 | 99.18 |
6
+ UPOS | 93.52 | 93.17 | 93.34 | 94.12
7
+ XPOS | 91.12 | 90.79 | 90.95 | 91.71
8
+ UFeats | 85.03 | 84.72 | 84.87 | 85.57
9
+ AllTags | 83.30 | 83.00 | 83.15 | 83.84
10
+ Lemmas | 90.14 | 89.81 | 89.98 | 90.72
11
+ UAS | 79.43 | 79.14 | 79.28 | 79.94
12
+ LAS | 75.10 | 74.83 | 74.96 | 75.58
13
+ ELAS | 54.77 | 78.44 | 64.50 | 83.93
14
+ EULAS | 56.12 | 80.37 | 66.09 | 85.99
15
+ CLAS | 71.30 | 70.95 | 71.13 | 71.51
16
+ MLAS | 57.40 | 57.12 | 57.26 | 57.57
17
+ BLEX | 63.02 | 62.71 | 62.86 | 63.20
sysoutputs/koebsala/udpipe_test_20200425_134739/ta.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/koebsala/udpipe_test_20200425_134739/ta.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 98.88 | 99.44 | 99.16 |
4
+ Sentences | 96.72 | 98.33 | 97.52 |
5
+ Words | 95.11 | 93.92 | 94.51 |
6
+ UPOS | 81.82 | 80.79 | 81.31 | 86.03
7
+ XPOS | 76.83 | 75.87 | 76.35 | 80.78
8
+ UFeats | 80.96 | 79.94 | 80.45 | 85.12
9
+ AllTags | 76.07 | 75.11 | 75.59 | 79.98
10
+ Lemmas | 84.67 | 83.61 | 84.14 | 89.03
11
+ UAS | 59.32 | 58.57 | 58.94 | 62.37
12
+ LAS | 52.29 | 51.63 | 51.96 | 54.98
13
+ ELAS | 40.99 | 56.31 | 47.44 | 61.35
14
+ EULAS | 42.70 | 58.67 | 49.43 | 63.92
15
+ CLAS | 49.20 | 48.20 | 48.70 | 51.60
16
+ MLAS | 42.76 | 41.89 | 42.32 | 44.84
17
+ BLEX | 43.87 | 42.97 | 43.42 | 46.00
sysoutputs/koebsala/udpipe_test_20200425_134739/uk.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/koebsala/udpipe_test_20200425_134739/uk.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.84 | 99.85 | 99.85 |
4
+ Sentences | 95.91 | 97.31 | 96.61 |
5
+ Words | 99.80 | 99.82 | 99.81 |
6
+ UPOS | 94.91 | 94.92 | 94.91 | 95.10
7
+ XPOS | 84.02 | 84.03 | 84.03 | 84.18
8
+ UFeats | 84.27 | 84.29 | 84.28 | 84.44
9
+ AllTags | 83.32 | 83.33 | 83.32 | 83.48
10
+ Lemmas | 93.55 | 93.57 | 93.56 | 93.74
11
+ UAS | 79.43 | 79.44 | 79.43 | 79.58
12
+ LAS | 74.82 | 74.84 | 74.83 | 74.97
13
+ ELAS | 54.87 | 77.26 | 64.17 | 83.81
14
+ EULAS | 56.09 | 78.98 | 65.60 | 85.68
15
+ CLAS | 70.02 | 69.58 | 69.80 | 69.72
16
+ MLAS | 57.77 | 57.40 | 57.58 | 57.52
17
+ BLEX | 64.39 | 63.98 | 64.18 | 64.11
sysoutputs/orange_deskin/test_02/ar.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/ar.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.97 | 99.96 | 99.97 |
4
+ Sentences | 76.34 | 87.79 | 81.67 |
5
+ Words | 95.76 | 93.41 | 94.57 |
6
+ UPOS | 93.03 | 90.74 | 91.87 | 97.15
7
+ XPOS | 90.67 | 88.44 | 89.54 | 94.69
8
+ UFeats | 90.79 | 88.56 | 89.66 | 94.81
9
+ AllTags | 90.42 | 88.20 | 89.29 | 94.42
10
+ Lemmas | 91.72 | 89.47 | 90.58 | 95.79
11
+ UAS | 80.39 | 78.41 | 79.39 | 83.95
12
+ LAS | 76.49 | 74.61 | 75.54 | 79.88
13
+ ELAS | 73.50 | 68.59 | 70.96 | 78.02
14
+ EULAS | 75.51 | 70.48 | 72.91 | 80.16
15
+ CLAS | 72.79 | 72.67 | 72.73 | 77.02
16
+ MLAS | 67.48 | 67.37 | 67.43 | 71.40
17
+ BLEX | 69.35 | 69.24 | 69.29 | 73.38
sysoutputs/orange_deskin/test_02/bg.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/bg.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.88 | 99.90 | 99.89 |
4
+ Sentences | 94.44 | 92.83 | 93.63 |
5
+ Words | 99.88 | 99.90 | 99.89 |
6
+ UPOS | 99.16 | 99.18 | 99.17 | 99.28
7
+ XPOS | 97.39 | 97.41 | 97.40 | 97.50
8
+ UFeats | 98.05 | 98.07 | 98.06 | 98.17
9
+ AllTags | 96.90 | 96.92 | 96.91 | 97.01
10
+ Lemmas | 98.29 | 98.31 | 98.30 | 98.41
11
+ UAS | 94.45 | 94.47 | 94.46 | 94.56
12
+ LAS | 91.82 | 91.83 | 91.83 | 91.93
13
+ ELAS | 90.28 | 88.57 | 89.42 | 91.20
14
+ EULAS | 91.27 | 89.54 | 90.40 | 92.20
15
+ CLAS | 89.19 | 88.99 | 89.09 | 89.08
16
+ MLAS | 86.20 | 86.01 | 86.11 | 86.09
17
+ BLEX | 86.97 | 86.77 | 86.87 | 86.86
sysoutputs/orange_deskin/test_02/cs.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef8a2f01f089a6d50b61a600bcc59322e6a6da2a29b2d4e20019cbd36e88a909
3
+ size 23315184
sysoutputs/orange_deskin/test_02/cs.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.88 | 99.82 | 99.85 |
4
+ Sentences | 93.84 | 91.29 | 92.55 |
5
+ Words | 99.88 | 99.82 | 99.85 |
6
+ UPOS | 99.02 | 98.95 | 98.98 | 99.13
7
+ XPOS | 96.98 | 96.91 | 96.95 | 97.09
8
+ UFeats | 97.07 | 97.01 | 97.04 | 97.19
9
+ AllTags | 96.27 | 96.21 | 96.24 | 96.39
10
+ Lemmas | 98.86 | 98.80 | 98.83 | 98.98
11
+ UAS | 93.96 | 93.90 | 93.93 | 94.08
12
+ LAS | 92.28 | 92.22 | 92.25 | 92.39
13
+ ELAS | 89.14 | 84.87 | 86.95 | 91.61
14
+ EULAS | 90.61 | 86.26 | 88.38 | 93.11
15
+ CLAS | 91.15 | 91.21 | 91.18 | 91.40
16
+ MLAS | 87.13 | 87.19 | 87.16 | 87.36
17
+ BLEX | 90.10 | 90.16 | 90.13 | 90.34
sysoutputs/orange_deskin/test_02/en.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/en.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.24 | 99.20 | 99.22 |
4
+ Sentences | 84.50 | 78.13 | 81.19 |
5
+ Words | 99.24 | 99.20 | 99.22 |
6
+ UPOS | 96.23 | 96.19 | 96.21 | 96.97
7
+ XPOS | 95.49 | 95.45 | 95.47 | 96.22
8
+ UFeats | 96.21 | 96.16 | 96.18 | 96.94
9
+ AllTags | 93.73 | 93.69 | 93.71 | 94.45
10
+ Lemmas | 96.90 | 96.86 | 96.88 | 97.64
11
+ UAS | 89.23 | 89.19 | 89.21 | 89.91
12
+ LAS | 86.95 | 86.92 | 86.94 | 87.62
13
+ ELAS | 85.71 | 84.72 | 85.21 | 89.16
14
+ EULAS | 86.52 | 85.52 | 86.02 | 90.01
15
+ CLAS | 84.52 | 84.22 | 84.37 | 84.99
16
+ MLAS | 78.92 | 78.63 | 78.78 | 79.36
17
+ BLEX | 82.07 | 81.77 | 81.92 | 82.53
sysoutputs/orange_deskin/test_02/et.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/et.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.59 | 99.81 | 99.70 |
4
+ Sentences | 90.74 | 85.45 | 88.02 |
5
+ Words | 99.59 | 99.81 | 99.70 |
6
+ UPOS | 97.35 | 97.56 | 97.45 | 97.75
7
+ XPOS | 97.99 | 98.21 | 98.10 | 98.40
8
+ UFeats | 96.32 | 96.53 | 96.43 | 96.71
9
+ AllTags | 95.05 | 95.26 | 95.16 | 95.44
10
+ Lemmas | 95.07 | 95.28 | 95.18 | 95.46
11
+ UAS | 88.59 | 88.79 | 88.69 | 88.96
12
+ LAS | 86.11 | 86.30 | 86.20 | 86.46
13
+ ELAS | 81.62 | 80.45 | 81.03 | 81.96
14
+ EULAS | 85.32 | 84.09 | 84.70 | 85.67
15
+ CLAS | 84.92 | 84.99 | 84.96 | 85.20
16
+ MLAS | 80.28 | 80.34 | 80.31 | 80.54
17
+ BLEX | 79.75 | 79.81 | 79.78 | 80.01
sysoutputs/orange_deskin/test_02/fi.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/fi.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.77 | 99.61 | 99.69 |
4
+ Sentences | 88.31 | 88.73 | 88.52 |
5
+ Words | 99.76 | 99.59 | 99.68 |
6
+ UPOS | 98.19 | 98.03 | 98.11 | 98.43
7
+ XPOS | 56.35 | 56.26 | 56.30 | 56.49
8
+ UFeats | 96.89 | 96.73 | 96.81 | 97.13
9
+ AllTags | 54.75 | 54.66 | 54.70 | 54.88
10
+ Lemmas | 92.19 | 92.04 | 92.12 | 92.42
11
+ UAS | 92.05 | 91.89 | 91.97 | 92.27
12
+ LAS | 90.38 | 90.23 | 90.31 | 90.60
13
+ ELAS | 87.53 | 84.98 | 86.24 | 90.46
14
+ EULAS | 89.10 | 86.51 | 87.79 | 92.09
15
+ CLAS | 89.19 | 88.95 | 89.07 | 89.32
16
+ MLAS | 85.25 | 85.02 | 85.13 | 85.37
17
+ BLEX | 80.12 | 79.91 | 80.01 | 80.24
sysoutputs/orange_deskin/test_02/fr.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/fr.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.55 | 99.53 | 99.54 |
4
+ Sentences | 92.42 | 94.13 | 93.27 |
5
+ Words | 99.00 | 99.26 | 99.13 |
6
+ UPOS | 95.91 | 96.16 | 96.04 | 96.88
7
+ XPOS | 99.00 | 99.26 | 99.13 | 100.00
8
+ UFeats | 91.68 | 91.91 | 91.79 | 92.60
9
+ AllTags | 90.02 | 90.25 | 90.14 | 90.93
10
+ Lemmas | 96.41 | 96.66 | 96.53 | 97.38
11
+ UAS | 90.86 | 91.09 | 90.98 | 91.77
12
+ LAS | 86.36 | 86.58 | 86.47 | 87.23
13
+ ELAS | 84.08 | 83.18 | 83.63 | 85.84
14
+ EULAS | 86.28 | 85.36 | 85.81 | 88.08
15
+ CLAS | 82.03 | 80.21 | 81.11 | 80.78
16
+ MLAS | 71.49 | 69.90 | 70.69 | 70.40
17
+ BLEX | 78.64 | 76.90 | 77.76 | 77.44
sysoutputs/orange_deskin/test_02/it.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/it.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.90 | 99.91 | 99.90 |
4
+ Sentences | 95.70 | 96.89 | 96.29 |
5
+ Words | 99.81 | 99.84 | 99.82 |
6
+ UPOS | 98.45 | 98.47 | 98.46 | 98.63
7
+ XPOS | 98.33 | 98.36 | 98.34 | 98.52
8
+ UFeats | 98.14 | 98.17 | 98.15 | 98.33
9
+ AllTags | 97.56 | 97.59 | 97.58 | 97.75
10
+ Lemmas | 98.48 | 98.51 | 98.50 | 98.67
11
+ UAS | 94.72 | 94.75 | 94.74 | 94.90
12
+ LAS | 93.08 | 93.11 | 93.09 | 93.26
13
+ ELAS | 91.20 | 90.46 | 90.83 | 94.34
14
+ EULAS | 92.37 | 91.62 | 91.99 | 95.55
15
+ CLAS | 89.76 | 89.46 | 89.61 | 89.67
16
+ MLAS | 86.98 | 86.69 | 86.84 | 86.90
17
+ BLEX | 87.92 | 87.63 | 87.77 | 87.83
sysoutputs/orange_deskin/test_02/lt.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/lt.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.94 | 99.94 | 99.94 |
4
+ Sentences | 89.33 | 85.67 | 87.46 |
5
+ Words | 99.94 | 99.94 | 99.94 |
6
+ UPOS | 96.57 | 96.57 | 96.57 | 96.63
7
+ XPOS | 91.05 | 91.05 | 91.05 | 91.11
8
+ UFeats | 91.67 | 91.67 | 91.67 | 91.73
9
+ AllTags | 90.13 | 90.13 | 90.13 | 90.18
10
+ Lemmas | 94.29 | 94.29 | 94.29 | 94.35
11
+ UAS | 84.91 | 84.91 | 84.91 | 84.96
12
+ LAS | 81.54 | 81.54 | 81.54 | 81.59
13
+ ELAS | 78.85 | 73.13 | 75.89 | 80.88
14
+ EULAS | 80.64 | 74.79 | 77.61 | 82.72
15
+ CLAS | 79.97 | 79.71 | 79.84 | 79.77
16
+ MLAS | 70.06 | 69.83 | 69.95 | 69.88
17
+ BLEX | 74.37 | 74.13 | 74.25 | 74.18
sysoutputs/orange_deskin/test_02/lv.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/lv.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.40 | 99.28 | 99.34 |
4
+ Sentences | 98.04 | 98.68 | 98.36 |
5
+ Words | 99.40 | 99.28 | 99.34 |
6
+ UPOS | 96.75 | 96.63 | 96.69 | 97.33
7
+ XPOS | 90.30 | 90.19 | 90.25 | 90.85
8
+ UFeats | 94.58 | 94.47 | 94.52 | 95.15
9
+ AllTags | 89.69 | 89.58 | 89.64 | 90.23
10
+ Lemmas | 96.34 | 96.22 | 96.28 | 96.92
11
+ UAS | 90.74 | 90.63 | 90.69 | 91.30
12
+ LAS | 88.01 | 87.91 | 87.96 | 88.55
13
+ ELAS | 83.96 | 80.34 | 82.11 | 87.32
14
+ EULAS | 86.42 | 82.69 | 84.51 | 89.87
15
+ CLAS | 85.91 | 85.79 | 85.85 | 86.68
16
+ MLAS | 78.50 | 78.39 | 78.45 | 79.21
17
+ BLEX | 82.66 | 82.54 | 82.60 | 83.40
sysoutputs/orange_deskin/test_02/nl.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/nl.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.63 | 99.79 | 99.71 |
4
+ Sentences | 84.13 | 71.38 | 77.23 |
5
+ Words | 99.63 | 99.79 | 99.71 |
6
+ UPOS | 96.67 | 96.82 | 96.75 | 97.03
7
+ XPOS | 95.45 | 95.59 | 95.52 | 95.80
8
+ UFeats | 96.60 | 96.75 | 96.67 | 96.95
9
+ AllTags | 94.80 | 94.94 | 94.87 | 95.14
10
+ Lemmas | 97.11 | 97.26 | 97.19 | 97.47
11
+ UAS | 90.50 | 90.64 | 90.57 | 90.83
12
+ LAS | 87.98 | 88.12 | 88.05 | 88.31
13
+ ELAS | 85.76 | 84.53 | 85.14 | 88.60
14
+ EULAS | 87.21 | 85.96 | 86.58 | 90.09
15
+ CLAS | 83.42 | 83.09 | 83.25 | 83.15
16
+ MLAS | 78.89 | 78.57 | 78.73 | 78.64
17
+ BLEX | 80.66 | 80.34 | 80.50 | 80.40
sysoutputs/orange_deskin/test_02/pertreebank/ar_padt-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/ar_padt-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.97 | 99.96 | 99.97 |
4
+ Sentences | 76.34 | 87.79 | 81.67 |
5
+ Words | 95.76 | 93.41 | 94.57 |
6
+ UPOS | 93.03 | 90.74 | 91.87 | 97.15
7
+ XPOS | 90.67 | 88.44 | 89.54 | 94.69
8
+ UFeats | 90.79 | 88.56 | 89.66 | 94.81
9
+ AllTags | 90.42 | 88.20 | 89.29 | 94.42
10
+ Lemmas | 91.72 | 89.47 | 90.58 | 95.79
11
+ UAS | 80.39 | 78.41 | 79.39 | 83.95
12
+ LAS | 76.49 | 74.61 | 75.54 | 79.88
13
+ ELAS | 73.50 | 68.60 | 70.96 | 78.00
14
+ EULAS | 75.52 | 70.48 | 72.91 | 80.14
15
+ CLAS | 72.79 | 72.67 | 72.73 | 77.02
16
+ MLAS | 67.48 | 67.37 | 67.43 | 71.40
17
+ BLEX | 69.35 | 69.24 | 69.29 | 73.38