[
restart
] [
share link
] [
json
]
select language:
ace_Arab
ace_Latn
acm
acq
aeb
afr
ajp
aka
als
amh
apc
ara
ara_Latn
ars
ary
arz
asm
ast
awa
ayr
azb
aze
azj
bak
bam
ban
bel
bem
ben
bho
bjn_Arab
bjn_Latn
bod
bos
bre
bug
bul
cat
ceb
ces
cjk
ckb
cmn_Hans
cmn_Hant
crh
cym
dan
deu
dik
dyu
dzo
ell
eng
epo
est
eus
ewe
fao
fas
fij
fin
fon
fra
fry
ful
fur
fuv
gaz
gla
gle
glg
grn
guj
hat
hau
heb
hin
hne
hrv
hun
hye
ibo
ilo
ind
isl
ita
jav
jpn
kab
kac
kam
kan
kas_Arab
kas_Deva
kat
kaz
kbp
kea
khk
khm
kik
kin
kir
kmb
kmr
knc_Arab
knc_Latn
kon
kor
lao
lav
lij
lim
lin
lit
lmo
ltg
ltz
lua
lug
luo
lus
lvs
mag
mai
mal
mar
min_Arab
min_Latn
mkd
mlt
mni
mon
mos
mri
msa
mya
nep
nld
nno
nob
nor
npi
nso
nus
nya
oci
orm
ory
pag
pan
pap
pbt
pes
plt
pol
por
prs
pus
quy
ron
run
rus
sag
san
sat
scn
shn
sin
slk
slv
smo
sna
snd
som
sot
spa
sqi
srd
srp
srp_Cyrl
ssw
sun
swa
swe
swh
szl
tam
taq_Latn
taq_Tfng
tat
tel
tgk
tgl
tha
tir
tpi
tsn
tso
tuk
tum
tur
twi
tzm
uig
ukr
umb
urd
uzb
uzn
vec
vie
war
wol
xho
ydd
yid
yor
zho
zul
ace_Arab
ace_Latn
acm
acq
aeb
afr
ajp
aka
als
amh
apc
ara
ara_Latn
ars
ary
arz
asm
ast
awa
ayr
azb
azj
bak
bam
ban
bel
bem
ben
bho
bjn_Arab
bjn_Latn
bod
bos
bug
bul
cat
ceb
cjk
ckb
cmn_Hans
cmn_Hant
crh
cym
dan
deu
dik
dyu
dzo
ell
eng
epo
est
eus
ewe
fao
fas
fij
fin
fon
fra
ful
fur
fuv
gaz
gla
gle
glg
grn
guj
hat
hau
heb
hin
hne
hrv
hun
hye
ibo
ilo
ind
isl
ita
jav
jpn
kab
kac
kam
kan
kas_Arab
kas_Deva
kat
kaz
kbp
kea
khk
khm
kik
kin
kir
kmb
kmr
knc_Arab
knc_Latn
kon
kor
lao
lav
lij
lim
lin
lit
lmo
ltg
ltz
lua
lug
luo
lus
lvs
mag
mai
mal
mar
min_Arab
min_Latn
mkd
mlt
mni
mon
mos
mri
msa
mya
nld
nno
nob
npi
nso
nus
nya
oci
orm
ory
pag
pan
pap
pbt
pes
plt
pol
por
prs
pus
quy
ron
run
rus
sag
san
sat
scn
shn
sin
slk
slv
smo
sna
snd
som
sot
spa
srd
srp
srp_Cyrl
ssw
sun
swe
swh
szl
tam
taq_Latn
taq_Tfng
tat
tel
tgk
tgl
tha
tir
tpi
tsn
tso
tuk
tum
tur
twi
tzm
uig
ukr
umb
urd
uzb
uzn
vec
vie
war
wol
xho
ydd
yor
zul
[
swap
] [
compare scores
] [
compare models
] [
map
] [
release history
] [
uploads
]
OPUS-MT Dashboard
Language pair:
ces-eng
Models:
[all models] [
OPUS-MT
] [
external
] [
contributed
] [
compare
]
Benchmark:
all benchmarks [
average score
]
Evaluation metric:
[
bleu
][
spbleu
][
chrf
] chrf++ [
comet
]
Chart Type:
[barchart][
diff
]
blue = OPUS-MT / Tatoeba-MT models, grey = external models, purple = user-contributed
render chart with [gd] [
plotly
]
exclude scores of user-contributed translations
Model Scores (comparing between OPUS-MT and external models)
ID
Benchmark (chrf++)
Output
OPUS-MT
chrf++
external
chrf++
Diff
contributed
chrf++
Diff
0
flores101-devtest
compare
ces+slk-eng/...2022-03-17
0.64605
facebook/nllb-200-3.3B
0.63173
0.014
1
flores200-devtest
compare
zlw-eng/opus...2022-03-17
0.64606
facebook/nllb-200-3.3B
0.62916
0.017
nllb-200-54.5B
0.6443
0.002
2
generaltest2022
compare
ces+slk-eng/...2022-03-13
0.69326
facebook/nllb-200-3.3B
0.66218
0.031
Online-W
0.78764
-0.094
3
multi30k_test_2016_flickr
compare
ces+slk-eng/...2022-03-17
0.61126
facebook/nll...illed-1.3B
0.60664
0.005
4
multi30k_test_2018_flickr
compare
ces+slk-eng/...2022-03-17
0.60538
facebook/nll...illed-1.3B
0.60073
0.005
5
newssyscomb2009
compare
sla-eng/opus...2024-08-17
0.54698
facebook/nllb-200-1.3B
0.53726
0.010
6
newstest2008
compare
zlw-eng/opus...2022-03-17
0.51994
facebook/nllb-200-1.3B
0.51098
0.009
7
newstest2009
compare
zlw-eng/opus...2022-03-17
0.54214
facebook/nllb-200-3.3B
0.52809
0.014
8
newstest2010
compare
zlw-eng/opus...2022-03-17
0.55649
facebook/nllb-200-3.3B
0.5528
0.004
9
newstest2011
compare
zlw-eng/opus...2022-03-17
0.5533
facebook/nllb-200-3.3B
0.53844
0.015
10
newstest2012
compare
zlw-eng/opus...2022-03-17
0.54636
facebook/nllb-200-1.3B
0.53581
0.011
11
newstest2013
compare
sla-eng/opus...2024-08-17
0.57123
facebook/nllb-200-3.3B
0.55938
0.012
12
newstest2014
compare
zlw-eng/opus...2022-03-17
0.62467
facebook/nllb-200-3.3B
0.60609
0.019
13
newstest2015
compare
ces+slk-eng/...2022-03-17
0.56931
facebook/nllb-200-3.3B
0.55689
0.012
14
newstest2016
compare
zlw-eng/opus...2022-03-17
0.59995
facebook/nllb-200-3.3B
0.5851
0.015
15
newstest2017
compare
zlw-eng/opus...2022-03-17
0.5627
facebook/nllb-200-3.3B
0.55619
0.007
16
newstest2018
compare
zlw-eng/opus...2022-03-17
0.56658
facebook/nllb-200-3.3B
0.55732
0.009
nllb-200-54.5B
0.56817
-0.002
17
newstest2021
compare
zlw-eng/opus...2022-03-17
0.58515
facebook/nllb-200-1.3B
0.55723
0.028
Facebook-AI
0.61703
-0.032
18
ntrex128
compare
ces+slk-eng/...2022-03-17
0.62
facebook/nllb-200-3.3B
0.6246
-0.005
19
tatoeba-test-v2020-07-28
compare
ces+slk-eng/...2022-03-17
0.72571
facebook/nllb-200-3.3B
0.72193
0.004
20
tatoeba-test-v2021-03-30
compare
ces+slk-eng/...2022-03-17
0.72107
facebook/nllb-200-3.3B
0.7182
0.003
21
tatoeba-test-v2021-08-07
compare
ces+slk-eng/...2022-03-17
0.71808
facebook/nllb-200-3.3B
0.71525
0.003
average
0.606
0.595
0.011
0.654
-0.048