[
restart
] [
share link
] [
json
]
select language:
ace_Arab
ace_Latn
acm
acq
aeb
afr
ajp
aka
als
amh
apc
ara
ara_Latn
ars
ary
arz
asm
ast
awa
ayr
azb
aze
azj
bak
bam
ban
bel
bem
ben
bho
bjn_Arab
bjn_Latn
bod
bos
bre
bug
bul
cat
ceb
ces
cjk
ckb
cmn_Hans
cmn_Hant
crh
cym
dan
deu
dik
dyu
dzo
ell
eng
epo
est
eus
ewe
fao
fas
fij
fin
fon
fra
fry
ful
fur
fuv
gaz
gla
gle
glg
grn
guj
hat
hau
heb
hin
hne
hrv
hun
hye
ibo
ilo
ind
isl
ita
jav
jpn
kab
kac
kam
kan
kas_Arab
kas_Deva
kat
kaz
kbp
kea
khk
khm
kik
kin
kir
kmb
kmr
knc_Arab
knc_Latn
kon
kor
lao
lav
lij
lim
lin
lit
lmo
ltg
ltz
lua
lug
luo
lus
lvs
mag
mai
mal
mar
min_Arab
min_Latn
mkd
mlt
mni
mon
mos
mri
msa
mya
nep
nld
nno
nob
nor
npi
nso
nus
nya
oci
orm
ory
pag
pan
pap
pbt
pes
plt
pol
por
prs
pus
quy
ron
run
rus
sag
san
sat
scn
shn
sin
slk
slv
smo
sna
snd
som
sot
spa
sqi
srd
srp
srp_Cyrl
ssw
sun
swa
swe
swh
szl
tam
taq_Latn
taq_Tfng
tat
tel
tgk
tgl
tha
tir
tpi
tsn
tso
tuk
tum
tur
twi
tzm
uig
ukr
umb
urd
uzb
uzn
vec
vie
war
wol
xho
ydd
yid
yor
zho
zul
ace_Arab
ace_Latn
acm
acq
aeb
afr
ajp
aka
als
amh
apc
ara
ara_Latn
ars
ary
arz
asm
ast
awa
ayr
azb
aze
azj
bak
bam
ban
bel
bem
ben
bho
bjn_Arab
bjn_Latn
bod
bos
bre
bug
bul
cat
ceb
ces
cjk
ckb
cmn_Hans
cmn_Hant
crh
cym
dan
deu
dik
dyu
dzo
ell
eng
epo
est
eus
ewe
fao
fas
fij
fin
fon
fra
fry
ful
fur
fuv
gaz
gla
gle
glg
grn
guj
hat
hau
heb
hin
hne
hrv
hun
hye
ibo
ilo
ind
isl
ita
jav
jpn
kab
kac
kam
kan
kas_Arab
kas_Deva
kat
kaz
kbp
kea
khk
khm
kik
kin
kir
kmb
kmr
knc_Arab
knc_Latn
kon
kor
lao
lav
lij
lim
lin
lit
lmo
ltg
ltz
lua
lug
luo
lus
lvs
mag
mai
mal
mar
min_Arab
min_Latn
mkd
mlt
mni
mon
mos
mri
msa
mya
nep
nld
nno
nob
nor
npi
nso
nus
nya
oci
orm
ory
pag
pan
pap
pbt
pes
plt
pol
por
prs
pus
quy
ron
run
rus
sag
san
sat
scn
shn
sin
slk
slv
smo
sna
snd
som
sot
spa
sqi
srd
srp
srp_Cyrl
ssw
sun
swa
swe
swh
szl
tam
taq_Latn
taq_Tfng
tat
tel
tgk
tgl
tha
tir
tpi
tsn
tso
tuk
tum
tur
twi
tzm
uig
ukr
umb
urd
uzb
uzn
vec
vie
war
wol
xho
ydd
yid
yor
zho
zul
[
swap
] [
compare scores
] [
compare models
] [
map
] [
release history
] [
uploads
]
OPUS-MT Dashboard
Language pair:
eng-fra
Models:
[all models] [
OPUS-MT
] [
external
] [
contributed
] [
compare
]
Benchmark:
all benchmarks [
average score
]
Evaluation metric:
[
bleu
][
spbleu
][
chrf
] chrf++ [
comet
]
Chart Type:
[barchart][
diff
]
blue = OPUS-MT / Tatoeba-MT models, grey = external models, purple = user-contributed
render chart with [gd] [
plotly
]
exclude scores of user-contributed translations
Model Scores (comparing between OPUS-MT and external models)
ID
Benchmark (chrf++)
Output
OPUS-MT
chrf++
external
chrf++
Diff
contributed
chrf++
Diff
0
flores101-devtest
compare
eng-fra/opus...2022-03-09
0.70683
facebook/nllb-200-3.3B
0.67364
0.033
1
flores200-devtest
compare
eng-fra/opus...2022-03-09
0.70683
facebook/nllb-200-3.3B
0.67222
0.035
nllb-200-54.5B
0.69676
0.010
2
multi30k_test_2016_flickr
compare
eng-fra/opus...2022-03-09
0.71176
facebook/nllb-200-3.3B
0.69723
0.015
3
multi30k_test_2017_flickr
compare
eng-fra/opus...2022-03-09
0.71356
facebook/nll...illed-1.3B
0.7011
0.012
4
multi30k_test_2017_mscoco
compare
gem-fra+ita+...2024-08-17
0.73235
facebook/nll...illed-1.3B
0.7061
0.026
5
multi30k_test_2018_flickr
compare
eng-fra/opus...2022-03-09
0.64932
facebook/nll...illed-1.3B
0.64539
0.004
6
newsdiscusstest2015
compare
eng-fra/opus...2022-03-09
0.6263
facebook/nll...illed-1.3B
0.61207
0.014
7
newssyscomb2009
compare
eng-fra/opus...2022-03-09
0.56209
facebook/nllb-200-3.3B
0.56756
-0.005
8
newstest2008
compare
gem-fra+ita+...2024-08-17
0.53021
facebook/nllb-200-1.3B
0.52896
0.001
9
newstest2009
compare
gmw-deu+eng+...2024-05-30
0.5531
facebook/nllb-200-3.3B
0.5537
-0.001
10
newstest2010
compare
eng-fra/opus...2022-03-09
0.5771
facebook/nll...illed-1.3B
0.57585
0.001
11
newstest2011
compare
eng-fra/opus...2022-03-09
0.59055
facebook/nllb-200-3.3B
0.59064
-0.000
12
newstest2012
compare
eng-fra/opus...2022-03-09
0.57134
facebook/nllb-200-3.3B
0.57069
0.001
13
newstest2013
compare
eng-fra/opus...2022-03-09
0.57459
facebook/nllb-200-3.3B
0.57461
-0.000
14
newstest2014
compare
eng-fra/opus...2022-03-09
0.64497
facebook/nllb-200-3.3B
0.63804
0.007
15
ntrex128
compare
eng-fra/opus...2022-03-09
0.6055
facebook/nll...illed-1.3B
0.58768
0.018
16
tatoeba-test-v2020-07-28
compare
eng-fra/opus...2022-03-09
0.67298
facebook/nll...illed-1.3B
0.65684
0.016
17
tatoeba-test-v2021-03-30
compare
eng-fra/opus...2022-03-09
0.67987
facebook/nll...illed-1.3B
0.66262
0.017
18
tatoeba-test-v2021-08-07
compare
eng-fra/opus...2022-03-09
0.68676
facebook/nllb-200-3.3B
0.66593
0.021
19
tico19-test
compare
eng-fra/opus...2022-03-09
0.6069
facebook/nllb-200-3.3B
0.61946
-0.013
average
0.635
0.625
0.010
0.697
-0.062