init
Browse files- eval/metric.first.answer.paragraph.questions_answers.lmqg_qg_ruquad.default.json +1 -0
- eval/metric.first.answer.paragraph_sentence.answer.lmqg_qg_ruquad.default.json +1 -0
- eval/samples.test.hyp.paragraph.questions_answers.lmqg_qg_ruquad.default.txt +0 -0
- eval/samples.test.hyp.paragraph_sentence.answer.lmqg_qg_ruquad.default.txt +0 -0
- eval/samples.validation.hyp.paragraph.questions_answers.lmqg_qg_ruquad.default.txt +0 -0
- eval/samples.validation.hyp.paragraph_sentence.answer.lmqg_qg_ruquad.default.txt +0 -0
eval/metric.first.answer.paragraph.questions_answers.lmqg_qg_ruquad.default.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"test": {"QAAlignedF1Score (BERTScore)": 0.8020638302134975, "QAAlignedRecall (BERTScore)": 0.8448789002153252, "QAAlignedPrecision (BERTScore)": 0.7647882298537283, "QAAlignedF1Score (MoverScore)": 0.5717037134082845, "QAAlignedRecall (MoverScore)": 0.6055083179170568, "QAAlignedPrecision (MoverScore)": 0.5439508094045942}, "validation": {"QAAlignedF1Score (BERTScore)": 0.802513149288662, "QAAlignedRecall (BERTScore)": 0.8457646330175733, "QAAlignedPrecision (BERTScore)": 0.7649000227511452, "QAAlignedF1Score (MoverScore)": 0.5718948814585387, "QAAlignedRecall (MoverScore)": 0.6056694577477068, "QAAlignedPrecision (MoverScore)": 0.5441556367981807}}
|
eval/metric.first.answer.paragraph_sentence.answer.lmqg_qg_ruquad.default.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"validation": {"Bleu_1": 0.46936236391910824, "Bleu_2": 0.4191560871393061, "Bleu_3": 0.373845040274865, "Bleu_4": 0.3282131241428463, "METEOR": 0.38881381467757414, "ROUGE_L": 0.5098002674107956, "BERTScore": 0.8672539898275003, "MoverScore": 0.7491983466594802, "AnswerF1Score": 65.20721911028629, "AnswerExactMatch": 44.579030976965846}, "test": {"Bleu_1": 0.45614739471803817, "Bleu_2": 0.4075634793631588, "Bleu_3": 0.36223404995437214, "Bleu_4": 0.3164426277312652, "METEOR": 0.38785711844515625, "ROUGE_L": 0.4973149859250398, "BERTScore": 0.8622405753072242, "MoverScore": 0.746364114280363, "AnswerF1Score": 64.30896561122552, "AnswerExactMatch": 44.44003177124702}}
|
eval/samples.test.hyp.paragraph.questions_answers.lmqg_qg_ruquad.default.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
eval/samples.test.hyp.paragraph_sentence.answer.lmqg_qg_ruquad.default.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
eval/samples.validation.hyp.paragraph.questions_answers.lmqg_qg_ruquad.default.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
eval/samples.validation.hyp.paragraph_sentence.answer.lmqg_qg_ruquad.default.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|