model update
Browse files
    	
        README.md
    CHANGED
    
    | @@ -27,19 +27,19 @@ model-index: | |
| 27 | 
             
                  type: default
         | 
| 28 | 
             
                  args: default
         | 
| 29 | 
             
                metrics:
         | 
| 30 | 
            -
                - name: BLEU4 ( | 
| 31 | 
             
                  type: bleu4_question_answer_generation
         | 
| 32 | 
             
                  value: 12.93
         | 
| 33 | 
            -
                - name: ROUGE-L ( | 
| 34 | 
             
                  type: rouge_l_question_answer_generation
         | 
| 35 | 
             
                  value: 36.54
         | 
| 36 | 
            -
                - name: METEOR ( | 
| 37 | 
             
                  type: meteor_question_answer_generation
         | 
| 38 | 
             
                  value: 30.35
         | 
| 39 | 
            -
                - name: BERTScore ( | 
| 40 | 
             
                  type: bertscore_question_answer_generation
         | 
| 41 | 
             
                  value: 90.55
         | 
| 42 | 
            -
                - name: MoverScore ( | 
| 43 | 
             
                  type: moverscore_question_answer_generation
         | 
| 44 | 
             
                  value: 61.82
         | 
| 45 | 
             
                - name: QAAlignedF1Score-BERTScore 
         | 
|  | |
| 27 | 
             
                  type: default
         | 
| 28 | 
             
                  args: default
         | 
| 29 | 
             
                metrics:
         | 
| 30 | 
            +
                - name: BLEU4 (Question & Answer Generation)
         | 
| 31 | 
             
                  type: bleu4_question_answer_generation
         | 
| 32 | 
             
                  value: 12.93
         | 
| 33 | 
            +
                - name: ROUGE-L (Question & Answer Generation)
         | 
| 34 | 
             
                  type: rouge_l_question_answer_generation
         | 
| 35 | 
             
                  value: 36.54
         | 
| 36 | 
            +
                - name: METEOR (Question & Answer Generation)
         | 
| 37 | 
             
                  type: meteor_question_answer_generation
         | 
| 38 | 
             
                  value: 30.35
         | 
| 39 | 
            +
                - name: BERTScore (Question & Answer Generation)
         | 
| 40 | 
             
                  type: bertscore_question_answer_generation
         | 
| 41 | 
             
                  value: 90.55
         | 
| 42 | 
            +
                - name: MoverScore (Question & Answer Generation)
         | 
| 43 | 
             
                  type: moverscore_question_answer_generation
         | 
| 44 | 
             
                  value: 61.82
         | 
| 45 | 
             
                - name: QAAlignedF1Score-BERTScore 
         | 

