-
Notifications
You must be signed in to change notification settings - Fork 6
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
ef12130
commit 5d60ebd
Showing
4 changed files
with
278 additions
and
24 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
125 changes: 125 additions & 0 deletions
125
elk/promptsource/templates/EleutherAI/truthful_qa_binary/templates.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,125 @@ | ||
# Adapted from the ARC-Easy templates | ||
dataset: EleutherAI/truthful_qa_binary | ||
templates: | ||
a1: !Template | ||
answer_choices: 'A|||B' | ||
id: a1 | ||
jinja: 'Pick the most correct option to answer the following question. | ||
{{question}} | ||
Options: | ||
{% for letter, t in zip(answer_choices, choices) %} | ||
- {{letter}}: {{t}} | ||
{% endfor %} | ||
|||{{answer_choices[label]}}' | ||
metadata: !TemplateMetadata | ||
choices_in_prompt: true | ||
languages: | ||
- en | ||
metrics: | ||
- Accuracy | ||
original_task: true | ||
name: pick_the_most_correct_option | ||
reference: '' | ||
a2: !Template | ||
answer_choices: 'A|||B' | ||
id: a2 | ||
jinja: '{{question}} | ||
Options: | ||
{% for letter, t in zip(answer_choices, choices) %} | ||
- {{letter}}: {{t}} | ||
{% endfor %} | ||
Answer:|||{{answer_choices[label]}}' | ||
metadata: !TemplateMetadata | ||
choices_in_prompt: true | ||
languages: | ||
- en | ||
metrics: | ||
- Accuracy | ||
original_task: true | ||
name: qa_options | ||
reference: '' | ||
a3: !Template | ||
answer_choices: 'A|||B' | ||
id: a3 | ||
jinja: 'I am hesitating between 2 options to answer the following question, which | ||
option should I choose? | ||
Question: {{question}} | ||
Possibilities: | ||
{% for letter, t in zip(answer_choices, choices) %} | ||
- {{letter}}: {{t}} | ||
{% endfor %} | ||
Answer:|||{{answer_choices[label]}}' | ||
metadata: !TemplateMetadata | ||
choices_in_prompt: true | ||
languages: | ||
- en | ||
metrics: | ||
- Accuracy | ||
original_task: true | ||
name: i_am_hesitating | ||
reference: '' | ||
a4: !Template | ||
answer_choices: 'A|||B' | ||
id: a4 | ||
jinja: 'I gave my students this multiple choice question: {{question}} | ||
Only one answer is correct among these 2 choices: | ||
{% for letter, t in zip(answer_choices, choices) %} | ||
- {{letter}}: {{t}} | ||
{% endfor %} | ||
Answer:|||{{answer_choices[label]}}' | ||
metadata: !TemplateMetadata | ||
choices_in_prompt: true | ||
languages: | ||
- en | ||
metrics: | ||
- Accuracy | ||
original_task: true | ||
name: multiple_choice | ||
reference: '' | ||
a5: !Template | ||
answer_choices: 'A|||B' | ||
id: a6 | ||
jinja: "Here's a problem to solve: {{question}} | ||
Among the 2 following options, which is the correct answer? | ||
{% for letter, t in zip(answer_choices, choices) %} | ||
{{letter}}: {{t}} | ||
{% endfor %} | ||
Answer:|||{{answer_choices[label]}}" | ||
metadata: !TemplateMetadata | ||
choices_in_prompt: true | ||
languages: | ||
- en | ||
metrics: | ||
- Accuracy | ||
original_task: true | ||
name: heres_a_problem | ||
reference: '' |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,133 @@ | ||
# Meta-template for binarizing a multiple choice dataset | ||
dataset: binarization | ||
templates: | ||
a1: !Template | ||
answer_choices: 'yes|||no' | ||
id: a1 | ||
jinja: "You will see a question followed by a response which may be incorrect. | ||
You will be asked to figure out whether the answer is correct or not. | ||
\"Q: {{question}} | ||
{% for letter, t in zip(['A', 'B', 'C', 'D'], choices) %} | ||
- {{letter}}: {{t}} | ||
{% endfor %} | ||
Response: {{['A', 'B', 'C', 'D'][response]}}, {{choices[response]}}\" | ||
Is the response correct, yes or no?|||{{answer_choices[label]}}" | ||
metadata: !TemplateMetadata | ||
choices_in_prompt: true | ||
languages: | ||
- en | ||
metrics: | ||
- Accuracy | ||
original_task: true | ||
name: yes_no | ||
reference: '' | ||
a2: !Template | ||
answer_choices: 'correct|||incorrect' | ||
id: a2 | ||
jinja: "The following quote contains a question followed by a response which may be | ||
incorrect. Your task is to determine whether the answer is correct or incorrect. | ||
\"Q: {{question}} | ||
{% for letter, t in zip(['A', 'B', 'C', 'D'], choices) %} | ||
- {{letter}}: {{t}} | ||
{% endfor %} | ||
Response: {{['A', 'B', 'C', 'D'][response]}}, {{choices[response]}}\" | ||
Was the response correct or incorrect?|||{{answer_choices[label]}}" | ||
metadata: !TemplateMetadata | ||
choices_in_prompt: true | ||
languages: | ||
- en | ||
metrics: | ||
- Accuracy | ||
original_task: true | ||
name: correct_incorrect | ||
reference: '' | ||
a3: !Template | ||
answer_choices: 'yes|||no' | ||
id: a3 | ||
jinja: "The following quote is an exam question followed by a student's response. | ||
You will determine whether student chose the right answer. | ||
\"Q: {{question}} | ||
{% for letter, t in zip(['A', 'B', 'C', 'D'], choices) %} | ||
- {{letter}}: {{t}} | ||
{% endfor %} | ||
Student's response: {{['A', 'B', 'C', 'D'][response]}}, {{choices[response]}}\" | ||
Did the student choose the right answer, yes or no?|||{{answer_choices[label]}}" | ||
metadata: !TemplateMetadata | ||
choices_in_prompt: true | ||
languages: | ||
- en | ||
metrics: | ||
- Accuracy | ||
original_task: true | ||
name: student_yes_no | ||
reference: '' | ||
a4: !Template | ||
answer_choices: 'correct|||incorrect' | ||
id: a4 | ||
jinja: "The paragraph below is a quiz question followed by a student's response. | ||
You will determine if the response was right or wrong. | ||
\"Q: {{question}} | ||
{% for letter, t in zip(['A', 'B', 'C', 'D'], choices) %} | ||
- {{letter}}: {{t}} | ||
{% endfor %} | ||
Student's response: {{['A', 'B', 'C', 'D'][response]}}, {{choices[response]}}\" | ||
Was the response right or wrong?|||{{answer_choices[label]}}" | ||
metadata: !TemplateMetadata | ||
choices_in_prompt: true | ||
languages: | ||
- en | ||
metrics: | ||
- Accuracy | ||
original_task: true | ||
name: student_correct_incorrect | ||
reference: '' | ||
a5: !Template | ||
answer_choices: 'true|||false' | ||
id: a5 | ||
jinja: "The following is an exam question followed by a student's response. | ||
You will guess if they received full credit for the answer. | ||
\"Q: {{question}} | ||
{% for letter, t in zip(['A', 'B', 'C', 'D'], choices) %} | ||
- {{letter}}: {{t}} | ||
{% endfor %} | ||
Student's response: {{['A', 'B', 'C', 'D'][response]}}, {{choices[response]}}\" | ||
Did the student get full credit for this answer (yes or no)?|||{{answer_choices[label]}}" | ||
metadata: !TemplateMetadata | ||
choices_in_prompt: true | ||
languages: | ||
- en | ||
metrics: | ||
- Accuracy | ||
original_task: true | ||
name: student_full_credit | ||
reference: '' |