diff --git "a/varierr.json" "b/varierr.json" --- "a/varierr.json" +++ "b/varierr.json" @@ -1,500 +1,500 @@ -{"id":"664","context":"Part of the reason for the difference in pieces per possible delivery may be due to the fact that five percent of possible residential deliveries are businesses, and it is thought, but not known, that a lesser percentage of possible deliveries on rural routes are businesses.","statement":"It is thought, but not known, that a lesser percentage of possible deliveries on rural routes are businesses, and part of the reason for the difference in pieces per possible delivery, may be due to the fact that five percent of possible residential deliveries are businesses.","entailment":[{"annotator":2,"id":"664-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Statement just changed the order of two hypothesis in the context.","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":0,"id":"664-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reason for the diffenrence in pieces per possible delivery mentioned in the context is that the difference percentage of businesses deliveries on residential and rural routes. But the reason in the statement only include the percentage of residential deliveries, not the diffenrence of deliveries.","self_corrected":false}],"idk":[1,3],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":1.0,"entailment":1.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1265","context":"In this enclosed but airy building, you'll find ladies with large machetes expertly chopping off hunks of kingfish, tuna, or shark for eager buyers.","statement":"You'll find small lepers chopping of chunks of tuna, its the only place they can work.","entailment":[],"neutral":[{"annotator":0,"id":"1265-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention whether the ladies are small lepers and whether its the only place they can work.","self_corrected":false},{"annotator":1,"id":"1265-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything about lepers or where they could work.","self_corrected":false},{"annotator":2,"id":"1265-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Small lepers\" don't have to be \"ladies\"; we don't know whether \"small lepers\" can find other jobs.","self_corrected":false},{"annotator":3,"id":"1265-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Lepers and the only place to work at are not mentioned ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1552","context":"The park on the hill of Monte makes a good playground, while the ride down in a wicker toboggan is straight out of an Old World theme park (though surely tame for older kids).","statement":"the park on the Hill of Monte is only for children.","entailment":[],"neutral":[{"annotator":0,"id":"1552-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention whether the park is only for children.","self_corrected":false},{"annotator":1,"id":"1552-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Only because the park makes a good playground it doesn't necessarily have to be only for kids.","self_corrected":false},{"annotator":2,"id":"1552-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No proof that the park can not be for adults.","self_corrected":false},{"annotator":3,"id":"1552-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not mentioned that it's only for children ","self_corrected":false}],"contradiction":[{"annotator":1,"id":"1552-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The phrase \"makes a good playground\" suggests that the park is not a designated playground and thus is open for all age groups. Also, even if the park would be a designated playground, these can typically be also used by adults (at least the parents).","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"285","context":"I touched my palm to his mutilated cheek, and tried to stem my instinctive revulsion.","statement":"You could see where the bear had scratched across his cheek.","entailment":[],"neutral":[{"annotator":0,"id":"285-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context contains no information about him being scratched on the cheek by a bear.","self_corrected":false},{"annotator":1,"id":"285-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything about how his cheek was mutilated, i.e. that it was a bear is not entailed.","self_corrected":false},{"annotator":2,"id":"285-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know who\/what makes his cheek mutilated.","self_corrected":false},{"annotator":3,"id":"285-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know if it's a bear that scratched him ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"515","context":"She wears either revealing clothes or professional clothes (or perhaps both).","statement":"She only wears short skirts.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"515-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"She doesn't only wear short skirts. She wears revealing clothes or professional clothes, the former don't have to be just short skirts, the latter are most likely not short skirts.","self_corrected":false},{"annotator":2,"id":"515-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Usually, \"professinal clothes\" are not \"short skirts\".","self_corrected":false},{"annotator":3,"id":"515-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"She could also wear professional clothes ","self_corrected":false}],"idk":[1],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"1474","context":"that doesn't seem fair does it","statement":"That might possibly be fair.","entailment":[],"neutral":[{"annotator":1,"id":"1474-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context can be interpreted as saying that it seems unfair but that this maybe wouldn't hold under closer inspection. In that case it doesn't really say something about the actual fairness.","self_corrected":true}],"contradiction":[{"annotator":0,"id":"1474-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context suggests that this may not be fair, while the statement suggests that it may be fair.","self_corrected":false},{"annotator":1,"id":"1474-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context can be interpreted as saying that it's really not fair, so it cannot be possibly fair.","self_corrected":false},{"annotator":2,"id":"1474-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context suggests a higher possibility of unfairness in this matter, which the statement does not reflect.","self_corrected":false},{"annotator":3,"id":"1474-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's a rhetorical question. The speaker means it's not fair ","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["contradiction"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"1436","context":"In 1982, Wallace won his last race for governor with a quarter of the black votes cast in the Democratic primary, a fact alluded to in a written epilogue at the end of the film.","statement":"Wallace was reelected as governor.","entailment":[{"annotator":0,"id":"1436-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentioned that Wallance won his last race for gevernor, we don't know if this was his first win or if he won again.","self_corrected":true}],"neutral":[{"annotator":1,"id":"1436-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything about whether Wallace was governer before his 1982 win.","self_corrected":false},{"annotator":2,"id":"1436-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Wallace was elected as governor, but we don't know whether he was \"reelected\".","self_corrected":false},{"annotator":3,"id":"1436-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know if he was governor before","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1338","context":"farmworkers conducted by the U.S.","statement":"Some farm laborers were sampled.","entailment":[],"neutral":[{"annotator":0,"id":"1338-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context contains no information about whether the conduction sampled the farmworkers.","self_corrected":false},{"annotator":2,"id":"1338-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether it is a census or a sampling survey.","self_corrected":false}],"contradiction":[],"idk":[1,3],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"362","context":"The next year, he built himself a palace, Iolani, which can still be toured in Honolulu.","statement":"Lolani was built in only 1 year.","entailment":[{"annotator":1,"id":"362-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"The next year\" can be interpreted as indicating that the building of Lolani was concluded in the same year.","self_corrected":false},{"annotator":3,"id":"362-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It was built \"the next year\". ","self_corrected":false}],"neutral":[{"annotator":0,"id":"362-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context makes no mention of how long it took to build lolani.","self_corrected":false},{"annotator":1,"id":"362-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"The next year\" can be interpreted as indicating that the building of Lolani was started in the next year.","self_corrected":false},{"annotator":2,"id":"362-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We don't know when did Lolani start to be built.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"753","context":"Indeed, recent economic research suggests that investment in information technology explains most of the acceleration in labor productivity growth-a major component of overall economic growth-since 1995.","statement":"Investment in the financial sector explains most of the acceleration in labor productivity.","entailment":[],"neutral":[{"annotator":1,"id":"753-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Some research only suggests that most of the acceleration comes from investments in information technology. The research can be wrong and it could still be true that most of the acceleration comes from the financial sector.","self_corrected":false},{"annotator":2,"id":"753-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We don't know whether investment in information technology is a subsector of investment in financial sector, or reverse.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"753-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reason of the acceleration in labor productivity is the investment in information technology, not in the financial sector.","self_corrected":false},{"annotator":1,"id":"753-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The the \"suggestion\" of the cited research is strong, then it is likely that most of the acceleration comes from investments in information technologies. This precludes most of the acceleration coming from investments in the financial sector.","self_corrected":false},{"annotator":2,"id":"753-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Investment in information technology explains most of the acceleration in labor productivity, not investment in financial sector.","self_corrected":false},{"annotator":3,"id":"753-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It should be information technology that is invested in","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"110","context":"These revelations were embarrassing to Clinton's opponents, wrote the Washington Post . The Sun-Times quoted Rahm Emanuel, Stephanopoulos' successor, on the From Day One I always thought this was politically motivated and had politics written all over it; after five years, it is nice to have the truth catch up with the president's political opponents.","statement":"Clinton's supporters were pleased with how the hearings went.","entailment":[],"neutral":[{"annotator":0,"id":"110-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions clinton's opponents, not clinton's supporters.","self_corrected":false},{"annotator":1,"id":"110-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context is about a single supporter [probably?] of Clinton so we cannot deduce that all or most of the supporters were pleased with the hearing.","self_corrected":false},{"annotator":2,"id":"110-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"These revelations were embarrassing to Clinton's opponents, but Clinton's supporters can also be unsatisfied with process for some reasons.","self_corrected":false},{"annotator":3,"id":"110-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know there are hearings ","self_corrected":false}],"contradiction":[{"annotator":3,"id":"110-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"It was embarrassing to them","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1456","context":"The disputes among nobles were not the first concern of ordinary French citizens.","statement":"Ordinary French citizens were not concerned with the disputes among nobles.","entailment":[{"annotator":1,"id":"1456-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In the context, \"The first concern\" can be read as a pars pro toto which would mean that it was really no concern at all.","self_corrected":true}],"neutral":[{"annotator":0,"id":"1456-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"not the first concern\" doesn't mean not the concern. The statement can be true or false.","self_corrected":false},{"annotator":1,"id":"1456-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It might not be the most important concern to the French citizens, but maybe an important concern after all.","self_corrected":false},{"annotator":2,"id":"1456-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The disputes among nobles could be second concern of ordinary French citizens. ","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1456-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They could be concerned. But it is not their first concern ","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","contradiction"],"error_labels":["entailment"],"has_ambiguity":true} -{"id":"248","context":"and i and i may have been the only one that did both because the mentality in Dallas was that you couldn't like both you had to like one and hate the other","statement":"I did not follow the mentality in Dallas, of liking only one team.","entailment":[{"annotator":0,"id":"248-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In the context is stated that the author likes both, but the mentality in Dallas was that you couldn't like both. So the statement is true.","self_corrected":false},{"annotator":1,"id":"248-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context clearly states that the mentality in Dallas was to like only one team and \"may have been the only one\" strongly suggests that the author did not adhere to this mentality.","self_corrected":false},{"annotator":2,"id":"248-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I did both instead of only liking one.","self_corrected":false},{"annotator":3,"id":"248-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, because I like both","self_corrected":false}],"neutral":[{"annotator":2,"id":"248-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know what does \"did both\" mean. It could refer to liking both teams, but also could be others, for example likes both McDonalds and Burger King.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"531","context":"The governing statute provides that a committee consisting of the Comptroller General, the Speaker of the House and President Pro Tempore of the Senate, the Majority and Minority leaders, and the Chairmen and Ranking Minority Members of the Senate Governmental Affairs and House Government Reform Committees recommend an individual to the President for appointment.","statement":"The process is long and will be reformed in the coming years.","entailment":[],"neutral":[{"annotator":0,"id":"531-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context makes no mention about how long the process takes and whether it will be reformed.","self_corrected":false},{"annotator":1,"id":"531-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"While the context could suggest that the process takes long because so many individuals are involved, it does not say anything about reform.","self_corrected":false},{"annotator":2,"id":"531-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Reform\" is not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"531-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The time duration of the process is not mentioned ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"209","context":"Even analysts who had argued for loosening the old standards, by which the market was clearly overvalued, now think it has maxed out for a while.","statement":"Some analysts wanted to make the old standards less restrictive for investors.","entailment":[{"annotator":2,"id":"209-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"... analysts ... had argued for loosening the old standards\"","self_corrected":false}],"neutral":[{"annotator":0,"id":"209-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions that there are analysts who had wanted to make the old standards less restrictive, but doesn't mention for whom should old standards be loosened.","self_corrected":false},{"annotator":1,"id":"209-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context whether the standards should have been loosened specifically for investors.","self_corrected":false},{"annotator":3,"id":"209-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not known if the loosening was thought for investors ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"975","context":"uh plastic is just too easy i mean that's the that's the whole problem with it um have","statement":"I find plastic to be too easy to use.","entailment":[{"annotator":0,"id":"975-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context refers to the problem of plastic is that it it too easy. So the statement could be true, because too tasy to use is indeed a problem of plastic. ","self_corrected":false},{"annotator":1,"id":"975-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context clearly states that the speaker finds plastic too easy.","self_corrected":true}],"neutral":[{"annotator":0,"id":"975-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement could be undetermined because in the context it doesn't explicitly state to what is plastic too easy, could be too easy to use or maybe to produce.","self_corrected":false},{"annotator":2,"id":"975-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Plastics can be just too easy to \"use\", to \"produce\", to \"dump\"...","self_corrected":false}],"contradiction":[],"idk":[3],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"645","context":"The political cleansing that did not happen through the impeachment process leaves Clinton with a great and serious burden.","statement":"There was no such instance of political cleansing.","entailment":[{"annotator":1,"id":"645-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context states that a specific instance of political cleansing did not happen and we can assume that the statement refers to that instance.","self_corrected":false},{"annotator":3,"id":"645-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"True, because the political cleansing did not happen ","self_corrected":true}],"neutral":[{"annotator":2,"id":"645-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\u201cThe political cleansing that did not happen through the impeachment process\u201d, but it could happen anywhere else.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"645-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The political cleansing did exist, as mentioned in the context. It just didn't happen throught the impeachment process.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"1592","context":"Larger ski resorts are 90 minutes away.","statement":"The largest resort is actually 100 minutes away.","entailment":[],"neutral":[{"annotator":0,"id":"1592-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions the larger ski resorts not the largest resort.","self_corrected":false},{"annotator":1,"id":"1592-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The largest resort can be among the larger resorts 90 minutes away or it could be even further away.","self_corrected":false},{"annotator":2,"id":"1592-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Larger one and the largest one can be different.","self_corrected":false},{"annotator":3,"id":"1592-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the largest ski resort ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"561","context":"The city was founded in the third millennium b.c. on the north shore of the bay, and reached a peak during the tenth century b.c. , when it was one of the most important cities in the Ionian Federation the poet Homer was born in S myrna during this period.","statement":"The city was founded in the third millennium","entailment":[{"annotator":1,"id":"561-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context states that the city was founded in the third millenium b.c. and we can assume that the third millenium mentioned in the statement is also b.c.","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":0,"id":"561-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The city was founded in the third millennium b.c., not in the third millennium. ","self_corrected":false},{"annotator":2,"id":"561-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\u201cthe third millennium\u201d is different from \u201cthe third millennium b.c.\u201d","self_corrected":false},{"annotator":3,"id":"561-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It should be third millennium bc.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":3.0,"entailment":1.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1010","context":"Her state is probably to be attributed to the mental shock consequent on recovering her memory.\"","statement":"It is too bad that she never regained her memory.","entailment":[],"neutral":[{"annotator":3,"id":"1010-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"It is not known if she will regain the memory in the future","self_corrected":true}],"contradiction":[{"annotator":0,"id":"1010-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"According to the context, she regained her memory. it is incorrect to say that she never regained her memory.","self_corrected":false},{"annotator":1,"id":"1010-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context states that something probably happened because she regained her memory. Thus, she has regained her memory at some point.","self_corrected":false},{"annotator":2,"id":"1010-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"She must regain her memory first to get any consequence on that.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["contradiction"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"10","context":"Isn't a woman's body her most personal property?","statement":"Women's bodies belong to themselves, they should decide what to do with it. ","entailment":[{"annotator":0,"id":"10-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context states that women's bodies are her their personal property. Personal property of women does belong to themselves. So the statement is true. ","self_corrected":false},{"annotator":1,"id":"10-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context can be interpreted as a rhetorical question. In that case, it reasonably entails the statement.","self_corrected":false},{"annotator":2,"id":"10-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If a woman's body is her personal property, then the body belongs to her and she has right to dominate it.","self_corrected":false}],"neutral":[{"annotator":1,"id":"10-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context can be interpreted as a honest question. In that case, it's not affirmative and thus neutral to the statement.","self_corrected":false},{"annotator":3,"id":"10-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"context and statement do not contradicting each other, or one entail the other","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1457","context":"General Motors, for instance, lost $460 million to strikes in 1997, but investors treated the costs as a kind of extraordinary charge and valued the company as if the losses had never happened.","statement":"GM lost a lot of money in labor disputes but was victorious in the end.","entailment":[{"annotator":1,"id":"1457-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context says that investors held up a high evaluation of GM even after the strikes cost the company a lot of money. This can be interpreted as a win for the company.","self_corrected":false}],"neutral":[{"annotator":0,"id":"1457-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The loss of money was treated by investor as a extraordinary charge and as if it had never happend, so this was a trick of treating the loss, but we can't conclude that it was victorious.","self_corrected":false},{"annotator":1,"id":"1457-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is conceivable that GM was valued highly by the investors even after losing a lot of money to the strike and additionally losing the subsequent negotiations.","self_corrected":false},{"annotator":2,"id":"1457-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether GM was \"victorious\" in the end.","self_corrected":false},{"annotator":3,"id":"1457-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not known if in the end it was victorious","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":4.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1384","context":"Bauerstein had been at Styles on the fatal night, and added: \"He said twice: 'That alters everything.' And I've been thinking. ","statement":"The fact that Styles was at Bauerstein changes everything.","entailment":[],"neutral":[{"annotator":0,"id":"1384-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context doesn't mention what changed everything.","self_corrected":false}],"contradiction":[{"annotator":1,"id":"1384-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement indicates that Bauerstein is a person and Styles is a location. The statement has it the other way around, so it is a likely contradiction.","self_corrected":true},{"annotator":2,"id":"1384-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is Bauerstein at Styles, not Styles at Bauerstein.","self_corrected":false},{"annotator":3,"id":"1384-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It should be Bauerstein had been at Styles","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"595","context":"The door did not budge.","statement":"The door was stuck, so it did not move. ","entailment":[{"annotator":3,"id":"595-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"budge entails that the door wouldn't move","self_corrected":true}],"neutral":[{"annotator":0,"id":"595-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions the state of the door, has no information about what caused the state.","self_corrected":false},{"annotator":1,"id":"595-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There could have been other reasons that the door didn't moove. For example, that it was locked.","self_corrected":false},{"annotator":2,"id":"595-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know the reason why the door did not budge, maybe it worked well.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1549","context":"it's like but the time we went to Florida and needed to rent a car you know he believed in it","statement":"We rented a car while we were in Florida.","entailment":[{"annotator":1,"id":"1549-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context says that they needed to rent a car, so it's likely that they also did rent a car.","self_corrected":false}],"neutral":[{"annotator":0,"id":"1549-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context is unclear because it only mentions that they needed to rent a car, doesn't explain whether they did or not.","self_corrected":false},{"annotator":2,"id":"1549-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe back then they are so poor that they \u201cNeeded to rent\u201d, but could not afford it. ","self_corrected":false},{"annotator":3,"id":"1549-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not known if they rented the car in the end. It's only known that they needed to rent a car","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"76","context":"yeah although i do worry that how easy this one was might be a bad lesson uh to the to the younger people um you know than there is the other generation","statement":"I do worry that it might be a bad lesson for the kids.","entailment":[{"annotator":0,"id":"76-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context refers to the worry about giving a bad lesson to the younger people, which matches the statement. ","self_corrected":false},{"annotator":1,"id":"76-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker in the context explicitly says that they worry about it being a bad lesson for younger people which includes kids.","self_corrected":false},{"annotator":2,"id":"76-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"younger people can be kids, adolescents or young adults","self_corrected":false},{"annotator":3,"id":"76-entailment-4","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"true, if the younger people are considered as kids","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":3,"id":"76-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"younger people are not kids","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":1.0,"entailment":4.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"64","context":"no North Carolina State","statement":"North Carolina is a county","entailment":[],"neutral":[{"annotator":0,"id":"64-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only doesn't state that whether North Carolina is a county or not.","self_corrected":false},{"annotator":2,"id":"64-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no North Carolina State, North Carolina could be a city or an area.","self_corrected":false},{"annotator":3,"id":"64-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"context and statement are irrelevant","self_corrected":false}],"contradiction":[{"annotator":1,"id":"64-contradiction-1","judgments":[{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context says that North Carolina is a state. So it's not a county.","self_corrected":true},{"annotator":2,"id":"64-contradiction-2","judgments":[{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"It is a state but not North Carolina, so North Carolina is a state instead of a country.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1031","context":"In the short term, U.S. consumers will benefit from cheap imports (as will U.S. multinationals that use parts made in East Asian factories).","statement":"U.S. consumers and factories in East Asia benefit from imports.","entailment":[],"neutral":[{"annotator":0,"id":"1031-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions that the U.S. consumers and multinationals willl benefit from cheap imports, has no information about the benifits to factories in East Asia.","self_corrected":false},{"annotator":1,"id":"1031-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything whether the factories in East Asia also benefit from the imports","self_corrected":false},{"annotator":2,"id":"1031-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether \"factories in East Asia benefit from imports\".","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1031-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"It should be U.S. cunsumers in U.S.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"494","context":"yeah it's a U S territory and it's just we own it or","statement":"I used to be great at remembering this type of thing, but now I don't.","entailment":[],"neutral":[{"annotator":0,"id":"494-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is irrelevant to what is discussed in the context. I think the topics are different.","self_corrected":false},{"annotator":1,"id":"494-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statements seem to be completely unrelated.","self_corrected":false},{"annotator":2,"id":"494-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We can not judge \u201cmy memory\u201d based on the context.","self_corrected":false},{"annotator":3,"id":"494-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not relevant","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"398","context":"Evaluating the intent of the six principles, we observed that they naturally fell into three distinct sets, which we refer to as critical success factors.","statement":"All three distinct sets need to be filled in order to be considered successful.","entailment":[{"annotator":0,"id":"398-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The critical success factors are three distinct sets, which indicate that they should be filled to get success. This is consistent with the statement.","self_corrected":false},{"annotator":3,"id":"398-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The three sets are critical success factors. A success means three sets filled.","self_corrected":false}],"neutral":[{"annotator":2,"id":"398-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We don't know whether all three sets are required to be filled, or maybe only one or two are enough.","self_corrected":false}],"contradiction":[],"idk":[1],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"428","context":"wow who can afford that my God i can't afford to miss a day let alone six","statement":"It's amazing that some people can afford to miss days from work, whereas I can't even afford to miss one.","entailment":[{"annotator":1,"id":"428-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The \"wow\" in the context indicates amazement at the fact that some people can afford to miss six days from work. ","self_corrected":false}],"neutral":[{"annotator":0,"id":"428-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions that the author can't afford to miss one day, doesn't contain any information that others can afford to miss days.","self_corrected":false},{"annotator":2,"id":"428-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":" It is cleat that \"I can't afford to miss a day\", but we don't know whether some people can afford to miss days from work.","self_corrected":false},{"annotator":3,"id":"428-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not known if some people can afford it or not.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"608","context":"Flying at a discount should be more dangerous.","statement":"It's totally safe to take advantage of discounted flying.","entailment":[],"neutral":[{"annotator":1,"id":"608-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The stance given in the context that discounted flying should be more dangerous, can mean that it should be even more dangerous. Thus, it does not really say something about the safety of discounted flying.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"608-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context states that it's dangerous to fly at a discount, whereas the statement states that dictount flying is safe.","self_corrected":false},{"annotator":2,"id":"608-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not totally safe, it is more dangerous to choose discounted flying.","self_corrected":false},{"annotator":3,"id":"608-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"context says more danguerous flying at discount, whereas statement says total safe.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"973","context":"The logic of analysis in case studies is the same","statement":"The logic for the case studies is the same thing as in the data collection.","entailment":[],"neutral":[{"annotator":0,"id":"973-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The same logic of analysis in case studies doesn't mean the same logic of case studies and data collection. The context doesn't mention the data collection. ","self_corrected":false},{"annotator":1,"id":"973-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not specify where else the logic is the same. No mention about data collection.","self_corrected":false},{"annotator":2,"id":"973-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In context, the subject is the logic of analysis in different case studies; in statement, the subject is the logic in case studies and the logic in data collection.","self_corrected":false},{"annotator":3,"id":"973-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not known if the logic for case studies is same as in data collection from context. It could be same as in other fields.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1472","context":"yeah but uh do you have small kids","statement":"It matters not if children are involved.","entailment":[{"annotator":1,"id":"1472-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"The speaker in the context asks about small kids, so at least it matters to them whether kids are involved.","self_corrected":true}],"neutral":[{"annotator":0,"id":"1472-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Based on the context, we don't know if it matters to have kids.","self_corrected":true}],"contradiction":[{"annotator":2,"id":"1472-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The question indicates that kids matter, otherwise there should be no \"but uh\".","self_corrected":false},{"annotator":3,"id":"1472-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Context entails the importance of kids, which contradicts the statement.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction"],"error_labels":["entailment","neutral"],"has_ambiguity":false} -{"id":"879","context":"Marriage is an important institution.","statement":"Marriage is crucial to society.","entailment":[{"annotator":1,"id":"879-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"That something is an important institution can be interpreted as being important for society.","self_corrected":false},{"annotator":3,"id":"879-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Insitution is a part of society. Marriage being an import institution entails marriage being important to the society","self_corrected":false}],"neutral":[{"annotator":0,"id":"879-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Based on the context, we only knowthe attributes of marriage as an important institution,not whether it is inportant to society.","self_corrected":true},{"annotator":2,"id":"879-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"marriage could be crucial to other objects, like \"longevity\", \"personal health\", etc.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"836","context":"yeah really no kidding","statement":"It's crazy!","entailment":[{"annotator":0,"id":"836-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"People might say \"no kidding\" when they realize what they're talking about is crazy.","self_corrected":false},{"annotator":3,"id":"836-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Something sounds so crazy that people would think it is a joke. \"No kidding\" means it's as crazy as it sounds.","self_corrected":false}],"neutral":[{"annotator":2,"id":"836-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"it can be not crazy but serious.","self_corrected":false}],"contradiction":[],"idk":[1],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1568","context":"The Women's Haven, which provides shelter and outreach to domestic-violence victims, already has a full-time attorney.","statement":"The Haven is a useful resource in the community.","entailment":[{"annotator":1,"id":"1568-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Providing shelter and outreach to domestic-violence victims sounds like being useful for the community.","self_corrected":false},{"annotator":3,"id":"1568-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Haven provides shelter to victims, making it a useful resource in the society","self_corrected":false}],"neutral":[{"annotator":0,"id":"1568-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context contains no information onthe importance of the Haven.","self_corrected":true},{"annotator":2,"id":"1568-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The Women's Haven is not the Haven.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"451","context":"The newspaper publishes just one letter a week from a reader, always with an editorial riposte at the bottom.","statement":"There are many letters submitted each week, but only one is chosen.","entailment":[],"neutral":[{"annotator":0,"id":"451-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The newspaper publishes only one letter a week, either because they receive only one per week or because they receive many but choose only one. We don't know which is true.","self_corrected":false},{"annotator":1,"id":"451-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything about how many letters are submitted.","self_corrected":false},{"annotator":2,"id":"451-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether there are many letters submitted.","self_corrected":false},{"annotator":3,"id":"451-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not known if many letters are submitted, but only known that only one is published.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1595","context":"Meanwhile, a site established for the WorldAid '96 Global Expo and Conference on Emergency Relief, which took place last fall, gives you a firsthand glimpse of the frequently crass world of the relief business (note the long list of commercial exhibitors in attendance).","statement":"WorldAid had a GLobal expo in 2002.","entailment":[],"neutral":[{"annotator":0,"id":"1595-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context mentions that WordAid had Global Expo last fall, we don't know whether last year was 2002 or not.","self_corrected":false},{"annotator":1,"id":"1595-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context talks only about a Global expo in 1996, not in 2002.","self_corrected":false},{"annotator":2,"id":"1595-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know how often is a Global expo hold.","self_corrected":false},{"annotator":3,"id":"1595-neutral-4","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Context and statement irrelevant to each other","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"474","context":"Monday's Question (No.","statement":"There was a question on Tuesday.","entailment":[],"neutral":[{"annotator":0,"id":"474-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions the question on Monday, we don't know ifthere was a question on Tuesday as well.","self_corrected":false},{"annotator":1,"id":"474-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context talks only about a question on Monday, not Tuesday.","self_corrected":false},{"annotator":2,"id":"474-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The question can on Wednesday, Thursday ...","self_corrected":false},{"annotator":3,"id":"474-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"163","context":"The students' reaction was swift and contentious, as if their feelings had been hurt.","statement":"The students reacted with horror.","entailment":[],"neutral":[{"annotator":0,"id":"163-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention what kind of reaction the students had.","self_corrected":false},{"annotator":1,"id":"163-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions that the students' feelings might have been hurt but it doesn't specify whether they also experienced horror.","self_corrected":false},{"annotator":2,"id":"163-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The students could reacted with anger or disappointment.","self_corrected":false},{"annotator":3,"id":"163-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not known what emotion was reacted with, could also be sadness or anger","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1401","context":"So is the salt, drying in the huge, square pans at Las Salinas in the south.","statement":"Pepper is made wet in Las Salinas.","entailment":[],"neutral":[{"annotator":0,"id":"1401-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions the salt, not the pepper.","self_corrected":false},{"annotator":1,"id":"1401-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context talks about salt. Pepper is not mentioned at all.","self_corrected":false},{"annotator":2,"id":"1401-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Pepper is not mentioned in context.","self_corrected":false},{"annotator":3,"id":"1401-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about where the pepper is made wet","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1362","context":"Daniel took it upon himself to explain a few things.","statement":"Daniel explained what was happening.","entailment":[{"annotator":0,"id":"1362-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"This statement could be true, because Daniel explained a few things, which could include what was happening.","self_corrected":true},{"annotator":2,"id":"1362-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Daniel explained actively.","self_corrected":false},{"annotator":3,"id":"1362-entailment-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True","self_corrected":false}],"neutral":[{"annotator":0,"id":"1362-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement could be undetermined because the context does not make it clear what things Daniel explained.","self_corrected":false},{"annotator":1,"id":"1362-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not specify what Daniel explained. It could also be other things than what was happening.","self_corrected":true},{"annotator":2,"id":"1362-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Daniel felt responsible to explain, but maybe fail to explain in the end.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"291","context":"Classic Castilian restaurant.","statement":"The restaurant is based off a classic Castilian style.","entailment":[{"annotator":0,"id":"291-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"classic castilian restaurant\" means that the style of the restaurant is classic Castilian, so the statement is true.","self_corrected":false},{"annotator":1,"id":"291-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both context and statement clearly mention that the restaurant is classic Castilian.","self_corrected":false},{"annotator":2,"id":"291-entailment-3","judgments":[{"annotator":0,"makes_sense":true}],"label_correction":false,"reason":"Usually it is true.","self_corrected":true},{"annotator":3,"id":"291-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is a castilian restaurant, so it has castilian style.","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"9","context":"But I'll take up my stand somewhere near, and when he comes out of the building I'll drop a handkerchief or something, and off you go!\"","statement":"I want you to follow him, so watch for the signal that I give.","entailment":[],"neutral":[{"annotator":0,"id":"9-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker wants the other one watch for the signal, i.e., the handkerchief dropping, so this part is correct, but we don't know if the intention is for the other person to follow the person he\/she is observing.","self_corrected":false},{"annotator":1,"id":"9-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear from the context, whether the speaker really wants the person they're talking to to follow someone. It could also be that they want the person to leave.","self_corrected":false},{"annotator":2,"id":"9-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe \u201coff you go\u201d means something else, like detonating the rubbish bin etc.","self_corrected":false},{"annotator":3,"id":"9-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not known if \"I\" want \"you\" to follow him from the context","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"688","context":"It is not a surprise, either, that Al Pacino chews the scenery in Devil's Advocate . And the idea that if the devil showed up on Earth he'd be running a New York corporate-law firm is also, to say the least, pre-chewed.","statement":"Nobody expects that the devil would take the form of a lawyer.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"688-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"pre-chewed\" implies that the idea of the devil running a law firm has been explored, so \"nobody expects\" in the statement is incorrect.","self_corrected":false},{"annotator":1,"id":"688-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context says that the idea of the devil being a lawyer is \"pre-chewed\" meaning that other movies already used this idea. So at least some people would expect the devil to take this form.","self_corrected":false},{"annotator":2,"id":"688-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\u201cpre-chewed\u201d means not fresh, so many people should have expected devil would take the form of a lawyer.","self_corrected":false},{"annotator":3,"id":"688-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"From the context we know that it is not a surprise, meaning people had expected that.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"578","context":"He jumped up, planting one hand on the charging horse, and came at the brute with the axe.","statement":"He swung at the brute with his sword.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"578-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He came at the brute with the axe, not the sword.","self_corrected":false},{"annotator":1,"id":"578-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement says that the person has one hand on the horse, so the other hand has to be holding the axe. Thus, he does not have any hand free to use a sword.","self_corrected":false},{"annotator":2,"id":"578-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not with \"sword\", but with \"axe\".","self_corrected":false},{"annotator":3,"id":"578-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, with his axe","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"406","context":"I can FEEL him.\"","statement":"I can sense his presence.","entailment":[{"annotator":0,"id":"406-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I think \"feel him\" is another way of saying \"sense his presence\".","self_corrected":false},{"annotator":1,"id":"406-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If a person can feel someone, then they most likely also sense their presence.","self_corrected":false},{"annotator":2,"id":"406-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Feel\" is synonym of sense. ","self_corrected":false},{"annotator":3,"id":"406-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"true, I can feel him, meaning I can sense his presence either physical or spiritually","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"999","context":"of course you could annex Cuba but they wouldn't like that a bit","statement":"Cubans would go up in arms if we tried to annex Cuba.","entailment":[{"annotator":0,"id":"999-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement could be true, because if Cubans wouldn't like that, it could be possible that they would go up in arms.","self_corrected":true}],"neutral":[{"annotator":1,"id":"999-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context talks only about the Cubans disliking an annexation of Cuba. That they would use armed resistance is not clear.","self_corrected":false},{"annotator":2,"id":"999-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know Cubans reaction if Cuba is annexed.","self_corrected":false},{"annotator":3,"id":"999-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"From context, it is only known that the Cubans would not be happy about this, but not known if they will arm themselves","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"480","context":"Another thing those early French and Dutch settlers agreed upon was that their island should be free of levies on any imported goods.","statement":"The French settlers did not mind income taxes at all.","entailment":[],"neutral":[{"annotator":1,"id":"480-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context talks about levies on imported goods, it is not clear why this should impact income taxes.","self_corrected":false},{"annotator":2,"id":"480-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The French can take tax issues seriously, but still made the decision to reach the agreement.","self_corrected":false},{"annotator":3,"id":"480-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not mentioned about the income taxes","self_corrected":false}],"contradiction":[{"annotator":0,"id":"480-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is incorrect, because the French settlers did mind taxing on imported goods.","self_corrected":true},{"annotator":3,"id":"480-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"The French only did not mind taxed on imported goods on their islands.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"540","context":"The air is warm.","statement":"The arid air permeates the surrounding land.","entailment":[],"neutral":[{"annotator":0,"id":"540-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Warm air is not necessarily associated with arid air.","self_corrected":false},{"annotator":1,"id":"540-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Air can be arid and cold at the same time.","self_corrected":false},{"annotator":2,"id":"540-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The air can be warm but mosit.","self_corrected":false},{"annotator":3,"id":"540-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelavant","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1165","context":"It's thought he used the same architect who worked on the Taj Mahal.","statement":"In reality, he did not use the Taj Mahal's architect.","entailment":[],"neutral":[{"annotator":1,"id":"1165-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"People can be wrong about him using the same architect.","self_corrected":false},{"annotator":2,"id":"1165-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know in reality whether he used the Taj Mahal's architect.","self_corrected":false},{"annotator":3,"id":"1165-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reality is not mentioned in the context","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1165-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"The context suggests that he might used the same architect, while the statement states that he did not.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"582","context":"News berates computer users for picking obvious, easily cracked passwords and chastises system administrators for ignoring basic security precautions.","statement":"Users and system administrators both do not prioritize security.","entailment":[{"annotator":2,"id":"582-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Users pick obvious, easily cracked passwords; administrators ignore basic security precautions.","self_corrected":false},{"annotator":3,"id":"582-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"true, users pick easy passwords and administrators irgnore basic security precautions, showing the low priority of security for them","self_corrected":false}],"neutral":[{"annotator":0,"id":"582-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context only mentions users' behaviour, we don't know if the system administrators prioritize security.","self_corrected":true},{"annotator":1,"id":"582-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The news can be wrong about the prevalence of this problem.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"734","context":"Companies that were foreign had to accept Indian financial participation and management.","statement":"Foreign companies had to take Indian money in order to operate their businesses.","entailment":[{"annotator":0,"id":"734-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Running businesses are the purpose of opening a company. So based on the context, companies have to satisfiy the prerequisites for accepting indian financial intervention in order to run businesses.","self_corrected":false},{"annotator":1,"id":"734-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"That the companies had to accept \"Indian financial participation\" means that they had to take Indian money.","self_corrected":false},{"annotator":3,"id":"734-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, if indian financial participation means investment from india","self_corrected":false}],"neutral":[{"annotator":2,"id":"734-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Financial participation and management can have other forms, e.g. audit and supervision ","self_corrected":false},{"annotator":3,"id":"734-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"financial participation could mean other possibilites other than investing money","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1379","context":"These days, newspaper writers are no longer allowed the kind of license he took.","statement":"Newspaper writers need to be more factual and careful these days.","entailment":[],"neutral":[{"annotator":0,"id":"1379-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't specify what kind of license he took. So we don't know whether it is associated with factual and carefull, as suggested in the statement.","self_corrected":false},{"annotator":1,"id":"1379-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear which freedoms the writer in the context took. It might not be about factuality and carefulness. ","self_corrected":false},{"annotator":2,"id":"1379-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"the kind of license\" can mean a facual and careful style, then the opposite should be not rigorous.","self_corrected":false},{"annotator":3,"id":"1379-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"2","context":"Sorry but that's how it is.","statement":"This is how things are and there are no apologies about it.","entailment":[{"annotator":1,"id":"2-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context can be interpreted as being unapologetic.","self_corrected":false}],"neutral":[{"annotator":0,"id":"2-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know the \"sorry\" in the context is meant to be an apology or just to comfort someone, so the need for an apology is unknown.","self_corrected":false},{"annotator":1,"id":"2-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context can also be interpreted as being matter of factly but not unapologetic.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"2-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In context, it reads \"sorry\", and usually it means apology.","self_corrected":false},{"annotator":3,"id":"2-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There are apologies in the context","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":1.0,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"861","context":"The cane plantations, increasingly in the hands of American tycoons, found a ready market in the US.","statement":"The US market was ready for the cane plantations, according to the economists.","entailment":[],"neutral":[{"annotator":0,"id":"861-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions that there is a ready market in the US, but it's not clear who indicates this.","self_corrected":false},{"annotator":1,"id":"861-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything about whether economists believed that the market was ready.","self_corrected":false},{"annotator":2,"id":"861-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know the resource of this ovservation that \"The cane plantations ... found a ready market in the US.\"","self_corrected":false},{"annotator":3,"id":"861-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not mentioned about the economists","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1559","context":"As legal scholar Randall Kennedy wrote in his book Race, Crime, and the Law , Even if race is only one of several factors behind a decision, tolerating it at all means tolerating it as potentially the decisive factor.","statement":"Race is one of several factors in some judicial decisions","entailment":[],"neutral":[{"annotator":0,"id":"1559-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only means decisions, not judicial decisions.","self_corrected":false},{"annotator":1,"id":"1559-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The conditional in the context does not assert whether race is one of several reasons or the only one in all judicial decisions.","self_corrected":false},{"annotator":2,"id":"1559-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether in fact race is one of several factors in some judicial decisions.","self_corrected":false},{"annotator":3,"id":"1559-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not mentioned in what process is the race a factor of","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"754","context":"After the recovery of Jerusalem in 1099, it took four hundred years of sieges and battles, treaties, betrayals, and yet more battles, before Christian kings and warlords succeeded in subduing the Moors.","statement":"The Moors were able to subdue the Christian kings after just a decade of war.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"754-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context refers to the success of the Christian kings and warlords, but the statement refers in the opposite direction","self_corrected":false},{"annotator":1,"id":"754-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Christian kings subdued the Moors, not the other way around.","self_corrected":false},{"annotator":2,"id":"754-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It was Christian kings who subdued the Moors, not inverse.","self_corrected":false},{"annotator":3,"id":"754-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Moors were then one subdued; It took the Chrisitans four hundred years.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"165","context":"'Would you like some tea?'","statement":"DO you want a cup of tea?","entailment":[{"annotator":0,"id":"165-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context and statement have the same meaning, both refer to the question about the willingness to drink tea.","self_corrected":false},{"annotator":1,"id":"165-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In both context and statement, the speaker asks whether another person wants tea.","self_corrected":false},{"annotator":2,"id":"165-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the answer for \"Would you like some tea\" is yes, then the answer for \"Do you want a cup of tea\" should also be yes.","self_corrected":false},{"annotator":3,"id":"165-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"paraphrases","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1588","context":"In Mumbai, both Juhu and Chowpatty beaches are, for instance, definitely a bad idea, and though the Marina beaches in Chennai are cleaner, there may be sharks.","statement":"The beaches are very dirty in Mumbai.","entailment":[{"annotator":2,"id":"1588-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Mumbai's beaches are dirtier than beaches in Chennai, and described as \"definitely a bad idea.\" ","self_corrected":false},{"annotator":3,"id":"1588-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Marian beaches are compared to Juhu and Chowpatty cleaner, meaning the beaches in Mumbai beaches are dirty.","self_corrected":false}],"neutral":[{"annotator":1,"id":"1588-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There could be other beaches than Juhu and Chowpatty in Mumbai which could be cleaner.","self_corrected":false},{"annotator":2,"id":"1588-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Mumbai's beaches are dirtier than beaches in Chennai, but it is not promised that they are very dirty objectively.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1588-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions that two beaches in Mumbai are cleaner than others, but the statement suggests that all beaches in Mumbai are dirty.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"525","context":"Moreover, Las Vegas has recently started to show signs of maturity in its cultural status as well.","statement":"The culture of Las Vegas has a lot of room for improvement.","entailment":[],"neutral":[{"annotator":0,"id":"525-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context states that the culture of Las Vegas is improving, but the is no information about the room for improvement.","self_corrected":false},{"annotator":1,"id":"525-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"That there were signs of maturity indicates improvement. However, it does not say anything about whether things can improve further.","self_corrected":false},{"annotator":2,"id":"525-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether the culture has a lot of room to improve.","self_corrected":false},{"annotator":3,"id":"525-neutral-4","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context is talking about the cultural status, whereas the statement the culture","self_corrected":false}],"contradiction":[{"annotator":2,"id":"525-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The culture started to show signs of maturity, then they must have reached some standard and doesnot have much to improve.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1132","context":"Think of it this When consumer confidence declines, it is as if, for some reason, the typical member of the co-op had become less willing to go out, more anxious to accumulate coupons for a rainy day.","statement":"Coupon collecting is no longer allowed in most US stores.","entailment":[],"neutral":[{"annotator":0,"id":"1132-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions that some people tend to collect coupon, which implies that coupon collection is actually allowed. However, it doesn't include location information, so we don't know if it refers to the United States.","self_corrected":false},{"annotator":1,"id":"1132-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything about whether coupon collection is legal or not in most US stores.","self_corrected":true},{"annotator":2,"id":"1132-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether the context is based on the U.S. society.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1132-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not to collect coupon is a choice of the consumers. It is not forbidden.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1328","context":"yes they would they just wouldn't be able to own the kind of automobiles that they think they deserve to own or the kind of homes that we think we deserve to own we might have to you know just be able to i think if we a generation went without debt then the next generation like if if our our generation my husband and i we're twenty eight if we lived our lives and didn't become you know indebted like you know our generation before us that um the budget would balance and that we became accustomed to living with what we could afford which we wouldn't be destitute i mean we wouldn't be living on the street by any means but just compared to how spoiled we are we would be in our own minds but i feel like the generation after us would oh man it it would be so good it would be so much better it wouldn't be perfect but then they could learn to live with what what they could afford to save to buy and if you want a nicer car than that well you save a little longer","statement":"I am glad our generation has no debt.","entailment":[],"neutral":[{"annotator":1,"id":"1328-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The speaker in the context would be glad if their generation had no debt, but does not assert this.","self_corrected":false},{"annotator":2,"id":"1328-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We don't know whether \"our generation\" has no debt.","self_corrected":false},{"annotator":3,"id":"1328-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We are indebted and therefore living on the street","self_corrected":true}],"contradiction":[{"annotator":0,"id":"1328-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context implies that our generation does have debt, so the statement is wrong.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"942","context":"The great attraction of the church is the splendid exterior, which is crowned by golden onion-shaped cupolas.","statement":"The outside of the church isn't much to look at, but the inside is intricately decorated.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"942-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context states that the exteroir of the church is great acctraction, but the statement implies the opposite.","self_corrected":false},{"annotator":1,"id":"942-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context clearly states that the exterior of the church is splendid.","self_corrected":false},{"annotator":2,"id":"942-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The outside of the church is attactive.","self_corrected":false},{"annotator":3,"id":"942-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Is is the exterior that is a great attraction.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"1170","context":"Mack Lee, Body Servant of General Robert E. Lee Through the Civil War , published in 1918.","statement":"The book was first drafted in early 1915.","entailment":[],"neutral":[{"annotator":0,"id":"1170-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mention the year the book published, no when it was drafted.","self_corrected":false},{"annotator":1,"id":"1170-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions only the publication date, not the date of the first draft.","self_corrected":false},{"annotator":2,"id":"1170-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We just know the publishment of the book, but don't know about the draft.","self_corrected":false},{"annotator":3,"id":"1170-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the first draft only the first publication","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"394","context":"She had the pathetic aggression of a wife or mother--to Bunt there was no difference.","statement":"Bunt was raised motherless in an orphanage.","entailment":[],"neutral":[{"annotator":0,"id":"394-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is unclear where Bunt was raised based on the context.","self_corrected":false},{"annotator":1,"id":"394-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not mention anything about Bunt having or not having a mother or where they were raised.","self_corrected":false},{"annotator":2,"id":"394-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know where and how Hunt grew up.","self_corrected":false},{"annotator":3,"id":"394-neutral-4","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know what the relation \"she\" is to Bunt. She could also be his wife","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"719","context":"The volumes are available again but won't be returned to the stacks until the damp library itself gets renovated.","statement":"The volumes will be available to the public after renovation.","entailment":[],"neutral":[{"annotator":1,"id":"719-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is not clear whether the library is a public or a private library.","self_corrected":false},{"annotator":2,"id":"719-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We don't know whether after renovation the volumes will be available to the public or not after renovation..","self_corrected":false}],"contradiction":[{"annotator":0,"id":"719-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context mentions that the library needs to be renovated, not the volumes.","self_corrected":false},{"annotator":2,"id":"719-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The volumes are available before the renovation.","self_corrected":false},{"annotator":3,"id":"719-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is already availabe before the renovation of stacks are done.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"588","context":"You have to walk through it).","statement":"Walking is the best way to get through it.","entailment":[{"annotator":1,"id":"588-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"Statement is a clear paraphrase of the context.","self_corrected":true}],"neutral":[{"annotator":0,"id":"588-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"The best way\" is not mentioned in the context. It is unclear if walking is the best way.","self_corrected":false},{"annotator":2,"id":"588-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know what is the best way to get through it, maybe driving is better than walking.","self_corrected":false},{"annotator":3,"id":"588-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Walking is not meant as best way but the only way","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"272","context":"The management of the cafe has established the rules for the use of their facility.","statement":"The management of the cafe is strict about how they manage it.","entailment":[],"neutral":[{"annotator":0,"id":"272-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We only know that there is management of the cafe, but the context doesn't mention how strict it is.","self_corrected":false},{"annotator":1,"id":"272-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement talks about the management being strict about their management, whereas the context talks about rules for the use of the cafe.","self_corrected":false},{"annotator":2,"id":"272-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether the rules are strict or not.","self_corrected":false},{"annotator":3,"id":"272-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Established rules do not mean strict management if they are not followed. Or the rules could be not strict rules","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"681","context":"You've got the keys still, haven't you, Poirot? I asked, as we reached the door of the locked room. ","statement":"I had the keys in my pocket.","entailment":[],"neutral":[{"annotator":1,"id":"681-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is not clear from the premise where the keys are.","self_corrected":false},{"annotator":2,"id":"681-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We can't make sure whether I have the key or not.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"681-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the speaker had the keys, then she\/he won't ask whether the other one had keys.","self_corrected":false},{"annotator":2,"id":"681-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If I have the keys, usually I will open the door, instead of asking others.","self_corrected":false},{"annotator":3,"id":"681-contradiction-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"I\" think Poirot has the keys","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1319","context":"i cried when the horse got killed and when the wolf got killed","statement":"Animal killings make me want to cry.","entailment":[{"annotator":0,"id":"1319-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Both horse and wolf are animals, the speaker cried when they got killed. So the statement is correct.","self_corrected":true},{"annotator":1,"id":"1319-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"If killings of two different animals make the narrator cry, then they probably generally care about animal killings.","self_corrected":true},{"annotator":2,"id":"1319-entailment-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Some animal like horse and wolf killings make me cry.","self_corrected":false}],"neutral":[{"annotator":1,"id":"1319-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It could be that the narrator had a personal relationship to the killed animals and does not care about animal killings in general.","self_corrected":false},{"annotator":2,"id":"1319-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether all kinds of animal killings will make me want to cry, maybe I don't want to cry for a rat killing.","self_corrected":false},{"annotator":3,"id":"1319-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Overgeneralization. I could be sad maybe only because I know the horse and wolf","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"749","context":"what does um is Robby Robin Williams does he have a funny part in the movie or is","statement":"How much went into making the movie?","entailment":[],"neutral":[{"annotator":0,"id":"749-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The content of the statement is independent of the context. The statement is just a question.","self_corrected":false},{"annotator":2,"id":"749-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In context there is nothing about the cost.","self_corrected":false},{"annotator":3,"id":"749-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"Not sure of \"how much\" of what went into the movie","self_corrected":true}],"contradiction":[],"idk":[1],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"204","context":"The remaining parts of the north, although enticing, are difficult to explore.","statement":"Inexperienced explorers should take care to avoid dangerous areas of the north.","entailment":[{"annotator":0,"id":"204-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The exploration of north parts is difficult based on the context, so it's true that inexperienced explorers should avoid those parts.","self_corrected":false},{"annotator":1,"id":"204-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Inexperienced explorers probably should be careful when exploring difficult to explore parts.","self_corrected":false},{"annotator":2,"id":"204-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Some parts of the north are diffcult to explore, so inexperienced explorers should take care.","self_corrected":false},{"annotator":3,"id":"204-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The remaining parts of the north are generally difficult to explore, meaning especially for the Inexperienced.","self_corrected":false}],"neutral":[{"annotator":2,"id":"204-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Inexperienced explorers should take care, but maybe they can also explore their with some guidance and help.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"174","context":"no i i i don't i it completely beyond me i went to my under graduate uh education","statement":"I can't remember, I did my undergraduate education.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"174-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions that the speaker did undergraduate education, the statement is false if the speaker of both sentences is the same person.","self_corrected":false},{"annotator":2,"id":"174-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In context, it read \"I went to my under graduate uh education\", so \"I\" do remember it.","self_corrected":false}],"idk":[1,3],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"714","context":"How did you get it?\" A chair was overturned. ","statement":"\"How did you get your hands on this object?\"","entailment":[{"annotator":2,"id":"714-entailment-1","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\u201cget your hands on this object\u201d can be understood as get something","self_corrected":false},{"annotator":3,"id":"714-entailment-2","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"paraphrases","self_corrected":false}],"neutral":[{"annotator":0,"id":"714-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"I am asking my self whether the question belongs to a kind of hypothesis\/statement. I can't make a conclusion based on the two questions in the provided context and statement.","self_corrected":false}],"contradiction":[],"idk":[1],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"418","context":"Auditors from another country engaged to conduct audits in their country should meet the professional qualifications to practice under that country's laws and regulations or other acceptable standards, such as those issued by the International Organization of Supreme Audit Institutions.","statement":"All auditors report to a globally managed governing body.","entailment":[],"neutral":[{"annotator":1,"id":"418-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything about to whom the auditors report.","self_corrected":false},{"annotator":2,"id":"418-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether auditors dealing with domestic companies need to report to a globally managed governing body.","self_corrected":false},{"annotator":3,"id":"418-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In the context, it is not mentioned that they report to the globally managed governing body","self_corrected":false}],"contradiction":[{"annotator":0,"id":"418-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context implies that there are several possibilities for the standards that the auditors are expected meet, so the standard mentioned in the statement is one of the standards, not a mandatory one.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"469","context":"The idea that Clinton's approval represents something new and immoral in the country is historically shortsighted.","statement":"It's accurate to conclude that Clinton's approvals signify the start of a new form of immorality in the country.","entailment":[],"neutral":[{"annotator":3,"id":"469-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"the approval only signified that the somehting immoral is historically shortsighted but does not introduce it.","self_corrected":true}],"contradiction":[{"annotator":0,"id":"469-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The contexts suggests that the conclusion about Clinton's approval is shortsighted, while the statement refers to the accuration of this conclusion, so it is incorrect.","self_corrected":false},{"annotator":1,"id":"469-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If an idea is historically shortsighted it is not accurate.","self_corrected":false},{"annotator":2,"id":"469-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"To the same idea, the context remarked it as \"historically shortsighted\", but the statement took it as \"accurate\".","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["contradiction"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"1349","context":"I had rejected it as absurd, nevertheless it persisted. ","statement":"I rejected it as absurd but it persisted out of protest.","entailment":[{"annotator":3,"id":"1349-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"paraphrases","self_corrected":true}],"neutral":[{"annotator":0,"id":"1349-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is unclear if it persisted out of protest or of other reasons.","self_corrected":false},{"annotator":1,"id":"1349-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything about the reasons for the persistence.","self_corrected":false},{"annotator":2,"id":"1349-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know the reason for persisting, maybe my rejection was overmitted.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"732","context":"He dismounted and Ca'daan saw he was smaller than the rest.","statement":"He was shorter than the others.","entailment":[{"annotator":0,"id":"732-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I think the two conclusion \"smaller after dismounting\" in the context and \" shorter\" in the statement both refer to the height of him So the statement is true. ","self_corrected":false},{"annotator":1,"id":"732-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of a part of the context.","self_corrected":false},{"annotator":2,"id":"732-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"smaller than the rest\" means \"shorter than the others\"","self_corrected":false},{"annotator":3,"id":"732-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Ca'daan saw he was smaller thant he rest","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"730","context":"And you are wrong in condemning it.","statement":"Everybody does it; it's normal.","entailment":[],"neutral":[{"annotator":0,"id":"730-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context refers to the wrongfulness of the condemnation,it is unclear what the normal situation is to which the statement refers.","self_corrected":false},{"annotator":1,"id":"730-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"You cannot infer that something is normal because it is wrong to condemn it.","self_corrected":false},{"annotator":2,"id":"730-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We can not say a thing is correct, juest because everybody does it.","self_corrected":false},{"annotator":3,"id":"730-neutral-4","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The reason of wrongfulness in condemning is not known","self_corrected":true}],"contradiction":[{"annotator":2,"id":"730-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"In reality, if everybody does a thing, the thing will become a costum, and will not be condemned.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1493","context":"San'doro didn't make it sound hypothetical, thought Jon.","statement":"San'doro's words were hollow, and Jon knew the truth of that immediately.","entailment":[],"neutral":[{"annotator":1,"id":"1493-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Jon only thought about how San'doro made it sound. This doesn't tell us anything about whether he believed the words.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1493-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"didn't sound hypothetical\" implies that San'doro's words were actually not hollow, so the statement is false.","self_corrected":false},{"annotator":2,"id":"1493-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Jon did not regard San'doro's words as hollow, instead, he regarded it as not hypothetical.","self_corrected":false},{"annotator":3,"id":"1493-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"san'doro's words sound factual.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"921","context":"On the west side of the square is Old King's House (built in 1762), which was the official residence of the British governor; it was here that the proclamation of emancipation was issued in 1838.","statement":"The Old King's House had an incident where the King was murdered inside of it.","entailment":[],"neutral":[{"annotator":0,"id":"921-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no information in the context about the murder inside of the Old King's House, so the statement could be true or false.","self_corrected":false},{"annotator":1,"id":"921-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't say anything about whether someone was killed in Old King's House.","self_corrected":false},{"annotator":2,"id":"921-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether a King was murdered inside of the Old King's House.","self_corrected":false},{"annotator":3,"id":"921-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the murder","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"391","context":"Several of its beaches are officially designated for nudism (known locally as naturisme) the most popular being Pointe Tarare and a functionary who is a Chevalier de la L??gion d'Honneur has been appointed to supervise all aspects of sunning in the buff.","statement":"They do not mind having nude people.","entailment":[{"annotator":0,"id":"391-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The beaches mentioned in the context accept people sunning naked, so we can conclude that they don't mind having nude people.","self_corrected":false},{"annotator":1,"id":"391-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Beaches officially designated for nudism do not mind having nude people.","self_corrected":false},{"annotator":2,"id":"391-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Some beaches are offically designated for nudism, which means nude people are allowed to be there.","self_corrected":false}],"neutral":[{"annotator":3,"id":"391-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Overgeneralization: they do not mind only in several of its beaches","self_corrected":false}],"contradiction":[{"annotator":2,"id":"391-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The nude people are supervised. If they don't mind, there should be no supervisors especially for such issues.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":3.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1274","context":"The village is Sainte-Marie, named by the explorer when he landed on 4 November 1493, attracted by the waterfalls and river he could see flowing down the green inland mountains.","statement":"The village is not named after the settling explorer.","entailment":[{"annotator":2,"id":"1274-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is named by the settling explorer, but named after.","self_corrected":false}],"neutral":[{"annotator":0,"id":"1274-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"There is no information in the context about how the village is named, so the statement could be true or false.","self_corrected":false},{"annotator":1,"id":"1274-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The village was named by the explorer, it is not clear whether he named it after himself.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1274-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"It is named after him","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"680","context":"Expectations that the ANC would oversee land reform--returning land seized during apartheid's forced migrations--and wealth redistribution have not been met.","statement":"The ANC would not be in charge of land reform.","entailment":[{"annotator":0,"id":"680-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The expectations have not been met, which means the the ANC didn't voersee land reform, so the statement is true.","self_corrected":false},{"annotator":1,"id":"680-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"If the ANC does oversee the land reform then it isn't in charge of it.","self_corrected":true},{"annotator":2,"id":"680-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The expectations have not been met.","self_corrected":false},{"annotator":3,"id":"680-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, the expections that ANC oversees this is not met","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"470","context":"I found her leaning against the bannisters, deadly pale. ","statement":"She couldn't stand on her own so she leaned against the bannisters until I found her.","entailment":[],"neutral":[{"annotator":0,"id":"470-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reason why she leaned against the bannisters in unclear, it may or may not have been because she couldn't stand on her own.","self_corrected":false},{"annotator":1,"id":"470-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"She could have leaned on the bannisters for other reasons than not being able to stand on her own.","self_corrected":false},{"annotator":2,"id":"470-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe she could stand on her own, but she would not.","self_corrected":false},{"annotator":3,"id":"470-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info: no info about if she standed there till I found her, or if she really couln't stand on her own","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"195","context":"The activities included in the Unified Agenda are, in general, those expected to have a regulatory action within the next 12 months, although agencies may include activities with an even longer time frame.","statement":"Some actions were implemented for being shorter than 12 months.","entailment":[{"annotator":0,"id":"195-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context mentions that some activities with an longer time frame than 12 months will be included, so it could be true that there are some activities are shorter that 12 months.","self_corrected":false},{"annotator":1,"id":"195-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context states that the actions should generally see action within 12 months, so at least some are implemented for being shorter than 12 months.","self_corrected":false}],"neutral":[{"annotator":2,"id":"195-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Some activities are maybe longer than 12 months.","self_corrected":false},{"annotator":3,"id":"195-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"Miss interpretation: some actions were to be implemented in less than 12 months","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"782","context":"yeah yeah i i went i went off to school wanting to either be a high school algebra teacher or high school French teacher because my two favorite people in the in high school were my algebra teacher and French teacher and uh and i was going to do that until the end of our sophomore year when we wanted uh we came time to sign up for majors and i had taken chemistry for the first time that year and surprised myself i did well in it","statement":"You are required to sign up for a major freshman year.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"782-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker in the context mentions that he\/she sign up for major until the end of the sophomore year, not freshman year, so the statement is false, people don't need to sign up in their freshman year.","self_corrected":false},{"annotator":1,"id":"782-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context states that the time for signing up for majors is sophomore year, not freshman year.","self_corrected":false},{"annotator":2,"id":"782-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"At the end of our sophomore year, you are required to sign up for majors.","self_corrected":false},{"annotator":3,"id":"782-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, it happens in the sophomore year","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"267","context":"kind of kind of nothing i won't have anything to do with","statement":"I don't want anything to do with it, no doubts about it.","entailment":[{"annotator":1,"id":"267-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context is a paraphrase of the statement.","self_corrected":true}],"neutral":[{"annotator":0,"id":"267-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The speaker won't have anything to do with it could because she\/he don't want or can't.","self_corrected":false},{"annotator":2,"id":"267-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether \"I\" want anything to do with it, we just know it is none of \"my\" business.","self_corrected":false},{"annotator":3,"id":"267-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info: Whether I want or not is not known, it's only mentioned that I will NOT have anything to do with it","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"185","context":"The questions may need to be tailored to","statement":"There are some questions that may or may not need to be tailored to.","entailment":[{"annotator":0,"id":"185-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If some questions may need to be tailored to, then it is true that others may not need to.","self_corrected":false},{"annotator":1,"id":"185-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context is a paraphrase of the statement.","self_corrected":false},{"annotator":2,"id":"185-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The both show the uncertainty, whether some questions need to be tailored to.","self_corrected":false},{"annotator":3,"id":"185-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"paraphrases","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"534","context":"life in prison then he's available for parole if it's if it's life and a day then he's not eligible for parole so what you know let's quit BSing with the system","statement":"The system is corrupt because he won't be able to get parole if it's life and a day.","entailment":[{"annotator":0,"id":"534-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Complaints were made about the system, as implied both in the context and statement. ","self_corrected":true}],"neutral":[{"annotator":1,"id":"534-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker thinks the system is very bad but doesn't say anything about corrupt.","self_corrected":false},{"annotator":2,"id":"534-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The system can be corrupt, but also can be ridiculous.","self_corrected":false},{"annotator":3,"id":"534-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info: there is not comment about whether or not the rules of this system are counted as corrupted.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"134","context":"A martini should be gin and vermouth and a twist.","statement":"A martini must be composed by gin and vermouth.","entailment":[{"annotator":1,"id":"134-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context clearly states that a martini should contain gin and vermouth.","self_corrected":false},{"annotator":2,"id":"134-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Gin and vermouth are necessary for a martini.","self_corrected":false},{"annotator":3,"id":"134-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"true. The ingredients of martini are gin and vermouth","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":0,"id":"134-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement doesn't mention twist, but it is mentioned in the context.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"252","context":"and uh really they're about it they've got a guy named Herb Williams that that i guess sort of was supposed to take the place of uh Tarpley but he uh he just doesn't have the offensive skills","statement":"Tarpley is a better offensive player that Herb Williams.","entailment":[{"annotator":0,"id":"252-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions that Williams doesn't have the offensive skills, even though he will replace Tarpley. So we can conclude from the context that Tarpley's offensive skills is better than Williams'.","self_corrected":true},{"annotator":1,"id":"252-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Herb Williams couldn't replace Tarpley because of his lack of offensive skills. Thus, it is reasonable to assume that Tarpley is the better offensive player.","self_corrected":false},{"annotator":2,"id":"252-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Herb Williams doesn't have the offensive skills and falls to take the place of Tarpley. So Tarpley should be a better offensive player.","self_corrected":false},{"annotator":3,"id":"252-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Herb williams does not have the offensive skills like Tarpley do","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"996","context":"that's true i didn't think about that","statement":"You've changed my mind with a new perspective.","entailment":[{"annotator":1,"id":"996-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"The context is a paraphrase of the statement.","self_corrected":true}],"neutral":[{"annotator":0,"id":"996-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The only thing mention in the context is the new perspective, we can't conclude whether the speaker changed mind.","self_corrected":false},{"annotator":2,"id":"996-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"I\" can also keep my mind, although you offer a new perspective.","self_corrected":false},{"annotator":3,"id":"996-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No Info: no info about whether or not I have changed my mind","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1562","context":"do you really romance","statement":"Do you really have an affair?","entailment":[{"annotator":0,"id":"1562-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Both are questions and are asking about the same thing, which is about having an affair.","self_corrected":true}],"neutral":[{"annotator":1,"id":"1562-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"You can have other types of romance than an affair.","self_corrected":false},{"annotator":2,"id":"1562-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A romance can happen between two unmarried single person","self_corrected":false},{"annotator":3,"id":"1562-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"romance does not directly relate to affair.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"155","context":"Until all members of our society are afforded that access, this promise of our government will continue to be unfulfilled.","statement":"The government is flawed and unfulfilled.","entailment":[],"neutral":[{"annotator":2,"id":"155-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We don't know whether all members of the society are afforded that access.","self_corrected":false},{"annotator":3,"id":"155-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The promise of the government is unfulfilled. Can not conclude that the government is therefore flawed and unfulfilled.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"155-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The government will not always be unfulfilled. If all menbers are afforded that access, the government could fulfill the promise mentioned in the context.","self_corrected":false}],"idk":[1],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"331","context":"However, co-requesters cannot approve additional co-requesters or restrict the timing of the release of the product after it is issued.","statement":"They will restrict timing of the release of the product.","entailment":[],"neutral":[{"annotator":1,"id":"331-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Whether they can restrict the timing of the product at all is not clear beacuse we don't know whether it was already issued.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"331-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"They can't restrict the release time, as mentioned in the context.","self_corrected":true},{"annotator":2,"id":"331-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Co-requesters cannot restrict the timing of the release of the product.","self_corrected":false},{"annotator":3,"id":"331-contradiction-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"They can NOT restrict the timing of the release","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"696","context":"The long-sought, the mysterious, the elusive Jane Finn! ","statement":"Jane Finn is as beautiful as she is mysterious.","entailment":[],"neutral":[{"annotator":1,"id":"696-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether Jane Finn is beautiful.","self_corrected":false},{"annotator":2,"id":"696-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether Jane Finn is beautiful or not.","self_corrected":false},{"annotator":3,"id":"696-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No Info: No Info about the beauty of Jane Finn","self_corrected":false}],"contradiction":[{"annotator":0,"id":"696-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The comment about Jane Finn stated both in the context and statement is mysterious. There is no mention of \"beautiful\" in the context.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"703","context":"the net cost of operations.","statement":"That's how it expensive it runs.","entailment":[{"annotator":0,"id":"703-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context and statement are both talking about the running cost.","self_corrected":false},{"annotator":2,"id":"703-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The cost can be described as \"how expensive\".","self_corrected":false}],"neutral":[{"annotator":3,"id":"703-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"irrelevant","self_corrected":true}],"contradiction":[],"idk":[1],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":null},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["entailment"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"799","context":"you know we keep a couple hundred dollars um if that much charged on those which isn't too bad it's just your normal","statement":"We have money on there, which isn't great","entailment":[{"annotator":2,"id":"799-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"Spending money on that is not great, because it is normal.","self_corrected":true}],"neutral":[{"annotator":0,"id":"799-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The \"not too bad\" comment in the context is about charging much, while the statement is about having money on there.","self_corrected":false}],"contradiction":[],"idk":[1,3],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"789","context":"The remaining parts of the north, although enticing, are difficult to explore.","statement":"The rest of the north presents a steep challenge.","entailment":[{"annotator":0,"id":"789-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both context and statement are saying that the rest of the north is hard to explore.","self_corrected":false},{"annotator":1,"id":"789-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the remaining parts of the north are difficult to explore, then they also present a steep challenge.","self_corrected":false},{"annotator":2,"id":"789-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Difficult to explore\" means \"a steep challenge\".","self_corrected":false},{"annotator":3,"id":"789-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"difficult to explore\" entails a chanllenge","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"886","context":"well Jerry do you have a favorite team","statement":"Jerry, do you follow any sports?","entailment":[{"annotator":0,"id":"886-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both context and statement are questions about sports. If someone has a favorite team, then he\/she must follow this sport.","self_corrected":false},{"annotator":2,"id":"886-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If Jerry has a favorite team, he\/she should follow this sport.","self_corrected":false}],"neutral":[{"annotator":1,"id":"886-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Jerry can follow sports without having a favorite team.","self_corrected":true},{"annotator":2,"id":"886-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Jerry can be a fake fan, for example he\/she support a local team, but even don't know the rule of the sport.","self_corrected":false},{"annotator":3,"id":"886-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"388","context":"Earlier this week, the Pakistani paper Dawn ran an editorial about reports that Pakistani poppy growers are planning to recultivate opium on a bigger scale because they haven't received promised compensation for switching to other crops.","statement":"It is illegal to grow opium in Pakistan.","entailment":[],"neutral":[{"annotator":0,"id":"388-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention whether growing opium in Pakistan is illegal or not.","self_corrected":false},{"annotator":1,"id":"388-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't say anything about the legality of the cultivation of opium.","self_corrected":false},{"annotator":2,"id":"388-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It didn't mention whether it is legal to plant opium in Pakistan.","self_corrected":false},{"annotator":3,"id":"388-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The legal status of growing opium is not mentioned in the context","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1013","context":"In keeping with other early Buddhist tenets, there is no figurative representation of Buddha here, However, there is a large gilded statue from a later period inside, and behind the temple are the spreading branches and trunks of the sacred Bodhi Tree, which is said to have grown from a sapling of the first one that stood here 2,500 years ago.","statement":"There is no statue of Buddha located there.","entailment":[{"annotator":0,"id":"1013-entailment-1","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No figurative representation of Buddha indicates that there is no satues of Buddha.","self_corrected":true},{"annotator":1,"id":"1013-entailment-2","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If there is no figurative representation of Buddha then there can be no statue of Buddha.","self_corrected":false}],"neutral":[{"annotator":2,"id":"1013-neutral-1","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether the \"large gilded statue\" is a statue of Buddha or something else.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"1013-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"However\" means although the early Buddhist tenet forbade the figurative representation of Buddha, later it was somehow disobeyed or changed, and there is a large gilded statue here.","self_corrected":false},{"annotator":3,"id":"1013-contradiction-2","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is a large gilded statue from a later period","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":1.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"883","context":"right right they left a woman and a child or the cat the sheep yeah","statement":"They were merciful in this regard, only taking the men as slaves.","entailment":[{"annotator":0,"id":"883-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement could be true because only the men are not mentioned in the context. They may have been merciful if they only took the men but left the women and children.","self_corrected":true}],"neutral":[{"annotator":0,"id":"883-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention men.","self_corrected":false},{"annotator":1,"id":"883-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They left a woman or a child. They could still have taken other women or children.","self_corrected":false},{"annotator":2,"id":"883-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"What did they take is not sure, maybe they didn't take anybody, just took some fortune away.","self_corrected":false},{"annotator":3,"id":"883-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about taking the men as slaves","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1185","context":"The rustic Bras-David picnic area, for example, is set alongside a burbling stream.","statement":"The stream is always burbling.","entailment":[{"annotator":0,"id":"1185-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Both context and statement mention the burbling stream.","self_corrected":true},{"annotator":2,"id":"1185-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Burbling\" is how the stream is described in the context.","self_corrected":false}],"neutral":[{"annotator":1,"id":"1185-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The stream could also be burbling only sometimes or most of the time.","self_corrected":false},{"annotator":2,"id":"1185-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It can't be promised that it is \"always\" burbling, maybe sometimes the rainfall will influence the volume of the stream.","self_corrected":false},{"annotator":3,"id":"1185-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The area is set alongside a burbling stream. No info about if all stream is burbling","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1144","context":"The third row of Exhibit 17 shows the Krewski, et al. ","statement":"Exhibit 17 has many rows.","entailment":[{"annotator":0,"id":"1144-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context suggests that Exhibit 17 has at least three rows. So there are indeed many rows.","self_corrected":false},{"annotator":2,"id":"1144-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"As there is \"the third row\", we can suppose that there are more than one row.","self_corrected":false},{"annotator":3,"id":"1144-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is the third row of Exhibit 17, meaning it has at least first and second row","self_corrected":false}],"neutral":[{"annotator":1,"id":"1144-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We only know that it has at least three rows, which is not many.","self_corrected":false},{"annotator":2,"id":"1144-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Many\" can be quite a large number, but it is not refered in the context.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"860","context":"i think we have too thank you very much you too bye-bye","statement":"I don't think we can thank you enough for your help.","entailment":[{"annotator":2,"id":"860-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both sentences express strong gratitude.","self_corrected":false}],"neutral":[{"annotator":0,"id":"860-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention why they are thanking you, it may or may not because of the help.","self_corrected":false},{"annotator":2,"id":"860-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"Maybe \"I\" think oral thanks is enough.","self_corrected":false},{"annotator":3,"id":"860-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"exaggeration","self_corrected":false}],"contradiction":[],"idk":[1],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"433","context":"have that well and it doesn't seem like very many people uh are really i mean there's a lot of people that are on death row but there's not very many people that actually um do get killed","statement":"Most people on death row end up living out their lives awaiting execution.","entailment":[{"annotator":0,"id":"433-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not very many people on death row get killed indicates that there are many people waiting on the death row, but they will not be executed.","self_corrected":true},{"annotator":3,"id":"433-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"there are many people on death row, but few of them actually get killed","self_corrected":false}],"neutral":[{"annotator":1,"id":"433-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The people in death row that are not killed could also be pardoned. So they would not await execution.","self_corrected":false},{"annotator":2,"id":"433-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Some people's execution can be cancelled and they will suffer from life imprisonment and die in prison.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1090","context":"we were lucky in that in one respect in that after she had her stroke she wasn't really you know really much aware of what was going on","statement":"She had a very serious stroke.","entailment":[{"annotator":1,"id":"1090-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The stroke left her unaware of her surroundings, so it has to have been serious.","self_corrected":false}],"neutral":[{"annotator":0,"id":"1090-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the seriousness of the stroke.","self_corrected":false},{"annotator":2,"id":"1090-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It can be a very serious stroke, but also can be a mild stroke.","self_corrected":false},{"annotator":3,"id":"1090-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the seriousness of her stroke; She could a serious or not so serious stroke","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"7","context":"He hadn't seen even pictures of such things since the few silent movies run in some of the little art theaters.","statement":"He had recently seen pictures depicting those things.","entailment":[],"neutral":[{"annotator":1,"id":"7-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"He could have visited the little art theaters recently or not. It is not clear.","self_corrected":true}],"contradiction":[{"annotator":0,"id":"7-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is clear from the context that he hadn't seen pictures of such things, while the statement suggests that he had recently seen them.","self_corrected":false},{"annotator":2,"id":"7-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, \"he hadn't seen pictures of such things\" since the silent movies run in some art theaters.","self_corrected":false},{"annotator":3,"id":"7-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He hadn't seen pictures of those things","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["contradiction"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"296","context":"it depends a lot of uh a lot of things were thought that uh as you know the farmers thought okay we got chemicals we're putting chemicals on the field well the ground will naturally filter out the","statement":"The farming chemicals are filtered by the ground.","entailment":[{"annotator":3,"id":"296-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"The farmers think the groud will naturally filter out the chemicals","self_corrected":true}],"neutral":[{"annotator":0,"id":"296-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions the thought of the farmers. It is not clear whether this thought is the truth.","self_corrected":false},{"annotator":1,"id":"296-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker talks about the farmers thinking that the ground will filter out the chemicals. But they don't say that they themselves believe it.","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"633","context":"oh really yeah so he he's uh he's probably going to be going to jail and and the problem with him is he's on a guaranteed salary like for three years so whether he plays or not they've got to pay him ten million dollars so if they","statement":"He is so hardworking and has helped the team achieve so much, I don't see anything wrong with paying him a million dollar salary.","entailment":[],"neutral":[{"annotator":0,"id":"633-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We can't infer from the context whether he is hardworking or not.","self_corrected":false},{"annotator":2,"id":"633-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"His personality and characters are not mentioned in the context, maybe he is not hardworking.","self_corrected":false},{"annotator":3,"id":"633-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about where he is hardworking or not or about my subject feelings on him got paid a million dollar salary","self_corrected":false}],"contradiction":[{"annotator":1,"id":"633-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The speaker clearly says that they believe it's a problem that the person has a guaranteed million dollar salary.","self_corrected":false},{"annotator":2,"id":"633-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"He will probably go to jail, and \"I\" worry about the ten million dollars, but although he can not work in jail, the money still need to be paid.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"963","context":"It was made up to look as much like an old-fashioned steam train as possible.","statement":"It was built in the modern era to look like something built in the past.","entailment":[{"annotator":0,"id":"963-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions the building of an old-fashioned train, the word old-fashioned would only be used in the modern era. So the statement is true. ","self_corrected":false},{"annotator":1,"id":"963-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It was made to look like a steam train which is something from the past.","self_corrected":false},{"annotator":2,"id":"963-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It was made up to look as an \"old-fashioned\" steam train, if it is made in the past, it should be described as \"fashion\" instead of \"old-fashioned\". So it was built in the modern era but to look like something old.","self_corrected":false}],"neutral":[{"annotator":3,"id":"963-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about when it was build. It could be built in 20th century to look like something built in 19th century","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"946","context":"Then he sobered.","statement":"He was drunk.","entailment":[{"annotator":0,"id":"946-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"If he was not sober due to alcohol, then the statement is true.","self_corrected":false},{"annotator":1,"id":"946-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"For a person to sober, they have to be drunk before.","self_corrected":false}],"neutral":[{"annotator":0,"id":"946-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He could also be not sober because of the drugs. ","self_corrected":false},{"annotator":2,"id":"946-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"He can be faint because of hunger or desease.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"946-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"sobered means becoming not drunk","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"1249","context":"During his disastrous campaign in Russia, he found time in Moscow to draw up a new statute for the Com??die-Francaise (the national theater), which had been dissolved during the Revolution.","statement":"Russia has been successfully invaded hundreds of times.","entailment":[],"neutral":[{"annotator":0,"id":"1249-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention how many times Russia has been invaded.","self_corrected":false},{"annotator":1,"id":"1249-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no information about how many times Russia was invaded.","self_corrected":false},{"annotator":2,"id":"1249-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Nothing shows how many times has Russia been invaded.","self_corrected":false},{"annotator":3,"id":"1249-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about how many times Russia has been invaded","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"747","context":"News ' cover says the proliferation of small computer devices and the ascendance of Web-based applications are eroding Microsoft's dominance.","statement":"Microsoft is a more profitable company than Apple.","entailment":[],"neutral":[{"annotator":0,"id":"747-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the profits of Microsoft and Apple.","self_corrected":false},{"annotator":1,"id":"747-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything about Apple.","self_corrected":false},{"annotator":2,"id":"747-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Apple is not mentioned in the context, so we can not compare which company is more profitable.","self_corrected":false},{"annotator":3,"id":"747-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the profit-comparision between Microsoft and Apple","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1139","context":"There's a lot of villas all the way along, but by degrees they seemed to get more and more thinned out, and in the end we got to one that seemed the last of the bunch.","statement":"There were only a few villas the whole way along, until we reached a small village that seemed to be the end.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"1139-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context clearly suggests that there are many villas all the way along.","self_corrected":false},{"annotator":1,"id":"1139-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In the end they reached a single villa, not a small village.","self_corrected":false},{"annotator":2,"id":"1139-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There are a lot of villas the whole way along.","self_corrected":false},{"annotator":3,"id":"1139-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is a lot of villas all the way along.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"1415","context":"The park was established in 1935 and was given Corbett's name after India became independent.","statement":"The park changed names due to the independence.","entailment":[{"annotator":1,"id":"1415-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The park probably received a name in 1935, so the new name would be a change.","self_corrected":true}],"neutral":[{"annotator":0,"id":"1415-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The park was renamed after the independence, but the reason for the name change is unclear.","self_corrected":false},{"annotator":2,"id":"1415-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The park changed names after the independence, but independence can not be the reason of changing, it can be a coincidence.","self_corrected":false},{"annotator":3,"id":"1415-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about whether the park had a name already before the independence.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"478","context":"Krugman's column will henceforth be known as The Dismal Science, a phrase too famous to be ownable by anyone, except possibly British essayist Thomas Carlyle (1795-1881), who coined it.","statement":"Krugman writes novels.","entailment":[],"neutral":[{"annotator":1,"id":"478-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's clear that Krugman writes a column, but not clear whether they write novels.","self_corrected":false},{"annotator":2,"id":"478-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Krugman has a column known as \"The Dismal Science\", it can be a novel column, but also can be others like essay column.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"478-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Krugman is a columnist, he doesn't write novels.","self_corrected":false},{"annotator":3,"id":"478-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, in the context is his column, which appears often in newspaper","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"700","context":"Even if the entire unified surplus were saved, GDP per capita would fall somewhat short of the U.S. historical average of doubling every 35 years.","statement":"Even if the entire unified surplus were lost, GDP per capita would fall somewhat short of the U.S. historical average of doubling every 35 years.","entailment":[],"neutral":[{"annotator":1,"id":"700-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear what would happen to the GDP if the surplus would be lost instead of saved.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"700-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Context state if the surplus were saved","self_corrected":false}],"idk":[0,2],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"708","context":"and my and my part-time work you know it's not our the restaurant our favorite restaurant in the town of Salisbury where actually we live you know where my where i'll return to my job or whatever we can normally eat out for um under fourteen dollars","statement":"My first part time job was in a restaurant in Salisbury where you could eat out for under $14.","entailment":[],"neutral":[{"annotator":0,"id":"708-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not mentioned in the context whether the speaker's first part time job was in a restaurant in Salisbury.","self_corrected":false},{"annotator":2,"id":"708-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"1. The part-time work in Salisbury can not be the first job; 2. The part-time job can not in a restaurant.","self_corrected":false},{"annotator":3,"id":"708-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about if it was my first part time job","self_corrected":false}],"contradiction":[{"annotator":3,"id":"708-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"My part time job is not in a restaurant","self_corrected":true}],"idk":[1],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1507","context":"do you think most states have that or","statement":"I think most states have that.","entailment":[],"neutral":[{"annotator":0,"id":"1507-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is an answer to the question in the context. It may be true or false.","self_corrected":false},{"annotator":1,"id":"1507-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker poses a question and doesn't assert that most states have that.","self_corrected":false},{"annotator":3,"id":"1507-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about what I think in the context","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"964","context":"There should be someone here who knew more of what was going on in this world than he did now.","statement":"He knew things, but hoped someone else knew more. ","entailment":[{"annotator":3,"id":"964-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"someone is hoped to be here to know more of what was going on than he did","self_corrected":true}],"neutral":[{"annotator":0,"id":"964-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only suggests that someone else knew more than he did, it is not clear whether he hoped so.","self_corrected":false},{"annotator":1,"id":"964-neutral-2","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether he hoped or only epected that there was someone who knew more.","self_corrected":false},{"annotator":2,"id":"964-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He supposed that someone else knew more, but it can not reflect whether he hoped so or not, maybe he hoped that he knew the most in the world.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1135","context":"She admits to Dorcas, 'I don't know what to do; scandal between husband and wife is a dreadful thing.' At 4 o'clock she has been angry, but completely mistress of herself. ","statement":"She had remained in control despite her anger.","entailment":[{"annotator":0,"id":"1135-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"It is clear from the context that she got control of herself.","self_corrected":false},{"annotator":1,"id":"1135-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"\"Mistress of herself\" means that the she was in control of herself.","self_corrected":false},{"annotator":2,"id":"1135-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"\"She has been angry, but completely mistress of herself. \"","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[3],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1470","context":"A small page-boy was waiting outside her own door when she returned to it.","statement":"When she came back to her door she found something waiting.","entailment":[{"annotator":1,"id":"1470-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"The someone waiting was the small page-boy.","self_corrected":true}],"neutral":[{"annotator":0,"id":"1470-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions that someone was waiting, we don't know if something was waiting as well.","self_corrected":false},{"annotator":2,"id":"1470-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe beside the page-boy, there are something else waiting.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"1470-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not correct to say \"a small page-boy\" as \"something\".","self_corrected":false},{"annotator":3,"id":"1470-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"it was rather someone waiting","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","contradiction"],"error_labels":["entailment"],"has_ambiguity":true} -{"id":"467","context":"Then, all the time, it was in the spill vase in Mrs. Inglethorp's bedroom, under our very noses? I cried. ","statement":"You mean we were so near it constantly?","entailment":[{"annotator":1,"id":"467-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Under our very noses\" means that it was very near to us.","self_corrected":false},{"annotator":3,"id":"467-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"under our very noses\" means in our very nearby surrendings","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[0,2],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1175","context":"He's chosen Meg Ryan.","statement":"A possible selection would be Meg Ryan or Jon Doe.","entailment":[{"annotator":0,"id":"1175-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The selection is Meg Ryan. So Meg Ryan or Jon Doe is true.","self_corrected":false},{"annotator":2,"id":"1175-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is possible, because Meg Ryan is one of the two candidates.","self_corrected":false}],"neutral":[{"annotator":1,"id":"1175-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear from which pool he chose Meg Ryan.","self_corrected":false},{"annotator":3,"id":"1175-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the other choice of person","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"666","context":"In manual systems, attestations, verifications, and approvals are usually shown by a signature or initial of an individual on a hard copy document.","statement":"The only things that signatures in manual systems show are attestations, verifications, or approvals.","entailment":[],"neutral":[{"annotator":1,"id":"666-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Signatures could also show other things in addition to the mentioned ones.","self_corrected":false},{"annotator":2,"id":"666-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Sinatures in manual systems can be used for more purposes except from attestations, verifications, and approvals.","self_corrected":false},{"annotator":3,"id":"666-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info that signatures only shows attestations, verifications and approvals. Signatures could show more than that","self_corrected":false}],"contradiction":[{"annotator":0,"id":"666-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Attestations, verifications and approvals, all three of these things, not just one of these things, are showed by a signature.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"606","context":"This doesn't look good.","statement":"This looks really bad.","entailment":[{"annotator":3,"id":"606-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"not look good\" implies \"bad\"","self_corrected":false}],"neutral":[{"annotator":0,"id":"606-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context suggests that it doesn't look good, which may be normal or bad.","self_corrected":false},{"annotator":1,"id":"606-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not good is not equivalent to really bad.","self_corrected":false},{"annotator":2,"id":"606-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"This doesn't look good, but it can look just a little bit bad, or really bad.","self_corrected":false},{"annotator":3,"id":"606-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"possible exaggeration: \"not looking good\" might be not as serious as \"really bad\"","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":4.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1589","context":"The word itself, tapa, is translated as lid and derives from the old custom of offering a bite of food along with a drink, the food being served on a saucer sitting on top of the glass like a lid.","statement":"Tapas are large portions and are a very filling meal.","entailment":[],"neutral":[{"annotator":0,"id":"1589-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the protion size of tapas.","self_corrected":true},{"annotator":1,"id":"1589-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The meaning of the word tapa could have radically changed and now signify large portions.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"1589-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Tapas only offer \"a bite of food\", so it is not \"very filling\".","self_corrected":false},{"annotator":3,"id":"1589-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Tapas are small portions","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"145","context":"The standard technology assumptions of scenario A were used by EIA in the development of the AEO2001 reference case projections.","statement":"EIA used the standard technology assumptions to eliminate the AEO2001 reference case projections.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"145-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context talks about developing AEO2001 reference case projections by using the assumptions, while the statement talks about elimination.","self_corrected":false},{"annotator":1,"id":"145-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The assumptions were used to develop the projections, not eliminate them.","self_corrected":false},{"annotator":2,"id":"145-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"EIA used the standard technology assumptions to develop the AEO2001 reference case projections, not to \"eliminate\" them.","self_corrected":false},{"annotator":3,"id":"145-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It was used during the development of the AEO2021","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"1532","context":"One wag, J., wrote in to ask, Is there a difference between pests and airlines?","statement":"J. thinks there is no difference between pests and airlines.","entailment":[{"annotator":0,"id":"1532-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The question from J. could be a rhetorical question to which the speaker already has a standard answer, which is that there is no difference.","self_corrected":false},{"annotator":1,"id":"1532-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Literally taken the question is so absurd that it is most likely a rhetorical question implying that there really is no difference.","self_corrected":false}],"neutral":[{"annotator":0,"id":"1532-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The question from J. could be a simple question which needs to be answered.","self_corrected":false},{"annotator":3,"id":"1532-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In the context J only asked a questions. It is unknown about his opinion","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1220","context":"We did not study the reasons for these deviations specifically, but they likely result from the context in which federal CIOs operate.","statement":"The Context in which federal CIOs operate is no different from other CIOs.","entailment":[],"neutral":[{"annotator":0,"id":"1220-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context refers only to a hypothetical, we don't know whether it is the truth that the context in which federal CIOs operate is different.","self_corrected":false},{"annotator":1,"id":"1220-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Other CIOs are not mentioned in the context.","self_corrected":false},{"annotator":2,"id":"1220-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Federal CIOs can be semilar with other CIOs, also can be different from others.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1220-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The contexts of different CIOs potentially lead to these deviations","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1019","context":"it it like strange that it you're right in the middle of the mountains and it's so brown and dry but boy you just didn't feel","statement":"you are in the right part of the mountains.","entailment":[],"neutral":[{"annotator":0,"id":"1019-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention whether someone is in the right part or not.","self_corrected":false},{"annotator":1,"id":"1019-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether right in the middle of the mountains is also the right part of the mountains.","self_corrected":false},{"annotator":3,"id":"1019-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"Not clear context","self_corrected":true}],"contradiction":[{"annotator":2,"id":"1019-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"right\" in context means exactly, not the side of the mounstains, and your location is in the middle of the mountains.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"641","context":"California is high","statement":"California is hyped up!","entailment":[{"annotator":2,"id":"641-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"high\" is one state of \"hyped up\". ","self_corrected":false},{"annotator":3,"id":"641-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the \"high\" means in the context of spirit, that Californa is a hyped city","self_corrected":false}],"neutral":[{"annotator":1,"id":"641-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"High is not the same as hyped up but also does not exclude the possibility.","self_corrected":false},{"annotator":3,"id":"641-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the \"high\" in the context mean that Californa is geographically high. Then it does not entail the statement","self_corrected":false}],"contradiction":[],"idk":[0],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"527","context":"The Chinese calendar was used to calculate the year of Japan's foundation by counting back the 1,260 years of the Chinese cosmological cycle.","statement":"The calculation of Japan's year of foundation was very exact.","entailment":[{"annotator":3,"id":"527-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"yes, because it was counted back 1,260 years of the Chinese cosmological cycle","self_corrected":false}],"neutral":[{"annotator":0,"id":"527-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context suggests how the Japan' year was calculated, but doesn't mention whether this calculation was exact or not.","self_corrected":false},{"annotator":1,"id":"527-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether this calculation is exact.","self_corrected":false},{"annotator":2,"id":"527-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In context it just states how Japan's year of foundation is calculated, but it can not prove the method is exact or not.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1277","context":"The tree-lined avenue extends less than three blocks to the sea.","statement":"The sea isn't even three blocks away.","entailment":[{"annotator":0,"id":"1277-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Both the context and the statement talk about the distance to the sea is lee than three blocks.","self_corrected":false},{"annotator":1,"id":"1277-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"If the avenue reaches the sea after less then three blocks, it cannot be further away than three blocks.","self_corrected":false},{"annotator":2,"id":"1277-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The avenue is less than three blocks to the sea.","self_corrected":false},{"annotator":3,"id":"1277-entailment-4","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"If the statement means that the sea is less than three blocks away","self_corrected":true}],"neutral":[{"annotator":2,"id":"1277-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not given where is the location of the narrator.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1277-contradiction-1","judgments":[{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"If the statement means that the sea is more than three blocks away","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":4.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"450","context":"Also, other sorbent-based approaches in development may prove in time to be preferable to ACI, making the use of ACI only a conservative assumption.","statement":"Hydrogen-based approaches in development may be preferable to ACl.","entailment":[],"neutral":[{"annotator":1,"id":"450-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions sorbent-based approaches, not hydrogen-based ones. But this doesn't rule out hydrogen-based approaches.","self_corrected":false},{"annotator":2,"id":"450-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In the context hydrogen-based approaches are not discussed.","self_corrected":false},{"annotator":3,"id":"450-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Context mentions sorbent-based approaches, whereas in the statement it is hydrogen-based approaches, which is not mentioned in the context","self_corrected":false}],"contradiction":[],"idk":[0],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"685","context":"I was to watch for an advertisement in the Times.","statement":"I looked for an ad in my mailbox.","entailment":[],"neutral":[{"annotator":0,"id":"685-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The speaker indeed looked for an ad, but the context doesn't mention where the speaker looked for.","self_corrected":true},{"annotator":1,"id":"685-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context talks about an obligation, but it is not clear whether the speaker then acts accordingly.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"685-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I searched \"the Times\" not \"my mailbox\" for an ad.","self_corrected":false},{"annotator":3,"id":"685-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Times is a newpaper","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"164","context":"Text Box 2.1: Gross Domestic Product and Gross National Product 48Text Box 4.1: How do the NIPA and federal unified budget concepts of","statement":"This text displays how GDP and GNP is calculated.","entailment":[],"neutral":[{"annotator":1,"id":"164-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear what the text displays.","self_corrected":false},{"annotator":2,"id":"164-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In context it only offer a theme of GDP and GNP, but it can be about every aspect of the concepts, like the growth or the depression, and the calculation methods.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"164-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't show the calculation of GDP and DNP.","self_corrected":false},{"annotator":3,"id":"164-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No it doesn't. It only shows the what GDP and GNP stand for","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1042","context":"Don't take it to heart, lad, he said kindly.","statement":"He was trying to console the lad.","entailment":[{"annotator":0,"id":"1042-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context is indeed about the consolation.","self_corrected":false},{"annotator":1,"id":"1042-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Don't take it to heart\" is an attempt of consolation.","self_corrected":false},{"annotator":2,"id":"1042-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The words and the attitude of him can express his attempt to console the lad.","self_corrected":false},{"annotator":3,"id":"1042-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"don't take it to heart\" means not to overthink something.. So the lad should not overthink something that might bother him","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1072","context":"It cannot be outlawed.","statement":"It has to be made illegal.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"1072-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"It cannot be outlawed means that it is legal.","self_corrected":false},{"annotator":3,"id":"1072-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"\"cannot be outlawed\" means \"cannot be made illegal\". So it has to stay legal","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"1268","context":"It started with The Wild Bunch : We sexualized violence, we made it beautiful.","statement":"Violence is now look at in the positive due to The Wild Bunch.","entailment":[{"annotator":0,"id":"1268-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Making violence beautiful is is a positive way of looking at violence.","self_corrected":false},{"annotator":2,"id":"1268-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"People made violence beautiful is a way to look at it positively.","self_corrected":false}],"neutral":[{"annotator":1,"id":"1268-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether people have a positive opinion about violence, only because The Wild Bunch made it look beautiful.","self_corrected":false},{"annotator":3,"id":"1268-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The current state of how violence is looked at is unknown; We only know it started to be looked at in a positive view","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"501","context":"You see, he said sadly, \"you have no instincts.\"","statement":"He said that I had no willpower.","entailment":[],"neutral":[{"annotator":0,"id":"501-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the comments on willpower.","self_corrected":false},{"annotator":3,"id":"501-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Instincts do not totally relate with willpower.","self_corrected":false}],"contradiction":[{"annotator":1,"id":"501-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He said \"no instincts\", not \"no willpower\".","self_corrected":false},{"annotator":2,"id":"501-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Instincts\" are not the same as \"willpower\".","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1036","context":"ooh it's kind of tough to think of some of the others although i do watch some of some of those frivolous things uh like on Thursday nights at nine o'clock when i get home from aerobics i will watch uh Knots Landing","statement":"I only watch frivolous things on Thursday nights.","entailment":[],"neutral":[{"annotator":0,"id":"1036-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions that the speaker watch frivolous things on Thursdays, but doesn't mention if he\/she watch them on other days as well.","self_corrected":false},{"annotator":1,"id":"1036-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They could also watch frivolous things at other times.","self_corrected":false},{"annotator":2,"id":"1036-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I do watch frivolous things on Thursday nights, but maybe I also watch at other time.","self_corrected":false},{"annotator":3,"id":"1036-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Thursday is mentioned as example. There is no info about what he does on other weekday nights","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1125","context":"Kutchins and Kirk cite a particularly amusing example of such Robert Spitzer, the man in charge of DSM-III , was sitting down with a committee that included his wife, in the process of composing a criteria-set for Masochistic Personality Disorder--a disease that was suggested for, but never made it into, the DSM-III-R (a revised edition).","statement":"DSM-III-R is a book of personality disorders.","entailment":[{"annotator":2,"id":"1125-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Masochistic Personality Disorder was suggested fot DSM-III-R, so the later should be about personality disorders.","self_corrected":false}],"neutral":[{"annotator":0,"id":"1125-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions that Masochistic Personality Disorder is not in the book, it doesn't mention the content of the book.","self_corrected":false},{"annotator":1,"id":"1125-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The book could contain also other things than only personality disorders.","self_corrected":false},{"annotator":3,"id":"1125-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We only know that DSM-III is a personality disorders. But DSM-III-R could just be a medical book for all kinds of disorders","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"386","context":"trying to keep grass alive during a summer on a piece of ground that big was expensive","statement":"The watering and fertilizer, can cost a lot to keep grass alive in the summer months.","entailment":[{"annotator":3,"id":"386-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Keeping grass alive on a big ground can be expensive. For that you need watering and fertilizer, which can be expensive","self_corrected":false}],"neutral":[{"annotator":0,"id":"386-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the reason for the high cost.","self_corrected":false},{"annotator":1,"id":"386-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear what exactly is expensive about keeping the grass alive.","self_corrected":false},{"annotator":2,"id":"386-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It costs a lot to keep grass alive in the summer, but the reason can be watering and fertilizer, or something else like labor and pesticide.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1252","context":"isn't it i can remember i've only been here eight years but i can remember coming to work from i used to live in Wylie and i could see downtown Dallas","statement":"Downtown Dallas was a short drive from where I lived in Wylie.","entailment":[{"annotator":0,"id":"1252-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker can see downtown Dallas from where he\/she lived, so it is true to say that it was a short drive.","self_corrected":false},{"annotator":1,"id":"1252-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the person could see Downtown Dallas from their place in Wylie, it probably was only a short drive.","self_corrected":false},{"annotator":3,"id":"1252-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"I\" saw Dallas on my way to work from Wylie. Considering daily commute, Dallas should not be very far away from Wylie","self_corrected":false}],"neutral":[{"annotator":2,"id":"1252-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"I could see downtown Dallas maybe because it was not far away from where I lived in Wylie, maybe because I lived in a high-rise apartment. Besides, as there could be a river between me and the downtown, if there is no bridges, the drive still will be long.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"335","context":"The living is not equal to the Ritz, he observed with a sigh.","statement":"The living is nothing compared to the glamour of the Ritz, he said sadly.","entailment":[{"annotator":0,"id":"335-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We can infer from the sigh that the speaker did think that living here is not as good as in Ritz.","self_corrected":false},{"annotator":2,"id":"335-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The sign and the words proved that he didn't think the living is good as the Ritz.","self_corrected":false},{"annotator":3,"id":"335-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"paraphrases","self_corrected":false}],"neutral":[{"annotator":1,"id":"335-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It's not clear whether he sighed from disappointment or relief.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1407","context":"Exhibit 3 presents total national emissions of NOx and SO2 from all sectors, including power.","statement":"In Exhibit 3 there are the total regional emissions od NOx and SO2 from all sectors.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"1407-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Exhibit 3 shows the total national emmissions, not regional emissions.","self_corrected":false},{"annotator":1,"id":"1407-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The emissions are national not regional.","self_corrected":false},{"annotator":2,"id":"1407-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Exhibit 3 is about \"national\" not \"regional\" emissions.","self_corrected":false},{"annotator":3,"id":"1407-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It should be national emissions","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"699","context":"Ca'daan heard the Kal grunt and felt the horse lift.","statement":"The Kal heard Ca'daan grunt.","entailment":[{"annotator":3,"id":"699-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"true, statement is a part of the context","self_corrected":true}],"neutral":[],"contradiction":[{"annotator":0,"id":"699-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The grunt was from Kal, not Ca'daan.","self_corrected":false},{"annotator":1,"id":"699-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Ca'daan grunted not the Kal.","self_corrected":false},{"annotator":2,"id":"699-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is Ca'daan that heard the Kal, not reverse.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1284","context":"5 are highly correlated during summer months in some areas.","statement":"Six are correlated to winter in certain areas.","entailment":[],"neutral":[{"annotator":0,"id":"1284-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only suggests the correlation during summer months, not winter.","self_corrected":false},{"annotator":1,"id":"1284-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It could be that six are correlated to winter, the context only speaks about summer.","self_corrected":false},{"annotator":2,"id":"1284-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Winter months are not discussed in the context.","self_corrected":false},{"annotator":3,"id":"1284-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about 6","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"355","context":"China's civil war sent distressing echoes to Hong Kong.","statement":"Japan fought a civil war.","entailment":[],"neutral":[{"annotator":0,"id":"355-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no mention of the Japan civil war in the context.","self_corrected":false},{"annotator":1,"id":"355-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It could be that Japan also fought a civil war, but it is not clear from the context.","self_corrected":false},{"annotator":2,"id":"355-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Japanese civil war is not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"355-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"no info about japan; irrelavant","self_corrected":false}],"contradiction":[{"annotator":3,"id":"355-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"China fought a civil war","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"225","context":"Then Shuman claims that Linux provides no graphical user interface.","statement":"They made accusations about the platform.","entailment":[{"annotator":0,"id":"225-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We can infer from the claim in the context that the made accusations.","self_corrected":false},{"annotator":1,"id":"225-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"That an operating system doesn't contain a graphical user interface can be called an accusation.","self_corrected":false}],"neutral":[{"annotator":2,"id":"225-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Linux provides no graphical user interface\" can be a common description or an accusation, the attitude needs more background to prove.","self_corrected":false},{"annotator":3,"id":"225-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the claim being an accusation","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1427","context":"Sphinxes were guardian deitiesinEgyptianmythologyandthis was monumentalprotection,standing73 m (240 ft)longand20 m (66 feet) high.","statement":"Sphinxes were put in the tombs to protect the dead.","entailment":[],"neutral":[{"annotator":0,"id":"1427-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no information in the context about where Sphinxes were placed.","self_corrected":false},{"annotator":1,"id":"1427-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context what exactly the sphinxes protected.","self_corrected":false},{"annotator":2,"id":"1427-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Sphinxes' location is not given in the context.","self_corrected":false},{"annotator":3,"id":"1427-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about whether if Sphinxes were put into the tombs","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1292","context":"The original wax models of the river gods are on display in the Civic Museum.","statement":"They have models made out of clay.","entailment":[],"neutral":[{"annotator":0,"id":"1292-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention about the clay models.","self_corrected":false},{"annotator":1,"id":"1292-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They could also have models made out of clay next to those made out of wax.","self_corrected":false},{"annotator":2,"id":"1292-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In the context it just refered to a wax model, but provided no information about clay model.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1292-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is out of wax","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"95","context":"What changed?","statement":"Nothing changed.","entailment":[],"neutral":[{"annotator":1,"id":"95-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker doesn't assert anything about whether something changed.","self_corrected":false},{"annotator":3,"id":"95-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"context is a questions. The statement is answer, but can not be entailed.","self_corrected":false}],"contradiction":[],"idk":[0,2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"581","context":"But there's SOMETHING.","statement":"Surely there's something.","entailment":[{"annotator":0,"id":"581-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that there is something.","self_corrected":false},{"annotator":1,"id":"581-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"581-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The uppercase of \"something\" can express \"surely\".","self_corrected":false},{"annotator":3,"id":"581-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is something despite of the tone","self_corrected":false}],"neutral":[{"annotator":2,"id":"581-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"Surely\" can not find a correspondant word in the context.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"691","context":"When he's ready for a major strike, how many innocents do you suppose are going to suffer? To quote one of your contemporaries; 'The needs of the many outweigh the needs of the few.' '","statement":"He won't care how many innocent people will suffer.","entailment":[{"annotator":0,"id":"691-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The question in the context could be a rhetorical question to which the speaker already has a standard answer, which is that he won't care.","self_corrected":false},{"annotator":3,"id":"691-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"He is preparing for a major striking, which will cause many innocents to suffer","self_corrected":true}],"neutral":[{"annotator":1,"id":"691-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether he cares that innocents will suffer.","self_corrected":false},{"annotator":2,"id":"691-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"His attitude towards the suffer of innocents is not given in the context, he maybe cares, maybe not.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"893","context":"but uh that has been the major change that we have noticed in gardening and that's about the extent of what we've done just a little bit on the patio and uh and waiting for the the rain to subside so we can mow we after about a month we finally got to mow this weekend","statement":"We have not done much gardening yet because of the rain.","entailment":[{"annotator":0,"id":"893-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions that them have only done a little gardening and needed to wait for the rain to die down, which suggests that the reason was raining.","self_corrected":false},{"annotator":1,"id":"893-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speakers says that they did only a little gardening because of the rain.","self_corrected":false},{"annotator":3,"id":"893-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We plan to mow after the rain subside","self_corrected":true}],"neutral":[{"annotator":2,"id":"893-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We have done just a little bit, but the reason can be the rain or something else like temperature.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"438","context":"It is, as you see, highly magnified. ","statement":"It is plain for you to see that it is amplified.","entailment":[{"annotator":0,"id":"438-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that it is magnified. ","self_corrected":false},{"annotator":1,"id":"438-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"438-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"highly magnified\" can be interpreted \"amplified\".","self_corrected":false},{"annotator":3,"id":"438-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It can be seen, and it is magnified","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"780","context":"There are many such at the present time.","statement":"There are over two currently.","entailment":[{"annotator":1,"id":"780-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If there are many then there are certainly over two.","self_corrected":false},{"annotator":2,"id":"780-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Many\" ususally is over two.","self_corrected":false},{"annotator":3,"id":"780-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Many means more than two","self_corrected":false}],"neutral":[{"annotator":0,"id":"780-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The word \"many\" in the context indicates more than one, but we don't know if there are more than two.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1025","context":"He threw one of them and shot the other.","statement":"He kept his gun holstered.","entailment":[],"neutral":[{"annotator":1,"id":"1025-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is not clear whether he had other shooting weapons than his gun.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1025-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement indicates that he didn't use the gun.","self_corrected":false},{"annotator":2,"id":"1025-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He \"shot\" one, so he need to pull his gun out of the holster.","self_corrected":false},{"annotator":3,"id":"1025-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He shot one of them","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"118","context":"For such a governmentwide review, an entrance conference is generally held with applicable central agencies, such as the Office of Management and Budget (OMB) or the Office of Personnel Management.","statement":"An entrance conference is held with specialized agencies.","entailment":[{"annotator":0,"id":"118-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Applicable central agencies mentioned in the context are indeed specialized agencies.","self_corrected":false},{"annotator":1,"id":"118-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context with less detail.","self_corrected":false},{"annotator":2,"id":"118-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Office of Management and Budget (OMB) or the Office of Personnel Management are specialized agencies. ","self_corrected":false},{"annotator":3,"id":"118-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"the conference is held with applicable central agencies. They can then be considered as specialized","self_corrected":false}],"neutral":[{"annotator":2,"id":"118-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"For a governmentwide review the statement is true, but for a normal entrance conference, it is could be held with specialized agencies or any temporary agencies. ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"169","context":"Splendid!","statement":"The speaker is excited by the situation.","entailment":[{"annotator":0,"id":"169-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The comment \"splendid\" and the exclamation mark indicate that the speaker is excitied.","self_corrected":false},{"annotator":1,"id":"169-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker calls out \"splendid!\" so they are probably excited about the situation.","self_corrected":false},{"annotator":2,"id":"169-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\n\"Splendid\" means very good, and the exclamation mark also conveys the speaker's excitement.","self_corrected":false},{"annotator":3,"id":"169-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Splendid entails excitement","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1149","context":"Answer? said Julius.","statement":"Julius already knew the answer.","entailment":[{"annotator":0,"id":"1149-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Julius is asking someone for answer, he might already know the right answer and wants to know if others do. ","self_corrected":false}],"neutral":[{"annotator":1,"id":"1149-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether Julius is really asking for the answer.","self_corrected":false},{"annotator":3,"id":"1149-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It was rather a question from Julius, we do not know if he knows the answer","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1149-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Julius is asking for answer, the question might indicate that he didn't know the answer.","self_corrected":false}],"idk":[2],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1380","context":"Even today, Yanomamo men raid villages, kill men, and abduct women for procreative purposes.","statement":"Yanomamo eats food.","entailment":[],"neutral":[{"annotator":0,"id":"1380-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention if Yanomamo eats food.","self_corrected":false},{"annotator":2,"id":"1380-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Raiding villages, killing men and abducting women have nothing to do with food supply.","self_corrected":false},{"annotator":3,"id":"1380-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[],"idk":[1],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"974","context":"John Panzar has characterized street delivery as a bottleneck function because a single firm can deliver to a recipient at a lower total cost than multiple firms delivering to the same customer.","statement":"John Panzar believes in nationalizing all postal delivery services and couriers into a single entity for cost-saving purposes.","entailment":[{"annotator":2,"id":"974-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"John Panzer supposes that delivering to one customer by only one firm costs fewer than multiple firms.","self_corrected":false}],"neutral":[{"annotator":0,"id":"974-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions the John Panzar's thoughts about street delivery, not the proposal to nationalize delivery services.","self_corrected":false},{"annotator":1,"id":"974-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear what John Panzar's stance on nationalizing postal services is.","self_corrected":false},{"annotator":2,"id":"974-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"John Panzer didn't mention the way how could one customer only received delivery from one firm, it could be nationalization, but also could be others like monopoly.","self_corrected":false},{"annotator":3,"id":"974-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"exaggeration: No info about John Panzar's believe and ambitions","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":4.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"716","context":"so do you have do you have the long i guess not not if there's see i was raised in New York but i guess up there you all don't have too long of a growing season do you","statement":"I am looking for a written guide to growing plants in different places in the country.","entailment":[],"neutral":[{"annotator":0,"id":"716-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Althought the speaker asks about the growing season in the context, the reason is not mentioned.","self_corrected":false},{"annotator":1,"id":"716-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the speaker is looking for a guide or simply asking a question.","self_corrected":false},{"annotator":3,"id":"716-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"not clear context; potential irrelevance","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1428","context":"The good news, however, can be found in reports like this one.","statement":"The good news is that the puppy's life was able to be saved.","entailment":[],"neutral":[{"annotator":0,"id":"1428-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention what is the good news.","self_corrected":false},{"annotator":1,"id":"1428-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear what the good news is.","self_corrected":false},{"annotator":2,"id":"1428-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The topic of good news can be puppy's life or cat's life or parrot's life or anything.","self_corrected":false},{"annotator":3,"id":"1428-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about a puppy in the context","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1536","context":"right oh they've really done uh good job of keeping everybody informed of what's going on sometimes i've wondered if it wasn't almost more than we needed to know","statement":"After sharing all information with everyone, I think I may have shared too much. ","entailment":[],"neutral":[{"annotator":1,"id":"1536-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The speaker talks about \"they\" sharing the information not about themselves.","self_corrected":false},{"annotator":2,"id":"1536-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not given in the context whether I have shared all information, or do I think I have shared too much.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1536-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not the speaker who shares the information with everybody, it's them.","self_corrected":false},{"annotator":2,"id":"1536-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not I but they who shared all information.","self_corrected":false},{"annotator":3,"id":"1536-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"I\" did not share the information, \"they\" did","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1006","context":"The important thing is to realize that it's way past time to move it.","statement":"It has not been moved yet in the past.","entailment":[{"annotator":0,"id":"1006-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context suggests that it's no longer the time to move, which indicates that it hasn't been moved yet.","self_corrected":false},{"annotator":3,"id":"1006-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Because it is too late to move it now, so in the past it was not moved","self_corrected":false}],"neutral":[{"annotator":1,"id":"1006-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It might have been moved in the past and now is needed to be moved again.","self_corrected":false},{"annotator":2,"id":"1006-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context states it is not time to move it now, but the history of movement is not refered to.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1221","context":"We know they will have to come from the south but that gives them a space as wide as the town in which to launch their attack.","statement":"The south is totally protected against an attack.","entailment":[],"neutral":[{"annotator":2,"id":"1221-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"In the context, it is analysed that they will attack from the south, but the protection of the south is not measurable.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1221-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context suggests that the south is the right place to launch the attack, which indicates that the south cannot totally defend itself against an attack.","self_corrected":false},{"annotator":1,"id":"1221-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The space in the south makes it not totally protected.","self_corrected":false},{"annotator":3,"id":"1221-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is a space in the south as wide as the town to launch their attack","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1193","context":"eligible individuals and the rules that apply if a state does not substantially enforce the statutory requirements.","statement":"It does not matter whether or not a state enforces the statutory requirements.","entailment":[],"neutral":[{"annotator":2,"id":"1193-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We can only make sure, if a state does not enforces the statutory requirements, the rules apply; otherwise, we don't know whether the rules take effect or not.","self_corrected":false}],"contradiction":[{"annotator":1,"id":"1193-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It matters whether the state enforces the statuory requirements because then other rules apply.","self_corrected":false},{"annotator":3,"id":"1193-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There are rules that apply if the state does not enforces the statutory requirements, meaning it does matter that the state do enforces these requirements. Because if not, then there is no need for those rules to exist","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"493","context":"yeah then you don't have you don't have that mess to clean up when you use an oil oil base painting and boy i'll tell you oh","statement":"Typically oil based paints are easy to work with and clean up.","entailment":[{"annotator":3,"id":"493-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false}],"label_correction":false,"reason":"Because in the context, \"you\" didn't use oil based paints, so \"you\" have a mess to clean up","self_corrected":true}],"neutral":[{"annotator":1,"id":"493-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is not clear whether oil based paints are also easy to work with.","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"395","context":"Why shouldn't he be?","statement":"There is no reason he shouldn't be.","entailment":[{"annotator":1,"id":"395-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false}],"neutral":[{"annotator":3,"id":"395-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The question is open. It is not known if there is reason or not. ","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1376","context":"The town is also known for its sparkling wine and for the caves where about 70 per?\u00adcent of France's cultivated mushrooms are grown.","statement":"The town has a lot of sparkling wine.","entailment":[{"annotator":2,"id":"1376-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The town is famous for its sparkling wine, so it should have lots of sparkling wine.","self_corrected":false}],"neutral":[{"annotator":1,"id":"1376-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It could also be a small amount of sparkling wine for which the place is famous.","self_corrected":false},{"annotator":3,"id":"1376-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is only known about the reputation of the town's wine, but not known about the quantity of the wine","self_corrected":false}],"contradiction":[],"idk":[0],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1060","context":"Asked about abortion the other day on CNN, Republican National Committee Chairman Jim Nicholson also invoked what is apparently the party-line inclusive party.","statement":"The Republican National Committee Chairman gave the party's standard answer on the subject of abortion when he was asked about it on CNN.","entailment":[{"annotator":0,"id":"1060-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I think the party-line mentioned in the context refers to the party's standard answer.","self_corrected":false},{"annotator":1,"id":"1060-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"1060-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Party-line\" typically refers to a position or stance that aligns with the official position or policies of a political party, so it can be refered to the \"standard answer\".","self_corrected":false},{"annotator":3,"id":"1060-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He toed the party line, meaning he said what is in line with the party's agenda ","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":2,"id":"1060-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"Inclusive party\" suggests a political party that welcomes diverse views and members, so it should not be the party's typical answer.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":1.0,"entailment":4.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"234","context":"However, the associated cost is primarily some of the costs of assessing and collecting duties on imported merchandise, such as the salaries of import specialists (who classify merchandise) and the costs of processing paperwork.","statement":"the associated cost is how much people spend relative to this amount","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"234-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The associated cost is not in the general sense of how much people spend, but is specifically defined in the context.","self_corrected":false},{"annotator":2,"id":"234-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In context, \"the associated cost is primarily some of the costs of assessing and collecting duties on imported merchandise\", it is about the goods, so the description \"relative to this amount\" is different from the definition given before.","self_corrected":false},{"annotator":3,"id":"234-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's the cost of assessing and collecting duties ","self_corrected":false}],"idk":[1],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"328","context":"well in a way you can travel light","statement":"You won't need to pack much.","entailment":[{"annotator":0,"id":"328-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Traveling light means exactly no need to pack much.","self_corrected":false},{"annotator":1,"id":"328-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If you can travel light, then you don't need to pack much.","self_corrected":false},{"annotator":2,"id":"328-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Travel light\" means exactly not packing much.","self_corrected":false},{"annotator":3,"id":"328-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Traveling light entails traveling with a small package ","self_corrected":false}],"neutral":[{"annotator":1,"id":"328-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"In a way\" suggests that it could be not the usual meaning of travelling light.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1190","context":"although the uh it's uh it we almost one day we painted the house to uh we painted we painted the whole inside and it had all this dark trim we thought uh you know we did the one wall but the other trim i'm trying to think i think i think we left most of it because it gets to be uh they don't do that in the newer houses now we don't the uh mold everything is white in a new house everything is white","statement":"We painted the house over the duration of one day.","entailment":[{"annotator":0,"id":"1190-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true}],"label_correction":false,"reason":"The context suggests that the speaker spent almost a day to paint the house, so the time they spent is indeed over the duration if one day. ","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[1,2,3],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"424","context":"The judge gave vent to a faint murmur of disapprobation, and the prisoner in the dock leant forward angrily. ","statement":"The judge ordered the court to be silent.","entailment":[],"neutral":[{"annotator":1,"id":"424-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything about slience.","self_corrected":true},{"annotator":2,"id":"424-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The judge \"gave vent to a faint murmur of disapprobation\" can not be taken as an order for quiet.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"424-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The judge only made a murmur and didn't give a direct order.","self_corrected":false},{"annotator":3,"id":"424-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"He let the crowd murmur ","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"430","context":"Keep your eyes open for Renaissance details, grand doorways, and views into lovely courtyards.","statement":"All of the doorways and courtyards have been completely remodeled since the Renaissance.","entailment":[],"neutral":[{"annotator":0,"id":"430-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention if the doorways and courtyards were remodeled since the Renaissance.","self_corrected":false},{"annotator":2,"id":"430-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not garanteed that all of the objects mentioned in the context have been completely remodeled, maybe part of them kept the same as before the Renaissance.","self_corrected":false},{"annotator":3,"id":"430-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about whether they have been remodeled or not. They could also be built in the Renaissance time ","self_corrected":false}],"contradiction":[{"annotator":1,"id":"430-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"If the doorways and courtyards would have been remodeled since the Renaissance, then it wouldn't make sense to look for the Reneaissance details.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1029","context":"This one ended up being surprisingly easy!","statement":"This question was very easy to answer.","entailment":[{"annotator":0,"id":"1029-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both context and statement suggest the easiness of what they talk about.","self_corrected":false},{"annotator":1,"id":"1029-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"1029-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is true because the question \"ended up being surprisingly easy\".","self_corrected":false},{"annotator":3,"id":"1029-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A question ended up easy, meaning it's easy to answer ","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"67","context":"The percent of total cost for each function included in the model and cost elasticity (with respect to volume) are shown in Table 1.","statement":"Table 1 also shows a picture diagram for each function.","entailment":[{"annotator":3,"id":"67-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"And this diagram shows the cost for each function ","self_corrected":true}],"neutral":[{"annotator":0,"id":"67-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention if Table 1 presents the picture diagram.","self_corrected":false},{"annotator":1,"id":"67-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear in which way the values are presented in Table 1.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"67-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"No, Table 1 shows the \"the percent of total cost for each function\" and \"cost elasticity\", not picture diagram.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["contradiction","entailment"],"has_ambiguity":false} -{"id":"343","context":"In addition, the senior executives at these organizations demonstrated their sustained commitment to financerelated improvement initiatives by using key business\/line managers to drive improvement efforts, attending key meetings, ensuring that the necessary resources are made available, and creating a system of rewards and incentives to recognize those who support improvement initiatives.","statement":"This system of rewards and incentives will hopefully improve company performance.","entailment":[{"annotator":0,"id":"343-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The rewards and incentives will be given to those who support improvement initiatives, so the company is more likely to improve their proferance under these rewards incentives.","self_corrected":false},{"annotator":3,"id":"343-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The measurement taken is thought for finance-related improvenemt.","self_corrected":false}],"neutral":[{"annotator":0,"id":"343-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false}],"label_correction":false,"reason":"The system of rewards and incentives ","self_corrected":true},{"annotator":1,"id":"343-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is not clear whether the speaker hopes that the rewards and incentives improve company performance.","self_corrected":false},{"annotator":2,"id":"343-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The measures mentioned in the context can not prove their effect \"will hopefully improve company performace\" or not.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1104","context":"Per week?","statement":"Every day.","entailment":[],"neutral":[{"annotator":0,"id":"1104-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"The context is a question and the statement is an answer. But I don't know whether the statement is exactly the answer to the question in the context. ","self_corrected":false},{"annotator":3,"id":"1104-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"From a question can not be entailed to a answer","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1104-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"per week means once every seven days","self_corrected":true}],"idk":[1,2],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"114","context":"She would be almost certainly sent to you under an assumed one.","statement":"The man told the other man that Bill would be sent to him.","entailment":[],"neutral":[{"annotator":0,"id":"114-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention who told whom.","self_corrected":false},{"annotator":2,"id":"114-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Bill is not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"114-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The gender of Bill and \"she\" and \"you\" is unknown","self_corrected":false}],"contradiction":[],"idk":[1],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1321","context":"Transforming Control of Public Health Programs Raises Concerns (","statement":"Everyone is content with the change of public health programs.","entailment":[],"neutral":[{"annotator":0,"id":"1321-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context doesn't mention whether people are satisfied with the programs.","self_corrected":true},{"annotator":3,"id":"1321-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"exaggeration: \"Raising concerns\" doesn't mean that everyone is concerned. Maybe someone is not concerned and the context will still hold","self_corrected":true}],"contradiction":[{"annotator":1,"id":"1321-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The program raises concerns, so not everyone is content with it.","self_corrected":false},{"annotator":2,"id":"1321-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, because the change of control of Public Health Programs \"raises concerns\", there must be someone not content with it.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":null},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["contradiction"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"807","context":"Extremely limited exceptions to the authority are established in 31 U.S.C.","statement":"They were trying to eliminate all exceptions.","entailment":[],"neutral":[{"annotator":0,"id":"807-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions that there are extremely limited exceptions, but not whether someone wanted to eliminate the exceptions.","self_corrected":false},{"annotator":1,"id":"807-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether they tried to eliminate the exceptions.","self_corrected":false},{"annotator":2,"id":"807-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The limitation of exceptions will not definitey lead to eliminating all exceptions.","self_corrected":false},{"annotator":3,"id":"807-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The motive of them is unknown. Maybe they didn't try to eliminate all, but just eliminate the unnecessary ones.","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"392","context":"On the northern slopes of this rocky outcropping is the site of the ancient capital of the island, also called Thira, which dates from the third century b.c. (when the Aegean was under Ptolemaic rule).","statement":"Is the site of the ancient asteroid impact, also called Thira.","entailment":[],"neutral":[{"annotator":0,"id":"392-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear if the site of the ancient capital mentioned in the context is also the site of the ancient asteroid impact.","self_corrected":false},{"annotator":1,"id":"392-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether there was an asteroid impact on Thira.","self_corrected":false},{"annotator":2,"id":"392-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"The ancient asteroid impact\" is not mentioned in the context.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"392-contradiction-1","judgments":[{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is the ancient capital of the island, not an asteroid impact","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1016","context":"After shuttering the DOE, Clinton could depict himself as a crusader against waste and bureaucracy who succeeded where even Reagan failed.","statement":"Clinton shuttered the DOE to move against waste.","entailment":[{"annotator":1,"id":"1016-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If shuttering the DOE allowed Clinton to appear as a crusader against waste then it probably was done to do something against waste.","self_corrected":false},{"annotator":3,"id":"1016-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He shuttered the DOE, and were depicted as crusader against waste","self_corrected":false}],"neutral":[{"annotator":0,"id":"1016-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"The context only suggests that Clinton can describe the reason as being against waste, but it is not clear if this reason is actually the truth.","self_corrected":false},{"annotator":2,"id":"1016-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"Clinton could depict himself as a crusader against waste and bureaucracy\" doesn't mean it is his true intention is \"to move against waste.\" Perhaps he really is just trying to save energy, but maybe he has other intentions.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1098","context":"yeah well that's my uh i mean every time i've tried to go you know it's always there's there's always a league bowling","statement":"Every time I try to go bowling there are leagues only and I can't bowl.","entailment":[{"annotator":0,"id":"1098-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both context and statement suggests that the speaker can't go bowling because of the leagues.","self_corrected":false},{"annotator":1,"id":"1098-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"1098-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Usually if the place is occupied by a league, then other people can not play there.","self_corrected":false}],"neutral":[{"annotator":3,"id":"1098-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe the place is big enough, so even the there is a league bowling, I can still bowl","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"456","context":"The end is near! Then a shout went up, and Hanson jerked his eyes from the gears to focus on a group of rocs that were landing at the far end of the camp.","statement":"It's all over, Hanson whispered as he stared at the gears. ","entailment":[],"neutral":[{"annotator":0,"id":"456-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context if \"It's all over\" was Hanson's whisper.","self_corrected":false},{"annotator":1,"id":"456-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether or what Hanson whispered","self_corrected":false}],"contradiction":[{"annotator":3,"id":"456-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The end is near, meaning it's soon to be over but not all over yet","self_corrected":true}],"idk":[2],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"140","context":"In the 19th century, when Kashmir was the most exotic hill-station of them all, the maharaja forbade the British to buy land there, so they then hit on the brilliant alternative of building luxuriously appointed houseboats moored on the lakes near Srinagar.","statement":"The maharaja allowed the British to build houseboats on the lakes.","entailment":[{"annotator":1,"id":"140-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The British did build houseboats on the lakes, so the maharaja must at least have tolerated it.","self_corrected":false},{"annotator":3,"id":"140-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The British was forbidden to buy land in Kashmir, so they buit houseboats on the lakes near Srinagar. Because they built it, so they must have been allowed","self_corrected":false}],"neutral":[{"annotator":0,"id":"140-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions what the maharaja forbade, not what he allowed.","self_corrected":true},{"annotator":2,"id":"140-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The British are allowed to moor houseboats on the lakes, but whether it is allowed for them to \"build\" houseboats is not mentioned.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"771","context":"Under the budget deal, by 2002, national defense will consume about $273 billion a year compared with $267 billion now.","statement":"The United States national defense budget will increase by 6 billion dollars.","entailment":[{"annotator":0,"id":"771-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The budget increases from 267 to 273 billion, an increase of $6 billion.","self_corrected":false},{"annotator":1,"id":"771-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"$273 billion - $267 billion = $6 billion","self_corrected":false},{"annotator":2,"id":"771-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is true, because 273-267 = 6.","self_corrected":false},{"annotator":3,"id":"771-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"right now the budget is 267 Million, in 2002 it will be 273 Million, making the incease by 6 million","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1577","context":"evaluation questions.","statement":"Only statements of the evaluation are available.","entailment":[],"neutral":[{"annotator":2,"id":"1577-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The state of statements of the evaluation is not given in the context.","self_corrected":false},{"annotator":3,"id":"1577-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[],"idk":[0,1],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"316","context":"Hearty Sabbath meals.","statement":"Hearty meals will only be offered to Buddhists","entailment":[],"neutral":[],"contradiction":[{"annotator":1,"id":"316-contradiction-1","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Buddhists typically don't celebrate Sabbath, so the hearty meals will most likely be offered to Jews.","self_corrected":false},{"annotator":2,"id":"316-contradiction-2","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Sabbath is Judaism's day, it doesn't make sense that hearty meals will only be offered to Buddhists, not to jewish people.","self_corrected":false},{"annotator":3,"id":"316-contradiction-3","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is jewish traditional meals","self_corrected":false}],"idk":[0],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"241","context":"but i don't know you know maybe you could do that for a certain period of time but i mean how long does that kind of a thing take you know to to um say to question the person or to get into their head","statement":"It might take a long time to do that because getting inside a person's head takes time.","entailment":[{"annotator":2,"id":"241-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is true, because the possibility exists, that it takes a long time to get into their head.","self_corrected":false},{"annotator":3,"id":"241-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Context states it is not known how long it would take, so it might take a long time","self_corrected":false}],"neutral":[{"annotator":0,"id":"241-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement seems to be a continuation of the context, the reason why it meight take a long time is not mentioned in the context.","self_corrected":false},{"annotator":1,"id":"241-neutral-2","judgments":[{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It's not clear whether it will take to get into a person's head.","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1297","context":"All of our many earnest experiments produced results in line with random chance, they conclude.","statement":"The experiments proved it was a much better predictor.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"1297-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The results are in line with random chance, which indicates that it was not a better predictor, it was just random. ","self_corrected":false},{"annotator":1,"id":"1297-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If results were in line with random chance, it was not a good predictor.","self_corrected":false},{"annotator":2,"id":"1297-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, if it was a much better predictor, then the earnest experiments should produce results much better than random chance.","self_corrected":false},{"annotator":3,"id":"1297-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Because the results are random, so the predictor is not good","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"825","context":"It seeks genuine direct elections after a period that is sufficient to organize alternative parties and prepare a campaign based on freedom of speech and other civil rights, the right to have free trade unions, the release of more than 200 political prisoners, debt relief, stronger penalties for corruption and pollution, no amnesty for Suharto and his fellow thieves, and a respite for the poor from the hardest edges of economic reform.","statement":"The only thing that can our society is more power to the presidential electors.","entailment":[],"neutral":[{"annotator":0,"id":"825-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't contain any information about the power of the presidential electors.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"825-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false}],"label_correction":true,"reason":"There is stronger penalties for corruption, so more restrictions on the presidential electors.","self_corrected":true}],"idk":[1,2],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1580","context":"but how do you know the good from the bad","statement":"Why care if it's good or bad?","entailment":[],"neutral":[{"annotator":3,"id":"1580-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Two different questions","self_corrected":false}],"contradiction":[],"idk":[0,1,2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1197","context":"'But if White has any designs at all on living, he'll be as far from Little as he can possibly get by now.'","statement":"White should be afraid to come back to Little.","entailment":[{"annotator":0,"id":"1197-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context suggests that White will die if he is with Little, we can infer that he should be afriaid of being with Litte.","self_corrected":false},{"annotator":1,"id":"1197-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It sounds like Little is after White's life.","self_corrected":false},{"annotator":2,"id":"1197-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In context, White should be far away from Little if he wants to live, so coming back to Little should be terrible, and he should be afraid of that.","self_corrected":false},{"annotator":3,"id":"1197-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If White has any planning on life, then he should not come back to Little. It means that Little is not a good place for living, so he should be afraid to come back to this place","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"563","context":"yes well yeah i am um actually actually i think that i at the higher level education i don't think there's so much of a problem there it's pretty much funded well there are small colleges that i'm sure are struggling","statement":"Small colleges usually have trouble with funding and resources.","entailment":[{"annotator":0,"id":"563-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that small colleges have funding issues.","self_corrected":true}],"neutral":[{"annotator":1,"id":"563-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker doesn't say anything about 'usually'. They only assert that there are some small colleges that are struggling.","self_corrected":false},{"annotator":2,"id":"563-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Some small colleges are struggling, but it can be a common phenomenon, or may be quite rare.","self_corrected":false},{"annotator":3,"id":"563-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker mentioned his college is well funded and then said some small colleges are struggling. It can not be concluded that all small colleges usually have trouble with funding","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1069","context":"Even if auditors do not follow such other standards and methodologies, they may still serve as a useful source of guidance to auditors in planning their work under GAGAS.","statement":"GAGAS requires strict compliance for auditors to follow.","entailment":[{"annotator":0,"id":"1069-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context suggests that auditors need to plan their work under GAGAS.","self_corrected":true}],"neutral":[{"annotator":1,"id":"1069-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear how strict GAGAS is.","self_corrected":false},{"annotator":2,"id":"1069-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context is not about GAGAS's requirements, we only know under GAGAS auditors can use other standards and methodologies as reference.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1069-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"GAGAS can also only serve as guidance to the auditors","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["contradiction","entailment"],"has_ambiguity":false} -{"id":"321","context":"What a brilliantly innocuous metaphor, devised by a master manipulator to obscure his manipulations.","statement":"The metaphor was created by the manipulator to convince people of something.","entailment":[{"annotator":0,"id":"321-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Manipulation does aim to make people believe certain things.","self_corrected":false}],"neutral":[{"annotator":2,"id":"321-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The purpose of the metaphor in the context is described as \"to obscure his manipulations\", whether he wanted to \"convince people of something\" is not given. ","self_corrected":false}],"contradiction":[{"annotator":1,"id":"321-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The metaphor was created to hide the manipulations, not for manipulating directly.","self_corrected":false},{"annotator":3,"id":"321-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It was created to hide his true intention to manipulate","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":1.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"466","context":"Changes in technology and its application to electronic commerce and expanding Internet applications will change the specific control activities that may be employed and how they are implemented, but the basic requirements of control will not have changed.","statement":"Technology will make it so we have less control of activities.","entailment":[],"neutral":[{"annotator":1,"id":"466-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It doesn't say anything about whether the possibility for control will change.","self_corrected":false},{"annotator":2,"id":"466-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Although \"the basic requirements of control will not have changed\", we don't know whether technology will bring more control of activies or less control.","self_corrected":false},{"annotator":3,"id":"466-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not mentioned how the specific control activites will be changed. But it is sure that the basic requirements of controll will not change","self_corrected":false}],"contradiction":[{"annotator":0,"id":"466-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Technology will change the employment and implementierung of some control activities, but the basic requirements, which means the degree of control, will not be changed.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"609","context":"The order was founded by James VII (James II of England) and continues today.","statement":"Kings frequently founded orders that can still be found today.","entailment":[],"neutral":[{"annotator":0,"id":"609-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions the order founded by James VII, we know nothing about orders that founded by other kings.","self_corrected":false},{"annotator":1,"id":"609-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context talks about a single order. We cannot infer that such orders are frequent.","self_corrected":false},{"annotator":2,"id":"609-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The order founded by a king can continue until now, but it is not given whether it is frequently founded.","self_corrected":false},{"annotator":3,"id":"609-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The frequeny of founding orderings is not known","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"738","context":"The road along the coastline to the south travels through busy agricultural towns and fishing villages untouched by tourism.","statement":"There are no tourists on the road through the agricultural towns and fishing villages.","entailment":[{"annotator":0,"id":"738-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"untouched\" implies that these places are not visited by tourists.","self_corrected":true},{"annotator":1,"id":"738-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The towns and villages are untouched by tourism so probably the route going through them is also relatively free of tourists.","self_corrected":false}],"neutral":[{"annotator":2,"id":"738-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We can only know that the fishing villages are not the destination of tourists, but we can't promise there are no tourists on the road through it.","self_corrected":false},{"annotator":3,"id":"738-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the road being touched by the tourists or not. The towns and villages are not touched by the tourists","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"536","context":"well they're so close to an undefeated undefeated season they can taste it and they wanna make history so i don't think they're gonna lack for motivation","statement":"Unless they suffer any losses, they'll remain motivated.","entailment":[{"annotator":1,"id":"536-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They are motivated by being undefeated. This will persist unless they are defeated.","self_corrected":true},{"annotator":3,"id":"536-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They are motivated to get a full win season. So if they suffer loss, then they can no longer make history, so their movitation may lack","self_corrected":false}],"neutral":[{"annotator":0,"id":"536-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention what would happen if they suffer any losses, they may or may not lose motivation.","self_corrected":true},{"annotator":2,"id":"536-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"After suffering any losses they maybe will lose motivation, but maybe still remain motivated.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"557","context":"The woman rolled and drew two spears before the horse had rolled and broken the rest.","statement":"They were in rotation on the ground grabbing their weapons.","entailment":[{"annotator":0,"id":"557-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The woman and the horse were grabbing the spears. They rolled indicates that they were on the ground.","self_corrected":true},{"annotator":3,"id":"557-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The women and horse both rolled. And the woman grabbed two spears","self_corrected":false}],"neutral":[{"annotator":2,"id":"557-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"The context only described a woman, we don't know who are \"they\" in the statement.","self_corrected":false}],"contradiction":[{"annotator":1,"id":"557-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The horse most likely wasn't grabbing a weapon.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":1.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1550","context":"However, the other young lady was most kind. ","statement":"I received a warm welcome from the other young lady who was present.","entailment":[],"neutral":[{"annotator":0,"id":"1550-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention what the young lady did to the speaker.","self_corrected":false},{"annotator":1,"id":"1550-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Only because the woman was kind, she did not have to extend a warm welcome.","self_corrected":false},{"annotator":2,"id":"1550-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The other young lay was most kind, but she could do anything to show her kindness, maybe to give warm welcome, maybe others.","self_corrected":false},{"annotator":3,"id":"1550-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The young lady was kind, but we don't know what she did","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"40","context":"The author began with a set of hunches or hypotheses about what can go wrong in agency management, and what would be evidence supporting-or contradicting-these hypotheses.","statement":"The hunches provided by the author weren't realistic as it pertains to agency management.","entailment":[],"neutral":[{"annotator":0,"id":"40-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the hunches provided by the author were realistic or not.","self_corrected":false},{"annotator":1,"id":"40-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear how realistic the hypotheses were.","self_corrected":false},{"annotator":2,"id":"40-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The judgement of the hunches provided by the author is not given in the context.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"40-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The hunches could be realistic, as the author provide potential evidence supporting these hypotheses","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1232","context":"okay and and i think we just hang up i don't think we have to do anything else","statement":"We need to wait until they tell us what to do.","entailment":[],"neutral":[{"annotator":0,"id":"1232-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context doesn't talk about why they don't have to do anything.","self_corrected":true}],"contradiction":[{"annotator":1,"id":"1232-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They explicitly say that they have to only hang up and not do anything else.","self_corrected":false},{"annotator":2,"id":"1232-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"I don't think we have to do anything else\" means we don't need to nothing, so we don't need to wait.","self_corrected":false},{"annotator":3,"id":"1232-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't need to do anything, meaning also not waiting for them to tell us what to do","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["contradiction"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"885","context":"David Cope, a professor of music at the University of California at Santa Cruz, claims to have created a 42 nd Mozart symphony.","statement":"Music Professor David Cope who specializes in Mozart's music claims to have created Mozart's 42nd symphony.","entailment":[{"annotator":1,"id":"885-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":true}],"neutral":[{"annotator":0,"id":"885-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They context doesn't mention the speciality of Professor David Cope.","self_corrected":false},{"annotator":2,"id":"885-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Whether David Cope specialized in Mozart's music is not given in the context.","self_corrected":false},{"annotator":3,"id":"885-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the specialization of the Music Professor","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"208","context":"The conspiracy-minded allege that the chains also leverage their influence to persuade the big publishers to produce more blockbusters at the expense of moderate-selling books.","statement":"Big publishers want to produce more high budget films, even if that means badly selling books.","entailment":[{"annotator":1,"id":"208-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":true}],"neutral":[{"annotator":0,"id":"208-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"According to the claim of conspiracy-minded, it's the chains that want to produce more blockbusters, not the big publishers. And we don't know if the claim is true.","self_corrected":false},{"annotator":2,"id":"208-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Producing film is not mentioned in the context, so we don't know the attitude of big publishers to it.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"208-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is at the expense of moderate-selling books, not badly selling books","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","contradiction"],"error_labels":["entailment"],"has_ambiguity":true} -{"id":"125","context":"(A bigger contribution may or may not mean, I really, really support Candidate X.) Freedom of association is an even bigger stretch--one that Justice Thomas would laugh out of court if some liberal proposed it.","statement":"A bigger contribution means to support candidate Y.","entailment":[],"neutral":[{"annotator":0,"id":"125-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They context doesn't talk about candidate Y.","self_corrected":false},{"annotator":1,"id":"125-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Candidate Y is not mentioned at all.","self_corrected":false},{"annotator":2,"id":"125-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A bigger contribution may or may not mean to support Candidate X, so the possibility of supporting Y exists, but not hundred percent.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"125-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, a bigger contribution can not prove any preference.","self_corrected":true},{"annotator":3,"id":"125-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is to support candidate X","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1537","context":"Because marginal costs are very low, a newspaper price for preprints might be as low as 5 or 6 cents per piece.","statement":"Many people consider these prices to be unfair to new printers.","entailment":[],"neutral":[{"annotator":0,"id":"1537-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention people's opinions on these prices.","self_corrected":false},{"annotator":1,"id":"1537-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not mentioned whether these prices are unfair.","self_corrected":false},{"annotator":2,"id":"1537-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"How do people think about the price for new printers is not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"1537-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not known about the poeple's opinion on the price","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"309","context":"Took forever.","statement":"Lasted two years","entailment":[],"neutral":[],"contradiction":[{"annotator":2,"id":"309-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Forever is longer than two years.","self_corrected":false},{"annotator":3,"id":"309-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"two years are not forever","self_corrected":false}],"idk":[0,1],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"591","context":"Just like we have hairpins and powder-puffs.\" Tommy handed over a rather shabby green notebook, and Tuppence began writing busily.","statement":"Tommy handed Tuppence an empty shabby green notebook.","entailment":[{"annotator":1,"id":"591-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"The statement is a paraphrase of a part of the context.","self_corrected":true}],"neutral":[{"annotator":0,"id":"591-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know if the notebook is empty.","self_corrected":false},{"annotator":2,"id":"591-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The shabby green notebook can be empty or not.","self_corrected":false},{"annotator":3,"id":"591-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about whether the notebook is empty","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"119","context":"And far, far away- lying still on the tracks- was the back of the train.","statement":"The train wasn't moving but then it started up.","entailment":[],"neutral":[{"annotator":0,"id":"119-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is not clear whether the train moved or not.","self_corrected":false},{"annotator":2,"id":"119-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The movement of the train is not given in the context.","self_corrected":false},{"annotator":3,"id":"119-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"No info about whether the train started up","self_corrected":true}],"contradiction":[{"annotator":1,"id":"119-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The tarin was lying still, so it didn't start up.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"369","context":"It lacked intelligence, introspection, and humor--it was crass, worthy of Cosmopolitan or Star . I do have a sense of humor, but can only appreciate a joke when it starts with a grain of truth.","statement":"The article won a Pulitzer Prize.","entailment":[],"neutral":[{"annotator":0,"id":"369-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the Pulitzer Prize.","self_corrected":false},{"annotator":3,"id":"369-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the article winnning a prize","self_corrected":false}],"contradiction":[{"annotator":1,"id":"369-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Apparantely, the article was very bad, so it most likely did not win a Pulitzer.","self_corrected":false},{"annotator":2,"id":"369-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The article was lacked intelligence, introspection, and humor, and that is not the taste of Pulitzer Prize.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1244","context":"It will be held in the Maryland woods, and the telecast will consist of jittery footage of the contestants' slow descent into madness as they are systematically stalked and disappeared\/disqualified by Bob Barker.","statement":"The show will be set in the woods north of Boston.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"1244-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The show will be held in the Maryland woods, not the woods north of Boston.","self_corrected":false},{"annotator":1,"id":"1244-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Boston is not near Maryland.","self_corrected":false},{"annotator":2,"id":"1244-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, the show \"will be held in the Maryland woods\".","self_corrected":false},{"annotator":3,"id":"1244-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maryland is to the south of Boston","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"1471","context":"It has served as a fortress for the Gallo-Romans, the Visigoths, Franks, and medieval French (you can see the layers of their masonry in the ramparts).","statement":"The fortress was built by the medieval French in 1173.","entailment":[],"neutral":[{"annotator":0,"id":"1471-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention when and by whom the fortress was built.","self_corrected":false},{"annotator":2,"id":"1471-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The building year of the fortress is not given in the context. ","self_corrected":false},{"annotator":3,"id":"1471-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about who built the fortress. The medieval French only had used it, so did Gallo-Romans.","self_corrected":false}],"contradiction":[{"annotator":1,"id":"1471-contradiction-1","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Gallo-Romans were much earlier than 1173, so the fortress was built earlier.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1313","context":"Tommy realized perfectly that in his own wits lay the only chance of escape, and behind his casual manner he was racking his brains furiously.","statement":"He'd been stuck for hours, starting to feel doubt crawl into his mind.","entailment":[],"neutral":[{"annotator":0,"id":"1313-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no mention in the context of how long he was stuck.","self_corrected":false},{"annotator":1,"id":"1313-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether Tommy experienced doubt.","self_corrected":false},{"annotator":2,"id":"1313-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know from which situation need Tommy escape , and the doubt didn't show up in the context.","self_corrected":false},{"annotator":3,"id":"1313-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about how long he had been stuck.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1070","context":"He touched it and felt his skin swelling and growing hot.","statement":"His skin was burning.","entailment":[{"annotator":1,"id":"1070-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The skin grew hot, so it was burning.","self_corrected":false},{"annotator":3,"id":"1070-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"his skin was swelling and growning hot. They are signs of burning","self_corrected":false}],"neutral":[{"annotator":0,"id":"1070-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reason that hes skon swelling and growing hot is not clear. It may or may not be because his skin was burning.","self_corrected":true},{"annotator":2,"id":"1070-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"His could be burning or maybe he was taken a bug's bite ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"770","context":"Bush the elder came of age when New England Republicans led the party, and patrician manners were boons to a Republican.","statement":"New England Republicans were weak.","entailment":[],"neutral":[{"annotator":2,"id":"770-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"New England Republicans could be weak or strong.","self_corrected":false},{"annotator":3,"id":"770-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the New England Republicans being weak","self_corrected":false}],"contradiction":[{"annotator":0,"id":"770-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"New England Republicans had patrician manners doesn't mean that they were weak.","self_corrected":false},{"annotator":1,"id":"770-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"New England Republicans cannot have been weak because they led the party.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"117","context":"In the depths of the Cold War, many Americans suspected Communists had infiltrated Washington and were about to subvert our democracy.","statement":"Communists assisted America's government during the Cold War.","entailment":[],"neutral":[{"annotator":0,"id":"117-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions American suspection. It may or may not be true.","self_corrected":false},{"annotator":1,"id":"117-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context is about what people suspected not about the facts.","self_corrected":false},{"annotator":2,"id":"117-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In the context it is all about Americans' suspect","self_corrected":false}],"contradiction":[{"annotator":3,"id":"117-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It was suspected that the communists had infiltrated Washington and to subvert the democracy, which is the opposite of assisting America","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1166","context":"For an authentic feel of old Portugal, slip into the cool entrance hall of theimpressive Leal Senado ( Loyal Senate building), a fine example of colonial architecture.","statement":"All that remains of Leal Senado is old ruins.","entailment":[],"neutral":[{"annotator":0,"id":"1166-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The current status of Leal Senado is not mentioned.","self_corrected":false},{"annotator":2,"id":"1166-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The condition of Leal Senado is not mentioned in the context.","self_corrected":false}],"contradiction":[{"annotator":1,"id":"1166-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Leal Senado is impressive and has an entrance hall, so it can hardly be only ruins.","self_corrected":false},{"annotator":3,"id":"1166-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is entrance hall of Leal Senado, so it can not be ruins","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"271","context":"no chemicals and plus then you can use it as a fertilizer and not have to worry about spreading those chemicals like on your lawn or your bushes or whatever","statement":"We don't want to use chemicals on our lawn","entailment":[{"annotator":0,"id":"271-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't need to worry about using them on the lawn because they are not chemicals, which suggests that we don't want to use chemicals on the lawn.","self_corrected":false},{"annotator":1,"id":"271-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker says that they might worry about spreading the chemicals on the lawn, so they don't want that.","self_corrected":false},{"annotator":3,"id":"271-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"when using chemicals, we need to worry about spreading the chemicals on the lawn","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"107","context":"We also have found that leading organizations strive to ensure that their core processes efficiently and effectively support mission-related outcomes.","statement":"Leading organizations want to be sure their processes are successful.","entailment":[{"annotator":0,"id":"107-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Leading organizations strive to ensure the success of their processes.","self_corrected":false},{"annotator":1,"id":"107-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"107-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, because leading organizations strive to ensure their processes.","self_corrected":false},{"annotator":3,"id":"107-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The organizations strive to ensure their process to support the outcomes. So the process needs to be successful to provide support for the outcomes","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1539","context":"well that's uh i agree with you there i mean he didn't have the surrounding cast that Montana had there's no doubt about that","statement":"I agree that he didn't have the same support as Montana, but he did well.","entailment":[],"neutral":[{"annotator":0,"id":"1539-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention whether he did well or not.","self_corrected":false},{"annotator":1,"id":"1539-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the speaker thinks that he did well.","self_corrected":false},{"annotator":2,"id":"1539-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I agree that he didn't have the same support as Montana, but he could did well, or bad.","self_corrected":false},{"annotator":3,"id":"1539-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about where he did well or not","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1078","context":"H-2A agricultural workers are required to maintain a foreign residence which they have no intention of abandoning.","statement":"Permanent foreign residence is required for some types of agricultural work visas.","entailment":[{"annotator":0,"id":"1078-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"H-2A\" is a type of agricultural work visa that mentioned in the statement.","self_corrected":false},{"annotator":1,"id":"1078-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"1078-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"H-2A agricultural workers need permanent foreign residence prove that some types of agricultural need permanent foreign residence.","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":3,"id":"1078-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It's those agricultural workers with H-2A visas needs to maintain the residence as part of their work but not a requirement for their application for the visa","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["entailment"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"273","context":"No, Dave Hanson, you were too important to us for that.","statement":"No, Dave Hanson, we couldn't risk your life becaus you are too important to us.","entailment":[],"neutral":[{"annotator":0,"id":"273-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention anything about risking life.","self_corrected":false},{"annotator":1,"id":"273-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the speakre talks about risking Hanson's life.","self_corrected":false},{"annotator":2,"id":"273-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Dave's is too important to us for \"that\", but that can be anything, not definitely about living or death.","self_corrected":false},{"annotator":3,"id":"273-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not mentioned if Dave is going to risk his life for that","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1178","context":"The pope, suggesting that Gen.","statement":"Gen is being suggested by the Pope.","entailment":[{"annotator":0,"id":"1178-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that the pope suggested the Gen.","self_corrected":false},{"annotator":1,"id":"1178-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"1178-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, it is a switch of active voice and passive voice.","self_corrected":false},{"annotator":3,"id":"1178-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"paraphrases","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"158","context":"but there's no uh inscriptions or or dates or anything else","statement":"There aren't any dates on it?","entailment":[],"neutral":[{"annotator":3,"id":"158-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"question is not a statement","self_corrected":false}],"contradiction":[],"idk":[0,1,2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1015","context":"If you have the energy to climb the 387 steps to the top of the south tower, you will be rewarded with a stunning view over the city.","statement":"The south tower has the best view in the city.","entailment":[],"neutral":[{"annotator":0,"id":"1015-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The view of the south tower may or may not be the best.","self_corrected":false},{"annotator":1,"id":"1015-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the stunning view is actually the best in the city.","self_corrected":false},{"annotator":2,"id":"1015-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context did not compare the view of the south tower with other places, so we don't know whether it has the best view.","self_corrected":false},{"annotator":3,"id":"1015-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The south tower has a stunning view but it is not known if it is the best","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"374","context":"I am glad she wasn't, said Jon.","statement":"Jon was sad that she wasn't happy.","entailment":[],"neutral":[{"annotator":1,"id":"374-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether Jon was talking about her being happy.","self_corrected":false},{"annotator":2,"id":"374-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Jon's attitude to her happiness is not given in the context.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"374-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Jon was glad, not sad.","self_corrected":false},{"annotator":3,"id":"374-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Jon was glad that she was not happy","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"890","context":"is there still that type of music available","statement":"Is that genre of music still a thing?","entailment":[{"annotator":0,"id":"890-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Both the context and statement are about the same question.","self_corrected":false},{"annotator":1,"id":"890-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false}],"neutral":[{"annotator":3,"id":"890-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"is it a thing\" means \"is it popular\". A thing can be available but not popular","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"824","context":"Strange as it may seem to the typical household, capital gains on its existing assets do not contribute to saving as measured in NIPA.","statement":"NIPA considers cat fur when it defines savings.","entailment":[{"annotator":2,"id":"824-entailment-1","judgments":[{"annotator":1,"makes_sense":false}],"label_correction":true,"reason":"Yes, because cat fur based on cats, which are existing assets, so it will not be counted by NIPA.","self_corrected":true}],"neutral":[{"annotator":0,"id":"824-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Cat fur is not mentioned in the context.","self_corrected":false},{"annotator":1,"id":"824-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Cat furs are not mentioned at all.","self_corrected":false},{"annotator":3,"id":"824-neutral-3","judgments":[{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not clear statement","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"871","context":"my goodness it's hard to believe i didn't think there was anybody in the country who hadn't seen that one","statement":"I thought I was the only one in this country who had seen it.","entailment":[{"annotator":3,"id":"871-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"I didn't think there was anybody seen that one. So I thought I was the only one in this country who had seen it","self_corrected":true}],"neutral":[],"contradiction":[{"annotator":0,"id":"871-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is false because the speaker thought that everyone had seen that one.","self_corrected":false},{"annotator":2,"id":"871-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I thought everybody in this country had seen it, so I am not the only one.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1158","context":"and i look back on that and i bought shoes i went shopping i did not need that money i did not need it i didn't need it i shouldn't have even qualified to get it i didn't need it and it would have been a little rough i might have eaten some bologna instead of roast beef out of the deli but i did not need it and as i look back now now we're paying that back i told my son if you have to live in the ghetto to go to college do it but don't take out ten thousand dollars in loans don't do it and i don't i hope don't think he'll have to do that but i just so like we might if we didn't have those loans we could have saved in the last five years the money for that and i believe we would have because God's really put it in our heart not to get in debt you know but we have friends at church that do this on a constant basis that are totally debt free and they pay cash for everything they buy","statement":"I am envious of all my debt-free churchgoing friends.","entailment":[],"neutral":[{"annotator":0,"id":"1158-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the speaker's attitude toward the debt-free friends at church.","self_corrected":false},{"annotator":1,"id":"1158-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the speaker is envious.","self_corrected":false},{"annotator":3,"id":"1158-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not known if i am envious of my debt-free friends","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"51","context":"If you have any questions about this report, please contact Henry R. Wray, Senior Associate General Counsel, at (202) 512-8581.","statement":"Henry R. Wray can be reached at (555) 512-8581.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"51-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The number of Henry R. Wray mentioned in the statement is wrong","self_corrected":false},{"annotator":1,"id":"51-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The phone number starts with (202) not with (555).","self_corrected":false},{"annotator":2,"id":"51-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The number is wrong, it should be (202) 512-8581 not (555) 512-8581.","self_corrected":false},{"annotator":3,"id":"51-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"wrong phone numbers","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"1448","context":"No, monsieur.","statement":"The speaker is answering no to a question.","entailment":[{"annotator":0,"id":"1448-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"According to the context, the speaker did say no.","self_corrected":false},{"annotator":1,"id":"1448-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"This is correct.","self_corrected":false},{"annotator":2,"id":"1448-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, the speaker said no.","self_corrected":false},{"annotator":3,"id":"1448-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"monsieur states that the speaker is talking to a man and said no","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"287","context":"The policy succeeded, and I was fortunate to have had the opportunity to make that contribution to my people.","statement":"Because the policy was a success, I was able to make a contribution to my people.","entailment":[{"annotator":0,"id":"287-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement mention the sucessful of the policy and the speaker's contribution to the people.","self_corrected":false},{"annotator":1,"id":"287-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":3,"id":"287-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"paraphrases","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":2,"id":"287-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"No, the contribution I made to my people is the success of policy.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1486","context":"3) The gap between the productivity of women and the productivity of men.","statement":"The gap of genders.","entailment":[{"annotator":2,"id":"1486-entailment-1","judgments":[{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, the gap between the productivity of genders can prove there is a gap between genders. ","self_corrected":false}],"neutral":[{"annotator":3,"id":"1486-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Statement does not specifiy the gap of WHAT of genders. In the context, it is the gap of productivity","self_corrected":false}],"contradiction":[],"idk":[0,1],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"553","context":"While obviously constrained by their bondage, blacks nonetheless forged a culture rich with religious observances, folk tales, family traditions, song, and so on.","statement":"Clearly are constrained by their folk tales and traditions.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"553-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They are constrained by their bondage, not their folk tales and traditions.","self_corrected":false},{"annotator":3,"id":"553-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They forges a rich culture with folk tales and traditions, which are not their constrans","self_corrected":false}],"idk":[1,2],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"487","context":"SSA is also seeking statutory authority for additional tools to recover current overpayments.","statement":"SSA wants the authority to recover overpayments made to insurers.","entailment":[],"neutral":[{"annotator":0,"id":"487-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not mention to whom the overpayment was made.","self_corrected":false},{"annotator":1,"id":"487-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the overpayments were made to insureres or to someone else.","self_corrected":false},{"annotator":2,"id":"487-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The overpayments can be made to insurers or to other shops, or department of government, etc.","self_corrected":false},{"annotator":3,"id":"487-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know to whom was the overpayments made.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"839","context":"Tommy Thompson of Wisconsin and Mayor Rudolph Giuliani of New York, the conservative vanguard on the issue, show no inclination to exploit research that says, in effect, Why care about day-care quality?","statement":"Thompson and Giuliani don't want to care about day cares.","entailment":[{"annotator":2,"id":"839-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"\"No inclination to exploit reserach\" can be understood as to study about day cares.","self_corrected":true}],"neutral":[],"contradiction":[{"annotator":0,"id":"839-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Thompson and Giuliani don't want to exploit the research that doesn't care about day cares, which means they did care about day cares.","self_corrected":false},{"annotator":3,"id":"839-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Thompson and Giuliani did not care about the research, that says not to care about the day-care quality. So they might actually care about day cares","self_corrected":false}],"idk":[1],"label_count_round_1":{"contradiction":2.0,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1259","context":"So unlike people who are fortunate enough to be able to afford attorneys and can go to another lawyer, our clients are simply lost in the legal system if they cannot get access to it from us.","statement":"Our clients can barely afford our legal assistance.","entailment":[{"annotator":0,"id":"1259-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Both the context and statement suggest that their clients are not able to afford attorneys.","self_corrected":true}],"neutral":[{"annotator":1,"id":"1259-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the clients have to pay for the services at all.","self_corrected":false},{"annotator":2,"id":"1259-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The cost of our legal assistance is not given in the context, maybe it is free.","self_corrected":false},{"annotator":3,"id":"1259-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not mentioned if our assistance is charged. It could be free.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"652","context":"apparently apparently the appraisers likes it because our taxes sure is high isn't it it really is","statement":"We wished the taxes were lower.","entailment":[],"neutral":[{"annotator":0,"id":"652-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the speaker's wishes about the taxes.","self_corrected":false},{"annotator":1,"id":"652-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether the speaker wants the taxes to be lower.","self_corrected":false},{"annotator":2,"id":"652-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Our taxes is high, but our attitude to high taxes is not shown in the context.","self_corrected":false},{"annotator":3,"id":"652-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no info about our wishes","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1219","context":"Of how, when tea was done, and everyone had stood,He reached for my head, put his hands over it,And gently pulled me to his chest, which smelledOf dung smoke and cinnamon and mutton grease.I could hear his wheezy breathing now, like the prophet's Last whispered word repeated by the faithful.Then he prayed for what no one had time to translate--His son interrupted the old man to tell him a groupOf snake charmers sought his blessing, and a blind thief.The saint pushed me away, took one long look,Then straightened my collar and nodded me toward the door.","statement":"When tea was done, he put his hands on me romantically.","entailment":[{"annotator":0,"id":"1219-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"When tea was done he put his hands over the speaker's head. According to the context, his moves were romantic.","self_corrected":true}],"neutral":[{"annotator":1,"id":"1219-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether there was romantic intention.","self_corrected":false},{"annotator":2,"id":"1219-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He was a \"saint\", so \"he put his hands on me\" could be romantically, but also could be nothing to do with romance, whereas about religion.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1219-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He is a saint, so it is more a religious practice rather than a romantice move","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","contradiction"],"error_labels":["entailment"],"has_ambiguity":true} -{"id":"723","context":"EPA estimates that 5.6 million acres of lakes, estuaries and wetlands and 43,500 miles of streams, rivers and coasts are impaired by mercury emissions.","statement":"The release of mercury has an impact on rivers, streams and lakes","entailment":[{"annotator":0,"id":"723-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that mercury emissions inpact streams, rivers and lakes.","self_corrected":false},{"annotator":1,"id":"723-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The mercury impairs the rivers, treams and lakes among others.","self_corrected":false},{"annotator":2,"id":"723-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True because mercury emissions impaired lakes, estuaries, wetlands, streams, rivers and coasts.","self_corrected":false},{"annotator":3,"id":"723-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A large land is impaired by mercury emissions, as reported by EPA. So the release of mercury has an impact on these natural bodies","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"779","context":"Mykonos has had a head start as far as diving is concerned because it was never banned here (after all, there are no ancient sites to protect).","statement":"Protection of ancient sites is the reason for diving bans in other places.","entailment":[{"annotator":0,"id":"779-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions that driving was never banned in Mykonos because there are no ancient sites to protect, which implies that protection of ancient site might be a reation for driving bans. ","self_corrected":false},{"annotator":1,"id":"779-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context states that Mykonos did not need to ban diving because there are no ancient sites to proect. This implies that other places banned diving for that reason.","self_corrected":false},{"annotator":3,"id":"779-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Mykonos is not banned for diving, because there is no ancient site to protect. So if there is ancient sites, then it would be a reason for Mykonos to be banned for diving","self_corrected":false}],"neutral":[{"annotator":2,"id":"779-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In other places, protection of ancients sites could be one reasonfor diving bans, but there could be other reasons like the danger for divers' life.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"13","context":"substitute my my yeah my kid'll do uh four or five hours this week for me no problem","statement":"I just can't make the time because of my job.","entailment":[],"neutral":[{"annotator":0,"id":"13-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reason the speaker can't make the time is not mentioned in the context.","self_corrected":false},{"annotator":1,"id":"13-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether the speaker needs a substitute because of their job.","self_corrected":false},{"annotator":3,"id":"13-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"No clear context","self_corrected":true}],"contradiction":[{"annotator":2,"id":"13-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"\"No problem\" means I can handle the time.","self_corrected":false}],"idk":[2],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"607","context":" Most menu prices include taxes and a service charge, but it's customary to leave a tip if you were served satisfactorily.","statement":"Most customers will tip in addition to the tax on the menus.","entailment":[{"annotator":1,"id":"607-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":true},{"annotator":2,"id":"607-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Leaving a tip is \"customary\", so it should be a behaviour that most people do.","self_corrected":false}],"neutral":[{"annotator":0,"id":"607-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The customers usually tip additionally if the service was good. I don't know if most customers were served satisfactorily.","self_corrected":false},{"annotator":2,"id":"607-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If people are satisfied with the service, they will usually leave a tip, but if they are not satisfied, maybe they will not leave a tip.","self_corrected":false},{"annotator":3,"id":"607-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The tip is customary, but it is not known how often people tip","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1273","context":"Recently, however, I have settled down and become decidedly less experimental.","statement":"I have lost my experimental nature due to old age.","entailment":[],"neutral":[{"annotator":0,"id":"1273-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reason the speaker lost the experimental nature is not mentioned.","self_corrected":false},{"annotator":1,"id":"1273-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether the speaker is old.","self_corrected":false},{"annotator":2,"id":"1273-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reason for the lost of my experimental nature could be old age, or others like lack of money or poor health condition.","self_corrected":false},{"annotator":3,"id":"1273-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reason for becoming less experimental is unknown","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"863","context":"When he's ready for a major strike, how many innocents do you suppose are going to suffer? To quote one of your contemporaries; 'The needs of the many outweigh the needs of the few.' '","statement":"He won't do a big strike because of the innocent people.","entailment":[],"neutral":[{"annotator":2,"id":"863-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A big strike could influence many innocents, he could give up because of that consideratin, but also could still continue his pain.","self_corrected":false}],"contradiction":[{"annotator":1,"id":"863-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The person is ready for a major strike. Thus he most likely is willing to do it.","self_corrected":false},{"annotator":3,"id":"863-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"when he is ready for a major strike\" means he is not ready not but preparing for it. ","self_corrected":false}],"idk":[0],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"972","context":"To get a wonderful view of the whole stretch of river, and to stretch your legs in a beautiful parklike setting, climb up to the Ceteau de Marqueyssac and its jardins suspendus (hanging gardens).","statement":"You will enjoy stretching your legs as you climb the Ceteau de Marqueyssac.","entailment":[{"annotator":3,"id":"972-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is put as a suggestion to climb up the Ceteau de Marqueyssac. During the climbing one should get wonderful view of the river and be able to strech his legs","self_corrected":false}],"neutral":[{"annotator":1,"id":"972-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether the people enjoy stretching their legs.","self_corrected":false},{"annotator":2,"id":"972-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Whether stretching your legs is pleasant or annoying is not discussed in the context.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"972-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context indicate that people can only stretch their legs after climbing up to the Ceteau de Marqueyssac, not during the climb.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"776","context":"The book is a parody of Bartlett's , serving up quotes from Lincoln, Jefferson, and Roger Rosenblatt with equal pomposity.","statement":"Bill Reilly's book has quotes from various presidents ranging from Lincoln to Jefferson.","entailment":[{"annotator":3,"id":"776-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"if assuming it is Bill Reilly's book.","self_corrected":false}],"neutral":[{"annotator":0,"id":"776-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the name Bill Reilly.","self_corrected":false},{"annotator":1,"id":"776-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions only two presidents (Lincoln and Jefferson). This does not qualify as \"various\" for me.","self_corrected":false},{"annotator":2,"id":"776-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No information of Bill Reilly's book is given in the context.","self_corrected":false},{"annotator":3,"id":"776-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"if the book is not known to be Bill's","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":4.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"454","context":"(As the old saying goes, If you can't figure out who the fool is at the poker table, it's probably you. ","statement":"Dealers say everyone is smart that is playing.","entailment":[],"neutral":[{"annotator":0,"id":"454-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention what the dealers say.","self_corrected":false},{"annotator":2,"id":"454-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Dealers are not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"454-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"not relevant","self_corrected":false}],"contradiction":[{"annotator":1,"id":"454-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The saying implies that there is at least one fool at the poker table.","self_corrected":true},{"annotator":2,"id":"454-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context says, if people can not find the fool, then themselves are fools, so at least one guy who is playing is not smart.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"528","context":"They have prominent red protuberances and may have been named after the British redcoats.","statement":"They were named after the redcoats because they are the same bright red color on their bodies.","entailment":[],"neutral":[{"annotator":0,"id":"528-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions the word \"may\", which suggests that it is just a possibility that they were named after the redcoats.","self_corrected":false},{"annotator":1,"id":"528-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context says that they \"may have been named\" not that the definitely were named after the redcoats.","self_corrected":false},{"annotator":2,"id":"528-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The color of their body is not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"528-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not known for sure that they were named after the British redcoats. In the context, they may have been named after that","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1368","context":"NEH-supported exhibitions were distinguished by their elaborate wall panels--educational maps, photomurals, stenciled treatises--which competed with the objects themselves for space and attention.","statement":"The exhibitions seem well-funded due to the elaborate detail of the gallery.","entailment":[{"annotator":0,"id":"1368-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement makes sense, as it is true that the exhibitions does have elaborate wall panels that can cost a lot of money.","self_corrected":false},{"annotator":1,"id":"1368-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Elaborate wall panels are costly, so most likely the exhibitions are well-funded.","self_corrected":false},{"annotator":3,"id":"1368-entailment-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Because the wall panels etc. are competing with the objects themselves, which cost money and thoughts","self_corrected":false}],"neutral":[{"annotator":2,"id":"1368-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The financial support of the gallery is not mentioned in the context.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1107","context":"Specifically, by defining mission improvement objectives, senior executives determine whether their organization needs a CIO who is a networking\/marketing specialist, business change agent, operations specialist, policy\/oversight manager, or any combination thereof.","statement":"A CIO must be an operations specialist.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"1107-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"According to the context, a CIO could be any combination of the personnel mentioned in the context.","self_corrected":false},{"annotator":1,"id":"1107-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A CIO can also have only a single of the other named specializations. This is indicated by \"or any combination thereof\".","self_corrected":false},{"annotator":2,"id":"1107-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, a CIO can be a networking\/marketing specialist, too.","self_corrected":false},{"annotator":3,"id":"1107-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"it can also be networking\/marketing specialist or business change agent and so on","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"743","context":"In this case, shareholders can pay twice for the sins of others.","statement":"shareholders can pay once for the sins of others.","entailment":[{"annotator":0,"id":"743-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Shareholders can pay twice, that includes once.","self_corrected":false},{"annotator":2,"id":"743-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, because paying once is included by paying twice.","self_corrected":false}],"neutral":[{"annotator":2,"id":"743-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We only know shareholders can pay twice for the sins, but whether can pay once or three times or more is not mentioned in the context.","self_corrected":false}],"contradiction":[{"annotator":1,"id":"743-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context clearly states \"twice\" not \"once\".","self_corrected":true},{"annotator":3,"id":"743-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"they can pay twice","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"481","context":"But they also don't seem to mind when the tranquillity of a Zen temple rock garden is shattered by recorded announcements blaring from loudspeakers parroting the information already contained in the leaflets provided at the ticket office; when heavy-metal pop music loudly emanates from the radio of the middle-aged owner of a corner grocery store; and when parks, gardens, and hallowed temples are ringed by garish souvenir shops whose shelves display both the tastefully understated and the hideously kitsch.","statement":"A Zen temple rock garden is a a place for lots of people to gather and celebrate.","entailment":[],"neutral":[{"annotator":0,"id":"481-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention what people do in the Zen temple rock garden.","self_corrected":false},{"annotator":2,"id":"481-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In the context, nothing is about gathering and celebrating.","self_corrected":false},{"annotator":3,"id":"481-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not known that many people come to the garden to celebrate","self_corrected":false}],"contradiction":[{"annotator":1,"id":"481-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"If the tranquility of a Zen garden can be distirbued, this implies that the traniquility is the usual state. This probably precludes large celebrations.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"649","context":"According to a 1995 Financial Executives Research Foundation report,5 transaction processing and other routine accounting activities, such as accounts payable, payroll, and external reporting, consume about 69 percent of costs within finance.","statement":"The financial world would be ok it there wasn't any 5 percent processing.","entailment":[],"neutral":[{"annotator":0,"id":"649-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention how the financial would be ok.","self_corrected":false},{"annotator":1,"id":"649-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear what 5 percent processing is or what it's impact on the financial world would be.","self_corrected":false},{"annotator":2,"id":"649-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Eliminating 5 percent processing may make the costs with finance lower, but we don't know whether there are some bad influence about that, like workers become less active.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"649-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"5% transaction processing and other routine accounting activities count up about 69% of costs within finance. So the 5% plays a big roll in the financial world","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"737","context":"Castlerigg near Keswick is the best example.","statement":"A good example would be Castlerigg near Keswick, in Scotland.","entailment":[{"annotator":0,"id":"737-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"According to the context, the example is the best, which certainly suggests that it is a good one.","self_corrected":false},{"annotator":1,"id":"737-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If it is the best, then certainly it is also good.","self_corrected":true}],"neutral":[{"annotator":2,"id":"737-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The location of Keswick is not given in the context.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"737-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Keswick is in England","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":1.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"347","context":"The large scale production of entertainment films is a phenomenon well worth seeing several times.","statement":"The production of entertainment films is elaborate and large scaled.","entailment":[{"annotator":3,"id":"347-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The production is worth seeing several times, so it is elaborate. And the production is large scaled","self_corrected":true}],"neutral":[{"annotator":0,"id":"347-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear if the production of entertainment films is elabortate.","self_corrected":false},{"annotator":1,"id":"347-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether it is elaborate.","self_corrected":true},{"annotator":2,"id":"347-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is elaborate and large scaled production of entertainment films, but there could be small-scaled production too.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"229","context":"Because marginal costs are very low, a newspaper price for preprints might be as low as 5 or 6 cents per piece.","statement":"Newspaper preprints can cost as much as $5.","entailment":[{"annotator":3,"id":"229-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"5 dollars for a pieace of newspaper","self_corrected":true}],"neutral":[{"annotator":0,"id":"229-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions how low the price may be, not how high it may be.","self_corrected":false},{"annotator":2,"id":"229-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The maximum cost of newspaper preprints is not given in the context.","self_corrected":false}],"contradiction":[{"annotator":1,"id":"229-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context says 5 or 6 cents, not $5.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["contradiction","entailment"],"has_ambiguity":false} -{"id":"1323","context":"You are sure that you did not in any way disclose your identity?\" Tommy shook his head.","statement":"I wish you hadn't revealed your identity, that was a mistake.","entailment":[],"neutral":[{"annotator":1,"id":"1323-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"Tommy did not reveal his identity.","self_corrected":false},{"annotator":2,"id":"1323-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"My preference of the answer of the question, whether Tommy disclosed his identity is not given in the context.","self_corrected":false},{"annotator":3,"id":"1323-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"can not infer about Tommy's wish about whether the other person should disclose his identity or not","self_corrected":false}],"contradiction":[],"idk":[0],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"593","context":"I'm not interested in tactics, Al.","statement":"Al is very interested in tactics.","entailment":[],"neutral":[{"annotator":0,"id":"593-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only suggests that the speaker's attutude toward tactics, not AI's.","self_corrected":false},{"annotator":1,"id":"593-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether Al is interested in tactics.","self_corrected":false},{"annotator":2,"id":"593-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker is not interested in tactics, but Al's interest is not given in the context.","self_corrected":false},{"annotator":3,"id":"593-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[{"annotator":3,"id":"593-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"if the sentence in the context is said by AI, then AI is not interested in tactics","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"900","context":"If the collecting entity transfers the nonexchange revenue to the General Fund or another entity, the amount is accounted for as a custodial activity by the collecting entity.","statement":"Nonexchange revenue to the General Mills.","entailment":[],"neutral":[{"annotator":1,"id":"900-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"General Mills are not mentioned at all.","self_corrected":false},{"annotator":2,"id":"900-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The General Mills is not mentioned in the context.","self_corrected":false}],"contradiction":[],"idk":[0,3],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"303","context":"For fiscal year 1996, Congress determined that the Commission should recover $126,400,000 in costs, an amount 8.6 percent higher than required in fiscal year 1995.","statement":"Congress determined that Commission should recover over $126 in costs.","entailment":[{"annotator":0,"id":"303-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"$126,400,000 is indeed more than $126 mentioned in the statement.","self_corrected":false},{"annotator":1,"id":"303-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"$ 126400000 is larger than $126, so technically it's over $126.","self_corrected":false},{"annotator":2,"id":"303-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"$126400000 is more than $126, so it is correct to say over $126.","self_corrected":false},{"annotator":3,"id":"303-entailment-4","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"commission should recover over 126,400,000 in costs","self_corrected":true}],"neutral":[],"contradiction":[{"annotator":1,"id":"303-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Commission should recover $126,400,000 not over $126","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1445","context":"OMB issued the guidance in Memorandum M0010, dated April 25, 2000.","statement":"Memorandum M0010 was issued by INS.","entailment":[],"neutral":[{"annotator":1,"id":"1445-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"It's not clear who issued the memorandum.","self_corrected":true}],"contradiction":[{"annotator":0,"id":"1445-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Memorandum M0010 was issued by OMB, not by INS.","self_corrected":false},{"annotator":2,"id":"1445-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, Memorandum M0010 was issued by OMB.","self_corrected":false},{"annotator":3,"id":"1445-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is issued by OMB","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["contradiction"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"262","context":"Credibility is a vital factor, and Jim Lehrer does, indeed, have it.","statement":"Everyone would believe whatever Jim Lehrer said.","entailment":[{"annotator":0,"id":"262-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Jim Lehrer has great credibility, which means evertone would believe him. ","self_corrected":true}],"neutral":[{"annotator":1,"id":"262-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"That a person is credible does not mean that everyone will believe them.","self_corrected":false},{"annotator":2,"id":"262-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Jim Lehrer has credibility, but it cannot be promised that no one would disbelieve him whatever he said.","self_corrected":false},{"annotator":3,"id":"262-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"overexaggeration. Jim Lehrer has credibility. But iit is exaggerated to say everyone would believe him","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1458","context":"right oh they've really done uh good job of keeping everybody informed of what's going on sometimes i've wondered if it wasn't almost more than we needed to know","statement":"I think I have shared too much information with everyone, so next year I will share less. ","entailment":[],"neutral":[{"annotator":0,"id":"1458-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention what the speaker will do next year.","self_corrected":false},{"annotator":1,"id":"1458-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"They\" shared information, not \"I\".","self_corrected":false},{"annotator":2,"id":"1458-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"What I want to know is whether \"they\" shared too much information, not I, and what I will next year is not given in the context. ","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1458-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"I\" didn't share the information, \"they\" did","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1202","context":"Each caters to a specific crowd, so hunt around until you find the one right for you.","statement":"There are marketers who have argued that there needs to be more effort to broaden appeal.","entailment":[],"neutral":[{"annotator":0,"id":"1202-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Based on the context, we don't know anything about the marketer's argument.","self_corrected":false},{"annotator":1,"id":"1202-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Marketers or their opinion are not mentioned at all.","self_corrected":false},{"annotator":2,"id":"1202-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In context, there is no comparison between the effort has been made and the need to be made in the future.","self_corrected":false},{"annotator":3,"id":"1202-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1497","context":"Both initial and supplemental proposed rule publications invited comments on the information collection requirements imposed by the rule.","statement":"There's no point in following politics or voting because your vote won't actually make a difference.","entailment":[],"neutral":[{"annotator":0,"id":"1497-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether votes would make a difference or not.","self_corrected":false},{"annotator":1,"id":"1497-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There's no mention of politics or voting in the context.","self_corrected":false},{"annotator":2,"id":"1497-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context has nothing to do with the effect of following politics and voting.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1497-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The comments are invited on the requirements. So one's opinoin might make a difference","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1320","context":"She buried his remains to spare her mother the gruesome sight.","statement":"The remains would have caused grief to her mother.","entailment":[{"annotator":3,"id":"1320-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"She buried the remains to spare her mother the grief. So if her mother saw the remain, she would grieve","self_corrected":true}],"neutral":[{"annotator":0,"id":"1320-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no mention in the context of how her mother might feel about the remains.","self_corrected":false},{"annotator":1,"id":"1320-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It could also be that the remains would have triggered another strongly negative reaction like disgust (and not grief).","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"864","context":"Write, write, and write.","statement":"You should keep practicing writing.","entailment":[{"annotator":2,"id":"864-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, the context is a imperative sentence and repeated three times \"write\", which could be seen as a order to keep writing.","self_corrected":false},{"annotator":3,"id":"864-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"the repetition of verbs implies the repetition doing that action. So it means keeps writing","self_corrected":false}],"neutral":[{"annotator":1,"id":"864-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether they should write for the sake of practicing.","self_corrected":false}],"contradiction":[],"idk":[0],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"477","context":"But those that are manufactured for sale in in Europe and so forth are quite the other way around","statement":"Products are made with differently designed machines in Europe.","entailment":[],"neutral":[{"annotator":0,"id":"477-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no mention in the context of what machines are used to produce products made in Europe.","self_corrected":false},{"annotator":1,"id":"477-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear how the products made for Europe differ from the others.","self_corrected":false},{"annotator":2,"id":"477-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Products could be made with differently designed machines in Europe, or with the same designed machines.","self_corrected":false}],"contradiction":[],"idk":[3],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"57","context":"and so i have really enjoyed that but but there are i do have friends that watch programs like they want to see a particular program and they are either home watching it or definitely recording it they have some programs that they won't miss","statement":"What programs do your friends like to watch?","entailment":[],"neutral":[{"annotator":3,"id":"57-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"statement is a question","self_corrected":false}],"contradiction":[],"idk":[0,1,2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"434","context":"But it just might be because he's afraid he'll lose his No.","statement":"He's definitely afraid of losing he's No.","entailment":[{"annotator":0,"id":"434-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Both the context and the statement mention that he is afraid he'll lose his No.","self_corrected":true}],"neutral":[{"annotator":1,"id":"434-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context says \"might\" and not \"definitely\".","self_corrected":true}],"contradiction":[{"annotator":2,"id":"434-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, it \"just might be\", but not definitely.","self_corrected":false},{"annotator":3,"id":"434-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is a possible that he is afraid of losing he's No. but not definitely","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction"],"error_labels":["entailment","neutral"],"has_ambiguity":false} -{"id":"298","context":"BUDGETARY RESOURCES - The forms of authority given to an agency allowing it to incur obligations.","statement":"Administrations generally feel that some agencies should have more budgetary resources than others.","entailment":[],"neutral":[{"annotator":0,"id":"298-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"The context only talks about what budget resources are.","self_corrected":false},{"annotator":1,"id":"298-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no mention of giving different resources to different agencies.","self_corrected":false},{"annotator":2,"id":"298-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no comparison of budgetary resources between agencies.","self_corrected":false},{"annotator":3,"id":"298-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"context is a defination. Statement is the opinion of the administrations.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"215","context":"well we bought this with credit too well we found it with a clearance uh down in Memphis i guess and uh","statement":"We bought non-sale items in Memphis on credit.","entailment":[],"neutral":[{"annotator":3,"id":"215-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is not known if the items are non-sales or not","self_corrected":true}],"contradiction":[{"annotator":0,"id":"215-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker found the item with a clearance, which suggests that it is on sale.","self_corrected":false},{"annotator":1,"id":"215-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It was a clearance, so the items were on sale.","self_corrected":false},{"annotator":2,"id":"215-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, we bought it with a clearance down.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["contradiction"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"872","context":"He threw one of them and shot the other.","statement":"He shot his gun.","entailment":[{"annotator":3,"id":"872-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He shot on of them. So he must have shot his gun","self_corrected":false}],"neutral":[{"annotator":0,"id":"872-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He shot, but it is not clear what did he shot, it could have been a gun or an arrow.","self_corrected":false},{"annotator":1,"id":"872-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether he shot with a gun or with some other weapon.","self_corrected":false},{"annotator":2,"id":"872-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He shot, but the objective could be his gun or something else like arrow.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"682","context":"Buffet and a\u00a0 la carte available.","statement":"It has table service.","entailment":[{"annotator":0,"id":"682-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A la carte is available suggests that it has table service.","self_corrected":false},{"annotator":1,"id":"682-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If it has a la carte, then it probably also has table service.","self_corrected":false},{"annotator":2,"id":"682-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A la carte includes table service, and a la carte is available.","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":3,"id":"682-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"It is a buffet, so there is no people serving the table","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["entailment"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1502","context":"No, I exclaimed, astonished. ","statement":"I said no to him several time, utterly surprised by the change of events. ","entailment":[{"annotator":0,"id":"1502-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Both the context and the statement suggest that the speaker said no and was surprised. ","self_corrected":true}],"neutral":[{"annotator":1,"id":"1502-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether they said \"no\" several times or only once.","self_corrected":false},{"annotator":2,"id":"1502-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reason of my astonishment is not given in the context.","self_corrected":false},{"annotator":3,"id":"1502-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"it is not known how many times I said no","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"752","context":"He's a bad lot.","statement":"He's a dishonest person","entailment":[],"neutral":[{"annotator":0,"id":"752-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He may or may not be bad because he is dishonest.","self_corrected":false},{"annotator":1,"id":"752-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He might be bad in other ways.","self_corrected":false},{"annotator":2,"id":"752-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Bad people can be both honest and dihonest.","self_corrected":false},{"annotator":3,"id":"752-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He could be honest but bad in other quality","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"643","context":"A button on the Chatterbox page will make this easy, so please do join in.","statement":"They wanted to make the site very user friendly.","entailment":[{"annotator":0,"id":"643-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context did show that they want to make it easy, which is a factor in user-friendliness.","self_corrected":false},{"annotator":2,"id":"643-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Yes, they want to add a botton which will make the operation easy.","self_corrected":false},{"annotator":3,"id":"643-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"only clicking a button would join the user in. So the user does not need to click a lot buttons. Therefore, it is user-friendly","self_corrected":true}],"neutral":[{"annotator":1,"id":"643-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"It's not clear from one text of the webpage that they really tried to make it user friendly.","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":null},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["entailment"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"1092","context":"So it wasn't Missenhardt's singing--marvelous though that was--that made Osmin's rantings so thrilling.","statement":"Osmin was always calm and collected.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"1092-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions Osmin's rantings, which implies that he was not always calm.","self_corrected":false},{"annotator":1,"id":"1092-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Osmin ranted, so they probably were not always calm and collected.","self_corrected":false},{"annotator":2,"id":"1092-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, because Osmin rants and quite thrilling.","self_corrected":false},{"annotator":3,"id":"1092-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Osmin is ranting. So he is not calm","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"600","context":"Deborah Pryce said Ohio Legal Services in Columbus will receive a $200,000 federal grant toward an online legal self-help center.","statement":"A $200,000 federal grant will be received by Ohio Legal Services, said Deborah Pryce, who could finally say it to the public.","entailment":[{"annotator":1,"id":"600-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context, with the addition of \"who could finally say it to the public\" which is only a minor aspect.","self_corrected":false}],"neutral":[{"annotator":0,"id":"600-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention whether Deborah Pryce said it to public or not.","self_corrected":false},{"annotator":2,"id":"600-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Finally\" means at the beginning, Deborah Pryce is not allowed to announce this information, but the true situation is not given in the context.","self_corrected":false},{"annotator":3,"id":"600-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about whether he could say it to the public or not","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"859","context":"Since the rules were issued as interim rules and not as general notices of proposed rulemaking, they are not subject to the Unfunded Mandates Reform Act of 1995.","statement":"The rules were not issued as interim rules but rather general notices of proposed rulemaking.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"859-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context clearly states that the rules were issued as interim rules. ","self_corrected":false},{"annotator":1,"id":"859-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's clearly stated that the rules were issued as interim rules.","self_corrected":false},{"annotator":2,"id":"859-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, the rules were issued as interim rules.","self_corrected":false},{"annotator":3,"id":"859-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The rules were issued as interim rules","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"986","context":"so you um-hum so you think it comes down to education or or something like that","statement":"IT all boils down to how much education you have.","entailment":[{"annotator":3,"id":"986-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Education is the the reason that explains something","self_corrected":false}],"neutral":[{"annotator":0,"id":"986-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context talks about \"your\" thought, this thought may or may not be true.","self_corrected":false},{"annotator":1,"id":"986-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context is a question, not a statement.","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1394","context":"The m??tro (subway) is the fastest way to move around the city, but the buses, both in the capital and the other big towns, are best for taking in the sights.","statement":"Taking the subway is a good way to experience big city life.","entailment":[{"annotator":0,"id":"1394-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Subway is the fastest way to move around the city, so it is somehow a good way to experience big city life.","self_corrected":false},{"annotator":1,"id":"1394-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If \"experience big city life\" means having an authentic experience of people living in the city, then the metro is a good way to do that.","self_corrected":false}],"neutral":[{"annotator":2,"id":"1394-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"Taking bus is a good way to experience big city life, taking subway could be good or not good.","self_corrected":false}],"contradiction":[{"annotator":1,"id":"1394-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If \"experience big city life\" means sight seeing, then the metro is not a good way to do this.","self_corrected":false},{"annotator":3,"id":"1394-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Taking buses would allow one to take in the sights in the city","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1386","context":"maybe adult literacy maybe you know composition writing maybe you know uh volunteering you know on a tutor line or though the even through the elementary schools for help with homework or the other part of me says is God i've had enough kids do i really","statement":"maybe I could volunteer to help coach sports since I've helped all my children be successful in sports","entailment":[],"neutral":[{"annotator":0,"id":"1386-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context makes no mention about coaching sports mentioned in the statement, so I don't know if the statement is true.","self_corrected":false},{"annotator":1,"id":"1386-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker is not talking about sports but about writing.","self_corrected":false},{"annotator":2,"id":"1386-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Helping couch sports is not mentioned in the context.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1386-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"I might volunteer to help with composition writing.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"787","context":"By seeding packs with a few high-value cards, the manufacturer is encouraging kids to buy Pokemon cards like lottery tickets.","statement":"Each Pokemon card pack is filled with every rare card a kid could want.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"787-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There are only a few rare cards in the Pokemon card packs.","self_corrected":false},{"annotator":1,"id":"787-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Few high value cards\" means that one pack does not contain \"every rare card\".","self_corrected":false},{"annotator":2,"id":"787-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, because lottery tickets are seldom to be valuable, and rare card can only appear rare in common card pack.","self_corrected":false},{"annotator":3,"id":"787-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"the packs are only filled with a few high-value cards.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"923","context":"Expenses included in calculating net cost for education and training programs that are intended to increase or maintain national economic productive capacity shall be reported as investments in human capital as required supplementary stewardship information accompanying the financial statements of the Federal Government and its component units.","statement":"Net cost for college programs can be calculated as a way to increase productivity.","entailment":[{"annotator":0,"id":"923-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Colledge programs mentioned in the statement are included in education and training programs mentioned in the context, which are intended to increase or maintain productivity.","self_corrected":true},{"annotator":1,"id":"923-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"shall be reported as investiments in human capital\" means that they can be included in the calculation as investiments in productivity.","self_corrected":false},{"annotator":2,"id":"923-entailment-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Yes, college programs is kind of education that can increase productivity.","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":3,"id":"923-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is those education and training programs that are intended to increase the producitvity. Not the NEt cost","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["entailment"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"461","context":"He watched the river flow.","statement":"The river roared by.","entailment":[],"neutral":[{"annotator":2,"id":"461-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The condition of the river is not described in the context.","self_corrected":false},{"annotator":3,"id":"461-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear how the river flows","self_corrected":false}],"contradiction":[{"annotator":0,"id":"461-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context suggests that the river move steadily, while roaring means the opposite.","self_corrected":false},{"annotator":1,"id":"461-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The river flowed, it didn't roar.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"618","context":"Today it is possible to buy cheap papyrus printed with gaudy Egyptian scenes in almost every souvenir shop in the country, but some of the most authentic are sold at The Pharaonic Village in Cairo where the papyrus is grown, processed, and hand-painted on site.","statement":"The Pharaonic Village in Cairo is the only place where one can buy authentic papyrus.","entailment":[],"neutral":[{"annotator":2,"id":"618-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"People can buy authentic papyrus in the Pharaonic Village in Cairo, but there can be other places also sell it.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"618-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Pharaonic Village in Cairo is the place to buy some of the most authentic papyrus, not the only place to buy authentic papyrus.","self_corrected":false},{"annotator":1,"id":"618-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"some of the most authentic\" implies that there are other places that also sell authentic papyrus.","self_corrected":false},{"annotator":3,"id":"618-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"You can buy it everywhere in the country","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"420","context":"After being diagnosed with cancer, Carrey's Kaufman decides to do a show at Carnegie Hall.","statement":"Carrey's Kaufman was diagnosed with cancer before deciding to do a show at Carnegie Hall.","entailment":[{"annotator":0,"id":"420-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement describe the same thing about Carrey's Kaufman.","self_corrected":false},{"annotator":2,"id":"420-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, Carrey's Kaufman was diagnosed with cancer first, and then decided to do a show.","self_corrected":false},{"annotator":3,"id":"420-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Kaufmann decided to do a show at Carnegie Hall after he is diagnosed with cancer","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":1,"id":"420-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"They decided to do the show after the diagnosis, not before.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["entailment"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1467","context":"Took forever.","statement":"Lasted too long","entailment":[{"annotator":0,"id":"1467-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Forvever\" is indeed too long.","self_corrected":false},{"annotator":1,"id":"1467-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":3,"id":"1467-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"took forever\" is an expression to say that it is taking too long","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"375","context":"Clearly, GAO needs assistance to meet its looming human capital challenges.","statement":"GAO will soon be suffering from a shortage of qualified personnel.","entailment":[{"annotator":1,"id":"375-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"human capital challenges\" most likely refers to a lack of qualified personnel.","self_corrected":true}],"neutral":[{"annotator":0,"id":"375-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Shortage of qualified personnel may or may not included in the GAO's human capital challenges.","self_corrected":false},{"annotator":2,"id":"375-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Human capital challenges can be a shortage of qualifies personnel, but also can be others, like too expensive labor price.","self_corrected":false},{"annotator":3,"id":"375-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear if it is a shortage. They might have hired too many personnel","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1155","context":"It is not a surprise, either, that Al Pacino chews the scenery in Devil's Advocate . And the idea that if the devil showed up on Earth he'd be running a New York corporate-law firm is also, to say the least, pre-chewed.","statement":"The fact that the devil would work in law is extremely cliche.","entailment":[{"annotator":0,"id":"1155-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"The word \"pre-chewed\" in the context indicates that it is cliche.","self_corrected":false},{"annotator":1,"id":"1155-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"\"chewing the scenery\" means artificial acting, so \"pre-chewed\" likely means cliche","self_corrected":false}],"neutral":[{"annotator":2,"id":"1155-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"The devil would work in law, which is an idea, not the fact, and the fact of devil's job is not given in the context.","self_corrected":false}],"contradiction":[],"idk":[3],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"280","context":"The door opened and Severn stepped out.","statement":"They were waiting for someone to open the door for them.","entailment":[],"neutral":[{"annotator":0,"id":"280-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention what \"they\" were doing.","self_corrected":false},{"annotator":1,"id":"280-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether they waited or whether they opened the door themselves.","self_corrected":false},{"annotator":2,"id":"280-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Servern stepped out when the door opened, maybe he waited, maybe not.","self_corrected":false},{"annotator":3,"id":"280-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear if someone opened the door for Severn or he opened it himself","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"370","context":"Charles Geveden has introduced legislation that will increase the Access to Justice supplement on court filing fees.","statement":"Charles Geveden initiated a law that will essentially lower court filing fees.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"370-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Increasing supplement is not a essential way to lower the fees.","self_corrected":false},{"annotator":2,"id":"370-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, Charles Geveden intiated a law wikk increase the court filling fees.","self_corrected":false},{"annotator":3,"id":"370-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The law he initiated increased the access to justice supplement on court filling fees, meaning the court would need to pay more.","self_corrected":false}],"idk":[1],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"880","context":"Standard screens may not perform as well in these patient subgroups that may represent a considerable part of the ED population.","statement":"The subgroups may not perform well in standard screens.","entailment":[{"annotator":0,"id":"880-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement talk about the possible poor performance of the subgroups in standard screens.","self_corrected":false},{"annotator":3,"id":"880-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"standard screens may not perform well in subgroups \" and \" subgroups may not perform well in standard screens\" has the same meaning","self_corrected":false}],"neutral":[{"annotator":1,"id":"880-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The screens do not perform well, not the subgroups.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"880-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"No, it is standard screens may not perform well in the subgroups.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"202","context":"Who are these sons of eggs?","statement":"I wish they were daughters of eggs.","entailment":[],"neutral":[{"annotator":1,"id":"202-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"It's not clear what the speaker wishes.","self_corrected":false},{"annotator":3,"id":"202-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"context is asking about the sons of eggs. Statement is a wish about the daughters of eggs","self_corrected":false}],"contradiction":[],"idk":[0,2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1499","context":"uh somewhat they're not my favorite team i am uh somewhat familiar with them","statement":"They are the best team in the league, by they are not my favorite.","entailment":[],"neutral":[{"annotator":0,"id":"1499-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The best team in the league is not mentioned by the context.","self_corrected":false},{"annotator":1,"id":"1499-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether they are the best team in the league.","self_corrected":false},{"annotator":2,"id":"1499-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe they are the best, maybe they are not.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1499-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"I am only somewhat familiar with the team. So I may not know if they are the best team in the league","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1101","context":"They won't be killing off George Clooney's character at ER like they did to Jimmy Smits at NYPD . Instead, Dr. Doug Ross is being forced out over the next two episodes because the maverick heartthrob gives an unauthorized painkiller to a terminally ill boy (Thursday, 10 p.m.).","statement":"George Clooney will not be getting fired from his TV show.","entailment":[{"annotator":0,"id":"1101-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"George Clooney's character will not be killed, which suggests that George Clooney will keep his job.","self_corrected":false},{"annotator":3,"id":"1101-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"George Clooney's character at ER will be kept. So he will not be fired","self_corrected":false}],"neutral":[{"annotator":2,"id":"1101-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is about George Clooney's character in the context, not the actor himself.","self_corrected":false}],"contradiction":[{"annotator":1,"id":"1101-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"Forced out\" means that he likely still will be fired.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"961","context":"The Varanasi Hindu University has an Art Museum with a superb collection of 16th-century Mughal miniatures, considered superior to the national collection in Delhi.","statement":"The Varanasi Hindu University has an art museum on its campus which may be superior objectively to the national collection in Delhi.","entailment":[{"annotator":3,"id":"961-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Because the university's museum has a superb collection of 16th-century Mughai miniatures, superior to the national one. Therefore, it could be considered superior in this collection compared to the national collection","self_corrected":false}],"neutral":[{"annotator":0,"id":"961-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The location of the Varanasi Hindu University's museum is not mentioned.","self_corrected":false},{"annotator":1,"id":"961-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear what \"objectively superior\" means in this context and whether this really is the acse.","self_corrected":false},{"annotator":2,"id":"961-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The location of the art museum of the Varanasi Hindu University is not given in the context.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"961-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Varanasi Hindu University's meseum is considered superior to the national collection in Delhi, which may not be objective.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"1326","context":"In a six-year study, scientists fed dogs and other animals irradiated chicken and found no evidence of increased cancer or other toxic effects.","statement":"Scientists gave animals irradiated chicken and they all lived as long as the rest of them.","entailment":[{"annotator":1,"id":"1326-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If there were no toxic effects, they should have lived as long as the others.","self_corrected":false},{"annotator":3,"id":"1326-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"no evidence of increased cancer or other toxic effects are found. So they should live no different as other chicken","self_corrected":false}],"neutral":[{"annotator":0,"id":"1326-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"They might lived longer than the rest of them.","self_corrected":false},{"annotator":2,"id":"1326-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No evidence of increased cancer or other toxic effects can make the test animals live as long as others, but the test animals may have other accidents which shorten their lifespan.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"506","context":"Whether a government postal service can engage in these kinds of negotiations deserves serious study.","statement":"There is serious study needed to check.","entailment":[{"annotator":0,"id":"506-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that it needs serious study.","self_corrected":false},{"annotator":1,"id":"506-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is loosely a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"506-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, there is serious study needed to check whether a government postal service can engage in these kinds of negotiations.","self_corrected":false},{"annotator":3,"id":"506-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Context states that the postal service deserves serious study. So there is serious study needed to be looked at","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"325","context":"will never be doused (Brit Hume, Fox News Sunday ; Tony Blankley, Late Edition ; Robert Novak, Capital Gang ; Tucker Carlson, The McLaughlin Group ). The middle way is best expressed by Howard Kurtz (NBC's Meet the Press )--he scolds Brill for undisclosed campaign contributions and for overstretching his legal case against Kenneth Starr but applauds him for casting light on the media.","statement":"They wanted the public to know where the funds came from.","entailment":[{"annotator":0,"id":"325-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Praising him for casting light on the media shows that they want the public to know the truth.","self_corrected":false},{"annotator":1,"id":"325-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They scolded for undisclosed campaign contributions, so they want the public to know where the money came from.","self_corrected":false},{"annotator":3,"id":"325-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Because they scolds Brill for undisclosed campain contributions. So they would want Brill to let public know where the fund comes from","self_corrected":false}],"neutral":[{"annotator":2,"id":"325-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The funds are not mentioned in the context.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"853","context":"Likewise, at their production decision reviews, these programs did not capture manufacturing and product reliability knowledge consistent with best practices.","statement":"Their production decision reviews located an anomaly in the data.","entailment":[{"annotator":2,"id":"853-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, because they find these programs did not capturing manufacturing and product reliability knowledge consistent with best practice.","self_corrected":true}],"neutral":[{"annotator":0,"id":"853-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not mention the anomalies in the data.","self_corrected":false},{"annotator":1,"id":"853-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"An anomaly in the data is not mentioned.","self_corrected":false},{"annotator":3,"id":"853-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is the inconsistence in the manufacutring and product reliability knowledge. Not an anomaly","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"509","context":"He'd gone a long way on what he'd found in one elementary book.","statement":"He learned a lot from that elementary book.","entailment":[{"annotator":0,"id":"509-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both sentences suggest that he learned a lot from the book.","self_corrected":false},{"annotator":1,"id":"509-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"509-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He spent a lot of time on what he found in the elementary book, so it must be very useful.","self_corrected":false},{"annotator":3,"id":"509-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"had gone a long way\" means he had made a lot progress with what he found in one elemantary book","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1134","context":"The Gaiety Theatre in South King Street is worth visiting for its ornate d??cor.","statement":"The Trump Tower is a terrible place to visit for ornate decor.","entailment":[],"neutral":[{"annotator":0,"id":"1134-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Trump Tower is not mentioned by the context.","self_corrected":false},{"annotator":2,"id":"1134-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Trump Tower is not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"1134-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"two different buildings","self_corrected":false}],"contradiction":[{"annotator":1,"id":"1134-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"If it is worth visiting, it is not a terrible place to visit.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1124","context":"appropriate agency representatives, help resolve","statement":"the right agency workers, help fix my security system","entailment":[],"neutral":[{"annotator":1,"id":"1124-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear what should be resolved.","self_corrected":false},{"annotator":2,"id":"1124-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"the right agency workers may help fix the security system, may help resolve other problems","self_corrected":false},{"annotator":3,"id":"1124-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about what the agency workers help resolve","self_corrected":false}],"contradiction":[],"idk":[0],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1317","context":"If ancient writings give only a romanticized view, they do offer a more precise picture of Indo-Aryan society.","statement":"Ancient writings show an accurate picture of Indo-Anryan society.","entailment":[],"neutral":[{"annotator":0,"id":"1317-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"More precise picture doesn't necessarily mean accurate picture.","self_corrected":false},{"annotator":1,"id":"1317-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear that the picture is truly accurate, it is only more precise than some other writings.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"1317-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"No, ancient writings give only a romanticized view.","self_corrected":false},{"annotator":3,"id":"1317-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not an accurate picture offered, but only a \"more precise\" picture. It could still be not clear","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1228","context":"If necessary to meeting the restrictions imposed in the preceding sentence, the Administrator shall reduce, pro rata, the basic Phase II allowance allocations for each unit subject to the requirements of section 414.","statement":"Section 414 helps balance allowance allocations for units.","entailment":[{"annotator":1,"id":"1228-entailment-1","judgments":[{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If \"subject to the requirements of section 414\" refers to \"reduce\", then Section 414 is involved in helping to balance the allowance. ","self_corrected":true}],"neutral":[{"annotator":1,"id":"1228-neutral-1","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"If \"subject to the requirements of section 414\" refers to \"unit\", then it is not clear whether Section 414 is involved in helping to balance the allowance. ","self_corrected":false},{"annotator":2,"id":"1228-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Section 414 require to reduce the allowance pro rata, so it can be balanced, or not balanced.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1228-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The requirements of section 414relate to the reduction of the basic Phase II allowance allocations, not the balance of allowance allocations.","self_corrected":true}],"idk":[3],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["contradiction","entailment"],"has_ambiguity":false} -{"id":"1333","context":"The chart to which Reich refers was actually presented during Saxton's opening statement, hours before Reich testified, and did not look as Reich claims it did.","statement":"Reich refers to a chart that he misunderstood.","entailment":[{"annotator":0,"id":"1333-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The fact that Reich was wrong about what he said about the charts he referred to shows that he misunderstood.","self_corrected":true}],"neutral":[{"annotator":1,"id":"1333-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether he misrepresented the chart because he misunderstood it. Maybe he did that on purpose.","self_corrected":false},{"annotator":2,"id":"1333-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Reich refers to a chart wrongly, but maybe it is because he misunderstood it, maybe because he remembered incorrectly.","self_corrected":false},{"annotator":3,"id":"1333-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It might be misunderstanding or Reich could also just remember it wrong","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"71","context":"well uh normally i like to to go out fishing in a boat and uh rather than like bank fishing and just like you try and catch anything that's swimming because i've had such problems of trying to catch any type of fish that uh i just really enjoy doing the boat type fishing","statement":"I fish in the boat and try catching any fish because I have trouble catching certain types.","entailment":[{"annotator":1,"id":"71-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":3,"id":"71-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I have problems of cathing any type of fish, not just certain types","self_corrected":false}],"idk":[0,2],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":1.0,"entailment":1.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"245","context":"True devotees talk shop at even more specialized groups, such as one on Northeastern weather (ne.weather), whose recent conversation topics included the great blizzard of 1978 and the freak snowstorm of May 1977.","statement":"Ne.weather is a general discussion group, not only about weather. ","entailment":[{"annotator":1,"id":"245-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"ne.weather is a specialized discussion group focussing on weather","self_corrected":true}],"neutral":[],"contradiction":[{"annotator":0,"id":"245-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"According to the context, ne.weather is a specialized group, not a general discussion group.","self_corrected":false},{"annotator":2,"id":"245-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, ne.weather is a specialized group.","self_corrected":false},{"annotator":3,"id":"245-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is specialized in weather topics","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"654","context":"we were talking . Try to behave","statement":"We are having an argument, come at me if you dare!","entailment":[],"neutral":[{"annotator":1,"id":"654-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"These are different statements.","self_corrected":false},{"annotator":2,"id":"654-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Talking can be peaceful or a arguement.","self_corrected":false},{"annotator":3,"id":"654-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"context means rather that the other person should behave and be quiet. It is not aggressive like in the statement","self_corrected":false}],"contradiction":[],"idk":[0],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"579","context":"Don't forget to take a change of clothing and a towel.","statement":"Remember to replace your towel and clothing.","entailment":[{"annotator":0,"id":"579-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Taking a change of clothing and a towel means taking an extra set of them, which implies the need to replace them with the extra set of clothes and towel.","self_corrected":false},{"annotator":1,"id":"579-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"579-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, a change of clothing and a towel is for replacement.","self_corrected":false},{"annotator":3,"id":"579-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"paraphrases","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"584","context":"Once or twice, but they seem more show than battle, said Adrin.","statement":"Adrin said they liked to perform more than they did fight.","entailment":[{"annotator":0,"id":"584-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"The statement is true because they seem more show than battle.","self_corrected":false},{"annotator":1,"id":"584-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false}],"neutral":[{"annotator":2,"id":"584-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Their preference is not shown in the context.","self_corrected":false}],"contradiction":[],"idk":[3],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"795","context":"and not only that it it opens you to phone solicitations","statement":"It also opens the door to move marketing calls.","entailment":[{"annotator":0,"id":"795-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Phone solicitations and move marketing calls have similar meanings.","self_corrected":false},{"annotator":2,"id":"795-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, phone solicitation is marketing call.","self_corrected":false}],"neutral":[{"annotator":1,"id":"795-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It opens you to phoen solitications. It's not clear whether it also opens you to move marketing calls (whatever that is)","self_corrected":false},{"annotator":3,"id":"795-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"irrelevant","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1382","context":"On the northwestern Alpine frontier, a new state had appeared on the scene, destined to lead the movement to a united Italy.","statement":"The alpine frontier was separated from Italy by glaciers.","entailment":[],"neutral":[{"annotator":0,"id":"1382-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions that Italy is on the northweatern Alpine frontier, Glaciers are not mentioned.","self_corrected":false},{"annotator":1,"id":"1382-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There are no glaciers mentioned.","self_corrected":false},{"annotator":2,"id":"1382-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Glaciers are not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"1382-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about what separating them","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"905","context":"'I don't know what happened, exactly.' I said.","statement":"You aren't making sense.","entailment":[],"neutral":[{"annotator":0,"id":"905-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention what \"you\" said.","self_corrected":false},{"annotator":1,"id":"905-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"These are different statements.","self_corrected":false},{"annotator":2,"id":"905-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I don't know what happened could because your words are nonsense, but also could because of others, like that things are too complicated.","self_corrected":false},{"annotator":3,"id":"905-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"567","context":"It can entail prospective and retrospective designs and it permits synthesis of many individual case studies undertaken at different times and in different sites.","statement":"It can entail prospective and retrospective designs for system redesigns.","entailment":[],"neutral":[{"annotator":0,"id":"567-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention system redesigns.","self_corrected":false},{"annotator":1,"id":"567-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether the designs are for system redesigns.","self_corrected":false},{"annotator":2,"id":"567-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It permits synthesis of many individual case studies, maybe also contributes to system redesigns, but maybe not.","self_corrected":false},{"annotator":3,"id":"567-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about what the design is for","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1180","context":"you know maybe it just wasn't possible at all in the first place you know like the no new taxes thing you know that's uh with the economy going the way it is and everything that was nearly ridiculous thing to","statement":"it's possible to have no new taxes with the way the economy is right now.","entailment":[{"annotator":1,"id":"1180-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false}],"label_correction":true,"reason":"The speaker says that the \"new taxes thing\" was not possible with \"the economy going the way it is\"","self_corrected":true}],"neutral":[{"annotator":2,"id":"1180-neutral-1","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context is talking about the economy in the past, not right now.","self_corrected":false}],"contradiction":[],"idk":[0,3],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"854","context":"You will remember my saying that it was wise to beware of people who were not telling you the truth.\"","statement":"There might be dishonest people around here.","entailment":[{"annotator":0,"id":"854-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions to beware of dishonest people, which implies that there might be dishonest people here.","self_corrected":false},{"annotator":1,"id":"854-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker warns someone of dishonest people, so likely there are some around.","self_corrected":false},{"annotator":3,"id":"854-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"one should be aware of people who aren't telling the truth. So there are dishonest people around","self_corrected":false}],"neutral":[{"annotator":2,"id":"854-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Dishonest people maybe are around here, maybe are somewhere else.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1555","context":"um-hum with the ice yeah","statement":"With the sunshine and heat wave yes.","entailment":[],"neutral":[{"annotator":1,"id":"1555-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"These are different statements.","self_corrected":false},{"annotator":3,"id":"1555-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[],"idk":[0,2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1216","context":"You wonder whether he could win a general election coming out of the right lane of the Democratic Party.","statement":"He will not run in a general election while he is a conservative Democrat.","entailment":[],"neutral":[{"annotator":1,"id":"1216-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Someone only asks themselves if he could win the general election. That does not say anything about its truth.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1216-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He is runing in a general election since \"you\" already wonder whether he could win it.","self_corrected":false},{"annotator":2,"id":"1216-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, he run in a general election while he is under the right lane of the Democratic Party.","self_corrected":false},{"annotator":3,"id":"1216-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is wondered if he could win a general election. So it is possible that he will run in a general election","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"521","context":"We have done that spectacularly.","statement":"Spectacular results was the only way to describe the impact of our past work.","entailment":[{"annotator":3,"id":"521-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Our work has been spectacular. So the result of the work must be spectacular","self_corrected":false}],"neutral":[{"annotator":0,"id":"521-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention otherr describtion than spectacular, so we don't if it is the only way.","self_corrected":false},{"annotator":1,"id":"521-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is not clear what they have done spectacularly.","self_corrected":false},{"annotator":2,"id":"521-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Spectacular results was the one way to describe the impact of our past work, but there could be other ways, like historical.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"990","context":"well what station plays uh that type of music","statement":"What TV station has documentaries about space travel?","entailment":[],"neutral":[{"annotator":1,"id":"990-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"These are different questions.","self_corrected":false},{"annotator":3,"id":"990-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[],"idk":[0,2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"419","context":"We always knew it was an outside chance.","statement":"We were never assured of it happening in time and we knew this full well.","entailment":[{"annotator":1,"id":"419-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"419-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, outside chance means a small probability, so we never assured of it happening.","self_corrected":false},{"annotator":3,"id":"419-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"We have always need it is a very small possibility. So we always knew it will most likely not happen","self_corrected":true}],"neutral":[{"annotator":0,"id":"419-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context what was an outside chance.","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":null},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["entailment"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"1538","context":"Perhaps a further password would be required, or, at any rate, some proof of identity.","statement":"Identity should be a minimum requirement.","entailment":[{"annotator":0,"id":"1538-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Identity is at any rate required, so it is a minimum requirement.","self_corrected":false},{"annotator":1,"id":"1538-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"1538-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, at any rate the proof of identity is needed.","self_corrected":false},{"annotator":3,"id":"1538-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Whatever is needed, some proof of identity is needed. So it makes the identity a minimum requirement","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"910","context":"It's come back? cried Julius excitedly.","statement":"They were excited to hear it will come back.","entailment":[{"annotator":2,"id":"910-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, Julius cried excitedly when heard it back.","self_corrected":false},{"annotator":3,"id":"910-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Julius cried excitedly. So he is excited about it coming back","self_corrected":false}],"neutral":[{"annotator":0,"id":"910-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions Julius, it is not clear who \"they\" are.","self_corrected":false},{"annotator":1,"id":"910-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether \"they\" or only Julius was excited.","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"360","context":"if it had rained any more in the last two weeks instead of planting Saint Augustine grass in the front yard i think i would have plowed everything under and had a rice field","statement":"It has rained enough to flood everything here and make rice pattys.","entailment":[],"neutral":[{"annotator":1,"id":"360-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear what \"make rice patty\"s means, but it shouldn't be entailed by having a rice field.","self_corrected":true},{"annotator":3,"id":"360-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"it hasn't rained enough. If it had rained enough, I would have had a rice field","self_corrected":true}],"contradiction":[{"annotator":0,"id":"360-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context indicates that it hasn't reained enough.","self_corrected":false},{"annotator":2,"id":"360-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, it has not rained enough in the last two weeks.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":null},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["contradiction"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"901","context":"This was used for ceremonial purposes, allowing statues of the gods to be carried to the river for journeys to the west bank, or to the Luxor sanctuary.","statement":"Statues were moved to Luxor for funerals and other ceremonies.","entailment":[{"annotator":3,"id":"901-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"ceremonial purposes were mentioned. Funeral is also a kind of ceremonial purpose. So the statues could also be used for that","self_corrected":false}],"neutral":[{"annotator":0,"id":"901-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Statues were moved to Luxor or to the west bank.","self_corrected":false},{"annotator":1,"id":"901-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It's not clear whether the statues were moved for funerals or only for other ceremonies.","self_corrected":false},{"annotator":2,"id":"901-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe statues were moved to Luxor, or to the west bank.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1097","context":"(Imagine the difference between smoking a cigarette and injecting pure nicotine directly into a vein.)","statement":"Smoking a cigarette is a lot like injecting pure nicotine.","entailment":[],"neutral":[{"annotator":0,"id":"1097-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't suggest the difference between smoking a cigarette and injecting pure nicotine.","self_corrected":false},{"annotator":1,"id":"1097-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether the difference is large or small according to the context.","self_corrected":false},{"annotator":2,"id":"1097-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe there are lots of similarities between smoking a cigarette and injecting pure nocotine, but maybe they are very different.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"1097-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, the context emphasizes the difference between smoking a cigarette and injecting pure nocotine.","self_corrected":false},{"annotator":3,"id":"1097-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There should be a difference","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1540","context":"well do you know you have a ten limit a ten minute time limit well that's okay and then they come on and tell you and they tell you got five seconds to say good-bye","statement":"You get a ten minute time limit, but sometimes you'll be told to end early.","entailment":[{"annotator":1,"id":"1540-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false}],"neutral":[{"annotator":3,"id":"1540-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear if then come early or on time to tell one to end","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1540-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context suggests clearly that \"you\" have a ten minute time limit. The requirement for an early end is not valid unless explicitly stated in the context.","self_corrected":false},{"annotator":2,"id":"1540-contradiction-2","judgments":[{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, you will not be told to end early, but will be asked to end it quickly.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":1.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1200","context":"Lincoln glared.","statement":"The man was angry.","entailment":[{"annotator":1,"id":"1200-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If he glared, then he probably was angry.","self_corrected":false},{"annotator":3,"id":"1200-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Glaring is a state out of anger","self_corrected":false}],"neutral":[{"annotator":0,"id":"1200-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Lincoln glared might out of anger or surprise.","self_corrected":false},{"annotator":2,"id":"1200-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The man glared maybe because of anger, maybe because of others like terror.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"233","context":"These alone could have valuable uses.","statement":"They may be valuable.","entailment":[{"annotator":0,"id":"233-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"According to the context, they could be valuable.","self_corrected":false},{"annotator":1,"id":"233-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"233-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, \"could have valuable uses\" implies the probability of being valuable.","self_corrected":false},{"annotator":3,"id":"233-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Valuable uses infer being valuable","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1086","context":"Generally, FGD systems tend to be constructed closer to the ground compared to SCR technology retrofits.","statement":"FGD systems tend to replicate SCR systems.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"1086-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context clearly suggest the difference between FG and SCR systems, which shows that FGD is not a replication of SCR systems.","self_corrected":false},{"annotator":1,"id":"1086-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They probably do not replicate SCR systems, because they are closer to the ground.","self_corrected":false},{"annotator":2,"id":"1086-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, FGD systems are closer to the ground.","self_corrected":false},{"annotator":3,"id":"1086-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"FGD systems tend to be closer to the ground; whereas SCR system not","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"447","context":"This confluence of a bad tax, a $1 billion reserve, a botched opposition campaign, and voters willing to call a bluff resulted in the I-695 victory.","statement":"The I-695 failed in its campaign to help the people.","entailment":[],"neutral":[{"annotator":1,"id":"447-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether the I-695 helped people.","self_corrected":false},{"annotator":3,"id":"447-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A series of negative factors contributed to I-695 success, but it can not be concluded that I-695 failed to help the people","self_corrected":false}],"contradiction":[{"annotator":0,"id":"447-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The campaign is not aimed to help the people, as described in the context. And the I-695 didn't fail.","self_corrected":false},{"annotator":2,"id":"447-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, the I-695 succeeded.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"729","context":"Despite a recent renovation, the Meadows Mall is the least appealing of the three suburban malls.","statement":"The Meadows Mall is not appealing.","entailment":[{"annotator":2,"id":"729-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"True, the Meadows Mall is the least appealing of the three suburban malls.","self_corrected":false}],"neutral":[{"annotator":0,"id":"729-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Meadows Mall is the least appealing of the three malls, which doesn't mean it is not appealing at all, it is just not more appealing than other two.","self_corrected":false},{"annotator":1,"id":"729-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is only less appealing than the other two malls. It's not clear whether they all are appealing.","self_corrected":false},{"annotator":2,"id":"729-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Meadows Mall is the least appealing of the three suburban malls, but maybe compared to other competitors, it is still appealing.","self_corrected":false},{"annotator":3,"id":"729-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"THe Meadows Mall is the least appealing out of three malls. But it could be appealing, just not as appealling as the other two","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":4.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"674","context":"Everybody has this quote from NBA commissioner David You cannot strike your boss and still hold your job--unless you play in the NBA.","statement":"NBA commissioner said he hates NBA players.","entailment":[],"neutral":[{"annotator":0,"id":"674-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention whether NBA commissioner said he hates NBA players.","self_corrected":false},{"annotator":1,"id":"674-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not seem to be about whether the commissioner likes NBA players.","self_corrected":false},{"annotator":2,"id":"674-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"NBA commissioner David said NBA players could strike their boss and still hold their job, as \"the boss\", he maybe hates NBA players, maybe not.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"674-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"His quote ony shows that the boss does not have total power over his players in NBA. It does not convey his personal feelings over the players","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"706","context":"approaches to achieving missions vary considerably between agencies.","statement":"Approaches to achieving missions might change a lot.","entailment":[{"annotator":0,"id":"706-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement show that there are different approaches to achieving missions.","self_corrected":false},{"annotator":1,"id":"706-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If they vary between agencies, they might change a lot, e.g. if you move from one agencie to another.","self_corrected":false},{"annotator":2,"id":"706-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, \"vary considerably\" implies \"change a lot.\"","self_corrected":false},{"annotator":3,"id":"706-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A considerable change could be big","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1430","context":"Kom Ombo is an unusual temple in that it is dedicated to two gods.","statement":"Rarely visited, Kom Ombo is a strange temple devoted to two gods.","entailment":[],"neutral":[{"annotator":0,"id":"1430-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear if Kom Ombo is rarely visited.","self_corrected":false},{"annotator":1,"id":"1430-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether Kom Ombo is rarely visited.","self_corrected":false},{"annotator":2,"id":"1430-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know how often the Kom Ombo temple is visited.","self_corrected":false},{"annotator":3,"id":"1430-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about how many people visiting the Kom Ombo","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1262","context":"If they have overestimated how far the CPI is off, Boskin and his commission may institutionalize an underestimated CPI--guaranteeing a yearly, stealth tax increase.","statement":"If they've overestimated how far the CPI is off, it will have horrific consequences. ","entailment":[],"neutral":[{"annotator":1,"id":"1262-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The tax increase will come if CPI is underestimated and it's not clear whether this is horrific.","self_corrected":false},{"annotator":3,"id":"1262-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not sure if a yearly, stealth tax increase counts as a horrific consequence","self_corrected":false}],"contradiction":[],"idk":[0,2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"25","context":"The tomb guardian will unlock the gate to the tunnel and give you a candle to explore the small circular catacomb, but for what little you can see, it is hardly worth the effort.","statement":"The tomb garden can give you a thorough tour of the catacombs.","entailment":[],"neutral":[{"annotator":1,"id":"25-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context speaks about the \"guardian\" not the \"garden\".","self_corrected":false},{"annotator":2,"id":"25-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"A thorough tour is not mentioned in the context.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"25-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The tomb guardian will open the gate for you and give you a candle, which implies that he will not give you a tour of the catacombs.","self_corrected":false},{"annotator":3,"id":"25-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, he only gives a candle to explore the catacomb. And you can only see a little, which is not worth the effort","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1056","context":"Hong Kong has long been China's handiest window on the West, and the city is unrivaled in its commercial know-how and managerial expertise.","statement":"Hong Kong is a great place to find commercial know-how if you are hiring someone new.","entailment":[{"annotator":0,"id":"1056-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context mentions that Hong Kong is a great place in its commercial know-how.","self_corrected":false},{"annotator":1,"id":"1056-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":3,"id":"1056-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"the city is experience in commercial know-how. So it is a good idea to find people in that area in Hong Kong","self_corrected":false}],"neutral":[{"annotator":2,"id":"1056-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Hiring someone new is not mentioned in the context.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1236","context":"Although the accounting and reporting model needs to be updated, in my view, the current attest and assurance model is also out of date.","statement":"The accounting model needs to be updated in addition to the acquisition model.","entailment":[],"neutral":[{"annotator":0,"id":"1236-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the acquisition model.","self_corrected":false},{"annotator":1,"id":"1236-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the acquisition model has to be updated.","self_corrected":false},{"annotator":2,"id":"1236-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The acquisition model is not mentioned in the context.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1236-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is the attest and assurance model needs to be updated","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"136","context":"After shuttering the DOE, Clinton could depict himself as a crusader against waste and bureaucracy who succeeded where even Reagan failed.","statement":"Reagan had tried to shutter the DOE but was unable to.","entailment":[{"annotator":0,"id":"136-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Clinton successfully shuttered the DOE while Reagan failed, as mentioned in the context. So the statement is true.","self_corrected":false},{"annotator":1,"id":"136-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Reagan failed at shuttering the DOE, so he tried to do it.","self_corrected":false},{"annotator":2,"id":"136-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, Reagan failed to shutter the DOE.","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":3,"id":"136-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It was Clinton who successfully shuttering DOE","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["entailment"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1441","context":"Tell me, how did those scribbled words on the envelope help you to discover that a will was made yesterday afternoon?\" Poirot smiled. ","statement":"How did you work out from that text that there was a new will?","entailment":[{"annotator":1,"id":"1441-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"1441-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, you discoverd there was a new will form those scribbled words on the envelope.","self_corrected":false},{"annotator":3,"id":"1441-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Context is a question about how \"you\" figure out there is a will made yesterday from only a text of scribbled words. So we can know the will is still new from yesterday","self_corrected":false}],"neutral":[{"annotator":0,"id":"1441-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context only mentions a will, it is not clear whether the will is a new one or the only one.","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["entailment"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"48","context":"Well, we will come in and interview the brave Dorcas.\" Dorcas was standing in the boudoir, her hands folded in front of her, and her grey hair rose in stiff waves under her white cap. ","statement":"Dorcas is well known for her bravery.","entailment":[{"annotator":2,"id":"48-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, \"brave Dorcas\" reveals her bravery.","self_corrected":false}],"neutral":[{"annotator":0,"id":"48-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear what is Dorcas well known for.","self_corrected":false},{"annotator":1,"id":"48-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether Dorcas was known for her bravery or whether only the speaker thought she was brave.","self_corrected":false},{"annotator":3,"id":"48-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about whether Dorcas is well known for her bravery or not","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"401","context":"The entire economy received a massive jump-start with the outbreak of the Korean War, with Japan ironically becoming the chief local supplier for an army it had battled so furiously just a few years earlier.","statement":"Korea and Japan were not at war.","entailment":[],"neutral":[{"annotator":2,"id":"401-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Japan was the chief local supplier for an army in the Korean war, but we don't know whether Japanese army also involved in the war.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"401-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Japan had battled with Korea furiously.","self_corrected":false},{"annotator":3,"id":"401-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They were a war between the two countries. And Japan even was the chief local supplier for Korean after the war","self_corrected":false}],"idk":[1],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1131","context":"Time 's cover package considers what makes a good school.","statement":"Time's cover package is about how most college students have to deal with insane student loans.","entailment":[],"neutral":[{"annotator":2,"id":"1131-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Insane student loans are not discussed in the context.","self_corrected":false},{"annotator":3,"id":"1131-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1131-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The topic of the Time's cover package mentioned in the statement is completely different from the one mentioned in the context.","self_corrected":false},{"annotator":1,"id":"1131-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The cover package is about good schools, not about student loans.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"731","context":"But the world is not run for the edification of tourists.","statement":"The world does not try and morally subject to tourists.","entailment":[{"annotator":0,"id":"731-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Runing for the edification of tourists includes morally subjecting to tourists.","self_corrected":false},{"annotator":2,"id":"731-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, the world exists not for the tourists.","self_corrected":false}],"neutral":[{"annotator":3,"id":"731-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"irrelevant","self_corrected":true}],"contradiction":[],"idk":[1],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":null},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["entailment"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"449","context":"The centralization dear to Richelieu and Louis XIV was becoming a reality.","statement":"Louis XIV cared a lot about centralization of his country and people.","entailment":[{"annotator":0,"id":"449-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement mention that Louis XIV valued the centralization.","self_corrected":false},{"annotator":1,"id":"449-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"449-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, the centraliza dear to Louis XIV implies he cares a lot about centralization.","self_corrected":false}],"neutral":[{"annotator":3,"id":"449-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It was not mentioned whether Louis XIV cared about his people or not","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["entailment"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"1204","context":"what does um is Robby Robin Williams does he have a funny part in the movie or is","statement":"Is Robin Williams in the movie?","entailment":[],"neutral":[{"annotator":1,"id":"1204-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The speaker seems to know that Robin Williams is in the movie, but not which part he has.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1204-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He is in the movie, the context asked about if he had a funny part in the movie","self_corrected":false}],"idk":[0,2],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"311","context":"But the door was locked?\" These exclamations burst from us disjointedly. ","statement":"We chaotically exclaimed as we all jumped up in a frenzy, \"But the door wasn't unlocked?\"","entailment":[{"annotator":1,"id":"311-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"\"Was not unlocked?\" entails \"Was locked?\"","self_corrected":false}],"neutral":[{"annotator":0,"id":"311-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn' mention that they jumped up.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"311-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The door was locked","self_corrected":true}],"idk":[2],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"1129","context":"Tuppence rose.","statement":"Tuppence stood up.","entailment":[{"annotator":0,"id":"1129-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that Tuppence stood up.","self_corrected":false},{"annotator":1,"id":"1129-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Rose\" entails \"stood up\".","self_corrected":false},{"annotator":2,"id":"1129-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A human \"rose\" means \"stood up\".","self_corrected":false},{"annotator":3,"id":"1129-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"the word\"rise\" means gets up\/ stands up","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"123","context":"Enlarging the village was not desirable and most knew that Severn only desired wealth and a seat on the council of elders.","statement":"Severn was happy being poor.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"123-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Severn desired wealth, so he was not happy beding poor.","self_corrected":false},{"annotator":1,"id":"123-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Severn desired wealth\" so they were not \"happy being poor\".","self_corrected":false},{"annotator":2,"id":"123-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, Severn only desired wealth so he should be unhappy being poor.","self_corrected":false},{"annotator":3,"id":"123-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Severn wants wealth","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"585","context":"and the other thing is the cost it's almost prohibitive to bring it to a dealer","statement":"The cost of fixing it makes it hard to bring it to a dealer.","entailment":[{"annotator":0,"id":"585-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and statement talk about the large cost of fixing it.","self_corrected":true}],"neutral":[{"annotator":1,"id":"585-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It's not clear whether \"fixing it\" increases the cost.","self_corrected":false},{"annotator":2,"id":"585-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The cost makes it hard to bring it to a dealer, but it could be the cost of fixing, or the cost of something else, like transport.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"585-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"It is the cost of bringing it to the dealer that is very expensive","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["contradiction","entailment"],"has_ambiguity":false} -{"id":"513","context":"He knew how the Simulacra was supposed to develop.","statement":"He didn't know about Sims.","entailment":[],"neutral":[{"annotator":1,"id":"513-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Sims are not mentioned in the context.","self_corrected":false},{"annotator":2,"id":"513-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Sims is not mentioned in the context.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"513-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He knew about Sims\/Simulacra and how they were supposed to develop","self_corrected":false}],"idk":[0],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"718","context":"i think that the people that are um have um a lower income which you automatically equate with lower education","statement":"I think because you have lower income you are less educated.","entailment":[{"annotator":2,"id":"718-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, I think that lower income is equal to lower education.","self_corrected":false}],"neutral":[{"annotator":1,"id":"718-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It's not clear whether the speaker talks about other people associating lower income with lower education or whether they talk about themselves.","self_corrected":false},{"annotator":3,"id":"718-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"There is no clear causal relation between the poorness and the education level. Either one could lead to the other","self_corrected":true}],"contradiction":[{"annotator":0,"id":"718-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context and the statement discuss just the opposite of the causal relationship between income and educational level.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":1.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"761","context":"GAO recommends that the Secretary of Defense revise policy and guidance","statement":"GAO recommends that you eat 5 fruit\/veg per day","entailment":[],"neutral":[{"annotator":0,"id":"761-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The recommendation of GAO in the statement is not mentioned in the context.","self_corrected":false},{"annotator":1,"id":"761-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Fruit\/veg are not mentioned in the context.","self_corrected":false},{"annotator":2,"id":"761-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Diet is not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"761-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1142","context":"In this respect, bringing Steve Jobs back to save Apple is like bringing Gen.","statement":"Steve Jobs came back to Apple.","entailment":[{"annotator":2,"id":"1142-entailment-1","judgments":[{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, because it describe Steve Jobs' back as \"bringing Gen\", so he came back to Apple and saved it like a General.","self_corrected":false},{"annotator":3,"id":"1142-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Steve Jobs was brought back to save Apple. So he came back to Apple","self_corrected":false}],"neutral":[{"annotator":0,"id":"1142-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context doesn't suggest if Steve Jobs came back to Apple.","self_corrected":false},{"annotator":1,"id":"1142-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The speaker talks only about what it would be like if Steve Jobs returned to Apple. He doesn't assert that he really came back.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"684","context":"Ca'daan closed the door behind them and retied the not.","statement":"Ca'daan closed the door as they entered, and bound it shut with rope.","entailment":[{"annotator":1,"id":"684-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The \"[k]not\" was probably made with rope.","self_corrected":false},{"annotator":3,"id":"684-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"paraphrases","self_corrected":true}],"neutral":[{"annotator":0,"id":"684-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention if Ca'daan bound the door shut with rope","self_corrected":false}],"contradiction":[{"annotator":2,"id":"684-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, Ca'daan closed the door after they entered, not as they entered.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":1.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"42","context":"Then he is very sure.","statement":"He is very sure of himself.","entailment":[{"annotator":3,"id":"42-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"paraphrases","self_corrected":true}],"neutral":[{"annotator":0,"id":"42-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention what he's very sure of.","self_corrected":false},{"annotator":1,"id":"42-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's unclear whether he is sure of himself or of something else.","self_corrected":false},{"annotator":2,"id":"42-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He could be very sure of himself, or be sure of any other things.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"112","context":"you can get a hard copy of it and that's about it","statement":"An email won't cut it.","entailment":[{"annotator":3,"id":"112-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Only one hard copy of it is allowed. So an E-mail does not meet the requirement and won't do the trick","self_corrected":false}],"neutral":[{"annotator":0,"id":"112-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions the hard copy.","self_corrected":true},{"annotator":1,"id":"112-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"E-mail is not mentioned in the context.","self_corrected":false},{"annotator":2,"id":"112-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Email is not mentioned in the context.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"368","context":"The Honorable Bill Archer, Chairman The Honorable Charles B. Rangel Ranking Minority Member Committee on Ways and Means House of Representatives","statement":"Bill Archer has never held government office in his entire life.","entailment":[],"neutral":[],"contradiction":[{"annotator":1,"id":"368-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Bill Archer was chairman of the house of representatives.","self_corrected":false},{"annotator":2,"id":"368-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"No, Bill Archer at lease held government office as Chairman.","self_corrected":false}],"idk":[0,3],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"278","context":"Lawyers in their first three years of practice or who are inactive pay $90, and retired lawyers pay nothing.","statement":"Lawyers pay $90 to be included in the directory.","entailment":[],"neutral":[{"annotator":1,"id":"278-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear for what lawyers pay $90.","self_corrected":true}],"contradiction":[{"annotator":0,"id":"278-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not all lawyers are required to pay $90.","self_corrected":false},{"annotator":2,"id":"278-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, retired lawyers pay nothing to be included in the directory.","self_corrected":false},{"annotator":3,"id":"278-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"retired laywers do not pay anything. And it is not clear what the money is for.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["contradiction"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"1030","context":"It vibrated under his hand.","statement":"It hummed quietly in his hand.","entailment":[{"annotator":1,"id":"1030-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"It vibrated\" probably entails \"it hummed\".","self_corrected":false},{"annotator":2,"id":"1030-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"True, because if it \"vibrated\", it usually \"hummed quietly\".","self_corrected":false}],"neutral":[{"annotator":0,"id":"1030-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context if the humming is quiet.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1030-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It's vibrating not humming ","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"896","context":"yeah i can believe that","statement":"I agree with what you said.","entailment":[{"annotator":0,"id":"896-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that the speaker believe what was said.","self_corrected":false},{"annotator":3,"id":"896-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"To Believe what has happened does not mean to agree with what has happened ","self_corrected":false}],"neutral":[{"annotator":1,"id":"896-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Belief does not entail agreement.","self_corrected":false},{"annotator":2,"id":"896-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I believe something, maybe it is what you said, maybe it is anything else.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"815","context":"In the first instance, IRS would have no record of time before the person could get through to an agent and of discouraged callers.","statement":"There is no recording of the time for callers.","entailment":[],"neutral":[{"annotator":1,"id":"815-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The \"would\" implies that this is about a hypothetical situation, not about a factual one.","self_corrected":false},{"annotator":2,"id":"815-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no recording of the time for discouraged callers, but other callers could be recorded, or not.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"815-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"There is recording of the time after the person get through to an agent.","self_corrected":false},{"annotator":3,"id":"815-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"There will be a record after the person get through to an agent ","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"724","context":"This having come to his stepmother's ears, she taxed him with it on the afternoon before her death, and a quarrel ensued, part of which was overheard. ","statement":"A love affair sparked just moments before her death.","entailment":[],"neutral":[{"annotator":0,"id":"724-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear if the context talks about A love affair.","self_corrected":false},{"annotator":1,"id":"724-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There's no mention of a love affair.","self_corrected":false},{"annotator":2,"id":"724-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"A love affair is not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"724-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not known that it's about a love affair ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1066","context":"Hersheimmer \"WELL,\" said Tuppence, recovering herself, \"it really seems as though it were meant to be.\" Carter nodded.","statement":"See, luck is real!","entailment":[{"annotator":0,"id":"1066-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The speaker seems to believe in luck, as she said that it seems as it were meant to be.","self_corrected":false}],"neutral":[{"annotator":1,"id":"1066-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Luck is not mentioned.","self_corrected":false},{"annotator":3,"id":"1066-neutral-2","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Irrelevant ","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"629","context":"(And yes, he has said a few things that can, with some effort, be construed as support for supply-side economics.)","statement":"It would take some work to construe the things as support for supply-side economics.","entailment":[{"annotator":0,"id":"629-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"With some effort\" has the same meaning as \"take some work\".","self_corrected":false},{"annotator":1,"id":"629-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"with some effort\" implies \"it would take some work\".","self_corrected":false},{"annotator":2,"id":"629-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, to construe the things as support for supply-side economics need some effort.","self_corrected":false},{"annotator":3,"id":"629-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"What he said would need some effort, meaning work, to be construed as support","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1437","context":"This is one of the reasons we're growing too weak to fight the Satheri. \"What's wrong with a ceremony of worship, if you must worship your eggshell?\" Dave asked.","statement":"Eggshell worship is the reason we're growing too weak to fight the Satheri, yet Dave asked about it.","entailment":[],"neutral":[{"annotator":0,"id":"1437-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't specify the reasons why they are growing too weak to fight the Satheri.","self_corrected":false},{"annotator":1,"id":"1437-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether \"Eggshell worship\" is really put as a reason.","self_corrected":false},{"annotator":2,"id":"1437-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reasons that we're growing too weak to fight the Satheri is not given in the context.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1437-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Dave meant if\"you\" can worship a eggshell then people can also have a ceremony of worship ","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"181","context":"Is there adequate information for judging generalizability?","statement":"Every output has some kind of resource.","entailment":[],"neutral":[{"annotator":1,"id":"181-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Output or resources are not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"181-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Irrelevant ","self_corrected":false}],"contradiction":[],"idk":[0,2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1361","context":"The central features of the Results Act-strategic planning, performance measurement, and public reporting and accountability-can serve as powerful tools to help change the basic culture of government.","statement":"The Results Act has strategic planning as a central feature for public organizations.","entailment":[{"annotator":1,"id":"1361-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"strategic planning\" is mentioned as one of the central features.","self_corrected":false},{"annotator":3,"id":"1361-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Strategic planning is one of its central features ","self_corrected":true}],"neutral":[],"contradiction":[{"annotator":0,"id":"1361-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The central features of the Results Act are tools to help government, not public organizations.","self_corrected":false},{"annotator":2,"id":"1361-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Results Act has strategic planning as a central feature, but the purpose is to help change the basic culture of government.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":2.0,"neutral":null},"label_count_round_2":{"contradiction":2.0,"entailment":1.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"894","context":"3 It should be noted that the toxicity (LC50) of a sample observed in a range-finding test may be significantly different from the toxicity observed in the follow-up chronic definitive test (1) the definitive test is longer; and (2) the test may be performed with a sample collected at a different time, and possibly differing significantly in the level of toxicity.","statement":"The toxicity of a sample in the range-finding test will be exactly the same as the toxicity in the follow-up test.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"894-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The toxicity of a sample in a range-finding test may be different from the toxicity in the follow-up test, whereas the statement says the exact opposite.","self_corrected":false},{"annotator":1,"id":"894-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context says it \"may be significantly different\".","self_corrected":false},{"annotator":2,"id":"894-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, the toxicity of a sample in the range-finding test may be significantly different from the toxicity in the follow-up test.","self_corrected":false},{"annotator":3,"id":"894-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They are not the same due to the test time and samples collected at a different time ","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"1018","context":"In 1984, Clinton picked up rock groupie Connie Hamzy when she was sunbathing in a bikini by a hotel pool.","statement":"Clinton kept her friends and relationships private in the 80s.","entailment":[],"neutral":[{"annotator":0,"id":"1018-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention if Clinton kept her friends and relationships private.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"1018-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false}],"label_correction":false,"reason":"No, Clition is male not female.","self_corrected":true},{"annotator":3,"id":"1018-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He picked a one of his friends in a public hotel pool. So it was not private kept ","self_corrected":false}],"idk":[1],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1251","context":"oh wow no i just started about well five years ago i think","statement":"It had started five years ago.","entailment":[{"annotator":0,"id":"1251-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that the speaker started five years ago.","self_corrected":false},{"annotator":2,"id":"1251-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, I think I started five years ago.","self_corrected":false},{"annotator":3,"id":"1251-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"I started something five years ago, so this thing or it was started five years ago ","self_corrected":false}],"neutral":[{"annotator":1,"id":"1251-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"I started\" not \"it started\"","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1396","context":"oh that's not really important the the other stuff is just you know window dressing because we we've never ordered anything fact the the van that we've got we bought uh from an estate it was an estate trade uh it was almost brand new the the gentlemen who owned it had died","statement":"We were very lucky to get the van given how new it was.","entailment":[{"annotator":0,"id":"1396-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is true that it's a lucky thing to get a new van.","self_corrected":false},{"annotator":3,"id":"1396-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The van was almost brand new because the gentleman who owned it died. So it's almost not used ","self_corrected":false}],"neutral":[{"annotator":1,"id":"1396-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Whether they were lucky would depend on the price which is not mentioned.","self_corrected":false},{"annotator":2,"id":"1396-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The van was almost brand new, but it could be lucky to have or not at all.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"415","context":"The NYT , in its front-page coverage, says the plane was flying far lower than the rules for training missions allow.","statement":"The NYT reported that training missions did allow for planes to fly that low.","entailment":[{"annotator":3,"id":"415-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"The plane flew lower than the rules allowed, so the rules do not allow to fly that low ","self_corrected":true}],"neutral":[],"contradiction":[{"annotator":0,"id":"415-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It was flying lower than the rules allow, which suggests that it was not allowed by rules for training missions.","self_corrected":false},{"annotator":1,"id":"415-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The \"plane was flying far lower than the rules [...] allow\" implies that it was allowed to fly this low.","self_corrected":true},{"annotator":2,"id":"415-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, NYT reported that training missions did not allow for planes to fly that low.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"605","context":"1 Now that each unit is fully staffed, the LSC Office of Program Performance and its state planning team contain over 260 years of experience in LSC-funded programs.","statement":"The LSC has over 260 years of experience with their lawyers.","entailment":[],"neutral":[{"annotator":0,"id":"605-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The experience of LSC with their lawyers is not mentioned.","self_corrected":false},{"annotator":1,"id":"605-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The experience is for the \"state planning team\" and not for \"lawyers\".","self_corrected":false},{"annotator":2,"id":"605-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The LSC has over over 260 years of experience in LSC-funded programs, but it could be with their lawyers, or with other staffs, like interns.","self_corrected":false},{"annotator":3,"id":"605-neutral-4","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"No info about the info but about LSC-funded program, which we do not know info about ","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1513","context":"You did, didn't you?\"","statement":"You didn't mean to do that, did you?","entailment":[],"neutral":[{"annotator":1,"id":"1513-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"mean to do that\" is not mentioned in the context.","self_corrected":false},{"annotator":2,"id":"1513-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The deliberation is not given in the context.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1513-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Context is about whether an action has been done by someone, but the statement is about the intention to do it ","self_corrected":true}],"idk":[0],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"354","context":"A clean, wholesome-looking woman opened it.","statement":"The woman was trying to be desecrate.","entailment":[],"neutral":[{"annotator":0,"id":"354-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention anything about desecration.","self_corrected":false},{"annotator":1,"id":"354-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear what the woman was trying to be.","self_corrected":false},{"annotator":2,"id":"354-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The attempt of the woman is not given in the context.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"354-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, the woman is clean and wholesome-looking, not desecrate.","self_corrected":false},{"annotator":3,"id":"354-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Context is a compliment, statement is a negative comment ","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"926","context":"How effectively DOD manages these funds will determine whether it receives a good return on its investment.","statement":"The DOD is certain to have a bad return on these funds.","entailment":[],"neutral":[{"annotator":2,"id":"926-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The return on its investment can be bad or good.","self_corrected":false},{"annotator":3,"id":"926-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not known yet about the result, it depends on DOD's management ","self_corrected":false}],"contradiction":[{"annotator":0,"id":"926-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The return is not certainly bad. The return is determined by how DOD manages these funds.","self_corrected":false},{"annotator":1,"id":"926-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"How effectively [...] will determine\" implies that there is at least a chance to have a good return.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"755","context":"Challenges to Restore Public Confidence in","statement":"Public confidence can be difficult to reestablish.","entailment":[{"annotator":0,"id":"755-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement suggest that it is possible that Public confidence is difficult to restore. This is true since the context only mentions that there are challenges, not how large the challenges are.","self_corrected":false},{"annotator":1,"id":"755-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If there are \"challenges to restore public confidence\" then it can be \"difficult to reestablish\".","self_corrected":false},{"annotator":3,"id":"755-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Challenge means to be a difficulty ","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1373","context":"if the United States had used full conventional power.","statement":"The United States is unable to maximize their potential.","entailment":[],"neutral":[{"annotator":1,"id":"1373-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Maximizing potential is not mentioned.","self_corrected":false},{"annotator":3,"id":"1373-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It it unknown if US is able to maximize the potential or not. Maybe US is able to, just will not ","self_corrected":false}],"contradiction":[],"idk":[0,2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"307","context":"What Ellison is doing here, as Hemingway did, is equating the process of becoming an artist with that of becoming a man.","statement":"Ellison and Hemingway took different ways to compare becoming a man.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"307-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Ellison and Hemingway took the same way, not different ways.","self_corrected":false},{"annotator":1,"id":"307-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They both compared the process to becoming an artist.","self_corrected":false},{"annotator":2,"id":"307-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"No, both Ellison and Hemingway equated the process of becoming an artist with that of becoming a man.","self_corrected":false},{"annotator":3,"id":"307-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They both equete becoming an artist with becoming an man. It means for them, when one becomes an artist, they also becomes a man. The way is the same","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"1138","context":"As Ben Yagoda writes in the New York Times Book Review , somewhere along the way, Kidder must have decided not to write a book about Tommy O'Connor.","statement":"A book was not written about Tommy O'Connor.","entailment":[{"annotator":2,"id":"1138-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, Kidder decided not to write a book about Tommy O'Connor.","self_corrected":false},{"annotator":3,"id":"1138-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Kidder dicided not to write a book about O'Conner. So this book about him is not written","self_corrected":false}],"neutral":[{"annotator":0,"id":"1138-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Kidder didn't write a book about Tommy O'Connor doesn't mean others haven't.","self_corrected":false},{"annotator":1,"id":"1138-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Other people might have written a book about Tommy O'Connor.","self_corrected":false},{"annotator":2,"id":"1138-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Maybe Kidder later wrote a book not about Tommy O'Connor, maybe he even did not write a book, so no book was written at all.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"446","context":"My unborn children will never appear on the Today show.","statement":"No direct descendent of mine will ever be a guest of the Today show.","entailment":[{"annotator":3,"id":"446-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"my unborn children mean the children that are born by me. So they are direct descendent of mine. So my unborn children not being on the show means my direct descendent not being on the show","self_corrected":false}],"neutral":[{"annotator":0,"id":"446-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The grandchild is also a direct descendent, we don't know if the speaker's grandchildren will appear on the Today show.","self_corrected":false},{"annotator":1,"id":"446-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"There might be children that have already been born. These would not be \"unborn children\" but also \"direct descendent\".","self_corrected":false},{"annotator":2,"id":"446-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Direct descendent include children and grandchildren. In the context, it is only confirmed that my children will not be a guest of the Today show, but my grandchildren could be on it.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"345","context":"He was crying like his mother had just walloped him.","statement":"He was crying like his mother hit him with a spoon.","entailment":[{"annotator":0,"id":"345-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Both the context and the statement describe how much he was crying.","self_corrected":false},{"annotator":1,"id":"345-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Regarding the intensity of crying \"wallop\" is probably similar to hitting \"with a spoon\".","self_corrected":false}],"neutral":[{"annotator":2,"id":"345-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"His mother could hit him with a spoon, could with other things like stike or slippers.","self_corrected":false},{"annotator":3,"id":"345-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not known with what his mother hit him","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"532","context":"Julius nodded gravely.","statement":"Julius loves to ask questions.","entailment":[],"neutral":[{"annotator":0,"id":"532-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context if Julius loves to ask questions.","self_corrected":false},{"annotator":1,"id":"532-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Questions are not mentioned.","self_corrected":false},{"annotator":2,"id":"532-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe Julius loves to ask questions, maybe not.","self_corrected":false},{"annotator":3,"id":"532-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"62","context":"Lie back, and DON'T THINK.","statement":"Lie back, and do not use your crazy mind.","entailment":[{"annotator":0,"id":"62-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context and the statement talk about stopping thinking.","self_corrected":false},{"annotator":3,"id":"62-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"DON'T THINK\" implies not to overthink and relax in this context. So it is similar to \" not use your crazy mind\", which also means not to overthink","self_corrected":false}],"neutral":[{"annotator":1,"id":"62-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It's not clear whether the mind is crazy.","self_corrected":false},{"annotator":2,"id":"62-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Using crazy mind could be counted as thinking or not thinking, but dreaming.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"104","context":"Boca da Corrida Encumeada (moderate; 5 hours): views of Curral das Freiras and the valley of Ribeiro do Poco.","statement":"Boca da Corrida Encumeada is a moderate text that takes 5 hours to complete.","entailment":[{"annotator":0,"id":"104-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":" \"Moderate\" and \"5 hours: are two descriptions about Boca da Corrida Encumeada made in the statement which are mentioned in the context as well.","self_corrected":true},{"annotator":3,"id":"104-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It could be assumed that (moderate; 5 hours) is a short form of moderate text with 5 hours reading time","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":1,"id":"104-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Boca da Corrida Encumeada\" sounds more like a hike than a text.","self_corrected":false},{"annotator":2,"id":"104-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, Boca da Corrida Encumeada should be a route, not a text.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":2.0,"neutral":null},"label_count_round_2":{"contradiction":2.0,"entailment":1.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1064","context":"The second half of the book dealt with the use of the true name.","statement":"The first part dealt with the use of false names.","entailment":[],"neutral":[{"annotator":0,"id":"1064-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The first part is not mentioned by the context.","self_corrected":false},{"annotator":1,"id":"1064-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear what the first part of the book is about.","self_corrected":false},{"annotator":2,"id":"1064-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The first part is not given in the context.","self_corrected":false},{"annotator":3,"id":"1064-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the first part of the book","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"282","context":"By coordinating policy development and awareness activities in this manner, she helps ensure that new risks and policies are communicated promptly and that employees are periodically reminded of existing policies through means such as monthly bulletins, an intranet web site, and presentations to new employees.","statement":"She can find new risks with the awareness campaign.","entailment":[],"neutral":[{"annotator":1,"id":"282-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The awareness campaign is about communicating risks, not about finding new ones","self_corrected":false},{"annotator":2,"id":"282-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Finding new risks could be a effect of awareness campaign, but it is not given in the context.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"282-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"She can communicate new risks with the awareness campaign insteading of finding new risks.","self_corrected":false},{"annotator":3,"id":"282-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"She does not find new risks but to ensure that the new risks are dealt with correctly","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"246","context":"or just get out and walk uh or even jog a little although i don't do that regularly but Washington's a great place to do that","statement":"\"I regularly go for a walk or a jog at Washington's.\"","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"246-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The speaker doesn't regularly go for a walk or a jog at Washington's.","self_corrected":false},{"annotator":1,"id":"246-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"i don't [walk or jog] regularly\" implies that the speaker also does no do that at Washington's","self_corrected":false},{"annotator":2,"id":"246-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, I don't go for a walk or a jog regularly.","self_corrected":false},{"annotator":3,"id":"246-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Context implies that \"i\" do not walk or job regularly","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"471","context":"Catch up on the Indian avant-garde and the bohemian people of Caletta at the Academy of Fine Arts on the southeast corner of the Maidan.","statement":"The Academy of Fine Arts is located in Northern Maidan.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"471-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Academy of Fine Arts is located in Southern Maidan instead of Northern Maidan.","self_corrected":false},{"annotator":1,"id":"471-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is located on the \"southeast corner\", so not in \"Northern\".","self_corrected":false},{"annotator":2,"id":"471-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, The Academy of Fine Arts is located in Southeasten Maidan.","self_corrected":false},{"annotator":3,"id":"471-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is located on the southeast corner of the Maidan","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"788","context":"Why shouldn't he be?","statement":"He doesn't actually want to be that way.","entailment":[],"neutral":[{"annotator":0,"id":"788-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"His thought is not mentioned in the context.","self_corrected":false},{"annotator":1,"id":"788-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether he wants to be like that.","self_corrected":false},{"annotator":3,"id":"788-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about his intention","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"884","context":"Other functional components of the Postal Service are presumed here not to exhibit significant scale economies, although this has not been demonstrated.","statement":"The Postal Service only operates very large scale economies.","entailment":[],"neutral":[{"annotator":0,"id":"884-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The word \"only\" is not mentioned in the context.","self_corrected":false},{"annotator":1,"id":"884-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the Postal Service operates economies at all, or what that should mean.","self_corrected":true},{"annotator":2,"id":"884-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Other functional components of the Postal Service could operate not very large scale economies, but this has not been demonstrated, so it can also be very large.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"884-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They are presumed not to operate significant\/large scale economies","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"315","context":"In May 1967, Gallup found that the number of people who said they intensely disliked RFK--who was also probably more intensely liked than any other practicing politician--was twice as high as the number who intensely disliked Johnson, the architect of the increasingly unpopular war in Vietnam.","statement":"Due to his attitudes on cheesecake, RFK was more disliked than Johnson.","entailment":[],"neutral":[{"annotator":0,"id":"315-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The reason why RFK was more disliked than Johnson is not mentioned in the context.","self_corrected":false},{"annotator":1,"id":"315-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether RFK had an attitude towards cheescake or how that impacted his popularity.","self_corrected":false},{"annotator":2,"id":"315-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"RFK's attitudes on cheesecake is not given in the context.","self_corrected":false},{"annotator":3,"id":"315-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not known that the dislikes on RFK is due to his attitudes on cheesecakes","self_corrected":false}],"contradiction":[{"annotator":2,"id":"315-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"No, RFK was more iked than Johnson.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"310","context":"She didn't listen.","statement":"She did not listen to the noise.","entailment":[],"neutral":[{"annotator":0,"id":"310-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't suggest what she didn't listen to.","self_corrected":false},{"annotator":1,"id":"310-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear whether she didn't listen to the noise or to something else.","self_corrected":false},{"annotator":2,"id":"310-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe she did not listen to the noise, maybe she did not listen to vert important messages.","self_corrected":false},{"annotator":3,"id":"310-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is unknown what she did not listen to","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"937","context":"Several of the organizations had professional and administrative staffs that provided analytical capabilities and facilitated their members' participation in the organization's activities.","statement":"Organizations had mandatory bonding exercises for their members.","entailment":[],"neutral":[{"annotator":0,"id":"937-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear if the activities are mandatory.","self_corrected":false},{"annotator":1,"id":"937-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"mandatory boding exercises\" are not mentioned.","self_corrected":false},{"annotator":2,"id":"937-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Organizations facilitated their members' participation in the organization's activities, but they maybe had mandatory bonding exercises, maybe not.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"937-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The members were facilitated to participate. So it is not mandatory but encouraged","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"365","context":"Indeed, said San'doro.","statement":"They were certain.","entailment":[{"annotator":1,"id":"365-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Indeed\" implies a level of certainty.","self_corrected":false},{"annotator":3,"id":"365-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Indeed implies a positive acknowledging attitude.","self_corrected":false}],"neutral":[{"annotator":0,"id":"365-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions San'doro, it is unclear who \"they\" are.","self_corrected":false},{"annotator":2,"id":"365-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In the context, there is only one person. \"They\" can refer to anyone.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"222","context":"When he's ready for a major strike, how many innocents do you suppose are going to suffer? To quote one of your contemporaries; 'The needs of the many outweigh the needs of the few.' '","statement":"If he does a big strike, many people will suffer.","entailment":[{"annotator":0,"id":"222-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The question in the context might be a rhetorical question which suggests exactly that many innocents will suffer.","self_corrected":false},{"annotator":1,"id":"222-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"how many innocents do you suppose are going to suffer\" implies that \"many people will suffer\".","self_corrected":false},{"annotator":3,"id":"222-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The question in the context implies that a major strike leads to suffering of many innocent people","self_corrected":false}],"neutral":[{"annotator":0,"id":"222-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the question in the context is just a ordinary question, then we don't know if many people will suffer.","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"4","context":"Mortifyingly enough, it is all the difficulty, the laziness, the pathetic formlessness in youth, the round peg in the square hole, the whatever do you want?","statement":"Many youth are lazy.","entailment":[{"annotator":1,"id":"4-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"the laziness [...] in youth\" implies that \"many youth are lazy\".","self_corrected":false},{"annotator":3,"id":"4-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The laziness in youth means youth being lazy","self_corrected":true}],"neutral":[{"annotator":0,"id":"4-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not mention whether many young people are lazy.","self_corrected":true}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":null},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["entailment"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"1566","context":"'You burned down my house.'","statement":"'Even though you tried to burn it down, my house is in perfect state.'","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"1566-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context suggest that the speaker's house was burned down, while the statement states that the house was not burned down.","self_corrected":false},{"annotator":1,"id":"1566-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"After being burned down, the house will not be in perfect state.","self_corrected":false},{"annotator":2,"id":"1566-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, you burned down my house means it already happened.","self_corrected":false},{"annotator":3,"id":"1566-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"context entails the house was burned down","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"75","context":"What about the hole?\" They scanned the cliff-side narrowly.","statement":"They looked from the top of the cliff for the hole.","entailment":[{"annotator":3,"id":"75-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They were scanning the cliff-side, so they were on the top of the cliff looking for the hole","self_corrected":false}],"neutral":[{"annotator":1,"id":"75-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It's not clear whether they were on top of a cliff.","self_corrected":false},{"annotator":2,"id":"75-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"They could look from at direction to the cliff-side, maybe from the top, maybe from the bottom.","self_corrected":false}],"contradiction":[],"idk":[0],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1383","context":"And she came to you?","statement":"The person asked if the woman came to him.","entailment":[{"annotator":0,"id":"1383-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is the description of the question in the context.","self_corrected":false},{"annotator":1,"id":"1383-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":3,"id":"1383-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"she' implies it was a woman.","self_corrected":false}],"neutral":[{"annotator":2,"id":"1383-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"You\" can refer to a male or a female.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1161","context":"Although claims data provide the most accurate information about health care use, ensuring adequate follow-up for purposes of obtaining information from patient self-report is important because many people do not report alcohol-related events to insurance compa-nies.","statement":"The insurance companies want to reduce medical payments by following-up to ensure patient was sober at the time of incident and intoxication may lead to a claim denial on reimbursement for medical expenses.","entailment":[],"neutral":[{"annotator":0,"id":"1161-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement seems to be a reason of why the insurance companies want to follow-up, that may just be one possibility.","self_corrected":false},{"annotator":1,"id":"1161-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether it is the insurance companies that should do the follow-up or why.","self_corrected":false},{"annotator":2,"id":"1161-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Intoxication may lead to a claim denial on reimbursement for medical expense, but it may not.","self_corrected":false}],"contradiction":[],"idk":[3],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"837","context":"An organization's activities, core processes, and resources must be aligned to support its mission and help it achieve its goals.","statement":"An organization is successful if its activities, resources, and goals align.","entailment":[],"neutral":[{"annotator":0,"id":"837-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"What is mentioned in the statement may be a factor in an organization's success, but there might be others.","self_corrected":false},{"annotator":1,"id":"837-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only says that these are required, not that they are sufficient.","self_corrected":false},{"annotator":2,"id":"837-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"An organization's activities, rescources and goals align can help it achieve success, but the alignment can not promise the success.","self_corrected":false},{"annotator":3,"id":"837-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Don't know if this is the only standard to measure if an organization is successful","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"835","context":"If ancient writings give only a romanticized view, they do offer a more precise picture of Indo-Aryan society.","statement":"Ancient writings don't show an accurate picture of Indo-Anryan society.","entailment":[{"annotator":2,"id":"835-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Yes, because ancient writings give only a romanticized view.","self_corrected":false},{"annotator":3,"id":"835-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"An accurate picture of the society is given under the assumption that ancient writings give a romanticized view. So in reality, where this assumption does not hold, the picture is not accurate either","self_corrected":false}],"neutral":[{"annotator":1,"id":"835-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The writings are \"more precise\" than something else. That doesn't imply that they are really accurate.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"835-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Ancient writings offer a more accurate picture of Ino-Aryan society.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"850","context":"In the original, Reich is set up by his host and then ambushed by a hostile questioner named John, and when he tries to answer with an eloquent Mr. Smith speech (My fist is clenched.","statement":"Reich's host is out to get him.","entailment":[{"annotator":0,"id":"850-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Reich's host is out to set him up. The word \"get\" in the statement could mean set up.","self_corrected":false},{"annotator":1,"id":"850-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"His host set him up, so he is out to get him.","self_corrected":false},{"annotator":2,"id":"850-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Yes, because Reich is set by his host.","self_corrected":false},{"annotator":3,"id":"850-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He is set up by his host. So his host designed a trap for him, meanig his host is out to get him","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":0,"id":"850-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the the word \"get\" refers to physical, for example \"catch\", then the statement is false.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":1.0,"entailment":4.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1548","context":"But you will find it all right.\"","statement":"You, I'm sure, will find it more than adequate.","entailment":[{"annotator":0,"id":"1548-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that it is fine.","self_corrected":true},{"annotator":1,"id":"1548-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"find it all right\" implies \"more than adequate\"","self_corrected":true}],"neutral":[{"annotator":2,"id":"1548-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It might be more than adequate, but also can be just adequate.","self_corrected":false},{"annotator":3,"id":"1548-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"possible extraggeration.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"486","context":"uh high humidity","statement":"Warm, sweaty temperatures.","entailment":[{"annotator":1,"id":"486-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"high humidity\" implies \"warm sweaty temperatures\"","self_corrected":false},{"annotator":3,"id":"486-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"paraphrases to high humidity","self_corrected":false}],"neutral":[{"annotator":2,"id":"486-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It can be warm or cold.","self_corrected":false}],"contradiction":[],"idk":[0],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"161","context":"The almost midtown Massabielle quarter (faubourg de Massabielle), is sometimes described as the most picturesque in the city.","statement":"The Massabielle quarter is a very touristy place.","entailment":[],"neutral":[{"annotator":0,"id":"161-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention whether the Massabielle quarter is very touristy or not","self_corrected":false},{"annotator":1,"id":"161-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"picturesque\" does not necessarily imply \"touristy\". ","self_corrected":false},{"annotator":2,"id":"161-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It could be a very touristy place because of its great beauty, but it also can be not touristy because of poor transportation, or it is not so famous.","self_corrected":false},{"annotator":3,"id":"161-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"picturesque is not directly related to touristy","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"152","context":"The tip was hooked towards the edge, the same way the tips are hammered for knives used for slaughter.","statement":"They were fragile and could not leave a scratch.","entailment":[],"neutral":[{"annotator":0,"id":"152-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear if they were fragile.","self_corrected":true},{"annotator":1,"id":"152-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It's not clear from the context whether \"they\" are fragile or not.","self_corrected":false},{"annotator":3,"id":"152-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"irrelevant","self_corrected":false}],"contradiction":[{"annotator":2,"id":"152-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, because knives used for slaughter are usually sharp.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1356","context":"So far, however, the number of mail pieces lost to alternative bill-paying methods is too small to have any material impact on First-Class volume.","statement":"Occasionally mail is lost but not often","entailment":[{"annotator":0,"id":"1356-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The number is too small indicates that mail loss is not often.","self_corrected":false},{"annotator":2,"id":"1356-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, because number of lost mail pieces is too small.","self_corrected":false},{"annotator":3,"id":"1356-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"the number of mail lost is too small. So it means it is not often to lose the mails","self_corrected":false}],"neutral":[{"annotator":1,"id":"1356-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The context only talks about the impact of losses because of \"alternative bill-paying methods\". Mail could also be lost for other reasons.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"336","context":"The draft treaty was Tommy's bait.","statement":"Tommy took the bait of the treaty.","entailment":[],"neutral":[{"annotator":0,"id":"336-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear if Tommy took the bait of the treaty.","self_corrected":false},{"annotator":1,"id":"336-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The treaty acts as bait for Tommy, but it is not clear whether he really took it.","self_corrected":false},{"annotator":3,"id":"336-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not known if Tommy took the bait.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"336-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, Tommy is the person who set the bait, but not the one took the bait.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"750","context":"All were prominent nationally known organizations.","statement":"The only identified organizations were well-known.","entailment":[{"annotator":1,"id":"750-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false}],"neutral":[{"annotator":0,"id":"750-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention identified organizations.","self_corrected":false},{"annotator":2,"id":"750-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The only identified organizations could be well-known or not well-known.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"750-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false}],"label_correction":true,"reason":"All the organizations were well-known","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"523","context":"Why bother to sacrifice your lives for dirt farmers and slavers?","statement":"No one cares about the dirt farmers and slaves.","entailment":[],"neutral":[{"annotator":0,"id":"523-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context indicates the speaker's attitude toward the dirt farmers and slaves, not everyone's attitude.","self_corrected":true},{"annotator":1,"id":"523-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Judging by the context, the speaker probably does not care for \"dirt farmers\" and \"slavers\". It does not follow that they think that no one cares for them. Also the statement talks about \"slaves\" which are not mentioned at all.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"523-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The person at whom this question was directed at, cared about the farmers.","self_corrected":false}],"idk":[2],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"526","context":"Jon twisted the man's wrist.","statement":"Jon grabbed the man.","entailment":[{"annotator":0,"id":"526-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is clear that Jon grabbed the man by twisting his wrist.","self_corrected":false},{"annotator":1,"id":"526-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In order to twist the wrist, Jon has to first grab the man.","self_corrected":false},{"annotator":3,"id":"526-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"To twist, one would need to grab","self_corrected":false}],"neutral":[{"annotator":2,"id":"526-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The man could be grabbed by Jon, but also could slip from him.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"512","context":"and the same is true of the drug hangover you know if you","statement":"It's just like a drug hangover but worse.","entailment":[],"neutral":[{"annotator":0,"id":"512-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't contain any information about if it is worse.","self_corrected":false},{"annotator":1,"id":"512-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context says that it's similar to \"drug hangover\" not \"worse\".","self_corrected":false},{"annotator":2,"id":"512-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is similar to the drug hangover, but the extent can be worse or better.","self_corrected":false},{"annotator":3,"id":"512-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about where a drug hangover is worse","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1525","context":"and going to school is also always very prohibitive now unless your parents are wealthy","statement":"Wealthy parents are necessary for school.","entailment":[{"annotator":0,"id":"1525-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement talk about the necessity of wealthy parents.","self_corrected":false},{"annotator":1,"id":"1525-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If \"going to school is [...] prohibitive [...] unless your parents are wealthy\" then \"wealthy parents are necessary for school\".","self_corrected":false},{"annotator":3,"id":"1525-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The school are too expensive to go to, making wealthy parents necessary to pay for the tuition","self_corrected":false}],"neutral":[{"annotator":2,"id":"1525-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Wealthy parents can make going to school easier, but maybe without wealthy parents, it is still possible.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"284","context":"In his effort to build nationalism across Turkey in the 1920s, Ataterk instituted a campaign to suppress Kurdish identity that continues today.","statement":"In 1942, Ataterk tried to build nationalism in Turkey.","entailment":[],"neutral":[{"annotator":1,"id":"284-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context talks about the 1920s. It is not clear whether Ataterk still was politically active in the 1940s and whether he continued his campaign.","self_corrected":false},{"annotator":2,"id":"284-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We only can sure that in 1920s, Ataterk tried to build nationalism in Turkey, but in 1942, maybe he tried, maybe not.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"284-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Ataterk tried to build nationalism in Turkey in the 1920s, not in 1942.","self_corrected":false},{"annotator":3,"id":"284-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It should be in 1920s","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"520","context":"AC Green's pretty good","statement":"AC Green is also a solid player.","entailment":[{"annotator":1,"id":"520-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"pretty good\" implies \"solid\".","self_corrected":false},{"annotator":3,"id":"520-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"a solid player is a good player","self_corrected":false}],"neutral":[{"annotator":0,"id":"520-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention AC Green's occupation.","self_corrected":false},{"annotator":2,"id":"520-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"AC Green could be a player or do other jobs,","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"120","context":"Candle grease?","statement":"Was it candle grease?","entailment":[{"annotator":0,"id":"120-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement ask if it was candle grease.","self_corrected":false},{"annotator":1,"id":"120-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":3,"id":"120-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"paraphrases","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"292","context":"Part of the reason for the difference in pieces per possible delivery may be due to the fact that five percent of possible residential deliveries are businesses, and it is thought, but not known, that a lesser percentage of possible deliveries on rural routes are businesses.","statement":"We all know that the reason for a lesser percentage of possible deliveries on rural routes being businesses, is because of the fact that people prefer living in cities rather than rural areas.","entailment":[],"neutral":[{"annotator":0,"id":"292-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the reason why lesser percentage of possible deliveries on rural routes are businesses.","self_corrected":false},{"annotator":1,"id":"292-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context whether \"we all know\" that. No group of people that could know something is mentioned in the context.","self_corrected":false},{"annotator":2,"id":"292-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We don't know whether people prefer living in cities or in rural areas.","self_corrected":false},{"annotator":3,"id":"292-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The fact that people prefer living in cities is not known","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"783","context":"'Dave Hanson, to whom nothing was impossible.' Well, we have a nearly impossible task: a task of engineering and building.","statement":"This building job is almost impossible, even for an experienced engineer.","entailment":[{"annotator":0,"id":"783-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We can infer that Dave Hanson is an experienced enginneer since nothing was impossible to him. The task is impossible since it is impossible to Dave Hanson, an experienced engineer.","self_corrected":false},{"annotator":1,"id":"783-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the task is \"nearly impossible\" then it is also \"almost impossible\" for an experienced engineer.","self_corrected":false}],"neutral":[{"annotator":3,"id":"783-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It should be the job of engineering and building together","self_corrected":true}],"contradiction":[{"annotator":2,"id":"783-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, because nothing was impossible to Dava Hanson.","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":null},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["entailment"],"error_labels":["contradiction","neutral"],"has_ambiguity":false} -{"id":"1231","context":"uh-huh and is it true i mean is it um","statement":"It's true.","entailment":[{"annotator":0,"id":"1231-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Both the context and the statement sugguest that it is true.","self_corrected":true},{"annotator":1,"id":"1231-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":true}],"neutral":[{"annotator":3,"id":"1231-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"context is a question","self_corrected":true}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":null},"label_set_round_1":["neutral","entailment"],"label_set_round_2":[],"error_labels":["entailment","neutral"],"has_ambiguity":false} -{"id":"275","context":"36 AC usage nationally for mercury control from power plants should be roughly proportional to the total MWe of coal-fired facilities that are equipped with the technology (this assumes an average capacity factor of 85 percent and other assumptions of Tables 4-4 and 4-5).","statement":"Power plants' mercury control AC usage is higher than total MWe from coal facilities.","entailment":[],"neutral":[{"annotator":0,"id":"275-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions that Power plants' mercury control AC usage is proportional to total MWe from coal facilities. It is not clear if the usage should be lower or higher than total MWe.","self_corrected":false},{"annotator":1,"id":"275-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is \"proportional\". This could be higher or lower.","self_corrected":false},{"annotator":2,"id":"275-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Power plants' mercury control AC usage is proportinal to the total MWe from coal facilities, so it could be more and could be less.","self_corrected":false},{"annotator":3,"id":"275-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not mentioned if the AC usage is higher than total MWe from coal facilities, but only in roughly proportional.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"920","context":"The results of even the most well designed epidemiological studies are characterized by this type of uncertainty, though well-designed studies typically report narrower uncertainty bounds around the best estimate than do studies of lesser quality.","statement":"All studies have the same amount of uncertainty to them.","entailment":[{"annotator":0,"id":"920-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"Well-designed studies and studies of lesser quality have different amount of uncertainty to them.","self_corrected":true}],"neutral":[],"contradiction":[{"annotator":1,"id":"920-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"well-designed studies typically report narrower uncertainty bounds\" means that they have less uncertainty than other types of studies. ","self_corrected":false},{"annotator":2,"id":"920-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, well-designed studies typically report narrower uncertainty bounds around the best estimate than do studies of lesser quality.","self_corrected":false},{"annotator":3,"id":"920-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"well-designed studies has less amount of uncertainty","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1077","context":"The great breathtaking Italian adventure remains the road.","statement":"The road remains the Italy people want to see.","entailment":[],"neutral":[{"annotator":0,"id":"1077-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention what Italy people want to see.","self_corrected":false},{"annotator":2,"id":"1077-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The road remains the great breathtaking Italian adventure, but Italy people could like adventure, could not.","self_corrected":false}],"contradiction":[],"idk":[1,3],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1479","context":"Earlier this week, the Pakistani paper Dawn ran an editorial about reports that Pakistani poppy growers are planning to recultivate opium on a bigger scale because they haven't received promised compensation for switching to other crops.","statement":"Pakistani poppy growers are mad at the government.","entailment":[{"annotator":0,"id":"1479-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Pakistani poppy growers haven't received compensation, which could be a reason why they might be mad at the government. Planning to recultivate opium on a bigger scale could be a sideways indication that they're mad.","self_corrected":false},{"annotator":2,"id":"1479-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Yes, because Pakistani poppy growers haven't received promised compensation.","self_corrected":false},{"annotator":3,"id":"1479-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Because the growers haven't received promised compensation from the government, so they can be mad at government because of it","self_corrected":false}],"neutral":[{"annotator":1,"id":"1479-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the poppy growers are really mad. Maybe they did not care much about the promised compensations.","self_corrected":false},{"annotator":2,"id":"1479-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Maybe Pakistani poppy are mad at the government, maybe they are not mad.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1503","context":"Then you're ready for the fray, either in the bustling great bazaars such as Delhi's Chandni Chowk or Mumbai's Bhuleshwar, or the more sedate ambience of grander shops and showrooms.","statement":"All of the great bazaars are bustling at all times.","entailment":[],"neutral":[{"annotator":0,"id":"1503-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear if all of the great bazaars are bustling at all times. The context only mentions two.","self_corrected":false},{"annotator":1,"id":"1503-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the bazaars are really bustling at all times or only at some times.","self_corrected":false},{"annotator":2,"id":"1503-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe great bazaars are bustling at all times, maybe only at day time or at night.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1503-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"In the context, only some bustling great bazaars were named, but it does not mean all of the great bazaars are bustling","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1150","context":"The importer pays duties that are required by law","statement":"Imported goods have duties","entailment":[{"annotator":0,"id":"1150-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Both the context and the statement show that imported products have duties.","self_corrected":false},{"annotator":1,"id":"1150-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the importer has to pay duties then imported goods have duties on them.","self_corrected":false},{"annotator":2,"id":"1150-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"True, Imported goods have duties that are required by law.","self_corrected":false}],"neutral":[],"contradiction":[{"annotator":3,"id":"1150-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The importers have duties not the imported goods","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":1.0,"entailment":3.0,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"865","context":"uh but you could fill a whole bunch of uh holes with these things i used to i used to advertise buying wheat pennies um i'd give a dollar a roll which two cents a piece which is basically overpriced","statement":"I made a good dollar while selling them.","entailment":[],"neutral":[{"annotator":0,"id":"865-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context if the speaker sold them.","self_corrected":false},{"annotator":1,"id":"865-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear how well the selling went.","self_corrected":false},{"annotator":3,"id":"865-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not clear context, but seems to be irrelevant to statement","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1057","context":"He walked out into the street and I followed.","statement":"I followed him down the street.","entailment":[{"annotator":0,"id":"1057-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context and the statement talk about the speaker following him down the street.","self_corrected":false},{"annotator":1,"id":"1057-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":3,"id":"1057-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He walked out on to street and I followed him. So naturally, I was also on the street, following him","self_corrected":false}],"neutral":[{"annotator":2,"id":"1057-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Maybe I followed him down the street, maybe I followed him up.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"800","context":"I guess history repeats itself, Jane.","statement":"I truly think the prior situation shows history repeats itself.","entailment":[{"annotator":2,"id":"800-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"I truly think history repeats itself.","self_corrected":false},{"annotator":3,"id":"800-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"paraphrases","self_corrected":true}],"neutral":[{"annotator":0,"id":"800-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't specify what shows that history repeats itself.","self_corrected":false},{"annotator":1,"id":"800-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is not clear whether the speaker truly believes that history repeats itself. They could also use it figuratively.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1345","context":"Their supplies scarce, their harvest meager, and their spirit broken, they abandoned the fort in 1858.","statement":"Their supplies remained very low and hard to maintain.","entailment":[{"annotator":0,"id":"1345-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is true because the context suggests that their supplies scarce and they abandonedd the fort.","self_corrected":false},{"annotator":1,"id":"1345-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The supplies probably remained low because otherwise they might not have abandoned the fort.","self_corrected":false},{"annotator":2,"id":"1345-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, their supplies are scarce and they abandoned the fort.","self_corrected":false},{"annotator":3,"id":"1345-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"scarce means insufficient, so their supplies were low","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"774","context":"Sphinxes were guardian deitiesinEgyptianmythologyandthis was monumentalprotection,standing73 m (240 ft)longand20 m (66 feet) high.","statement":"Sphinxes guarded people.","entailment":[{"annotator":0,"id":"774-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context clearly suggests that Sphinxes were guardian deities.","self_corrected":true},{"annotator":2,"id":"774-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, Sphinxes were guardian deities.","self_corrected":false}],"neutral":[{"annotator":1,"id":"774-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context what exactly sphinxes guard.","self_corrected":false},{"annotator":2,"id":"774-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"In Egyptian mythology, Sphinxes were guardian deities, but in reality, maybe they guarded people, maybe not. ","self_corrected":false},{"annotator":3,"id":"774-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about what Sphinxes guarded, it could be people or a temple.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"976","context":"Next, you enter the vast and splendid Imperial Hall, with three handsome marble fountains, and a canopied throne from which the sultan would enjoy the music and dancing of his concubines.","statement":"The sultan enjoyed drinking from the marble fountains in the Imperial Hall.","entailment":[],"neutral":[{"annotator":0,"id":"976-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't talk about the sultan drinking.","self_corrected":false},{"annotator":1,"id":"976-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether the Sultan drank from the fountains.","self_corrected":false},{"annotator":2,"id":"976-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe the sultan enjoyed drinking from the marble fountains, maybe he didn't like it.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"976-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"He enjoyed music and dancing there ","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"720","context":"although the uh it's uh it we almost one day we painted the house to uh we painted we painted the whole inside and it had all this dark trim we thought uh you know we did the one wall but the other trim i'm trying to think i think i think we left most of it because it gets to be uh they don't do that in the newer houses now we don't the uh mold everything is white in a new house everything is white","statement":"It took over a day to paint the house","entailment":[],"neutral":[{"annotator":1,"id":"720-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"If it took \"almost a day\" then it took less not more than one day.","self_corrected":true}],"contradiction":[{"annotator":0,"id":"720-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They took almost one day to paint the house, which is less than a day.","self_corrected":false},{"annotator":2,"id":"720-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, it took almost one day to paint the house.","self_corrected":false},{"annotator":3,"id":"720-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It took almost one day, so less than a day","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":3.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["contradiction"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"556","context":"After their savage battles, the warriors recuperated through meditation in the peace of a Zen monastery rock garden.","statement":"The warriors recuperated through mediation learned from monks.","entailment":[],"neutral":[{"annotator":0,"id":"556-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention from whom the warriors learned to meditate.","self_corrected":false},{"annotator":1,"id":"556-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They only meditated in a monastery. It is not clear from whom they learned the meditation.","self_corrected":false},{"annotator":2,"id":"556-neutral-3","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe there are monks in the Zen monastery rock garden, maybe there are not.","self_corrected":false},{"annotator":3,"id":"556-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not known if they learned it from monks ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1133","context":"because otherwise it's too it gets if you start them when it's cooler in the spring then it gets too hot in the summer","statement":"You should start them during Spring if you want them to be cool during the summer.","entailment":[],"neutral":[{"annotator":1,"id":"1133-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is not clear whether \"they\" get too hot or whether it gets too hot for them.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"1133-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, it gets too hot in the summer.","self_corrected":false},{"annotator":3,"id":"1133-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"One should start in the spring, cause it's hot summer and it's cool in spring. ","self_corrected":false}],"idk":[0],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1241","context":"As he stepped across the threshold, Tommy brought the picture down with terrific force on his head.","statement":"Tommy hurt his head bringing the picture down.","entailment":[{"annotator":0,"id":"1241-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is true since the picture that Tommy brought down hit him in the head.","self_corrected":false},{"annotator":1,"id":"1241-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Tommy probably hurt his head because a picture hit his head with \"terrific force\".","self_corrected":false},{"annotator":2,"id":"1241-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"True, Tommy hurt his head with the picture.","self_corrected":false}],"neutral":[{"annotator":2,"id":"1241-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Tommy should hit another guy not himself, but in the statement, it could be understood as Tommy hurt himself, or another guy.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1241-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"He is not hurt but rather bad strong emotion ","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"137","context":"What the judge really wants are the facts -- he wants to make a good decision, he said.","statement":"In the end the judge made a bad decision since he imprisoned someone innocent.","entailment":[],"neutral":[{"annotator":0,"id":"137-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't suggest what kind of decision the judge made.","self_corrected":false},{"annotator":1,"id":"137-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The judge could have made a bad decision even if his intentions were to only rely on facts.","self_corrected":false},{"annotator":2,"id":"137-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"We only know that the judge wants to make a good decision, but the desicion could be actually good or not.","self_corrected":false},{"annotator":3,"id":"137-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about what happened in the end ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1584","context":"We still espouse a God-given right of human beings to use the environment for their benefit, says Barrett Duke of the Southern Baptists.","statement":"Human beings are entitled to the environment.","entailment":[{"annotator":0,"id":"1584-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement refer to the right of human beings to use the environment.","self_corrected":false},{"annotator":1,"id":"1584-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"God-given right of human beings to use the environment\" implies that humans \"are entitled to the environment\".","self_corrected":false},{"annotator":3,"id":"1584-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Because it's god given right. So human are born to have this right","self_corrected":false}],"neutral":[{"annotator":2,"id":"1584-neutral-1","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Barrett Duke of the Southern Baptists believes human beings are entitled to the environment, but the facts can be not.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"914","context":"The Palace of Jahangir is built around a square court with arches.","statement":"The Palace of Jahangir houses a wonderful square court, complete with arches.","entailment":[{"annotator":1,"id":"914-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"is built around a square court with arches\" implies \"houses a wonderful square court [...] with arches","self_corrected":false},{"annotator":2,"id":"914-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, the Palace of Jahangir is built around a square court with arches.","self_corrected":false},{"annotator":3,"id":"914-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The place is built around the square, so the place houses it ","self_corrected":false}],"neutral":[{"annotator":0,"id":"914-neutral-1","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context whether the square court is wonderful or not.","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["entailment"],"error_labels":["neutral"],"has_ambiguity":false} -{"id":"1341","context":"His proud reserve--a product of 40 years in the spotlight--is refreshing but does not bode well for his capacity to shepherd big ideas through Congress.","statement":"He is way too loud.","entailment":[{"annotator":0,"id":"1341-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The word \"loud\" in the statement may have two kinds of meanings. If it refers to flashy, then it fits the description of him in the context.","self_corrected":false},{"annotator":2,"id":"1341-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"True, his proud reserve is a product of 40 years in the spotlights, but does not bode well.","self_corrected":false}],"neutral":[{"annotator":0,"id":"1341-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It \"loud\" refers to noisy, the statement may be true or false since the context doesn't mention anything about his voice.","self_corrected":false},{"annotator":1,"id":"1341-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether he is too loud or whether his proudness shows in other ways.","self_corrected":false},{"annotator":3,"id":"1341-neutral-3","judgments":[{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Irrelevant ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1176","context":"Unless the report is restricted by law or regulation, auditors should ensure that copies be made available for public inspection.","statement":"This report is most likely restricted by law or regulation and should not be ensured.","entailment":[{"annotator":2,"id":"1176-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"True, being restricted by law or regulation is an exception that this report should not make the copies available for public inspection.","self_corrected":false}],"neutral":[{"annotator":0,"id":"1176-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't suggest whether this report is restricted or not.","self_corrected":false},{"annotator":1,"id":"1176-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only states what should happen if the report is not restricted. It does not say anything about whether it is restricted.","self_corrected":false},{"annotator":3,"id":"1176-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about a specific report ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1267","context":"yep because it's when it's self propelled it's heavy yeah","statement":"it's heavy when it's self propelled, in case you were wondering","entailment":[{"annotator":0,"id":"1267-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that it is heavy when it is self propelled.","self_corrected":false},{"annotator":1,"id":"1267-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":3,"id":"1267-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Same sentence: it's heavy when it's self propelled ","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"346","context":"Christ on a crutch, what does he have to do to lose your support, stab David Geffen with a kitchen knife?","statement":"Your support is unwavering.","entailment":[{"annotator":1,"id":"346-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the only way to lose one person's support is to stab someone then the support is unwavering.","self_corrected":false},{"annotator":2,"id":"346-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, because it used exaggeration to prove that you will always support him.","self_corrected":false},{"annotator":3,"id":"346-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"Your\" support is so unwavering that he has to do something very extreme to lose your support ","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[0],"label_count_round_1":{"contradiction":null,"entailment":3.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1596","context":"oh really it wouldn't matter if we plant them when it was starting to get warmer","statement":"It is better to plant when it is colder.","entailment":[],"neutral":[{"annotator":1,"id":"1596-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If it doesn't matter when they plant them when it gets warmer, then it is not better to plant when it is colder.","self_corrected":false},{"annotator":2,"id":"1596-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"1) The question mark of context is missing, the end of the sentence could be a question mark. 2) Maybe it is better to plant when it is complete warm or even hot.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1596-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context clearly suggests that it is fine to plant when it is warmer.","self_corrected":false},{"annotator":3,"id":"1596-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It doesn't matter if it's warmer or colder ","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"827","context":"and they just put instructors out there and you you sign up for instruction and they just give you an arm band and if you see an instructor who's not doing anything you just tap him on the shoulder and ask him questions and they'll show you things","statement":"The instructors are marked with armbands, and anytime you want to know anything, you just find one of them. ","entailment":[{"annotator":1,"id":"827-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"The statement is a paraphrase of the context.","self_corrected":true}],"neutral":[{"annotator":0,"id":"827-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The contexte only mentions that they will give the speaker an arm band, it is not clear if the instructors are marked with armbands.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"827-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"No, I am marked with armbands and I need instructoion.","self_corrected":false},{"annotator":3,"id":"827-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not anytime, the instructor has to be free first ","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","contradiction"],"error_labels":["entailment"],"has_ambiguity":true} -{"id":"943","context":"'I see.'","statement":"It was clear","entailment":[{"annotator":1,"id":"943-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":3,"id":"943-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If I see means literally to see ","self_corrected":false}],"neutral":[{"annotator":2,"id":"943-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Maybe it is not clear but I understand it anyway.","self_corrected":false},{"annotator":3,"id":"943-neutral-2","judgments":[{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Irrelevant if I see means I understand ","self_corrected":false}],"contradiction":[],"idk":[0],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1230","context":"He found himself thinking in circles of worry and pulled himself back to his problem.","statement":"He could not afford to get distracted from his problem.","entailment":[],"neutral":[{"annotator":0,"id":"1230-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Although the context mentions that he pulled himself back to his problem, it is not clear if he could afford to get distracted.","self_corrected":false},{"annotator":1,"id":"1230-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"He might also focus on the problem to distract him from his worries. It is not clear whether the problem really was so important.","self_corrected":false},{"annotator":2,"id":"1230-neutral-3","judgments":[{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"His problem may be related to money, also may not.","self_corrected":false},{"annotator":3,"id":"1230-neutral-4","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Context means his way of thinking does not help him solve his problem ","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"806","context":"How effectively DOD manages these funds will determine whether it receives a good return on its investment.","statement":"These funds are for the purchase of five thousand tons of potatoes.","entailment":[],"neutral":[{"annotator":0,"id":"806-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not specify what these funds are used for.","self_corrected":false},{"annotator":1,"id":"806-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The purpose of the funds is not mentioned at all.","self_corrected":false},{"annotator":2,"id":"806-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The purchase of five thousand tons of potatoes is not given in the context.","self_corrected":false},{"annotator":3,"id":"806-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Irrelevant ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"221","context":"yeah okay yeah those games are fun to watch you you you watch those games","statement":"Those games are a lot of fun.","entailment":[{"annotator":0,"id":"221-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is clear from the context and statement that those games are fun.","self_corrected":false},{"annotator":1,"id":"221-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the games are fun to watch then they are fun (at least in that way).","self_corrected":false},{"annotator":2,"id":"221-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, because those games are fun to watch.","self_corrected":false},{"annotator":3,"id":"221-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The games are fun to watch so it's a lot of fun ","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"648","context":"Total volume grew 13.","statement":"The expected increase was 10.","entailment":[],"neutral":[{"annotator":1,"id":"648-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It's not clear what the expected increase was.","self_corrected":false},{"annotator":2,"id":"648-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The expected increase maybe more or less than 10.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"648-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Accordding to the context, the increase was 13, not 10.","self_corrected":false},{"annotator":3,"id":"648-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It was 13","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1270","context":"Wear a nicely ventilated hat and keep to the shade in the street.","statement":"The buildings are so low that there is no shade in the streets.","entailment":[],"neutral":[{"annotator":0,"id":"1270-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not mention if the buildings are low.","self_corrected":false},{"annotator":2,"id":"1270-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe the buildings are not low, but the street is too wide.","self_corrected":false},{"annotator":3,"id":"1270-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Irrelevant ","self_corrected":false}],"contradiction":[{"annotator":1,"id":"1270-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"If the recommendation is to stay in the shade of the buildings then there are probably buildings casting shades. Otherwise the recommendation would not make sense.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1048","context":"But is the Internet so miraculous an advertising vehicle that Gross will be able to siphon off $400 per person from total ad spending of $1,000 per family--or persuade advertisers to spend an additional $400 to reach each of his customers?","statement":"The internet is so great at advertising that is saved Gross money.","entailment":[],"neutral":[{"annotator":1,"id":"1048-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context asks the question of whether the Internet is so great at advertising, whereas the statement asserts it.","self_corrected":false},{"annotator":3,"id":"1048-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Context is a question. It can not come to a conclusion as in the statement ","self_corrected":false}],"contradiction":[{"annotator":2,"id":"1048-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"No, Gross saved no money, but siphoned money from other people.","self_corrected":true}],"idk":[0],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1350","context":"The questions may need to be tailored to","statement":"A majority of the questions referenced will need to be tailored to.","entailment":[{"annotator":1,"id":"1350-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"The statement is a paraphrase of the context.","self_corrected":true}],"neutral":[{"annotator":0,"id":"1350-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't specify how many questions need to be tailored.","self_corrected":false},{"annotator":2,"id":"1350-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe all the questions referenced will need to be tailored to.","self_corrected":false},{"annotator":3,"id":"1350-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about whether the majority of the questions or all of them ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"1564","context":"Traditionally, certain designs were reserved for royalty, but today elegant geometric or exuberant, stylized floral patterns are available to all.","statement":"Designs once reserved for royalty cost more to buy.","entailment":[],"neutral":[{"annotator":0,"id":"1564-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There is no mention of designs' costs in the context.","self_corrected":false},{"annotator":1,"id":"1564-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It doesn't say whether these designs are more expensive.","self_corrected":false},{"annotator":2,"id":"1564-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The price of the designs once reserved for royalty is not mentioned in the context.","self_corrected":false},{"annotator":3,"id":"1564-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about the cost ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"844","context":"The street ends at Taksim Square (Taksim Meydane), the heart of modern Istanbul, lined with luxurious five-star hotels and the glass-fronted Ataturk Cultural Centre (Ataturk Keleter Sarayy), also called the Opera House.","statement":"The street is quite a luxurious one.","entailment":[{"annotator":0,"id":"844-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The description of the street in the context shows that it is indeed a luxurious one.","self_corrected":false},{"annotator":3,"id":"844-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"On the street there are fancy centers and a Luxurious hotel","self_corrected":true}],"neutral":[{"annotator":1,"id":"844-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Taksim Square is luxurious. It is not clear whether the street leading to it is also.","self_corrected":false},{"annotator":2,"id":"844-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The detail of the street itself is not mentioned in the context, we only know the end of the street.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1001","context":"Still, I guess that can be got over.","statement":"There are some things that you need to ignore.","entailment":[{"annotator":3,"id":"1001-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Getting over means to move on and stop caring about this. Thus to ignore ","self_corrected":false}],"neutral":[{"annotator":0,"id":"1001-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't say anything about whether there's something to ignore.","self_corrected":false},{"annotator":1,"id":"1001-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Getting over something does not necsessarily mean that you should ignore the issues. You could also work through them.","self_corrected":false},{"annotator":2,"id":"1001-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"There are some things that you can ignore, but it is not necessary.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"496","context":"Managing better requires that agencies have, and rely upon, sound financial and program information.","statement":"Agencies that rely on information based on unsound financial information will have management problems.","entailment":[{"annotator":0,"id":"496-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context talks about the importance of sound financial information, o it is natural to deduce that if financial information is not sound, there will be problems.","self_corrected":false},{"annotator":1,"id":"496-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If sound information is required for better managing then unsound information will lead to management problems.","self_corrected":false}],"neutral":[{"annotator":2,"id":"496-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Sound financial information will help manage the agencies better, but without it doesn't meant to have problems, maybe it just makes the process less efficient.","self_corrected":false},{"annotator":3,"id":"496-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Good Management requires sound financial, but unsound financial does noch necessarily Leads to management problems ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"409","context":"Three more days went by in dreary inaction.","statement":"The days passed by slowly.","entailment":[{"annotator":0,"id":"409-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The description\"dreary inaction\" implies that the days went by slowly. ","self_corrected":false},{"annotator":1,"id":"409-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":true}],"neutral":[{"annotator":2,"id":"409-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The days may made people feel slow, maybe feel fast.","self_corrected":false},{"annotator":3,"id":"409-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"Dreary inaction means nothing being down. It does not mean slowly ","self_corrected":true}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"265","context":"The party's broad aims were to support capitalist policies and to continue close ties with Britain and the rest of the Commonwealth.","statement":"The party sought to establish ties with the United States.","entailment":[],"neutral":[{"annotator":0,"id":"265-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention anything about establishing ties with the United States.","self_corrected":false},{"annotator":1,"id":"265-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The United States are not mentioned in the context.","self_corrected":false}],"contradiction":[{"annotator":2,"id":"265-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"No, the party had close ties with the United States already.","self_corrected":true},{"annotator":3,"id":"265-contradiction-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They aim to maintain ties with Britain and its commonwealth ","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"1208","context":"As the budgets, functions, and points of service of many government programs devolve to state and local government, private entities and nonprofit organizations, and other third parties, it may become harder for GAO to obtain the records it needs to complete audits and evaluations.","statement":"Audits and evaluations are harder because it is more difficult for GAO to get the records.","entailment":[{"annotator":2,"id":"1208-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"True, because it may become harder for GAO to obtain the records it needs to complete audits and evaluations.","self_corrected":false}],"neutral":[{"annotator":0,"id":"1208-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context only mentions that it may be more difficult to obtain records, which is a possibility rather than an absolute thing.","self_corrected":false},{"annotator":1,"id":"1208-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context says that it \"may become harder\" not that \"it is more difficult\". These are not the same things because the context describes a future state.","self_corrected":false},{"annotator":2,"id":"1208-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It can be more difficult for GAO to get the records, but maybe it actually doesn't become harder for GAO ro obtain the records.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"1208-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"It was because the budget functions etc. of government programs further devolves that makes it harder ","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"1298","context":"That's why we tried to kill you.","statement":"That's one of the reasons we wanted to kill you.","entailment":[{"annotator":1,"id":"1298-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false}],"neutral":[{"annotator":2,"id":"1298-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Maybe it is the only reason that we wanted to kill you, maybe that is just one of the reasons.","self_corrected":false}],"contradiction":[{"annotator":0,"id":"1298-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context implies that there is one reason, while the statement suggests that therea are multiple reasons.","self_corrected":false},{"annotator":3,"id":"1298-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"From the context we know it might be the only reason ","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":2.0,"entailment":1.0,"neutral":1.0},"label_count_round_2":{"contradiction":2.0,"entailment":1.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["contradiction","neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"23","context":"The inquiry expanded very quickly, however, from asking what technology failed to an examination of contextual influences, such as","statement":"They moved they inquiries over from technology failing because they thought it may be something else.","entailment":[{"annotator":1,"id":"23-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"\"expanded [...] from asking what technology failed to [...] contextual influences\" means that they \"moved inquiries from technology failing because they thought it may be something else\".","self_corrected":false}],"neutral":[{"annotator":0,"id":"23-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention the reason of moving inquiries over from technology failing.","self_corrected":false},{"annotator":2,"id":"23-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe they thought it may be something else, maybe they just can not sure, what is the true reason.","self_corrected":false}],"contradiction":[],"idk":[3],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"638","context":"it's just it's the morals of the people which i mean i guess we everybody's responsible for the society but if i had a child that that did things so bad it's not they don't care about anybody these people they're stealing from they're just the big bad rich guy","statement":"I have no issue with people stealing from others.","entailment":[{"annotator":1,"id":"638-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"they don't care about anybody these people they're stealing from\" shows that the speaker has issues with people stealing.","self_corrected":true}],"neutral":[],"contradiction":[{"annotator":3,"id":"638-contradiction-1","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"I have no problem people stealing from bag rich people ","self_corrected":true}],"idk":[0,2],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":[],"error_labels":["contradiction","entailment"],"has_ambiguity":false} -{"id":"1500","context":"Endorphins were flowing.","statement":"My endorphins were flowing.","entailment":[{"annotator":1,"id":"1500-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"The statement is a paraphrase of the context.","self_corrected":true}],"neutral":[{"annotator":0,"id":"1500-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't specify whose endorphins were flowing.","self_corrected":false},{"annotator":2,"id":"1500-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It could be my endorphins, but also could be yours or anyone's.","self_corrected":false},{"annotator":3,"id":"1500-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about whose endorphins it is ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral"],"error_labels":["entailment"],"has_ambiguity":false} -{"id":"704","context":"Students of human misery can savor its underlying sadness and futility.","statement":"Students of human misery will be delighted to see how sad it truly is.","entailment":[{"annotator":1,"id":"704-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"\"can savor\" implies \"will be delighted\".","self_corrected":false}],"neutral":[{"annotator":0,"id":"704-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context if the students will be delighted.","self_corrected":true},{"annotator":2,"id":"704-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Students of human misery can \"savored\" that sadness, so maybe they are delighted to see that, maybe they are tortured by the disasters.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"704-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"Savor means to understand. Not to enjoy ","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":1.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"612","context":"really oh i thought it was great yeah","statement":"that was a nice experience","entailment":[{"annotator":0,"id":"612-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement say that it was good.","self_corrected":false},{"annotator":1,"id":"612-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"it was great\" means that it was \"a nice experience\".","self_corrected":false},{"annotator":2,"id":"612-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, because I thought it was great","self_corrected":false}],"neutral":[{"annotator":2,"id":"612-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Maybe it was a great experience, maybe it was a great present or something else.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"612-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"He thought it was a great experience. But in the context there was an element of surprise. So it was not a great experience after all ","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":3.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":3.0,"neutral":1.0},"label_set_round_1":["contradiction","neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":["contradiction"],"has_ambiguity":true} -{"id":"356","context":"By coordinating policy development and awareness activities in this manner, she helps ensure that new risks and policies are communicated promptly and that employees are periodically reminded of existing policies through means such as monthly bulletins, an intranet web site, and presentations to new employees.","statement":"There new employees are a risk.","entailment":[],"neutral":[{"annotator":0,"id":"356-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't mention whether new employees are a risk or not.","self_corrected":false},{"annotator":1,"id":"356-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Not the employees are a risk but they should be made aware of the risks.","self_corrected":false},{"annotator":2,"id":"356-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The situation of new employees is not given in the context.","self_corrected":false}],"contradiction":[{"annotator":3,"id":"356-contradiction-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Nothing about the new employee being a risk ","self_corrected":true}],"idk":[],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":3.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral"],"error_labels":["contradiction"],"has_ambiguity":false} -{"id":"1567","context":"HCFA published a Notice of Proposed Rulemaking on March 28, 1997 (62 Fed.","statement":"HCFA tried to keep everyone informed about the rules they were making.","entailment":[{"annotator":0,"id":"1567-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Publishing a Notice is a way HCFA tried to keep everyone informed.","self_corrected":false},{"annotator":3,"id":"1567-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They published the rules. So theoretically everyone can see it. ","self_corrected":false}],"neutral":[{"annotator":1,"id":"1567-neutral-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"It is not clear whether they tried to keep everyone informed or whether they published the notice only because they had to.","self_corrected":false},{"annotator":2,"id":"1567-neutral-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"HCFS published a Notice, but the notice may have tried to keep everyone informed, maybe just made it quitely.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":2.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"93","context":"Suddenly she started, and her face blanched.","statement":"She moved swiftly, her face pale.","entailment":[{"annotator":1,"id":"93-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false}],"neutral":[{"annotator":0,"id":"93-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context doesn't specify what she started doing, it could be moving, thinking or talking.","self_corrected":false},{"annotator":2,"id":"93-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"She \"started\" can mean she moved swiftly, but also can do other actions, like crying, singing, etc.","self_corrected":false},{"annotator":3,"id":"93-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about her moving swiftly ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":3.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"945","context":"Reports on attestation engagements should state that the engagement was made in accordance with generally accepted government auditing standards.","statement":"Details regarding validation engagements ought to express that the engagement was made as per by and large acknowledged government evaluating guidelines.","entailment":[{"annotator":0,"id":"945-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and statement suggest the same requirement for reports.","self_corrected":false},{"annotator":3,"id":"945-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Generally accepted is synonym to large acknowledged ","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1266","context":"In other cases, we must rely on survey approaches to estimate WTP, usually through a variant of the contingent valuation approach, which generally involves directly questioning respondents for their WTP in hypothetical market situations.","statement":"Hypothetical market situations are uniform across all respondents.","entailment":[],"neutral":[{"annotator":1,"id":"1266-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true}],"label_correction":false,"reason":"It is not clear whether they use the same market situtations for all people they ask.","self_corrected":false},{"annotator":3,"id":"1266-neutral-2","judgments":[{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about uniformity across the respondents","self_corrected":false}],"contradiction":[{"annotator":2,"id":"1266-contradiction-1","judgments":[{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, if market situations are uniform, then \"a variant of the contingent valuation approach\" is not necessary. ","self_corrected":false}],"idk":[0],"label_count_round_1":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_count_round_2":{"contradiction":1.0,"entailment":null,"neutral":2.0},"label_set_round_1":["neutral","contradiction"],"label_set_round_2":["neutral","contradiction"],"error_labels":[],"has_ambiguity":true} -{"id":"530","context":"General Motors, for instance, lost $460 million to strikes in 1997, but investors treated the costs as a kind of extraordinary charge and valued the company as if the losses had never happened.","statement":"GM lost a lot almost a million dollars in labor disputes.","entailment":[],"neutral":[],"contradiction":[{"annotator":0,"id":"530-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"GM lost more than a million dollars.","self_corrected":false},{"annotator":1,"id":"530-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"GM lost over 460 million which is much more than one million.","self_corrected":false},{"annotator":2,"id":"530-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No, GM lost $460 million in labor disputes, far more than a million.","self_corrected":false},{"annotator":3,"id":"530-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They lost 460 million ","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":null,"neutral":null},"label_count_round_2":{"contradiction":4.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction"],"label_set_round_2":["contradiction"],"error_labels":[],"has_ambiguity":false} -{"id":"170","context":"was it bad","statement":"Was it not good?","entailment":[{"annotator":1,"id":"170-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":3,"id":"170-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Bad and not good are synonyms ","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[0,2],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"1163","context":"Agencies may perform the analyses required by sections 603 and 604 in conjunction with or as part of any other agenda or analysis required by other law if such other analysis satisfies the provisions of these sections.","statement":"There are many times when the agencies have gotten in trouble.","entailment":[],"neutral":[{"annotator":0,"id":"1163-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"It is not clear from the context if th eagencies have gotten in trouble.","self_corrected":false},{"annotator":1,"id":"1163-neutral-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context does not say anything about trouble.","self_corrected":false},{"annotator":2,"id":"1163-neutral-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"The conditions of trouble is not offered in the context.","self_corrected":false},{"annotator":3,"id":"1163-neutral-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"No info about agencies in trouble ","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":null,"neutral":4.0},"label_count_round_2":{"contradiction":null,"entailment":null,"neutral":4.0},"label_set_round_1":["neutral"],"label_set_round_2":["neutral"],"error_labels":[],"has_ambiguity":false} -{"id":"1095","context":"and to have children and just get a day care or someone to take care of it and not really have the bonding process that takes place with babies and stuff you know","statement":"The children should not go to day car.","entailment":[{"annotator":1,"id":"1095-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"\"just get a day care [...] and not really have the bonding process\" sound like the speaker is opposed to day care.","self_corrected":false},{"annotator":3,"id":"1095-entailment-2","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Because the bonding process will be missed ","self_corrected":true}],"neutral":[{"annotator":0,"id":"1095-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The context says that it is not good if sending children to day care is the only way to take care of them, but it doesn't mention if children shouldn't go to day care at all.","self_corrected":false}],"contradiction":[],"idk":[2],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":1.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"427","context":"The entire setup has an anti-competitive, anti-entrepreneurial flavor that rewards political lobbying rather than good business practices.","statement":"The setup has lead to increases in political lobbying.","entailment":[{"annotator":1,"id":"427-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"If the setup rewards political lobbying then it will likely lead to increases in lobbying.","self_corrected":false},{"annotator":3,"id":"427-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Because political lobbying is rewarded ","self_corrected":false}],"neutral":[{"annotator":0,"id":"427-neutral-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":false}],"label_correction":false,"reason":"Rewarding political lobbying does not necessarily mean an actual increase in political lobbying.","self_corrected":false}],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_count_round_2":{"contradiction":null,"entailment":2.0,"neutral":1.0},"label_set_round_1":["neutral","entailment"],"label_set_round_2":["neutral","entailment"],"error_labels":[],"has_ambiguity":true} -{"id":"1196","context":"I'm confused.","statement":"Not all of it is very clear to me.","entailment":[{"annotator":0,"id":"1196-entailment-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"Both the context and the statement suggest that the speaker does not understand.","self_corrected":false},{"annotator":1,"id":"1196-entailment-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The statement is a paraphrase of the context.","self_corrected":false},{"annotator":2,"id":"1196-entailment-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"True, because \"confused\" means, at lease one thing is not very clear to me.","self_corrected":false},{"annotator":3,"id":"1196-entailment-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"When someone is confused , then not everything is clear to him ","self_corrected":false}],"neutral":[],"contradiction":[],"idk":[],"label_count_round_1":{"contradiction":null,"entailment":4.0,"neutral":null},"label_count_round_2":{"contradiction":null,"entailment":4.0,"neutral":null},"label_set_round_1":["entailment"],"label_set_round_2":["entailment"],"error_labels":[],"has_ambiguity":false} -{"id":"22","context":"They made little effort, despite the Jesuit presence in Asia, to convert local inhabitants to Christianity or to expand their territory into the interior.","statement":"The Jesuit presence in Asia helped to convert local residents to Christianity, allowing them to expand their territory. ","entailment":[{"annotator":0,"id":"22-entailment-1","judgments":[{"annotator":0,"makes_sense":false},{"annotator":1,"makes_sense":false},{"annotator":2,"makes_sense":false},{"annotator":3,"makes_sense":false}],"label_correction":true,"reason":"Both the context and the statement suggest that the speaker does not understand.","self_corrected":true}],"neutral":[],"contradiction":[{"annotator":0,"id":"22-contradiction-1","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Jesuit presence didn't make much effort to convert local residents to Christianity or to expand their territory.","self_corrected":false},{"annotator":1,"id":"22-contradiction-2","judgments":[{"annotator":0,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They did not try to expand their territory.","self_corrected":true},{"annotator":2,"id":"22-contradiction-3","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"The Jesuit did not make effort to convert local residents to Christianity, or to expand their territory. ","self_corrected":false},{"annotator":3,"id":"22-contradiction-4","judgments":[{"annotator":0,"makes_sense":true},{"annotator":1,"makes_sense":true},{"annotator":2,"makes_sense":true},{"annotator":3,"makes_sense":true}],"label_correction":false,"reason":"They made little effort to convert the locals or to expand the their territory. So they did not help.","self_corrected":false}],"idk":[],"label_count_round_1":{"contradiction":4.0,"entailment":1.0,"neutral":null},"label_count_round_2":{"contradiction":3.0,"entailment":null,"neutral":null},"label_set_round_1":["contradiction","entailment"],"label_set_round_2":["contradiction"],"error_labels":["entailment"],"has_ambiguity":false} +{"id": "23751e", "context": "Part of the reason for the difference in pieces per possible delivery may be due to the fact that five percent of possible residential deliveries are businesses, and it is thought, but not known, that a lesser percentage of possible deliveries on rural routes are businesses.", "statement": "It is thought, but not known, that a lesser percentage of possible deliveries on rural routes are businesses, and part of the reason for the difference in pieces per possible delivery, may be due to the fact that five percent of possible residential deliveries are businesses.", "entailment": [{"annotator": 2, "id": "664-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Statement just changed the order of two hypothesis in the context.", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 0, "id": "664-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reason for the diffenrence in pieces per possible delivery mentioned in the context is that the difference percentage of businesses deliveries on residential and rural routes. But the reason in the statement only include the percentage of residential deliveries, not the diffenrence of deliveries.", "self_corrected": false}], "idk": [1, 3], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": 1.0, "entailment": 1.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 85, "n": 13, "c": 2}} +{"id": "61429c", "context": "In this enclosed but airy building, you'll find ladies with large machetes expertly chopping off hunks of kingfish, tuna, or shark for eager buyers.", "statement": "You'll find small lepers chopping of chunks of tuna, its the only place they can work.", "entailment": [], "neutral": [{"annotator": 0, "id": "1265-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention whether the ladies are small lepers and whether its the only place they can work.", "self_corrected": false}, {"annotator": 1, "id": "1265-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything about lepers or where they could work.", "self_corrected": false}, {"annotator": 2, "id": "1265-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Small lepers\" don't have to be \"ladies\"; we don't know whether \"small lepers\" can find other jobs.", "self_corrected": false}, {"annotator": 3, "id": "1265-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Lepers and the only place to work at are not mentioned", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 41, "c": 57, "e": 2}} +{"id": "54811c", "context": "The park on the hill of Monte makes a good playground, while the ride down in a wicker toboggan is straight out of an Old World theme park (though surely tame for older kids).", "statement": "the park on the Hill of Monte is only for children.", "entailment": [], "neutral": [{"annotator": 0, "id": "1552-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention whether the park is only for children.", "self_corrected": false}, {"annotator": 1, "id": "1552-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Only because the park makes a good playground it doesn't necessarily have to be only for kids.", "self_corrected": false}, {"annotator": 2, "id": "1552-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No proof that the park can not be for adults.", "self_corrected": false}, {"annotator": 3, "id": "1552-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not mentioned that it's only for children", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "1552-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The phrase \"makes a good playground\" suggests that the park is not a designated playground and thus is open for all age groups. Also, even if the park would be a designated playground, these can typically be also used by adults (at least the parents).", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 61, "c": 22, "e": 17}} +{"id": "12601n", "context": "I touched my palm to his mutilated cheek, and tried to stem my instinctive revulsion.", "statement": "You could see where the bear had scratched across his cheek.", "entailment": [], "neutral": [{"annotator": 0, "id": "285-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context contains no information about him being scratched on the cheek by a bear.", "self_corrected": false}, {"annotator": 1, "id": "285-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything about how his cheek was mutilated, i.e. that it was a bear is not entailed.", "self_corrected": false}, {"annotator": 2, "id": "285-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know who/what makes his cheek mutilated.", "self_corrected": false}, {"annotator": 3, "id": "285-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know if it's a bear that scratched him", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 80, "c": 6, "e": 14}} +{"id": "38477c", "context": "She wears either revealing clothes or professional clothes (or perhaps both).", "statement": "She only wears short skirts.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "515-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "She doesn't only wear short skirts. She wears revealing clothes or professional clothes, the former don't have to be just short skirts, the latter are most likely not short skirts.", "self_corrected": false}, {"annotator": 2, "id": "515-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Usually, \"professinal clothes\" are not \"short skirts\".", "self_corrected": false}, {"annotator": 3, "id": "515-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "She could also wear professional clothes", "self_corrected": false}], "idk": [1], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 62, "n": 37, "e": 1}} +{"id": "1735n", "context": "that doesn't seem fair does it", "statement": "That might possibly be fair.", "entailment": [], "neutral": [{"annotator": 1, "id": "1474-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context can be interpreted as saying that it seems unfair but that this maybe wouldn't hold under closer inspection. In that case it doesn't really say something about the actual fairness.", "self_corrected": true}], "contradiction": [{"annotator": 0, "id": "1474-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context suggests that this may not be fair, while the statement suggests that it may be fair.", "self_corrected": false}, {"annotator": 1, "id": "1474-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context can be interpreted as saying that it's really not fair, so it cannot be possibly fair.", "self_corrected": false}, {"annotator": 2, "id": "1474-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context suggests a higher possibility of unfairness in this matter, which the statement does not reflect.", "self_corrected": false}, {"annotator": 3, "id": "1474-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's a rhetorical question. The speaker means it's not fair", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["contradiction"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"c": 48, "n": 31, "e": 21}} +{"id": "7449e", "context": "In 1982, Wallace won his last race for governor with a quarter of the black votes cast in the Democratic primary, a fact alluded to in a written epilogue at the end of the film.", "statement": "Wallace was reelected as governor.", "entailment": [{"annotator": 0, "id": "1436-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentioned that Wallance won his last race for gevernor, we don't know if this was his first win or if he won again.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "1436-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything about whether Wallace was governer before his 1982 win.", "self_corrected": false}, {"annotator": 2, "id": "1436-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Wallace was elected as governor, but we don't know whether he was \"reelected\".", "self_corrected": false}, {"annotator": 3, "id": "1436-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know if he was governor before", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 28, "e": 68, "c": 4}} +{"id": "24385e", "context": "farmworkers conducted by the U.S.", "statement": "Some farm laborers were sampled.", "entailment": [], "neutral": [{"annotator": 0, "id": "1338-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context contains no information about whether the conduction sampled the farmworkers.", "self_corrected": false}, {"annotator": 2, "id": "1338-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether it is a census or a sampling survey.", "self_corrected": false}], "contradiction": [], "idk": [1, 3], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 67, "e": 28, "c": 5}} +{"id": "49807n", "context": "The next year, he built himself a palace, Iolani, which can still be toured in Honolulu.", "statement": "Lolani was built in only 1 year.", "entailment": [{"annotator": 1, "id": "362-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"The next year\" can be interpreted as indicating that the building of Lolani was concluded in the same year.", "self_corrected": false}, {"annotator": 3, "id": "362-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It was built \"the next year\".", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "362-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context makes no mention of how long it took to build lolani.", "self_corrected": false}, {"annotator": 1, "id": "362-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"The next year\" can be interpreted as indicating that the building of Lolani was started in the next year.", "self_corrected": false}, {"annotator": 2, "id": "362-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We don't know when did Lolani start to be built.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 67, "e": 28, "c": 5}} +{"id": "138448c", "context": "Indeed, recent economic research suggests that investment in information technology explains most of the acceleration in labor productivity growth-a major component of overall economic growth-since 1995.", "statement": "Investment in the financial sector explains most of the acceleration in labor productivity.", "entailment": [], "neutral": [{"annotator": 1, "id": "753-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Some research only suggests that most of the acceleration comes from investments in information technology. The research can be wrong and it could still be true that most of the acceleration comes from the financial sector.", "self_corrected": false}, {"annotator": 2, "id": "753-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We don't know whether investment in information technology is a subsector of investment in financial sector, or reverse.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "753-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reason of the acceleration in labor productivity is the investment in information technology, not in the financial sector.", "self_corrected": false}, {"annotator": 1, "id": "753-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The the \"suggestion\" of the cited research is strong, then it is likely that most of the acceleration comes from investments in information technologies. This precludes most of the acceleration coming from investments in the financial sector.", "self_corrected": false}, {"annotator": 2, "id": "753-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Investment in information technology explains most of the acceleration in labor productivity, not investment in financial sector.", "self_corrected": false}, {"annotator": 3, "id": "753-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It should be information technology that is invested in", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 60, "e": 35, "n": 5}} +{"id": "48454c", "context": "These revelations were embarrassing to Clinton's opponents, wrote the Washington Post . The Sun-Times quoted Rahm Emanuel, Stephanopoulos' successor, on the From Day One I always thought this was politically motivated and had politics written all over it; after five years, it is nice to have the truth catch up with the president's political opponents.", "statement": "Clinton's supporters were pleased with how the hearings went.", "entailment": [], "neutral": [{"annotator": 0, "id": "110-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions clinton's opponents, not clinton's supporters.", "self_corrected": false}, {"annotator": 1, "id": "110-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context is about a single supporter [probably?] of Clinton so we cannot deduce that all or most of the supporters were pleased with the hearing.", "self_corrected": false}, {"annotator": 2, "id": "110-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "These revelations were embarrassing to Clinton's opponents, but Clinton's supporters can also be unsatisfied with process for some reasons.", "self_corrected": false}, {"annotator": 3, "id": "110-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know there are hearings", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "110-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "It was embarrassing to them", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"c": 17, "n": 43, "e": 40}} +{"id": "73260n", "context": "The disputes among nobles were not the first concern of ordinary French citizens.", "statement": "Ordinary French citizens were not concerned with the disputes among nobles.", "entailment": [{"annotator": 1, "id": "1456-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In the context, \"The first concern\" can be read as a pars pro toto which would mean that it was really no concern at all.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "1456-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"not the first concern\" doesn't mean not the concern. The statement can be true or false.", "self_corrected": false}, {"annotator": 1, "id": "1456-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It might not be the most important concern to the French citizens, but maybe an important concern after all.", "self_corrected": false}, {"annotator": 2, "id": "1456-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The disputes among nobles could be second concern of ordinary French citizens.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1456-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They could be concerned. But it is not their first concern", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": ["entailment"], "has_ambiguity": true, "chaosnli_labels": {"n": 18, "e": 72, "c": 10}} +{"id": "76219n", "context": "and i and i may have been the only one that did both because the mentality in Dallas was that you couldn't like both you had to like one and hate the other", "statement": "I did not follow the mentality in Dallas, of liking only one team.", "entailment": [{"annotator": 0, "id": "248-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In the context is stated that the author likes both, but the mentality in Dallas was that you couldn't like both. So the statement is true.", "self_corrected": false}, {"annotator": 1, "id": "248-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context clearly states that the mentality in Dallas was to like only one team and \"may have been the only one\" strongly suggests that the author did not adhere to this mentality.", "self_corrected": false}, {"annotator": 2, "id": "248-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I did both instead of only liking one.", "self_corrected": false}, {"annotator": 3, "id": "248-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, because I like both", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "248-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know what does \"did both\" mean. It could refer to liking both teams, but also could be others, for example likes both McDonalds and Burger King.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 70, "n": 25, "c": 5}} +{"id": "10229n", "context": "The governing statute provides that a committee consisting of the Comptroller General, the Speaker of the House and President Pro Tempore of the Senate, the Majority and Minority leaders, and the Chairmen and Ranking Minority Members of the Senate Governmental Affairs and House Government Reform Committees recommend an individual to the President for appointment.", "statement": "The process is long and will be reformed in the coming years.", "entailment": [], "neutral": [{"annotator": 0, "id": "531-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context makes no mention about how long the process takes and whether it will be reformed.", "self_corrected": false}, {"annotator": 1, "id": "531-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "While the context could suggest that the process takes long because so many individuals are involved, it does not say anything about reform.", "self_corrected": false}, {"annotator": 2, "id": "531-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Reform\" is not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "531-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The time duration of the process is not mentioned", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 18, "n": 79, "e": 3}} +{"id": "99791n", "context": "Even analysts who had argued for loosening the old standards, by which the market was clearly overvalued, now think it has maxed out for a while.", "statement": "Some analysts wanted to make the old standards less restrictive for investors.", "entailment": [{"annotator": 2, "id": "209-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"... analysts ... had argued for loosening the old standards\"", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "209-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions that there are analysts who had wanted to make the old standards less restrictive, but doesn't mention for whom should old standards be loosened.", "self_corrected": false}, {"annotator": 1, "id": "209-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context whether the standards should have been loosened specifically for investors.", "self_corrected": false}, {"annotator": 3, "id": "209-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not known if the loosening was thought for investors", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 56, "n": 31, "c": 13}} +{"id": "13964e", "context": "uh plastic is just too easy i mean that's the that's the whole problem with it um have", "statement": "I find plastic to be too easy to use.", "entailment": [{"annotator": 0, "id": "975-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context refers to the problem of plastic is that it it too easy. So the statement could be true, because too tasy to use is indeed a problem of plastic.", "self_corrected": false}, {"annotator": 1, "id": "975-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context clearly states that the speaker finds plastic too easy.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "975-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement could be undetermined because in the context it doesn't explicitly state to what is plastic too easy, could be too easy to use or maybe to produce.", "self_corrected": false}, {"annotator": 2, "id": "975-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Plastics can be just too easy to \"use\", to \"produce\", to \"dump\"...", "self_corrected": false}], "contradiction": [], "idk": [3], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 72, "n": 28}} +{"id": "66185c", "context": "The political cleansing that did not happen through the impeachment process leaves Clinton with a great and serious burden.", "statement": "There was no such instance of political cleansing.", "entailment": [{"annotator": 1, "id": "645-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context states that a specific instance of political cleansing did not happen and we can assume that the statement refers to that instance.", "self_corrected": false}, {"annotator": 3, "id": "645-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "True, because the political cleansing did not happen", "self_corrected": true}], "neutral": [{"annotator": 2, "id": "645-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\u201cThe political cleansing that did not happen through the impeachment process\u201d, but it could happen anywhere else.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "645-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The political cleansing did exist, as mentioned in the context. It just didn't happen throught the impeachment process.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"e": 75, "n": 16, "c": 9}} +{"id": "65066n", "context": "Larger ski resorts are 90 minutes away.", "statement": "The largest resort is actually 100 minutes away.", "entailment": [], "neutral": [{"annotator": 0, "id": "1592-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions the larger ski resorts not the largest resort.", "self_corrected": false}, {"annotator": 1, "id": "1592-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The largest resort can be among the larger resorts 90 minutes away or it could be even further away.", "self_corrected": false}, {"annotator": 2, "id": "1592-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Larger one and the largest one can be different.", "self_corrected": false}, {"annotator": 3, "id": "1592-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the largest ski resort", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 69, "c": 29, "e": 2}} +{"id": "76020e", "context": "The city was founded in the third millennium b.c. on the north shore of the bay, and reached a peak during the tenth century b.c. , when it was one of the most important cities in the Ionian Federation the poet Homer was born in S myrna during this period.", "statement": "The city was founded in the third millennium", "entailment": [{"annotator": 1, "id": "561-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context states that the city was founded in the third millenium b.c. and we can assume that the third millenium mentioned in the statement is also b.c.", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 0, "id": "561-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The city was founded in the third millennium b.c., not in the third millennium.", "self_corrected": false}, {"annotator": 2, "id": "561-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\u201cthe third millennium\u201d is different from \u201cthe third millennium b.c.\u201d", "self_corrected": false}, {"annotator": 3, "id": "561-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It should be third millennium bc.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": 3.0, "entailment": 1.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 86, "n": 2, "c": 12}} +{"id": "83248c", "context": "Her state is probably to be attributed to the mental shock consequent on recovering her memory.\"", "statement": "It is too bad that she never regained her memory.", "entailment": [], "neutral": [{"annotator": 3, "id": "1010-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "It is not known if she will regain the memory in the future", "self_corrected": true}], "contradiction": [{"annotator": 0, "id": "1010-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "According to the context, she regained her memory. it is incorrect to say that she never regained her memory.", "self_corrected": false}, {"annotator": 1, "id": "1010-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context states that something probably happened because she regained her memory. Thus, she has regained her memory at some point.", "self_corrected": false}, {"annotator": 2, "id": "1010-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "She must regain her memory first to get any consequence on that.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["contradiction"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"c": 60, "n": 29, "e": 11}} +{"id": "79141n", "context": "Isn't a woman's body her most personal property?", "statement": "Women's bodies belong to themselves, they should decide what to do with it.", "entailment": [{"annotator": 0, "id": "10-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context states that women's bodies are her their personal property. Personal property of women does belong to themselves. So the statement is true.", "self_corrected": false}, {"annotator": 1, "id": "10-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context can be interpreted as a rhetorical question. In that case, it reasonably entails the statement.", "self_corrected": false}, {"annotator": 2, "id": "10-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If a woman's body is her personal property, then the body belongs to her and she has right to dominate it.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "10-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context can be interpreted as a honest question. In that case, it's not affirmative and thus neutral to the statement.", "self_corrected": false}, {"annotator": 3, "id": "10-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "context and statement do not contradicting each other, or one entail the other", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 24, "e": 76}} +{"id": "97926n", "context": "General Motors, for instance, lost $460 million to strikes in 1997, but investors treated the costs as a kind of extraordinary charge and valued the company as if the losses had never happened.", "statement": "GM lost a lot of money in labor disputes but was victorious in the end.", "entailment": [{"annotator": 1, "id": "1457-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context says that investors held up a high evaluation of GM even after the strikes cost the company a lot of money. This can be interpreted as a win for the company.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1457-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The loss of money was treated by investor as a extraordinary charge and as if it had never happend, so this was a trick of treating the loss, but we can't conclude that it was victorious.", "self_corrected": false}, {"annotator": 1, "id": "1457-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is conceivable that GM was valued highly by the investors even after losing a lot of money to the strike and additionally losing the subsequent negotiations.", "self_corrected": false}, {"annotator": 2, "id": "1457-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether GM was \"victorious\" in the end.", "self_corrected": false}, {"annotator": 3, "id": "1457-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not known if in the end it was victorious", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 4.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 30, "e": 67, "c": 3}} +{"id": "45957e", "context": "Bauerstein had been at Styles on the fatal night, and added: \"He said twice: 'That alters everything.' And I've been thinking.", "statement": "The fact that Styles was at Bauerstein changes everything.", "entailment": [], "neutral": [{"annotator": 0, "id": "1384-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context doesn't mention what changed everything.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "1384-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement indicates that Bauerstein is a person and Styles is a location. The statement has it the other way around, so it is a likely contradiction.", "self_corrected": true}, {"annotator": 2, "id": "1384-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is Bauerstein at Styles, not Styles at Bauerstein.", "self_corrected": false}, {"annotator": 3, "id": "1384-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It should be Bauerstein had been at Styles", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 32, "e": 43, "c": 25}} +{"id": "24126n", "context": "The door did not budge.", "statement": "The door was stuck, so it did not move.", "entailment": [{"annotator": 3, "id": "595-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "budge entails that the door wouldn't move", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "595-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions the state of the door, has no information about what caused the state.", "self_corrected": false}, {"annotator": 1, "id": "595-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There could have been other reasons that the door didn't moove. For example, that it was locked.", "self_corrected": false}, {"annotator": 2, "id": "595-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know the reason why the door did not budge, maybe it worked well.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 77, "n": 23}} +{"id": "135251n", "context": "it's like but the time we went to Florida and needed to rent a car you know he believed in it", "statement": "We rented a car while we were in Florida.", "entailment": [{"annotator": 1, "id": "1549-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context says that they needed to rent a car, so it's likely that they also did rent a car.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1549-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context is unclear because it only mentions that they needed to rent a car, doesn't explain whether they did or not.", "self_corrected": false}, {"annotator": 2, "id": "1549-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe back then they are so poor that they \u201cNeeded to rent\u201d, but could not afford it.", "self_corrected": false}, {"annotator": 3, "id": "1549-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not known if they rented the car in the end. It's only known that they needed to rent a car", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 26, "e": 72, "c": 2}} +{"id": "48223n", "context": "yeah although i do worry that how easy this one was might be a bad lesson uh to the to the younger people um you know than there is the other generation", "statement": "I do worry that it might be a bad lesson for the kids.", "entailment": [{"annotator": 0, "id": "76-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context refers to the worry about giving a bad lesson to the younger people, which matches the statement.", "self_corrected": false}, {"annotator": 1, "id": "76-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker in the context explicitly says that they worry about it being a bad lesson for younger people which includes kids.", "self_corrected": false}, {"annotator": 2, "id": "76-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "younger people can be kids, adolescents or young adults", "self_corrected": false}, {"annotator": 3, "id": "76-entailment-4", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "true, if the younger people are considered as kids", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 3, "id": "76-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "younger people are not kids", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": 1.0, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 83, "n": 13, "c": 4}} +{"id": "73518n", "context": "no North Carolina State", "statement": "North Carolina is a county", "entailment": [], "neutral": [{"annotator": 0, "id": "64-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only doesn't state that whether North Carolina is a county or not.", "self_corrected": false}, {"annotator": 2, "id": "64-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no North Carolina State, North Carolina could be a city or an area.", "self_corrected": false}, {"annotator": 3, "id": "64-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "context and statement are irrelevant", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "64-contradiction-1", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context says that North Carolina is a state. So it's not a county.", "self_corrected": true}, {"annotator": 2, "id": "64-contradiction-2", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "It is a state but not North Carolina, so North Carolina is a state instead of a country.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 28, "c": 70, "e": 2}} +{"id": "16996e", "context": "In the short term, U.S. consumers will benefit from cheap imports (as will U.S. multinationals that use parts made in East Asian factories).", "statement": "U.S. consumers and factories in East Asia benefit from imports.", "entailment": [], "neutral": [{"annotator": 0, "id": "1031-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions that the U.S. consumers and multinationals willl benefit from cheap imports, has no information about the benifits to factories in East Asia.", "self_corrected": false}, {"annotator": 1, "id": "1031-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything whether the factories in East Asia also benefit from the imports", "self_corrected": false}, {"annotator": 2, "id": "1031-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether \"factories in East Asia benefit from imports\".", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1031-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "It should be U.S. cunsumers in U.S.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 32, "e": 63, "c": 5}} +{"id": "69815n", "context": "yeah it's a U S territory and it's just we own it or", "statement": "I used to be great at remembering this type of thing, but now I don't.", "entailment": [], "neutral": [{"annotator": 0, "id": "494-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is irrelevant to what is discussed in the context. I think the topics are different.", "self_corrected": false}, {"annotator": 1, "id": "494-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statements seem to be completely unrelated.", "self_corrected": false}, {"annotator": 2, "id": "494-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We can not judge \u201cmy memory\u201d based on the context.", "self_corrected": false}, {"annotator": 3, "id": "494-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not relevant", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 34, "n": 65, "e": 1}} +{"id": "35809n", "context": "Evaluating the intent of the six principles, we observed that they naturally fell into three distinct sets, which we refer to as critical success factors.", "statement": "All three distinct sets need to be filled in order to be considered successful.", "entailment": [{"annotator": 0, "id": "398-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The critical success factors are three distinct sets, which indicate that they should be filled to get success. This is consistent with the statement.", "self_corrected": false}, {"annotator": 3, "id": "398-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The three sets are critical success factors. A success means three sets filled.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "398-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We don't know whether all three sets are required to be filled, or maybe only one or two are enough.", "self_corrected": false}], "contradiction": [], "idk": [1], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 48, "n": 49, "c": 3}} +{"id": "30282n", "context": "wow who can afford that my God i can't afford to miss a day let alone six", "statement": "It's amazing that some people can afford to miss days from work, whereas I can't even afford to miss one.", "entailment": [{"annotator": 1, "id": "428-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The \"wow\" in the context indicates amazement at the fact that some people can afford to miss six days from work.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "428-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions that the author can't afford to miss one day, doesn't contain any information that others can afford to miss days.", "self_corrected": false}, {"annotator": 2, "id": "428-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is cleat that \"I can't afford to miss a day\", but we don't know whether some people can afford to miss days from work.", "self_corrected": false}, {"annotator": 3, "id": "428-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not known if some people can afford it or not.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 88, "n": 12}} +{"id": "142430c", "context": "Flying at a discount should be more dangerous.", "statement": "It's totally safe to take advantage of discounted flying.", "entailment": [], "neutral": [{"annotator": 1, "id": "608-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The stance given in the context that discounted flying should be more dangerous, can mean that it should be even more dangerous. Thus, it does not really say something about the safety of discounted flying.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "608-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context states that it's dangerous to fly at a discount, whereas the statement states that dictount flying is safe.", "self_corrected": false}, {"annotator": 2, "id": "608-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not totally safe, it is more dangerous to choose discounted flying.", "self_corrected": false}, {"annotator": 3, "id": "608-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "context says more danguerous flying at discount, whereas statement says total safe.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 40, "c": 46, "e": 14}} +{"id": "127290n", "context": "The logic of analysis in case studies is the same", "statement": "The logic for the case studies is the same thing as in the data collection.", "entailment": [], "neutral": [{"annotator": 0, "id": "973-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The same logic of analysis in case studies doesn't mean the same logic of case studies and data collection. The context doesn't mention the data collection.", "self_corrected": false}, {"annotator": 1, "id": "973-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not specify where else the logic is the same. No mention about data collection.", "self_corrected": false}, {"annotator": 2, "id": "973-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In context, the subject is the logic of analysis in different case studies; in statement, the subject is the logic in case studies and the logic in data collection.", "self_corrected": false}, {"annotator": 3, "id": "973-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not known if the logic for case studies is same as in data collection from context. It could be same as in other fields.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 23, "n": 70, "c": 7}} +{"id": "100792c", "context": "yeah but uh do you have small kids", "statement": "It matters not if children are involved.", "entailment": [{"annotator": 1, "id": "1472-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "The speaker in the context asks about small kids, so at least it matters to them whether kids are involved.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "1472-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Based on the context, we don't know if it matters to have kids.", "self_corrected": true}], "contradiction": [{"annotator": 2, "id": "1472-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The question indicates that kids matter, otherwise there should be no \"but uh\".", "self_corrected": false}, {"annotator": 3, "id": "1472-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Context entails the importance of kids, which contradicts the statement.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction"], "error_labels": ["entailment", "neutral"], "has_ambiguity": false, "chaosnli_labels": {"c": 38, "n": 59, "e": 3}} +{"id": "75572n", "context": "Marriage is an important institution.", "statement": "Marriage is crucial to society.", "entailment": [{"annotator": 1, "id": "879-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "That something is an important institution can be interpreted as being important for society.", "self_corrected": false}, {"annotator": 3, "id": "879-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Insitution is a part of society. Marriage being an import institution entails marriage being important to the society", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "879-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Based on the context, we only knowthe attributes of marriage as an important institution,not whether it is inportant to society.", "self_corrected": true}, {"annotator": 2, "id": "879-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "marriage could be crucial to other objects, like \"longevity\", \"personal health\", etc.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 49, "n": 43, "c": 8}} +{"id": "22235n", "context": "yeah really no kidding", "statement": "It's crazy!", "entailment": [{"annotator": 0, "id": "836-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "People might say \"no kidding\" when they realize what they're talking about is crazy.", "self_corrected": false}, {"annotator": 3, "id": "836-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Something sounds so crazy that people would think it is a joke. \"No kidding\" means it's as crazy as it sounds.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "836-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "it can be not crazy but serious.", "self_corrected": false}], "contradiction": [], "idk": [1], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 67, "e": 28, "c": 5}} +{"id": "40486n", "context": "The Women's Haven, which provides shelter and outreach to domestic-violence victims, already has a full-time attorney.", "statement": "The Haven is a useful resource in the community.", "entailment": [{"annotator": 1, "id": "1568-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Providing shelter and outreach to domestic-violence victims sounds like being useful for the community.", "self_corrected": false}, {"annotator": 3, "id": "1568-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Haven provides shelter to victims, making it a useful resource in the society", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1568-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context contains no information onthe importance of the Haven.", "self_corrected": true}, {"annotator": 2, "id": "1568-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The Women's Haven is not the Haven.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 47, "e": 52, "c": 1}} +{"id": "13133n", "context": "The newspaper publishes just one letter a week from a reader, always with an editorial riposte at the bottom.", "statement": "There are many letters submitted each week, but only one is chosen.", "entailment": [], "neutral": [{"annotator": 0, "id": "451-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The newspaper publishes only one letter a week, either because they receive only one per week or because they receive many but choose only one. We don't know which is true.", "self_corrected": false}, {"annotator": 1, "id": "451-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything about how many letters are submitted.", "self_corrected": false}, {"annotator": 2, "id": "451-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether there are many letters submitted.", "self_corrected": false}, {"annotator": 3, "id": "451-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not known if many letters are submitted, but only known that only one is published.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 50, "n": 47, "c": 3}} +{"id": "94674c", "context": "Meanwhile, a site established for the WorldAid '96 Global Expo and Conference on Emergency Relief, which took place last fall, gives you a firsthand glimpse of the frequently crass world of the relief business (note the long list of commercial exhibitors in attendance).", "statement": "WorldAid had a GLobal expo in 2002.", "entailment": [], "neutral": [{"annotator": 0, "id": "1595-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context mentions that WordAid had Global Expo last fall, we don't know whether last year was 2002 or not.", "self_corrected": false}, {"annotator": 1, "id": "1595-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context talks only about a Global expo in 1996, not in 2002.", "self_corrected": false}, {"annotator": 2, "id": "1595-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know how often is a Global expo hold.", "self_corrected": false}, {"annotator": 3, "id": "1595-neutral-4", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Context and statement irrelevant to each other", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 35, "n": 63, "e": 2}} +{"id": "101525c", "context": "Monday's Question (No.", "statement": "There was a question on Tuesday.", "entailment": [], "neutral": [{"annotator": 0, "id": "474-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions the question on Monday, we don't know ifthere was a question on Tuesday as well.", "self_corrected": false}, {"annotator": 1, "id": "474-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context talks only about a question on Monday, not Tuesday.", "self_corrected": false}, {"annotator": 2, "id": "474-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The question can on Wednesday, Thursday ...", "self_corrected": false}, {"annotator": 3, "id": "474-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 72, "c": 28}} +{"id": "145495c", "context": "The students' reaction was swift and contentious, as if their feelings had been hurt.", "statement": "The students reacted with horror.", "entailment": [], "neutral": [{"annotator": 0, "id": "163-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention what kind of reaction the students had.", "self_corrected": false}, {"annotator": 1, "id": "163-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions that the students' feelings might have been hurt but it doesn't specify whether they also experienced horror.", "self_corrected": false}, {"annotator": 2, "id": "163-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The students could reacted with anger or disappointment.", "self_corrected": false}, {"annotator": 3, "id": "163-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not known what emotion was reacted with, could also be sadness or anger", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 36, "c": 52, "e": 12}} +{"id": "93357c", "context": "So is the salt, drying in the huge, square pans at Las Salinas in the south.", "statement": "Pepper is made wet in Las Salinas.", "entailment": [], "neutral": [{"annotator": 0, "id": "1401-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions the salt, not the pepper.", "self_corrected": false}, {"annotator": 1, "id": "1401-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context talks about salt. Pepper is not mentioned at all.", "self_corrected": false}, {"annotator": 2, "id": "1401-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Pepper is not mentioned in context.", "self_corrected": false}, {"annotator": 3, "id": "1401-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about where the pepper is made wet", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 47, "c": 53}} +{"id": "42388e", "context": "Daniel took it upon himself to explain a few things.", "statement": "Daniel explained what was happening.", "entailment": [{"annotator": 0, "id": "1362-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "This statement could be true, because Daniel explained a few things, which could include what was happening.", "self_corrected": true}, {"annotator": 2, "id": "1362-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Daniel explained actively.", "self_corrected": false}, {"annotator": 3, "id": "1362-entailment-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1362-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement could be undetermined because the context does not make it clear what things Daniel explained.", "self_corrected": false}, {"annotator": 1, "id": "1362-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not specify what Daniel explained. It could also be other things than what was happening.", "self_corrected": true}, {"annotator": 2, "id": "1362-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Daniel felt responsible to explain, but maybe fail to explain in the end.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 81, "n": 19}} +{"id": "22587e", "context": "Classic Castilian restaurant.", "statement": "The restaurant is based off a classic Castilian style.", "entailment": [{"annotator": 0, "id": "291-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"classic castilian restaurant\" means that the style of the restaurant is classic Castilian, so the statement is true.", "self_corrected": false}, {"annotator": 1, "id": "291-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both context and statement clearly mention that the restaurant is classic Castilian.", "self_corrected": false}, {"annotator": 2, "id": "291-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}], "label_correction": false, "reason": "Usually it is true.", "self_corrected": true}, {"annotator": 3, "id": "291-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is a castilian restaurant, so it has castilian style.", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 90, "n": 9, "c": 1}} +{"id": "131261n", "context": "But I'll take up my stand somewhere near, and when he comes out of the building I'll drop a handkerchief or something, and off you go!\"", "statement": "I want you to follow him, so watch for the signal that I give.", "entailment": [], "neutral": [{"annotator": 0, "id": "9-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker wants the other one watch for the signal, i.e., the handkerchief dropping, so this part is correct, but we don't know if the intention is for the other person to follow the person he/she is observing.", "self_corrected": false}, {"annotator": 1, "id": "9-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear from the context, whether the speaker really wants the person they're talking to to follow someone. It could also be that they want the person to leave.", "self_corrected": false}, {"annotator": 2, "id": "9-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe \u201coff you go\u201d means something else, like detonating the rubbish bin etc.", "self_corrected": false}, {"annotator": 3, "id": "9-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not known if \"I\" want \"you\" to follow him from the context", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 55, "c": 5, "n": 40}} +{"id": "51353c", "context": "It is not a surprise, either, that Al Pacino chews the scenery in Devil's Advocate . And the idea that if the devil showed up on Earth he'd be running a New York corporate-law firm is also, to say the least, pre-chewed.", "statement": "Nobody expects that the devil would take the form of a lawyer.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "688-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"pre-chewed\" implies that the idea of the devil running a law firm has been explored, so \"nobody expects\" in the statement is incorrect.", "self_corrected": false}, {"annotator": 1, "id": "688-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context says that the idea of the devil being a lawyer is \"pre-chewed\" meaning that other movies already used this idea. So at least some people would expect the devil to take this form.", "self_corrected": false}, {"annotator": 2, "id": "688-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\u201cpre-chewed\u201d means not fresh, so many people should have expected devil would take the form of a lawyer.", "self_corrected": false}, {"annotator": 3, "id": "688-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "From the context we know that it is not a surprise, meaning people had expected that.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 29, "c": 56, "e": 15}} +{"id": "10547c", "context": "He jumped up, planting one hand on the charging horse, and came at the brute with the axe.", "statement": "He swung at the brute with his sword.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "578-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He came at the brute with the axe, not the sword.", "self_corrected": false}, {"annotator": 1, "id": "578-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement says that the person has one hand on the horse, so the other hand has to be holding the axe. Thus, he does not have any hand free to use a sword.", "self_corrected": false}, {"annotator": 2, "id": "578-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not with \"sword\", but with \"axe\".", "self_corrected": false}, {"annotator": 3, "id": "578-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, with his axe", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 91, "e": 3, "n": 6}} +{"id": "136360e", "context": "I can FEEL him.\"", "statement": "I can sense his presence.", "entailment": [{"annotator": 0, "id": "406-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I think \"feel him\" is another way of saying \"sense his presence\".", "self_corrected": false}, {"annotator": 1, "id": "406-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If a person can feel someone, then they most likely also sense their presence.", "self_corrected": false}, {"annotator": 2, "id": "406-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Feel\" is synonym of sense.", "self_corrected": false}, {"annotator": 3, "id": "406-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "true, I can feel him, meaning I can sense his presence either physical or spiritually", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 86, "n": 13, "c": 1}} +{"id": "113193n", "context": "of course you could annex Cuba but they wouldn't like that a bit", "statement": "Cubans would go up in arms if we tried to annex Cuba.", "entailment": [{"annotator": 0, "id": "999-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement could be true, because if Cubans wouldn't like that, it could be possible that they would go up in arms.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "999-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context talks only about the Cubans disliking an annexation of Cuba. That they would use armed resistance is not clear.", "self_corrected": false}, {"annotator": 2, "id": "999-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know Cubans reaction if Cuba is annexed.", "self_corrected": false}, {"annotator": 3, "id": "999-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "From context, it is only known that the Cubans would not be happy about this, but not known if they will arm themselves", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 31, "e": 69}} +{"id": "120955n", "context": "Another thing those early French and Dutch settlers agreed upon was that their island should be free of levies on any imported goods.", "statement": "The French settlers did not mind income taxes at all.", "entailment": [], "neutral": [{"annotator": 1, "id": "480-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context talks about levies on imported goods, it is not clear why this should impact income taxes.", "self_corrected": false}, {"annotator": 2, "id": "480-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The French can take tax issues seriously, but still made the decision to reach the agreement.", "self_corrected": false}, {"annotator": 3, "id": "480-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not mentioned about the income taxes", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "480-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is incorrect, because the French settlers did mind taxing on imported goods.", "self_corrected": true}, {"annotator": 3, "id": "480-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "The French only did not mind taxed on imported goods on their islands.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"c": 55, "e": 4, "n": 41}} +{"id": "88188e", "context": "The air is warm.", "statement": "The arid air permeates the surrounding land.", "entailment": [], "neutral": [{"annotator": 0, "id": "540-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Warm air is not necessarily associated with arid air.", "self_corrected": false}, {"annotator": 1, "id": "540-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Air can be arid and cold at the same time.", "self_corrected": false}, {"annotator": 2, "id": "540-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The air can be warm but mosit.", "self_corrected": false}, {"annotator": 3, "id": "540-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelavant", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 16, "n": 75, "c": 9}} +{"id": "138966n", "context": "It's thought he used the same architect who worked on the Taj Mahal.", "statement": "In reality, he did not use the Taj Mahal's architect.", "entailment": [], "neutral": [{"annotator": 1, "id": "1165-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "People can be wrong about him using the same architect.", "self_corrected": false}, {"annotator": 2, "id": "1165-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know in reality whether he used the Taj Mahal's architect.", "self_corrected": false}, {"annotator": 3, "id": "1165-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reality is not mentioned in the context", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1165-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "The context suggests that he might used the same architect, while the statement states that he did not.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 66, "c": 25, "e": 9}} +{"id": "48222n", "context": "News berates computer users for picking obvious, easily cracked passwords and chastises system administrators for ignoring basic security precautions.", "statement": "Users and system administrators both do not prioritize security.", "entailment": [{"annotator": 2, "id": "582-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Users pick obvious, easily cracked passwords; administrators ignore basic security precautions.", "self_corrected": false}, {"annotator": 3, "id": "582-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "true, users pick easy passwords and administrators irgnore basic security precautions, showing the low priority of security for them", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "582-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context only mentions users' behaviour, we don't know if the system administrators prioritize security.", "self_corrected": true}, {"annotator": 1, "id": "582-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The news can be wrong about the prevalence of this problem.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 25, "e": 72, "c": 3}} +{"id": "18428n", "context": "Companies that were foreign had to accept Indian financial participation and management.", "statement": "Foreign companies had to take Indian money in order to operate their businesses.", "entailment": [{"annotator": 0, "id": "734-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Running businesses are the purpose of opening a company. So based on the context, companies have to satisfiy the prerequisites for accepting indian financial intervention in order to run businesses.", "self_corrected": false}, {"annotator": 1, "id": "734-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "That the companies had to accept \"Indian financial participation\" means that they had to take Indian money.", "self_corrected": false}, {"annotator": 3, "id": "734-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, if indian financial participation means investment from india", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "734-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Financial participation and management can have other forms, e.g. audit and supervision", "self_corrected": false}, {"annotator": 3, "id": "734-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "financial participation could mean other possibilites other than investing money", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 81, "n": 17, "c": 2}} +{"id": "116059n", "context": "These days, newspaper writers are no longer allowed the kind of license he took.", "statement": "Newspaper writers need to be more factual and careful these days.", "entailment": [], "neutral": [{"annotator": 0, "id": "1379-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't specify what kind of license he took. So we don't know whether it is associated with factual and carefull, as suggested in the statement.", "self_corrected": false}, {"annotator": 1, "id": "1379-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear which freedoms the writer in the context took. It might not be about factuality and carefulness.", "self_corrected": false}, {"annotator": 2, "id": "1379-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"the kind of license\" can mean a facual and careful style, then the opposite should be not rigorous.", "self_corrected": false}, {"annotator": 3, "id": "1379-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 37, "c": 7, "e": 56}} +{"id": "67610c", "context": "Sorry but that's how it is.", "statement": "This is how things are and there are no apologies about it.", "entailment": [{"annotator": 1, "id": "2-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context can be interpreted as being unapologetic.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "2-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know the \"sorry\" in the context is meant to be an apology or just to comfort someone, so the need for an apology is unknown.", "self_corrected": false}, {"annotator": 1, "id": "2-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context can also be interpreted as being matter of factly but not unapologetic.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "2-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In context, it reads \"sorry\", and usually it means apology.", "self_corrected": false}, {"annotator": 3, "id": "2-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There are apologies in the context", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 40, "n": 12, "e": 48}} +{"id": "95186n", "context": "The cane plantations, increasingly in the hands of American tycoons, found a ready market in the US.", "statement": "The US market was ready for the cane plantations, according to the economists.", "entailment": [], "neutral": [{"annotator": 0, "id": "861-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions that there is a ready market in the US, but it's not clear who indicates this.", "self_corrected": false}, {"annotator": 1, "id": "861-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything about whether economists believed that the market was ready.", "self_corrected": false}, {"annotator": 2, "id": "861-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know the resource of this ovservation that \"The cane plantations ... found a ready market in the US.\"", "self_corrected": false}, {"annotator": 3, "id": "861-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not mentioned about the economists", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 41, "e": 58, "c": 1}} +{"id": "77875e", "context": "As legal scholar Randall Kennedy wrote in his book Race, Crime, and the Law , Even if race is only one of several factors behind a decision, tolerating it at all means tolerating it as potentially the decisive factor.", "statement": "Race is one of several factors in some judicial decisions", "entailment": [], "neutral": [{"annotator": 0, "id": "1559-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only means decisions, not judicial decisions.", "self_corrected": false}, {"annotator": 1, "id": "1559-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The conditional in the context does not assert whether race is one of several reasons or the only one in all judicial decisions.", "self_corrected": false}, {"annotator": 2, "id": "1559-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether in fact race is one of several factors in some judicial decisions.", "self_corrected": false}, {"annotator": 3, "id": "1559-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not mentioned in what process is the race a factor of", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 81, "n": 15, "c": 4}} +{"id": "65879c", "context": "After the recovery of Jerusalem in 1099, it took four hundred years of sieges and battles, treaties, betrayals, and yet more battles, before Christian kings and warlords succeeded in subduing the Moors.", "statement": "The Moors were able to subdue the Christian kings after just a decade of war.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "754-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context refers to the success of the Christian kings and warlords, but the statement refers in the opposite direction", "self_corrected": false}, {"annotator": 1, "id": "754-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Christian kings subdued the Moors, not the other way around.", "self_corrected": false}, {"annotator": 2, "id": "754-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It was Christian kings who subdued the Moors, not inverse.", "self_corrected": false}, {"annotator": 3, "id": "754-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Moors were then one subdued; It took the Chrisitans four hundred years.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 89, "n": 8, "e": 3}} +{"id": "86331e", "context": "'Would you like some tea?'", "statement": "DO you want a cup of tea?", "entailment": [{"annotator": 0, "id": "165-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context and statement have the same meaning, both refer to the question about the willingness to drink tea.", "self_corrected": false}, {"annotator": 1, "id": "165-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In both context and statement, the speaker asks whether another person wants tea.", "self_corrected": false}, {"annotator": 2, "id": "165-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the answer for \"Would you like some tea\" is yes, then the answer for \"Do you want a cup of tea\" should also be yes.", "self_corrected": false}, {"annotator": 3, "id": "165-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "paraphrases", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 90, "n": 10}} +{"id": "65130n", "context": "In Mumbai, both Juhu and Chowpatty beaches are, for instance, definitely a bad idea, and though the Marina beaches in Chennai are cleaner, there may be sharks.", "statement": "The beaches are very dirty in Mumbai.", "entailment": [{"annotator": 2, "id": "1588-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Mumbai's beaches are dirtier than beaches in Chennai, and described as \"definitely a bad idea.\"", "self_corrected": false}, {"annotator": 3, "id": "1588-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Marian beaches are compared to Juhu and Chowpatty cleaner, meaning the beaches in Mumbai beaches are dirty.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1588-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There could be other beaches than Juhu and Chowpatty in Mumbai which could be cleaner.", "self_corrected": false}, {"annotator": 2, "id": "1588-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Mumbai's beaches are dirtier than beaches in Chennai, but it is not promised that they are very dirty objectively.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1588-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions that two beaches in Mumbai are cleaner than others, but the statement suggests that all beaches in Mumbai are dirty.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"n": 48, "e": 47, "c": 5}} +{"id": "19578n", "context": "Moreover, Las Vegas has recently started to show signs of maturity in its cultural status as well.", "statement": "The culture of Las Vegas has a lot of room for improvement.", "entailment": [], "neutral": [{"annotator": 0, "id": "525-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context states that the culture of Las Vegas is improving, but the is no information about the room for improvement.", "self_corrected": false}, {"annotator": 1, "id": "525-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "That there were signs of maturity indicates improvement. However, it does not say anything about whether things can improve further.", "self_corrected": false}, {"annotator": 2, "id": "525-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether the culture has a lot of room to improve.", "self_corrected": false}, {"annotator": 3, "id": "525-neutral-4", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context is talking about the cultural status, whereas the statement the culture", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "525-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The culture started to show signs of maturity, then they must have reached some standard and doesnot have much to improve.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 70, "e": 24, "c": 6}} +{"id": "83657c", "context": "Think of it this When consumer confidence declines, it is as if, for some reason, the typical member of the co-op had become less willing to go out, more anxious to accumulate coupons for a rainy day.", "statement": "Coupon collecting is no longer allowed in most US stores.", "entailment": [], "neutral": [{"annotator": 0, "id": "1132-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions that some people tend to collect coupon, which implies that coupon collection is actually allowed. However, it doesn't include location information, so we don't know if it refers to the United States.", "self_corrected": false}, {"annotator": 1, "id": "1132-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything about whether coupon collection is legal or not in most US stores.", "self_corrected": true}, {"annotator": 2, "id": "1132-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether the context is based on the U.S. society.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1132-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not to collect coupon is a choice of the consumers. It is not forbidden.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 54, "e": 5, "c": 41}} +{"id": "102817c", "context": "yes they would they just wouldn't be able to own the kind of automobiles that they think they deserve to own or the kind of homes that we think we deserve to own we might have to you know just be able to i think if we a generation went without debt then the next generation like if if our our generation my husband and i we're twenty eight if we lived our lives and didn't become you know indebted like you know our generation before us that um the budget would balance and that we became accustomed to living with what we could afford which we wouldn't be destitute i mean we wouldn't be living on the street by any means but just compared to how spoiled we are we would be in our own minds but i feel like the generation after us would oh man it it would be so good it would be so much better it wouldn't be perfect but then they could learn to live with what what they could afford to save to buy and if you want a nicer car than that well you save a little longer", "statement": "I am glad our generation has no debt.", "entailment": [], "neutral": [{"annotator": 1, "id": "1328-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The speaker in the context would be glad if their generation had no debt, but does not assert this.", "self_corrected": false}, {"annotator": 2, "id": "1328-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We don't know whether \"our generation\" has no debt.", "self_corrected": false}, {"annotator": 3, "id": "1328-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We are indebted and therefore living on the street", "self_corrected": true}], "contradiction": [{"annotator": 0, "id": "1328-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context implies that our generation does have debt, so the statement is wrong.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"c": 43, "e": 23, "n": 34}} +{"id": "124590c", "context": "The great attraction of the church is the splendid exterior, which is crowned by golden onion-shaped cupolas.", "statement": "The outside of the church isn't much to look at, but the inside is intricately decorated.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "942-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context states that the exteroir of the church is great acctraction, but the statement implies the opposite.", "self_corrected": false}, {"annotator": 1, "id": "942-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context clearly states that the exterior of the church is splendid.", "self_corrected": false}, {"annotator": 2, "id": "942-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The outside of the church is attactive.", "self_corrected": false}, {"annotator": 3, "id": "942-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Is is the exterior that is a great attraction.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 87, "e": 4, "n": 9}} +{"id": "96583n", "context": "Mack Lee, Body Servant of General Robert E. Lee Through the Civil War , published in 1918.", "statement": "The book was first drafted in early 1915.", "entailment": [], "neutral": [{"annotator": 0, "id": "1170-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mention the year the book published, no when it was drafted.", "self_corrected": false}, {"annotator": 1, "id": "1170-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions only the publication date, not the date of the first draft.", "self_corrected": false}, {"annotator": 2, "id": "1170-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We just know the publishment of the book, but don't know about the draft.", "self_corrected": false}, {"annotator": 3, "id": "1170-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the first draft only the first publication", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 78, "c": 22}} +{"id": "132525n", "context": "She had the pathetic aggression of a wife or mother--to Bunt there was no difference.", "statement": "Bunt was raised motherless in an orphanage.", "entailment": [], "neutral": [{"annotator": 0, "id": "394-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is unclear where Bunt was raised based on the context.", "self_corrected": false}, {"annotator": 1, "id": "394-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not mention anything about Bunt having or not having a mother or where they were raised.", "self_corrected": false}, {"annotator": 2, "id": "394-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know where and how Hunt grew up.", "self_corrected": false}, {"annotator": 3, "id": "394-neutral-4", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know what the relation \"she\" is to Bunt. She could also be his wife", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 88, "c": 12}} +{"id": "11362e", "context": "The volumes are available again but won't be returned to the stacks until the damp library itself gets renovated.", "statement": "The volumes will be available to the public after renovation.", "entailment": [], "neutral": [{"annotator": 1, "id": "719-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is not clear whether the library is a public or a private library.", "self_corrected": false}, {"annotator": 2, "id": "719-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We don't know whether after renovation the volumes will be available to the public or not after renovation..", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "719-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context mentions that the library needs to be renovated, not the volumes.", "self_corrected": false}, {"annotator": 2, "id": "719-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The volumes are available before the renovation.", "self_corrected": false}, {"annotator": 3, "id": "719-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is already availabe before the renovation of stacks are done.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 79, "n": 14, "c": 7}} +{"id": "107468n", "context": "You have to walk through it).", "statement": "Walking is the best way to get through it.", "entailment": [{"annotator": 1, "id": "588-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "Statement is a clear paraphrase of the context.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "588-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"The best way\" is not mentioned in the context. It is unclear if walking is the best way.", "self_corrected": false}, {"annotator": 2, "id": "588-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know what is the best way to get through it, maybe driving is better than walking.", "self_corrected": false}, {"annotator": 3, "id": "588-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Walking is not meant as best way but the only way", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 64, "n": 36}} +{"id": "145047e", "context": "The management of the cafe has established the rules for the use of their facility.", "statement": "The management of the cafe is strict about how they manage it.", "entailment": [], "neutral": [{"annotator": 0, "id": "272-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We only know that there is management of the cafe, but the context doesn't mention how strict it is.", "self_corrected": false}, {"annotator": 1, "id": "272-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement talks about the management being strict about their management, whereas the context talks about rules for the use of the cafe.", "self_corrected": false}, {"annotator": 2, "id": "272-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether the rules are strict or not.", "self_corrected": false}, {"annotator": 3, "id": "272-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Established rules do not mean strict management if they are not followed. Or the rules could be not strict rules", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 65, "e": 34, "c": 1}} +{"id": "55888c", "context": "You've got the keys still, haven't you, Poirot? I asked, as we reached the door of the locked room.", "statement": "I had the keys in my pocket.", "entailment": [], "neutral": [{"annotator": 1, "id": "681-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is not clear from the premise where the keys are.", "self_corrected": false}, {"annotator": 2, "id": "681-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We can't make sure whether I have the key or not.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "681-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the speaker had the keys, then she/he won't ask whether the other one had keys.", "self_corrected": false}, {"annotator": 2, "id": "681-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If I have the keys, usually I will open the door, instead of asking others.", "self_corrected": false}, {"annotator": 3, "id": "681-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"I\" think Poirot has the keys", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 52, "c": 41, "e": 7}} +{"id": "138285n", "context": "i cried when the horse got killed and when the wolf got killed", "statement": "Animal killings make me want to cry.", "entailment": [{"annotator": 0, "id": "1319-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Both horse and wolf are animals, the speaker cried when they got killed. So the statement is correct.", "self_corrected": true}, {"annotator": 1, "id": "1319-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "If killings of two different animals make the narrator cry, then they probably generally care about animal killings.", "self_corrected": true}, {"annotator": 2, "id": "1319-entailment-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Some animal like horse and wolf killings make me cry.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1319-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It could be that the narrator had a personal relationship to the killed animals and does not care about animal killings in general.", "self_corrected": false}, {"annotator": 2, "id": "1319-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether all kinds of animal killings will make me want to cry, maybe I don't want to cry for a rat killing.", "self_corrected": false}, {"annotator": 3, "id": "1319-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Overgeneralization. I could be sad maybe only because I know the horse and wolf", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 77, "n": 22, "c": 1}} +{"id": "57454c", "context": "what does um is Robby Robin Williams does he have a funny part in the movie or is", "statement": "How much went into making the movie?", "entailment": [], "neutral": [{"annotator": 0, "id": "749-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The content of the statement is independent of the context. The statement is just a question.", "self_corrected": false}, {"annotator": 2, "id": "749-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In context there is nothing about the cost.", "self_corrected": false}, {"annotator": 3, "id": "749-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "Not sure of \"how much\" of what went into the movie", "self_corrected": true}], "contradiction": [], "idk": [1], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 63, "c": 37}} +{"id": "88605n", "context": "The remaining parts of the north, although enticing, are difficult to explore.", "statement": "Inexperienced explorers should take care to avoid dangerous areas of the north.", "entailment": [{"annotator": 0, "id": "204-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The exploration of north parts is difficult based on the context, so it's true that inexperienced explorers should avoid those parts.", "self_corrected": false}, {"annotator": 1, "id": "204-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Inexperienced explorers probably should be careful when exploring difficult to explore parts.", "self_corrected": false}, {"annotator": 2, "id": "204-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Some parts of the north are diffcult to explore, so inexperienced explorers should take care.", "self_corrected": false}, {"annotator": 3, "id": "204-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The remaining parts of the north are generally difficult to explore, meaning especially for the Inexperienced.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "204-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Inexperienced explorers should take care, but maybe they can also explore their with some guidance and help.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 41, "e": 56, "c": 3}} +{"id": "72721e", "context": "no i i i don't i it completely beyond me i went to my under graduate uh education", "statement": "I can't remember, I did my undergraduate education.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "174-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions that the speaker did undergraduate education, the statement is false if the speaker of both sentences is the same person.", "self_corrected": false}, {"annotator": 2, "id": "174-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In context, it read \"I went to my under graduate uh education\", so \"I\" do remember it.", "self_corrected": false}], "idk": [1, 3], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 41, "n": 54, "c": 5}} +{"id": "49611e", "context": "How did you get it?\" A chair was overturned.", "statement": "\"How did you get your hands on this object?\"", "entailment": [{"annotator": 2, "id": "714-entailment-1", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\u201cget your hands on this object\u201d can be understood as get something", "self_corrected": false}, {"annotator": 3, "id": "714-entailment-2", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "paraphrases", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "714-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "I am asking my self whether the question belongs to a kind of hypothesis/statement. I can't make a conclusion based on the two questions in the provided context and statement.", "self_corrected": false}], "contradiction": [], "idk": [1], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 45, "n": 52, "c": 3}} +{"id": "16989c", "context": "Auditors from another country engaged to conduct audits in their country should meet the professional qualifications to practice under that country's laws and regulations or other acceptable standards, such as those issued by the International Organization of Supreme Audit Institutions.", "statement": "All auditors report to a globally managed governing body.", "entailment": [], "neutral": [{"annotator": 1, "id": "418-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything about to whom the auditors report.", "self_corrected": false}, {"annotator": 2, "id": "418-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether auditors dealing with domestic companies need to report to a globally managed governing body.", "self_corrected": false}, {"annotator": 3, "id": "418-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In the context, it is not mentioned that they report to the globally managed governing body", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "418-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context implies that there are several possibilities for the standards that the auditors are expected meet, so the standard mentioned in the statement is one of the standards, not a mandatory one.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 42, "e": 39, "c": 19}} +{"id": "112349c", "context": "The idea that Clinton's approval represents something new and immoral in the country is historically shortsighted.", "statement": "It's accurate to conclude that Clinton's approvals signify the start of a new form of immorality in the country.", "entailment": [], "neutral": [{"annotator": 3, "id": "469-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "the approval only signified that the somehting immoral is historically shortsighted but does not introduce it.", "self_corrected": true}], "contradiction": [{"annotator": 0, "id": "469-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The contexts suggests that the conclusion about Clinton's approval is shortsighted, while the statement refers to the accuration of this conclusion, so it is incorrect.", "self_corrected": false}, {"annotator": 1, "id": "469-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If an idea is historically shortsighted it is not accurate.", "self_corrected": false}, {"annotator": 2, "id": "469-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "To the same idea, the context remarked it as \"historically shortsighted\", but the statement took it as \"accurate\".", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["contradiction"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"c": 63, "e": 21, "n": 16}} +{"id": "119768n", "context": "I had rejected it as absurd, nevertheless it persisted.", "statement": "I rejected it as absurd but it persisted out of protest.", "entailment": [{"annotator": 3, "id": "1349-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "paraphrases", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "1349-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is unclear if it persisted out of protest or of other reasons.", "self_corrected": false}, {"annotator": 1, "id": "1349-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything about the reasons for the persistence.", "self_corrected": false}, {"annotator": 2, "id": "1349-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know the reason for persisting, maybe my rejection was overmitted.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 50, "e": 49, "c": 1}} +{"id": "111680e", "context": "He dismounted and Ca'daan saw he was smaller than the rest.", "statement": "He was shorter than the others.", "entailment": [{"annotator": 0, "id": "732-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I think the two conclusion \"smaller after dismounting\" in the context and \" shorter\" in the statement both refer to the height of him So the statement is true.", "self_corrected": false}, {"annotator": 1, "id": "732-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of a part of the context.", "self_corrected": false}, {"annotator": 2, "id": "732-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"smaller than the rest\" means \"shorter than the others\"", "self_corrected": false}, {"annotator": 3, "id": "732-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Ca'daan saw he was smaller thant he rest", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 91, "n": 7, "c": 2}} +{"id": "43440n", "context": "And you are wrong in condemning it.", "statement": "Everybody does it; it's normal.", "entailment": [], "neutral": [{"annotator": 0, "id": "730-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context refers to the wrongfulness of the condemnation,it is unclear what the normal situation is to which the statement refers.", "self_corrected": false}, {"annotator": 1, "id": "730-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "You cannot infer that something is normal because it is wrong to condemn it.", "self_corrected": false}, {"annotator": 2, "id": "730-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We can not say a thing is correct, juest because everybody does it.", "self_corrected": false}, {"annotator": 3, "id": "730-neutral-4", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The reason of wrongfulness in condemning is not known", "self_corrected": true}], "contradiction": [{"annotator": 2, "id": "730-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "In reality, if everybody does a thing, the thing will become a costum, and will not be condemned.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 64, "c": 21, "e": 15}} +{"id": "91709c", "context": "San'doro didn't make it sound hypothetical, thought Jon.", "statement": "San'doro's words were hollow, and Jon knew the truth of that immediately.", "entailment": [], "neutral": [{"annotator": 1, "id": "1493-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Jon only thought about how San'doro made it sound. This doesn't tell us anything about whether he believed the words.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1493-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"didn't sound hypothetical\" implies that San'doro's words were actually not hollow, so the statement is false.", "self_corrected": false}, {"annotator": 2, "id": "1493-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Jon did not regard San'doro's words as hollow, instead, he regarded it as not hypothetical.", "self_corrected": false}, {"annotator": 3, "id": "1493-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "san'doro's words sound factual.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 49, "c": 32, "e": 19}} +{"id": "47798n", "context": "On the west side of the square is Old King's House (built in 1762), which was the official residence of the British governor; it was here that the proclamation of emancipation was issued in 1838.", "statement": "The Old King's House had an incident where the King was murdered inside of it.", "entailment": [], "neutral": [{"annotator": 0, "id": "921-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no information in the context about the murder inside of the Old King's House, so the statement could be true or false.", "self_corrected": false}, {"annotator": 1, "id": "921-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't say anything about whether someone was killed in Old King's House.", "self_corrected": false}, {"annotator": 2, "id": "921-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether a King was murdered inside of the Old King's House.", "self_corrected": false}, {"annotator": 3, "id": "921-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the murder", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 72, "c": 28}} +{"id": "103364n", "context": "Several of its beaches are officially designated for nudism (known locally as naturisme) the most popular being Pointe Tarare and a functionary who is a Chevalier de la L??gion d'Honneur has been appointed to supervise all aspects of sunning in the buff.", "statement": "They do not mind having nude people.", "entailment": [{"annotator": 0, "id": "391-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The beaches mentioned in the context accept people sunning naked, so we can conclude that they don't mind having nude people.", "self_corrected": false}, {"annotator": 1, "id": "391-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Beaches officially designated for nudism do not mind having nude people.", "self_corrected": false}, {"annotator": 2, "id": "391-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Some beaches are offically designated for nudism, which means nude people are allowed to be there.", "self_corrected": false}], "neutral": [{"annotator": 3, "id": "391-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Overgeneralization: they do not mind only in several of its beaches", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "391-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The nude people are supervised. If they don't mind, there should be no supervisors especially for such issues.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 75, "n": 24, "c": 1}} +{"id": "49462c", "context": "The village is Sainte-Marie, named by the explorer when he landed on 4 November 1493, attracted by the waterfalls and river he could see flowing down the green inland mountains.", "statement": "The village is not named after the settling explorer.", "entailment": [{"annotator": 2, "id": "1274-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is named by the settling explorer, but named after.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1274-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "There is no information in the context about how the village is named, so the statement could be true or false.", "self_corrected": false}, {"annotator": 1, "id": "1274-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The village was named by the explorer, it is not clear whether he named it after himself.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1274-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "It is named after him", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"n": 20, "c": 70, "e": 10}} +{"id": "97011c", "context": "Expectations that the ANC would oversee land reform--returning land seized during apartheid's forced migrations--and wealth redistribution have not been met.", "statement": "The ANC would not be in charge of land reform.", "entailment": [{"annotator": 0, "id": "680-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The expectations have not been met, which means the the ANC didn't voersee land reform, so the statement is true.", "self_corrected": false}, {"annotator": 1, "id": "680-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "If the ANC does oversee the land reform then it isn't in charge of it.", "self_corrected": true}, {"annotator": 2, "id": "680-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The expectations have not been met.", "self_corrected": false}, {"annotator": 3, "id": "680-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, the expections that ANC oversees this is not met", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 31, "n": 31, "e": 38}} +{"id": "56743e", "context": "I found her leaning against the bannisters, deadly pale.", "statement": "She couldn't stand on her own so she leaned against the bannisters until I found her.", "entailment": [], "neutral": [{"annotator": 0, "id": "470-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reason why she leaned against the bannisters in unclear, it may or may not have been because she couldn't stand on her own.", "self_corrected": false}, {"annotator": 1, "id": "470-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "She could have leaned on the bannisters for other reasons than not being able to stand on her own.", "self_corrected": false}, {"annotator": 2, "id": "470-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe she could stand on her own, but she would not.", "self_corrected": false}, {"annotator": 3, "id": "470-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info: no info about if she standed there till I found her, or if she really couln't stand on her own", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 54, "e": 46}} +{"id": "48300c", "context": "The activities included in the Unified Agenda are, in general, those expected to have a regulatory action within the next 12 months, although agencies may include activities with an even longer time frame.", "statement": "Some actions were implemented for being shorter than 12 months.", "entailment": [{"annotator": 0, "id": "195-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context mentions that some activities with an longer time frame than 12 months will be included, so it could be true that there are some activities are shorter that 12 months.", "self_corrected": false}, {"annotator": 1, "id": "195-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context states that the actions should generally see action within 12 months, so at least some are implemented for being shorter than 12 months.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "195-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Some activities are maybe longer than 12 months.", "self_corrected": false}, {"annotator": 3, "id": "195-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "Miss interpretation: some actions were to be implemented in less than 12 months", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 22, "e": 41, "c": 37}} +{"id": "105769c", "context": "yeah yeah i i went i went off to school wanting to either be a high school algebra teacher or high school French teacher because my two favorite people in the in high school were my algebra teacher and French teacher and uh and i was going to do that until the end of our sophomore year when we wanted uh we came time to sign up for majors and i had taken chemistry for the first time that year and surprised myself i did well in it", "statement": "You are required to sign up for a major freshman year.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "782-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker in the context mentions that he/she sign up for major until the end of the sophomore year, not freshman year, so the statement is false, people don't need to sign up in their freshman year.", "self_corrected": false}, {"annotator": 1, "id": "782-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context states that the time for signing up for majors is sophomore year, not freshman year.", "self_corrected": false}, {"annotator": 2, "id": "782-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "At the end of our sophomore year, you are required to sign up for majors.", "self_corrected": false}, {"annotator": 3, "id": "782-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, it happens in the sophomore year", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 36, "c": 56, "e": 8}} +{"id": "53866e", "context": "kind of kind of nothing i won't have anything to do with", "statement": "I don't want anything to do with it, no doubts about it.", "entailment": [{"annotator": 1, "id": "267-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context is a paraphrase of the statement.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "267-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The speaker won't have anything to do with it could because she/he don't want or can't.", "self_corrected": false}, {"annotator": 2, "id": "267-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether \"I\" want anything to do with it, we just know it is none of \"my\" business.", "self_corrected": false}, {"annotator": 3, "id": "267-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info: Whether I want or not is not known, it's only mentioned that I will NOT have anything to do with it", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 22, "c": 4, "e": 74}} +{"id": "136752e", "context": "The questions may need to be tailored to", "statement": "There are some questions that may or may not need to be tailored to.", "entailment": [{"annotator": 0, "id": "185-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If some questions may need to be tailored to, then it is true that others may not need to.", "self_corrected": false}, {"annotator": 1, "id": "185-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context is a paraphrase of the statement.", "self_corrected": false}, {"annotator": 2, "id": "185-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The both show the uncertainty, whether some questions need to be tailored to.", "self_corrected": false}, {"annotator": 3, "id": "185-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "paraphrases", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 9, "c": 8, "e": 83}} +{"id": "14388e", "context": "life in prison then he's available for parole if it's if it's life and a day then he's not eligible for parole so what you know let's quit BSing with the system", "statement": "The system is corrupt because he won't be able to get parole if it's life and a day.", "entailment": [{"annotator": 0, "id": "534-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Complaints were made about the system, as implied both in the context and statement.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "534-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker thinks the system is very bad but doesn't say anything about corrupt.", "self_corrected": false}, {"annotator": 2, "id": "534-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The system can be corrupt, but also can be ridiculous.", "self_corrected": false}, {"annotator": 3, "id": "534-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info: there is not comment about whether or not the rules of this system are counted as corrupted.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 39, "e": 56, "c": 5}} +{"id": "103559e", "context": "A martini should be gin and vermouth and a twist.", "statement": "A martini must be composed by gin and vermouth.", "entailment": [{"annotator": 1, "id": "134-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context clearly states that a martini should contain gin and vermouth.", "self_corrected": false}, {"annotator": 2, "id": "134-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Gin and vermouth are necessary for a martini.", "self_corrected": false}, {"annotator": 3, "id": "134-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "true. The ingredients of martini are gin and vermouth", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 0, "id": "134-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement doesn't mention twist, but it is mentioned in the context.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 23, "e": 64, "c": 13}} +{"id": "132019n", "context": "and uh really they're about it they've got a guy named Herb Williams that that i guess sort of was supposed to take the place of uh Tarpley but he uh he just doesn't have the offensive skills", "statement": "Tarpley is a better offensive player that Herb Williams.", "entailment": [{"annotator": 0, "id": "252-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions that Williams doesn't have the offensive skills, even though he will replace Tarpley. So we can conclude from the context that Tarpley's offensive skills is better than Williams'.", "self_corrected": true}, {"annotator": 1, "id": "252-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Herb Williams couldn't replace Tarpley because of his lack of offensive skills. Thus, it is reasonable to assume that Tarpley is the better offensive player.", "self_corrected": false}, {"annotator": 2, "id": "252-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Herb Williams doesn't have the offensive skills and falls to take the place of Tarpley. So Tarpley should be a better offensive player.", "self_corrected": false}, {"annotator": 3, "id": "252-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Herb williams does not have the offensive skills like Tarpley do", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 20, "e": 75, "n": 5}} +{"id": "118999n", "context": "that's true i didn't think about that", "statement": "You've changed my mind with a new perspective.", "entailment": [{"annotator": 1, "id": "996-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "The context is a paraphrase of the statement.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "996-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The only thing mention in the context is the new perspective, we can't conclude whether the speaker changed mind.", "self_corrected": false}, {"annotator": 2, "id": "996-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"I\" can also keep my mind, although you offer a new perspective.", "self_corrected": false}, {"annotator": 3, "id": "996-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No Info: no info about whether or not I have changed my mind", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 71, "n": 27, "c": 2}} +{"id": "47404e", "context": "do you really romance", "statement": "Do you really have an affair?", "entailment": [{"annotator": 0, "id": "1562-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Both are questions and are asking about the same thing, which is about having an affair.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "1562-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "You can have other types of romance than an affair.", "self_corrected": false}, {"annotator": 2, "id": "1562-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A romance can happen between two unmarried single person", "self_corrected": false}, {"annotator": 3, "id": "1562-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "romance does not directly relate to affair.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 68, "c": 11, "e": 21}} +{"id": "30171n", "context": "Until all members of our society are afforded that access, this promise of our government will continue to be unfulfilled.", "statement": "The government is flawed and unfulfilled.", "entailment": [], "neutral": [{"annotator": 2, "id": "155-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We don't know whether all members of the society are afforded that access.", "self_corrected": false}, {"annotator": 3, "id": "155-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The promise of the government is unfulfilled. Can not conclude that the government is therefore flawed and unfulfilled.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "155-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The government will not always be unfulfilled. If all menbers are afforded that access, the government could fulfill the promise mentioned in the context.", "self_corrected": false}], "idk": [1], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 56, "n": 38, "c": 6}} +{"id": "134514c", "context": "However, co-requesters cannot approve additional co-requesters or restrict the timing of the release of the product after it is issued.", "statement": "They will restrict timing of the release of the product.", "entailment": [], "neutral": [{"annotator": 1, "id": "331-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Whether they can restrict the timing of the product at all is not clear beacuse we don't know whether it was already issued.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "331-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "They can't restrict the release time, as mentioned in the context.", "self_corrected": true}, {"annotator": 2, "id": "331-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Co-requesters cannot restrict the timing of the release of the product.", "self_corrected": false}, {"annotator": 3, "id": "331-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "They can NOT restrict the timing of the release", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 80, "e": 7, "n": 13}} +{"id": "52542n", "context": "The long-sought, the mysterious, the elusive Jane Finn!", "statement": "Jane Finn is as beautiful as she is mysterious.", "entailment": [], "neutral": [{"annotator": 1, "id": "696-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether Jane Finn is beautiful.", "self_corrected": false}, {"annotator": 2, "id": "696-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether Jane Finn is beautiful or not.", "self_corrected": false}, {"annotator": 3, "id": "696-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No Info: No Info about the beauty of Jane Finn", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "696-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The comment about Jane Finn stated both in the context and statement is mysterious. There is no mention of \"beautiful\" in the context.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 83, "e": 16, "c": 1}} +{"id": "47408n", "context": "the net cost of operations.", "statement": "That's how it expensive it runs.", "entailment": [{"annotator": 0, "id": "703-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context and statement are both talking about the running cost.", "self_corrected": false}, {"annotator": 2, "id": "703-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The cost can be described as \"how expensive\".", "self_corrected": false}], "neutral": [{"annotator": 3, "id": "703-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "irrelevant", "self_corrected": true}], "contradiction": [], "idk": [1], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"n": 49, "e": 47, "c": 4}} +{"id": "135021n", "context": "you know we keep a couple hundred dollars um if that much charged on those which isn't too bad it's just your normal", "statement": "We have money on there, which isn't great", "entailment": [{"annotator": 2, "id": "799-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "Spending money on that is not great, because it is normal.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "799-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The \"not too bad\" comment in the context is about charging much, while the statement is about having money on there.", "self_corrected": false}], "contradiction": [], "idk": [1, 3], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 50, "c": 22, "e": 28}} +{"id": "88605e", "context": "The remaining parts of the north, although enticing, are difficult to explore.", "statement": "The rest of the north presents a steep challenge.", "entailment": [{"annotator": 0, "id": "789-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both context and statement are saying that the rest of the north is hard to explore.", "self_corrected": false}, {"annotator": 1, "id": "789-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the remaining parts of the north are difficult to explore, then they also present a steep challenge.", "self_corrected": false}, {"annotator": 2, "id": "789-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Difficult to explore\" means \"a steep challenge\".", "self_corrected": false}, {"annotator": 3, "id": "789-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"difficult to explore\" entails a chanllenge", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 82, "n": 17, "c": 1}} +{"id": "98710n", "context": "well Jerry do you have a favorite team", "statement": "Jerry, do you follow any sports?", "entailment": [{"annotator": 0, "id": "886-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both context and statement are questions about sports. If someone has a favorite team, then he/she must follow this sport.", "self_corrected": false}, {"annotator": 2, "id": "886-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If Jerry has a favorite team, he/she should follow this sport.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "886-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Jerry can follow sports without having a favorite team.", "self_corrected": true}, {"annotator": 2, "id": "886-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Jerry can be a fake fan, for example he/she support a local team, but even don't know the rule of the sport.", "self_corrected": false}, {"annotator": 3, "id": "886-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 46, "n": 51, "c": 3}} +{"id": "30894c", "context": "Earlier this week, the Pakistani paper Dawn ran an editorial about reports that Pakistani poppy growers are planning to recultivate opium on a bigger scale because they haven't received promised compensation for switching to other crops.", "statement": "It is illegal to grow opium in Pakistan.", "entailment": [], "neutral": [{"annotator": 0, "id": "388-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention whether growing opium in Pakistan is illegal or not.", "self_corrected": false}, {"annotator": 1, "id": "388-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't say anything about the legality of the cultivation of opium.", "self_corrected": false}, {"annotator": 2, "id": "388-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It didn't mention whether it is legal to plant opium in Pakistan.", "self_corrected": false}, {"annotator": 3, "id": "388-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The legal status of growing opium is not mentioned in the context", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 67, "c": 29, "e": 4}} +{"id": "81356e", "context": "In keeping with other early Buddhist tenets, there is no figurative representation of Buddha here, However, there is a large gilded statue from a later period inside, and behind the temple are the spreading branches and trunks of the sacred Bodhi Tree, which is said to have grown from a sapling of the first one that stood here 2,500 years ago.", "statement": "There is no statue of Buddha located there.", "entailment": [{"annotator": 0, "id": "1013-entailment-1", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No figurative representation of Buddha indicates that there is no satues of Buddha.", "self_corrected": true}, {"annotator": 1, "id": "1013-entailment-2", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If there is no figurative representation of Buddha then there can be no statue of Buddha.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1013-neutral-1", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether the \"large gilded statue\" is a statue of Buddha or something else.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "1013-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"However\" means although the early Buddhist tenet forbade the figurative representation of Buddha, later it was somehow disobeyed or changed, and there is a large gilded statue here.", "self_corrected": false}, {"annotator": 3, "id": "1013-contradiction-2", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is a large gilded statue from a later period", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 66, "c": 25, "n": 9}} +{"id": "132516n", "context": "right right they left a woman and a child or the cat the sheep yeah", "statement": "They were merciful in this regard, only taking the men as slaves.", "entailment": [{"annotator": 0, "id": "883-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement could be true because only the men are not mentioned in the context. They may have been merciful if they only took the men but left the women and children.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "883-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention men.", "self_corrected": false}, {"annotator": 1, "id": "883-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They left a woman or a child. They could still have taken other women or children.", "self_corrected": false}, {"annotator": 2, "id": "883-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "What did they take is not sure, maybe they didn't take anybody, just took some fortune away.", "self_corrected": false}, {"annotator": 3, "id": "883-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about taking the men as slaves", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 7, "n": 80, "c": 13}} +{"id": "34176n", "context": "The rustic Bras-David picnic area, for example, is set alongside a burbling stream.", "statement": "The stream is always burbling.", "entailment": [{"annotator": 0, "id": "1185-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Both context and statement mention the burbling stream.", "self_corrected": true}, {"annotator": 2, "id": "1185-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Burbling\" is how the stream is described in the context.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1185-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The stream could also be burbling only sometimes or most of the time.", "self_corrected": false}, {"annotator": 2, "id": "1185-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It can't be promised that it is \"always\" burbling, maybe sometimes the rainfall will influence the volume of the stream.", "self_corrected": false}, {"annotator": 3, "id": "1185-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The area is set alongside a burbling stream. No info about if all stream is burbling", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 54, "e": 44, "c": 2}} +{"id": "77116e", "context": "The third row of Exhibit 17 shows the Krewski, et al.", "statement": "Exhibit 17 has many rows.", "entailment": [{"annotator": 0, "id": "1144-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context suggests that Exhibit 17 has at least three rows. So there are indeed many rows.", "self_corrected": false}, {"annotator": 2, "id": "1144-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "As there is \"the third row\", we can suppose that there are more than one row.", "self_corrected": false}, {"annotator": 3, "id": "1144-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is the third row of Exhibit 17, meaning it has at least first and second row", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1144-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We only know that it has at least three rows, which is not many.", "self_corrected": false}, {"annotator": 2, "id": "1144-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Many\" can be quite a large number, but it is not refered in the context.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 43, "n": 57}} +{"id": "76947n", "context": "i think we have too thank you very much you too bye-bye", "statement": "I don't think we can thank you enough for your help.", "entailment": [{"annotator": 2, "id": "860-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both sentences express strong gratitude.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "860-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention why they are thanking you, it may or may not because of the help.", "self_corrected": false}, {"annotator": 2, "id": "860-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "Maybe \"I\" think oral thanks is enough.", "self_corrected": false}, {"annotator": 3, "id": "860-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "exaggeration", "self_corrected": false}], "contradiction": [], "idk": [1], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 34, "n": 52, "c": 14}} +{"id": "139635n", "context": "have that well and it doesn't seem like very many people uh are really i mean there's a lot of people that are on death row but there's not very many people that actually um do get killed", "statement": "Most people on death row end up living out their lives awaiting execution.", "entailment": [{"annotator": 0, "id": "433-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not very many people on death row get killed indicates that there are many people waiting on the death row, but they will not be executed.", "self_corrected": true}, {"annotator": 3, "id": "433-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "there are many people on death row, but few of them actually get killed", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "433-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The people in death row that are not killed could also be pardoned. So they would not await execution.", "self_corrected": false}, {"annotator": 2, "id": "433-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Some people's execution can be cancelled and they will suffer from life imprisonment and die in prison.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 77, "n": 19, "c": 4}} +{"id": "101245n", "context": "we were lucky in that in one respect in that after she had her stroke she wasn't really you know really much aware of what was going on", "statement": "She had a very serious stroke.", "entailment": [{"annotator": 1, "id": "1090-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The stroke left her unaware of her surroundings, so it has to have been serious.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1090-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the seriousness of the stroke.", "self_corrected": false}, {"annotator": 2, "id": "1090-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It can be a very serious stroke, but also can be a mild stroke.", "self_corrected": false}, {"annotator": 3, "id": "1090-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the seriousness of her stroke; She could a serious or not so serious stroke", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 75, "n": 24, "c": 1}} +{"id": "8545c", "context": "He hadn't seen even pictures of such things since the few silent movies run in some of the little art theaters.", "statement": "He had recently seen pictures depicting those things.", "entailment": [], "neutral": [{"annotator": 1, "id": "7-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "He could have visited the little art theaters recently or not. It is not clear.", "self_corrected": true}], "contradiction": [{"annotator": 0, "id": "7-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is clear from the context that he hadn't seen pictures of such things, while the statement suggests that he had recently seen them.", "self_corrected": false}, {"annotator": 2, "id": "7-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, \"he hadn't seen pictures of such things\" since the silent movies run in some art theaters.", "self_corrected": false}, {"annotator": 3, "id": "7-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He hadn't seen pictures of those things", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["contradiction"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"c": 73, "n": 16, "e": 11}} +{"id": "8219n", "context": "it depends a lot of uh a lot of things were thought that uh as you know the farmers thought okay we got chemicals we're putting chemicals on the field well the ground will naturally filter out the", "statement": "The farming chemicals are filtered by the ground.", "entailment": [{"annotator": 3, "id": "296-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "The farmers think the groud will naturally filter out the chemicals", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "296-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions the thought of the farmers. It is not clear whether this thought is the truth.", "self_corrected": false}, {"annotator": 1, "id": "296-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker talks about the farmers thinking that the ground will filter out the chemicals. But they don't say that they themselves believe it.", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 49, "n": 48, "c": 3}} +{"id": "115247c", "context": "oh really yeah so he he's uh he's probably going to be going to jail and and the problem with him is he's on a guaranteed salary like for three years so whether he plays or not they've got to pay him ten million dollars so if they", "statement": "He is so hardworking and has helped the team achieve so much, I don't see anything wrong with paying him a million dollar salary.", "entailment": [], "neutral": [{"annotator": 0, "id": "633-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We can't infer from the context whether he is hardworking or not.", "self_corrected": false}, {"annotator": 2, "id": "633-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "His personality and characters are not mentioned in the context, maybe he is not hardworking.", "self_corrected": false}, {"annotator": 3, "id": "633-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about where he is hardworking or not or about my subject feelings on him got paid a million dollar salary", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "633-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The speaker clearly says that they believe it's a problem that the person has a guaranteed million dollar salary.", "self_corrected": false}, {"annotator": 2, "id": "633-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "He will probably go to jail, and \"I\" worry about the ten million dollars, but although he can not work in jail, the money still need to be paid.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 42, "c": 54, "e": 4}} +{"id": "99708e", "context": "It was made up to look as much like an old-fashioned steam train as possible.", "statement": "It was built in the modern era to look like something built in the past.", "entailment": [{"annotator": 0, "id": "963-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions the building of an old-fashioned train, the word old-fashioned would only be used in the modern era. So the statement is true.", "self_corrected": false}, {"annotator": 1, "id": "963-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It was made to look like a steam train which is something from the past.", "self_corrected": false}, {"annotator": 2, "id": "963-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It was made up to look as an \"old-fashioned\" steam train, if it is made in the past, it should be described as \"fashion\" instead of \"old-fashioned\". So it was built in the modern era but to look like something old.", "self_corrected": false}], "neutral": [{"annotator": 3, "id": "963-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about when it was build. It could be built in 20th century to look like something built in 19th century", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 85, "n": 14, "c": 1}} +{"id": "82415n", "context": "Then he sobered.", "statement": "He was drunk.", "entailment": [{"annotator": 0, "id": "946-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "If he was not sober due to alcohol, then the statement is true.", "self_corrected": false}, {"annotator": 1, "id": "946-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "For a person to sober, they have to be drunk before.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "946-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He could also be not sober because of the drugs.", "self_corrected": false}, {"annotator": 2, "id": "946-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "He can be faint because of hunger or desease.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "946-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "sobered means becoming not drunk", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"e": 40, "c": 38, "n": 22}} +{"id": "82700c", "context": "During his disastrous campaign in Russia, he found time in Moscow to draw up a new statute for the Com??die-Francaise (the national theater), which had been dissolved during the Revolution.", "statement": "Russia has been successfully invaded hundreds of times.", "entailment": [], "neutral": [{"annotator": 0, "id": "1249-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention how many times Russia has been invaded.", "self_corrected": false}, {"annotator": 1, "id": "1249-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no information about how many times Russia was invaded.", "self_corrected": false}, {"annotator": 2, "id": "1249-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Nothing shows how many times has Russia been invaded.", "self_corrected": false}, {"annotator": 3, "id": "1249-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about how many times Russia has been invaded", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 76, "c": 23, "e": 1}} +{"id": "1073c", "context": "News ' cover says the proliferation of small computer devices and the ascendance of Web-based applications are eroding Microsoft's dominance.", "statement": "Microsoft is a more profitable company than Apple.", "entailment": [], "neutral": [{"annotator": 0, "id": "747-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the profits of Microsoft and Apple.", "self_corrected": false}, {"annotator": 1, "id": "747-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything about Apple.", "self_corrected": false}, {"annotator": 2, "id": "747-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Apple is not mentioned in the context, so we can not compare which company is more profitable.", "self_corrected": false}, {"annotator": 3, "id": "747-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the profit-comparision between Microsoft and Apple", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 83, "e": 4, "c": 13}} +{"id": "120149c", "context": "There's a lot of villas all the way along, but by degrees they seemed to get more and more thinned out, and in the end we got to one that seemed the last of the bunch.", "statement": "There were only a few villas the whole way along, until we reached a small village that seemed to be the end.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "1139-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context clearly suggests that there are many villas all the way along.", "self_corrected": false}, {"annotator": 1, "id": "1139-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In the end they reached a single villa, not a small village.", "self_corrected": false}, {"annotator": 2, "id": "1139-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There are a lot of villas the whole way along.", "self_corrected": false}, {"annotator": 3, "id": "1139-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is a lot of villas all the way along.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 22, "c": 69, "n": 9}} +{"id": "124037n", "context": "The park was established in 1935 and was given Corbett's name after India became independent.", "statement": "The park changed names due to the independence.", "entailment": [{"annotator": 1, "id": "1415-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The park probably received a name in 1935, so the new name would be a change.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "1415-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The park was renamed after the independence, but the reason for the name change is unclear.", "self_corrected": false}, {"annotator": 2, "id": "1415-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The park changed names after the independence, but independence can not be the reason of changing, it can be a coincidence.", "self_corrected": false}, {"annotator": 3, "id": "1415-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about whether the park had a name already before the independence.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"c": 4, "e": 69, "n": 27}} +{"id": "92062c", "context": "Krugman's column will henceforth be known as The Dismal Science, a phrase too famous to be ownable by anyone, except possibly British essayist Thomas Carlyle (1795-1881), who coined it.", "statement": "Krugman writes novels.", "entailment": [], "neutral": [{"annotator": 1, "id": "478-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's clear that Krugman writes a column, but not clear whether they write novels.", "self_corrected": false}, {"annotator": 2, "id": "478-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Krugman has a column known as \"The Dismal Science\", it can be a novel column, but also can be others like essay column.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "478-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Krugman is a columnist, he doesn't write novels.", "self_corrected": false}, {"annotator": 3, "id": "478-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, in the context is his column, which appears often in newspaper", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 30, "n": 64, "e": 6}} +{"id": "131235c", "context": "Even if the entire unified surplus were saved, GDP per capita would fall somewhat short of the U.S. historical average of doubling every 35 years.", "statement": "Even if the entire unified surplus were lost, GDP per capita would fall somewhat short of the U.S. historical average of doubling every 35 years.", "entailment": [], "neutral": [{"annotator": 1, "id": "700-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear what would happen to the GDP if the surplus would be lost instead of saved.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "700-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Context state if the surplus were saved", "self_corrected": false}], "idk": [0, 2], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 51, "n": 17, "e": 32}} +{"id": "46820e", "context": "and my and my part-time work you know it's not our the restaurant our favorite restaurant in the town of Salisbury where actually we live you know where my where i'll return to my job or whatever we can normally eat out for um under fourteen dollars", "statement": "My first part time job was in a restaurant in Salisbury where you could eat out for under $14.", "entailment": [], "neutral": [{"annotator": 0, "id": "708-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not mentioned in the context whether the speaker's first part time job was in a restaurant in Salisbury.", "self_corrected": false}, {"annotator": 2, "id": "708-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "1. The part-time work in Salisbury can not be the first job; 2. The part-time job can not in a restaurant.", "self_corrected": false}, {"annotator": 3, "id": "708-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about if it was my first part time job", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "708-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "My part time job is not in a restaurant", "self_corrected": true}], "idk": [1], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"e": 34, "n": 55, "c": 11}} +{"id": "77590c", "context": "do you think most states have that or", "statement": "I think most states have that.", "entailment": [], "neutral": [{"annotator": 0, "id": "1507-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is an answer to the question in the context. It may be true or false.", "self_corrected": false}, {"annotator": 1, "id": "1507-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker poses a question and doesn't assert that most states have that.", "self_corrected": false}, {"annotator": 3, "id": "1507-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about what I think in the context", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 70, "e": 19, "c": 11}} +{"id": "128542e", "context": "There should be someone here who knew more of what was going on in this world than he did now.", "statement": "He knew things, but hoped someone else knew more.", "entailment": [{"annotator": 3, "id": "964-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "someone is hoped to be here to know more of what was going on than he did", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "964-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only suggests that someone else knew more than he did, it is not clear whether he hoped so.", "self_corrected": false}, {"annotator": 1, "id": "964-neutral-2", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether he hoped or only epected that there was someone who knew more.", "self_corrected": false}, {"annotator": 2, "id": "964-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He supposed that someone else knew more, but it can not reflect whether he hoped so or not, maybe he hoped that he knew the most in the world.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 82, "n": 18}} +{"id": "74768e", "context": "She admits to Dorcas, 'I don't know what to do; scandal between husband and wife is a dreadful thing.' At 4 o'clock she has been angry, but completely mistress of herself.", "statement": "She had remained in control despite her anger.", "entailment": [{"annotator": 0, "id": "1135-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "It is clear from the context that she got control of herself.", "self_corrected": false}, {"annotator": 1, "id": "1135-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "\"Mistress of herself\" means that the she was in control of herself.", "self_corrected": false}, {"annotator": 2, "id": "1135-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "\"She has been angry, but completely mistress of herself. \"", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [3], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 57, "n": 32, "c": 11}} +{"id": "122928e", "context": "A small page-boy was waiting outside her own door when she returned to it.", "statement": "When she came back to her door she found something waiting.", "entailment": [{"annotator": 1, "id": "1470-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "The someone waiting was the small page-boy.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "1470-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions that someone was waiting, we don't know if something was waiting as well.", "self_corrected": false}, {"annotator": 2, "id": "1470-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe beside the page-boy, there are something else waiting.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "1470-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not correct to say \"a small page-boy\" as \"something\".", "self_corrected": false}, {"annotator": 3, "id": "1470-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "it was rather someone waiting", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": ["entailment"], "has_ambiguity": true, "chaosnli_labels": {"n": 10, "c": 13, "e": 77}} +{"id": "139409e", "context": "Then, all the time, it was in the spill vase in Mrs. Inglethorp's bedroom, under our very noses? I cried.", "statement": "You mean we were so near it constantly?", "entailment": [{"annotator": 1, "id": "467-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Under our very noses\" means that it was very near to us.", "self_corrected": false}, {"annotator": 3, "id": "467-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"under our very noses\" means in our very nearby surrendings", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [0, 2], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 78, "n": 21, "c": 1}} +{"id": "59208n", "context": "He's chosen Meg Ryan.", "statement": "A possible selection would be Meg Ryan or Jon Doe.", "entailment": [{"annotator": 0, "id": "1175-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The selection is Meg Ryan. So Meg Ryan or Jon Doe is true.", "self_corrected": false}, {"annotator": 2, "id": "1175-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is possible, because Meg Ryan is one of the two candidates.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1175-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear from which pool he chose Meg Ryan.", "self_corrected": false}, {"annotator": 3, "id": "1175-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the other choice of person", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 39, "e": 13, "n": 48}} +{"id": "133597n", "context": "In manual systems, attestations, verifications, and approvals are usually shown by a signature or initial of an individual on a hard copy document.", "statement": "The only things that signatures in manual systems show are attestations, verifications, or approvals.", "entailment": [], "neutral": [{"annotator": 1, "id": "666-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Signatures could also show other things in addition to the mentioned ones.", "self_corrected": false}, {"annotator": 2, "id": "666-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Sinatures in manual systems can be used for more purposes except from attestations, verifications, and approvals.", "self_corrected": false}, {"annotator": 3, "id": "666-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info that signatures only shows attestations, verifications and approvals. Signatures could show more than that", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "666-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Attestations, verifications and approvals, all three of these things, not just one of these things, are showed by a signature.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 52, "e": 32, "c": 16}} +{"id": "80517e", "context": "This doesn't look good.", "statement": "This looks really bad.", "entailment": [{"annotator": 3, "id": "606-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"not look good\" implies \"bad\"", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "606-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context suggests that it doesn't look good, which may be normal or bad.", "self_corrected": false}, {"annotator": 1, "id": "606-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not good is not equivalent to really bad.", "self_corrected": false}, {"annotator": 2, "id": "606-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "This doesn't look good, but it can look just a little bit bad, or really bad.", "self_corrected": false}, {"annotator": 3, "id": "606-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "possible exaggeration: \"not looking good\" might be not as serious as \"really bad\"", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 4.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 32, "e": 65, "c": 3}} +{"id": "93236c", "context": "The word itself, tapa, is translated as lid and derives from the old custom of offering a bite of food along with a drink, the food being served on a saucer sitting on top of the glass like a lid.", "statement": "Tapas are large portions and are a very filling meal.", "entailment": [], "neutral": [{"annotator": 0, "id": "1589-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the protion size of tapas.", "self_corrected": true}, {"annotator": 1, "id": "1589-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The meaning of the word tapa could have radically changed and now signify large portions.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "1589-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Tapas only offer \"a bite of food\", so it is not \"very filling\".", "self_corrected": false}, {"annotator": 3, "id": "1589-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Tapas are small portions", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 68, "n": 30, "e": 2}} +{"id": "142643c", "context": "The standard technology assumptions of scenario A were used by EIA in the development of the AEO2001 reference case projections.", "statement": "EIA used the standard technology assumptions to eliminate the AEO2001 reference case projections.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "145-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context talks about developing AEO2001 reference case projections by using the assumptions, while the statement talks about elimination.", "self_corrected": false}, {"annotator": 1, "id": "145-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The assumptions were used to develop the projections, not eliminate them.", "self_corrected": false}, {"annotator": 2, "id": "145-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "EIA used the standard technology assumptions to develop the AEO2001 reference case projections, not to \"eliminate\" them.", "self_corrected": false}, {"annotator": 3, "id": "145-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It was used during the development of the AEO2021", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 20, "n": 19, "c": 61}} +{"id": "31113e", "context": "One wag, J., wrote in to ask, Is there a difference between pests and airlines?", "statement": "J. thinks there is no difference between pests and airlines.", "entailment": [{"annotator": 0, "id": "1532-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The question from J. could be a rhetorical question to which the speaker already has a standard answer, which is that there is no difference.", "self_corrected": false}, {"annotator": 1, "id": "1532-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Literally taken the question is so absurd that it is most likely a rhetorical question implying that there really is no difference.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1532-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The question from J. could be a simple question which needs to be answered.", "self_corrected": false}, {"annotator": 3, "id": "1532-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In the context J only asked a questions. It is unknown about his opinion", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 55, "e": 28, "c": 17}} +{"id": "34776c", "context": "We did not study the reasons for these deviations specifically, but they likely result from the context in which federal CIOs operate.", "statement": "The Context in which federal CIOs operate is no different from other CIOs.", "entailment": [], "neutral": [{"annotator": 0, "id": "1220-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context refers only to a hypothetical, we don't know whether it is the truth that the context in which federal CIOs operate is different.", "self_corrected": false}, {"annotator": 1, "id": "1220-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Other CIOs are not mentioned in the context.", "self_corrected": false}, {"annotator": 2, "id": "1220-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Federal CIOs can be semilar with other CIOs, also can be different from others.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1220-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The contexts of different CIOs potentially lead to these deviations", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"c": 30, "n": 60, "e": 10}} +{"id": "121422c", "context": "it it like strange that it you're right in the middle of the mountains and it's so brown and dry but boy you just didn't feel", "statement": "you are in the right part of the mountains.", "entailment": [], "neutral": [{"annotator": 0, "id": "1019-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention whether someone is in the right part or not.", "self_corrected": false}, {"annotator": 1, "id": "1019-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether right in the middle of the mountains is also the right part of the mountains.", "self_corrected": false}, {"annotator": 3, "id": "1019-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "Not clear context", "self_corrected": true}], "contradiction": [{"annotator": 2, "id": "1019-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"right\" in context means exactly, not the side of the mounstains, and your location is in the middle of the mountains.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 72, "c": 16, "e": 12}} +{"id": "104805e", "context": "California is high", "statement": "California is hyped up!", "entailment": [{"annotator": 2, "id": "641-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"high\" is one state of \"hyped up\".", "self_corrected": false}, {"annotator": 3, "id": "641-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the \"high\" means in the context of spirit, that Californa is a hyped city", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "641-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "High is not the same as hyped up but also does not exclude the possibility.", "self_corrected": false}, {"annotator": 3, "id": "641-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the \"high\" in the context mean that Californa is geographically high. Then it does not entail the statement", "self_corrected": false}], "contradiction": [], "idk": [0], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 72, "c": 16, "e": 12}} +{"id": "115821n", "context": "The Chinese calendar was used to calculate the year of Japan's foundation by counting back the 1,260 years of the Chinese cosmological cycle.", "statement": "The calculation of Japan's year of foundation was very exact.", "entailment": [{"annotator": 3, "id": "527-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "yes, because it was counted back 1,260 years of the Chinese cosmological cycle", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "527-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context suggests how the Japan' year was calculated, but doesn't mention whether this calculation was exact or not.", "self_corrected": false}, {"annotator": 1, "id": "527-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether this calculation is exact.", "self_corrected": false}, {"annotator": 2, "id": "527-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In context it just states how Japan's year of foundation is calculated, but it can not prove the method is exact or not.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 41, "n": 56, "c": 3}} +{"id": "80630e", "context": "The tree-lined avenue extends less than three blocks to the sea.", "statement": "The sea isn't even three blocks away.", "entailment": [{"annotator": 0, "id": "1277-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Both the context and the statement talk about the distance to the sea is lee than three blocks.", "self_corrected": false}, {"annotator": 1, "id": "1277-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "If the avenue reaches the sea after less then three blocks, it cannot be further away than three blocks.", "self_corrected": false}, {"annotator": 2, "id": "1277-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The avenue is less than three blocks to the sea.", "self_corrected": false}, {"annotator": 3, "id": "1277-entailment-4", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "If the statement means that the sea is less than three blocks away", "self_corrected": true}], "neutral": [{"annotator": 2, "id": "1277-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not given where is the location of the narrator.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1277-contradiction-1", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "If the statement means that the sea is more than three blocks away", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 4.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"e": 89, "c": 3, "n": 8}} +{"id": "138862c", "context": "Also, other sorbent-based approaches in development may prove in time to be preferable to ACI, making the use of ACI only a conservative assumption.", "statement": "Hydrogen-based approaches in development may be preferable to ACl.", "entailment": [], "neutral": [{"annotator": 1, "id": "450-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions sorbent-based approaches, not hydrogen-based ones. But this doesn't rule out hydrogen-based approaches.", "self_corrected": false}, {"annotator": 2, "id": "450-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In the context hydrogen-based approaches are not discussed.", "self_corrected": false}, {"annotator": 3, "id": "450-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Context mentions sorbent-based approaches, whereas in the statement it is hydrogen-based approaches, which is not mentioned in the context", "self_corrected": false}], "contradiction": [], "idk": [0], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 66, "e": 18, "c": 16}} +{"id": "105179c", "context": "I was to watch for an advertisement in the Times.", "statement": "I looked for an ad in my mailbox.", "entailment": [], "neutral": [{"annotator": 0, "id": "685-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The speaker indeed looked for an ad, but the context doesn't mention where the speaker looked for.", "self_corrected": true}, {"annotator": 1, "id": "685-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context talks about an obligation, but it is not clear whether the speaker then acts accordingly.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "685-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I searched \"the Times\" not \"my mailbox\" for an ad.", "self_corrected": false}, {"annotator": 3, "id": "685-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Times is a newpaper", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 48, "n": 45, "e": 7}} +{"id": "109876n", "context": "Text Box 2.1: Gross Domestic Product and Gross National Product 48Text Box 4.1: How do the NIPA and federal unified budget concepts of", "statement": "This text displays how GDP and GNP is calculated.", "entailment": [], "neutral": [{"annotator": 1, "id": "164-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear what the text displays.", "self_corrected": false}, {"annotator": 2, "id": "164-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In context it only offer a theme of GDP and GNP, but it can be about every aspect of the concepts, like the growth or the depression, and the calculation methods.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "164-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't show the calculation of GDP and DNP.", "self_corrected": false}, {"annotator": 3, "id": "164-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No it doesn't. It only shows the what GDP and GNP stand for", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 25, "n": 45, "c": 30}} +{"id": "65353n", "context": "Don't take it to heart, lad, he said kindly.", "statement": "He was trying to console the lad.", "entailment": [{"annotator": 0, "id": "1042-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context is indeed about the consolation.", "self_corrected": false}, {"annotator": 1, "id": "1042-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Don't take it to heart\" is an attempt of consolation.", "self_corrected": false}, {"annotator": 2, "id": "1042-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The words and the attitude of him can express his attempt to console the lad.", "self_corrected": false}, {"annotator": 3, "id": "1042-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"don't take it to heart\" means not to overthink something.. So the lad should not overthink something that might bother him", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 97, "n": 3}} +{"id": "16494c", "context": "It cannot be outlawed.", "statement": "It has to be made illegal.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "1072-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "It cannot be outlawed means that it is legal.", "self_corrected": false}, {"annotator": 3, "id": "1072-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "\"cannot be outlawed\" means \"cannot be made illegal\". So it has to stay legal", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 84, "e": 8, "n": 8}} +{"id": "60732n", "context": "It started with The Wild Bunch : We sexualized violence, we made it beautiful.", "statement": "Violence is now look at in the positive due to The Wild Bunch.", "entailment": [{"annotator": 0, "id": "1268-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Making violence beautiful is is a positive way of looking at violence.", "self_corrected": false}, {"annotator": 2, "id": "1268-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "People made violence beautiful is a way to look at it positively.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1268-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether people have a positive opinion about violence, only because The Wild Bunch made it look beautiful.", "self_corrected": false}, {"annotator": 3, "id": "1268-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The current state of how violence is looked at is unknown; We only know it started to be looked at in a positive view", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 70, "n": 28, "c": 2}} +{"id": "88646c", "context": "You see, he said sadly, \"you have no instincts.\"", "statement": "He said that I had no willpower.", "entailment": [], "neutral": [{"annotator": 0, "id": "501-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the comments on willpower.", "self_corrected": false}, {"annotator": 3, "id": "501-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Instincts do not totally relate with willpower.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "501-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He said \"no instincts\", not \"no willpower\".", "self_corrected": false}, {"annotator": 2, "id": "501-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Instincts\" are not the same as \"willpower\".", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 80, "n": 13, "e": 7}} +{"id": "53074n", "context": "ooh it's kind of tough to think of some of the others although i do watch some of some of those frivolous things uh like on Thursday nights at nine o'clock when i get home from aerobics i will watch uh Knots Landing", "statement": "I only watch frivolous things on Thursday nights.", "entailment": [], "neutral": [{"annotator": 0, "id": "1036-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions that the speaker watch frivolous things on Thursdays, but doesn't mention if he/she watch them on other days as well.", "self_corrected": false}, {"annotator": 1, "id": "1036-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They could also watch frivolous things at other times.", "self_corrected": false}, {"annotator": 2, "id": "1036-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I do watch frivolous things on Thursday nights, but maybe I also watch at other time.", "self_corrected": false}, {"annotator": 3, "id": "1036-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Thursday is mentioned as example. There is no info about what he does on other weekday nights", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 69, "e": 23, "c": 8}} +{"id": "61818n", "context": "Kutchins and Kirk cite a particularly amusing example of such Robert Spitzer, the man in charge of DSM-III , was sitting down with a committee that included his wife, in the process of composing a criteria-set for Masochistic Personality Disorder--a disease that was suggested for, but never made it into, the DSM-III-R (a revised edition).", "statement": "DSM-III-R is a book of personality disorders.", "entailment": [{"annotator": 2, "id": "1125-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Masochistic Personality Disorder was suggested fot DSM-III-R, so the later should be about personality disorders.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1125-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions that Masochistic Personality Disorder is not in the book, it doesn't mention the content of the book.", "self_corrected": false}, {"annotator": 1, "id": "1125-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The book could contain also other things than only personality disorders.", "self_corrected": false}, {"annotator": 3, "id": "1125-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We only know that DSM-III is a personality disorders. But DSM-III-R could just be a medical book for all kinds of disorders", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 32, "e": 63, "c": 5}} +{"id": "46003n", "context": "trying to keep grass alive during a summer on a piece of ground that big was expensive", "statement": "The watering and fertilizer, can cost a lot to keep grass alive in the summer months.", "entailment": [{"annotator": 3, "id": "386-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Keeping grass alive on a big ground can be expensive. For that you need watering and fertilizer, which can be expensive", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "386-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the reason for the high cost.", "self_corrected": false}, {"annotator": 1, "id": "386-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear what exactly is expensive about keeping the grass alive.", "self_corrected": false}, {"annotator": 2, "id": "386-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It costs a lot to keep grass alive in the summer, but the reason can be watering and fertilizer, or something else like labor and pesticide.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 30, "e": 69, "c": 1}} +{"id": "6386n", "context": "isn't it i can remember i've only been here eight years but i can remember coming to work from i used to live in Wylie and i could see downtown Dallas", "statement": "Downtown Dallas was a short drive from where I lived in Wylie.", "entailment": [{"annotator": 0, "id": "1252-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker can see downtown Dallas from where he/she lived, so it is true to say that it was a short drive.", "self_corrected": false}, {"annotator": 1, "id": "1252-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the person could see Downtown Dallas from their place in Wylie, it probably was only a short drive.", "self_corrected": false}, {"annotator": 3, "id": "1252-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"I\" saw Dallas on my way to work from Wylie. Considering daily commute, Dallas should not be very far away from Wylie", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1252-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "I could see downtown Dallas maybe because it was not far away from where I lived in Wylie, maybe because I lived in a high-rise apartment. Besides, as there could be a river between me and the downtown, if there is no bridges, the drive still will be long.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 58, "n": 41, "c": 1}} +{"id": "98739n", "context": "The living is not equal to the Ritz, he observed with a sigh.", "statement": "The living is nothing compared to the glamour of the Ritz, he said sadly.", "entailment": [{"annotator": 0, "id": "335-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We can infer from the sigh that the speaker did think that living here is not as good as in Ritz.", "self_corrected": false}, {"annotator": 2, "id": "335-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The sign and the words proved that he didn't think the living is good as the Ritz.", "self_corrected": false}, {"annotator": 3, "id": "335-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "paraphrases", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "335-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It's not clear whether he sighed from disappointment or relief.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 71, "n": 22, "c": 7}} +{"id": "108624c", "context": "Exhibit 3 presents total national emissions of NOx and SO2 from all sectors, including power.", "statement": "In Exhibit 3 there are the total regional emissions od NOx and SO2 from all sectors.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "1407-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Exhibit 3 shows the total national emmissions, not regional emissions.", "self_corrected": false}, {"annotator": 1, "id": "1407-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The emissions are national not regional.", "self_corrected": false}, {"annotator": 2, "id": "1407-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Exhibit 3 is about \"national\" not \"regional\" emissions.", "self_corrected": false}, {"annotator": 3, "id": "1407-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It should be national emissions", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 42, "e": 47, "n": 11}} +{"id": "106013c", "context": "Ca'daan heard the Kal grunt and felt the horse lift.", "statement": "The Kal heard Ca'daan grunt.", "entailment": [{"annotator": 3, "id": "699-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "true, statement is a part of the context", "self_corrected": true}], "neutral": [], "contradiction": [{"annotator": 0, "id": "699-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The grunt was from Kal, not Ca'daan.", "self_corrected": false}, {"annotator": 1, "id": "699-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Ca'daan grunted not the Kal.", "self_corrected": false}, {"annotator": 2, "id": "699-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is Ca'daan that heard the Kal, not reverse.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"c": 75, "n": 16, "e": 9}} +{"id": "37407n", "context": "5 are highly correlated during summer months in some areas.", "statement": "Six are correlated to winter in certain areas.", "entailment": [], "neutral": [{"annotator": 0, "id": "1284-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only suggests the correlation during summer months, not winter.", "self_corrected": false}, {"annotator": 1, "id": "1284-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It could be that six are correlated to winter, the context only speaks about summer.", "self_corrected": false}, {"annotator": 2, "id": "1284-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Winter months are not discussed in the context.", "self_corrected": false}, {"annotator": 3, "id": "1284-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about 6", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 63, "c": 37}} +{"id": "70590c", "context": "China's civil war sent distressing echoes to Hong Kong.", "statement": "Japan fought a civil war.", "entailment": [], "neutral": [{"annotator": 0, "id": "355-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no mention of the Japan civil war in the context.", "self_corrected": false}, {"annotator": 1, "id": "355-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It could be that Japan also fought a civil war, but it is not clear from the context.", "self_corrected": false}, {"annotator": 2, "id": "355-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Japanese civil war is not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "355-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "no info about japan; irrelavant", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "355-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "China fought a civil war", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 49, "e": 1, "c": 50}} +{"id": "23901e", "context": "Then Shuman claims that Linux provides no graphical user interface.", "statement": "They made accusations about the platform.", "entailment": [{"annotator": 0, "id": "225-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We can infer from the claim in the context that the made accusations.", "self_corrected": false}, {"annotator": 1, "id": "225-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "That an operating system doesn't contain a graphical user interface can be called an accusation.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "225-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Linux provides no graphical user interface\" can be a common description or an accusation, the attitude needs more background to prove.", "self_corrected": false}, {"annotator": 3, "id": "225-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the claim being an accusation", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 8, "n": 22, "e": 70}} +{"id": "23280n", "context": "Sphinxes were guardian deitiesinEgyptianmythologyandthis was monumentalprotection,standing73 m (240 ft)longand20 m (66 feet) high.", "statement": "Sphinxes were put in the tombs to protect the dead.", "entailment": [], "neutral": [{"annotator": 0, "id": "1427-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no information in the context about where Sphinxes were placed.", "self_corrected": false}, {"annotator": 1, "id": "1427-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context what exactly the sphinxes protected.", "self_corrected": false}, {"annotator": 2, "id": "1427-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Sphinxes' location is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "1427-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about whether if Sphinxes were put into the tombs", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 70, "e": 24, "c": 6}} +{"id": "135247c", "context": "The original wax models of the river gods are on display in the Civic Museum.", "statement": "They have models made out of clay.", "entailment": [], "neutral": [{"annotator": 0, "id": "1292-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention about the clay models.", "self_corrected": false}, {"annotator": 1, "id": "1292-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They could also have models made out of clay next to those made out of wax.", "self_corrected": false}, {"annotator": 2, "id": "1292-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In the context it just refered to a wax model, but provided no information about clay model.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1292-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is out of wax", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 57, "n": 38, "e": 5}} +{"id": "58357c", "context": "What changed?", "statement": "Nothing changed.", "entailment": [], "neutral": [{"annotator": 1, "id": "95-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker doesn't assert anything about whether something changed.", "self_corrected": false}, {"annotator": 3, "id": "95-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "context is a questions. The statement is answer, but can not be entailed.", "self_corrected": false}], "contradiction": [], "idk": [0, 2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 76, "c": 20, "e": 4}} +{"id": "19803e", "context": "But there's SOMETHING.", "statement": "Surely there's something.", "entailment": [{"annotator": 0, "id": "581-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that there is something.", "self_corrected": false}, {"annotator": 1, "id": "581-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "581-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The uppercase of \"something\" can express \"surely\".", "self_corrected": false}, {"annotator": 3, "id": "581-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is something despite of the tone", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "581-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"Surely\" can not find a correspondant word in the context.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 90, "n": 8, "c": 2}} +{"id": "144753n", "context": "When he's ready for a major strike, how many innocents do you suppose are going to suffer? To quote one of your contemporaries; 'The needs of the many outweigh the needs of the few.' '", "statement": "He won't care how many innocent people will suffer.", "entailment": [{"annotator": 0, "id": "691-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The question in the context could be a rhetorical question to which the speaker already has a standard answer, which is that he won't care.", "self_corrected": false}, {"annotator": 3, "id": "691-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "He is preparing for a major striking, which will cause many innocents to suffer", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "691-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether he cares that innocents will suffer.", "self_corrected": false}, {"annotator": 2, "id": "691-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "His attitude towards the suffer of innocents is not given in the context, he maybe cares, maybe not.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 49, "n": 45, "c": 6}} +{"id": "15100e", "context": "but uh that has been the major change that we have noticed in gardening and that's about the extent of what we've done just a little bit on the patio and uh and waiting for the the rain to subside so we can mow we after about a month we finally got to mow this weekend", "statement": "We have not done much gardening yet because of the rain.", "entailment": [{"annotator": 0, "id": "893-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions that them have only done a little gardening and needed to wait for the rain to die down, which suggests that the reason was raining.", "self_corrected": false}, {"annotator": 1, "id": "893-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speakers says that they did only a little gardening because of the rain.", "self_corrected": false}, {"annotator": 3, "id": "893-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We plan to mow after the rain subside", "self_corrected": true}], "neutral": [{"annotator": 2, "id": "893-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We have done just a little bit, but the reason can be the rain or something else like temperature.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 88, "n": 12}} +{"id": "28507n", "context": "It is, as you see, highly magnified.", "statement": "It is plain for you to see that it is amplified.", "entailment": [{"annotator": 0, "id": "438-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that it is magnified.", "self_corrected": false}, {"annotator": 1, "id": "438-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "438-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"highly magnified\" can be interpreted \"amplified\".", "self_corrected": false}, {"annotator": 3, "id": "438-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It can be seen, and it is magnified", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 76, "c": 10, "n": 14}} +{"id": "123748n", "context": "There are many such at the present time.", "statement": "There are over two currently.", "entailment": [{"annotator": 1, "id": "780-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If there are many then there are certainly over two.", "self_corrected": false}, {"annotator": 2, "id": "780-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Many\" ususally is over two.", "self_corrected": false}, {"annotator": 3, "id": "780-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Many means more than two", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "780-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The word \"many\" in the context indicates more than one, but we don't know if there are more than two.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 82, "n": 15, "c": 3}} +{"id": "111338c", "context": "He threw one of them and shot the other.", "statement": "He kept his gun holstered.", "entailment": [], "neutral": [{"annotator": 1, "id": "1025-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is not clear whether he had other shooting weapons than his gun.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1025-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement indicates that he didn't use the gun.", "self_corrected": false}, {"annotator": 2, "id": "1025-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He \"shot\" one, so he need to pull his gun out of the holster.", "self_corrected": false}, {"annotator": 3, "id": "1025-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He shot one of them", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 90, "n": 7, "e": 3}} +{"id": "52171c", "context": "For such a governmentwide review, an entrance conference is generally held with applicable central agencies, such as the Office of Management and Budget (OMB) or the Office of Personnel Management.", "statement": "An entrance conference is held with specialized agencies.", "entailment": [{"annotator": 0, "id": "118-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Applicable central agencies mentioned in the context are indeed specialized agencies.", "self_corrected": false}, {"annotator": 1, "id": "118-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context with less detail.", "self_corrected": false}, {"annotator": 2, "id": "118-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Office of Management and Budget (OMB) or the Office of Personnel Management are specialized agencies.", "self_corrected": false}, {"annotator": 3, "id": "118-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "the conference is held with applicable central agencies. They can then be considered as specialized", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "118-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "For a governmentwide review the statement is true, but for a normal entrance conference, it is could be held with specialized agencies or any temporary agencies.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 84, "n": 8, "c": 8}} +{"id": "90548e", "context": "Splendid!", "statement": "The speaker is excited by the situation.", "entailment": [{"annotator": 0, "id": "169-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The comment \"splendid\" and the exclamation mark indicate that the speaker is excitied.", "self_corrected": false}, {"annotator": 1, "id": "169-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker calls out \"splendid!\" so they are probably excited about the situation.", "self_corrected": false}, {"annotator": 2, "id": "169-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Splendid\" means very good, and the exclamation mark also conveys the speaker's excitement.", "self_corrected": false}, {"annotator": 3, "id": "169-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Splendid entails excitement", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 72, "n": 27, "c": 1}} +{"id": "81842c", "context": "Answer? said Julius.", "statement": "Julius already knew the answer.", "entailment": [{"annotator": 0, "id": "1149-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Julius is asking someone for answer, he might already know the right answer and wants to know if others do.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1149-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether Julius is really asking for the answer.", "self_corrected": false}, {"annotator": 3, "id": "1149-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It was rather a question from Julius, we do not know if he knows the answer", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1149-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Julius is asking for answer, the question might indicate that he didn't know the answer.", "self_corrected": false}], "idk": [2], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 71, "c": 23, "e": 6}} +{"id": "91601n", "context": "Even today, Yanomamo men raid villages, kill men, and abduct women for procreative purposes.", "statement": "Yanomamo eats food.", "entailment": [], "neutral": [{"annotator": 0, "id": "1380-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention if Yanomamo eats food.", "self_corrected": false}, {"annotator": 2, "id": "1380-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Raiding villages, killing men and abducting women have nothing to do with food supply.", "self_corrected": false}, {"annotator": 3, "id": "1380-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [], "idk": [1], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 8, "n": 71, "c": 21}} +{"id": "118415n", "context": "John Panzar has characterized street delivery as a bottleneck function because a single firm can deliver to a recipient at a lower total cost than multiple firms delivering to the same customer.", "statement": "John Panzar believes in nationalizing all postal delivery services and couriers into a single entity for cost-saving purposes.", "entailment": [{"annotator": 2, "id": "974-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "John Panzer supposes that delivering to one customer by only one firm costs fewer than multiple firms.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "974-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions the John Panzar's thoughts about street delivery, not the proposal to nationalize delivery services.", "self_corrected": false}, {"annotator": 1, "id": "974-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear what John Panzar's stance on nationalizing postal services is.", "self_corrected": false}, {"annotator": 2, "id": "974-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "John Panzer didn't mention the way how could one customer only received delivery from one firm, it could be nationalization, but also could be others like monopoly.", "self_corrected": false}, {"annotator": 3, "id": "974-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "exaggeration: No info about John Panzar's believe and ambitions", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 4.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 42, "c": 5, "e": 53}} +{"id": "113644n", "context": "so do you have do you have the long i guess not not if there's see i was raised in New York but i guess up there you all don't have too long of a growing season do you", "statement": "I am looking for a written guide to growing plants in different places in the country.", "entailment": [], "neutral": [{"annotator": 0, "id": "716-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Althought the speaker asks about the growing season in the context, the reason is not mentioned.", "self_corrected": false}, {"annotator": 1, "id": "716-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the speaker is looking for a guide or simply asking a question.", "self_corrected": false}, {"annotator": 3, "id": "716-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "not clear context; potential irrelevance", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 70, "c": 22, "e": 8}} +{"id": "47260n", "context": "The good news, however, can be found in reports like this one.", "statement": "The good news is that the puppy's life was able to be saved.", "entailment": [], "neutral": [{"annotator": 0, "id": "1428-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention what is the good news.", "self_corrected": false}, {"annotator": 1, "id": "1428-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear what the good news is.", "self_corrected": false}, {"annotator": 2, "id": "1428-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The topic of good news can be puppy's life or cat's life or parrot's life or anything.", "self_corrected": false}, {"annotator": 3, "id": "1428-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about a puppy in the context", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 86, "c": 11, "e": 3}} +{"id": "129081e", "context": "right oh they've really done uh good job of keeping everybody informed of what's going on sometimes i've wondered if it wasn't almost more than we needed to know", "statement": "After sharing all information with everyone, I think I may have shared too much.", "entailment": [], "neutral": [{"annotator": 1, "id": "1536-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The speaker talks about \"they\" sharing the information not about themselves.", "self_corrected": false}, {"annotator": 2, "id": "1536-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not given in the context whether I have shared all information, or do I think I have shared too much.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1536-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not the speaker who shares the information with everybody, it's them.", "self_corrected": false}, {"annotator": 2, "id": "1536-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not I but they who shared all information.", "self_corrected": false}, {"annotator": 3, "id": "1536-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"I\" did not share the information, \"they\" did", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 63, "n": 20, "c": 17}} +{"id": "18189e", "context": "The important thing is to realize that it's way past time to move it.", "statement": "It has not been moved yet in the past.", "entailment": [{"annotator": 0, "id": "1006-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context suggests that it's no longer the time to move, which indicates that it hasn't been moved yet.", "self_corrected": false}, {"annotator": 3, "id": "1006-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Because it is too late to move it now, so in the past it was not moved", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1006-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It might have been moved in the past and now is needed to be moved again.", "self_corrected": false}, {"annotator": 2, "id": "1006-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context states it is not time to move it now, but the history of movement is not refered to.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 56, "e": 38, "c": 6}} +{"id": "91797c", "context": "We know they will have to come from the south but that gives them a space as wide as the town in which to launch their attack.", "statement": "The south is totally protected against an attack.", "entailment": [], "neutral": [{"annotator": 2, "id": "1221-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "In the context, it is analysed that they will attack from the south, but the protection of the south is not measurable.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1221-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context suggests that the south is the right place to launch the attack, which indicates that the south cannot totally defend itself against an attack.", "self_corrected": false}, {"annotator": 1, "id": "1221-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The space in the south makes it not totally protected.", "self_corrected": false}, {"annotator": 3, "id": "1221-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is a space in the south as wide as the town to launch their attack", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 61, "n": 37, "e": 2}} +{"id": "917c", "context": "eligible individuals and the rules that apply if a state does not substantially enforce the statutory requirements.", "statement": "It does not matter whether or not a state enforces the statutory requirements.", "entailment": [], "neutral": [{"annotator": 2, "id": "1193-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We can only make sure, if a state does not enforces the statutory requirements, the rules apply; otherwise, we don't know whether the rules take effect or not.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "1193-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It matters whether the state enforces the statuory requirements because then other rules apply.", "self_corrected": false}, {"annotator": 3, "id": "1193-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There are rules that apply if the state does not enforces the statutory requirements, meaning it does matter that the state do enforces these requirements. Because if not, then there is no need for those rules to exist", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 46, "e": 11, "c": 43}} +{"id": "89995c", "context": "yeah then you don't have you don't have that mess to clean up when you use an oil oil base painting and boy i'll tell you oh", "statement": "Typically oil based paints are easy to work with and clean up.", "entailment": [{"annotator": 3, "id": "493-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}], "label_correction": false, "reason": "Because in the context, \"you\" didn't use oil based paints, so \"you\" have a mess to clean up", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "493-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is not clear whether oil based paints are also easy to work with.", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 65, "n": 19, "c": 16}} +{"id": "33822e", "context": "Why shouldn't he be?", "statement": "There is no reason he shouldn't be.", "entailment": [{"annotator": 1, "id": "395-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}], "neutral": [{"annotator": 3, "id": "395-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The question is open. It is not known if there is reason or not.", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 68, "n": 27, "c": 5}} +{"id": "42983e", "context": "The town is also known for its sparkling wine and for the caves where about 70 per?\u00adcent of France's cultivated mushrooms are grown.", "statement": "The town has a lot of sparkling wine.", "entailment": [{"annotator": 2, "id": "1376-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The town is famous for its sparkling wine, so it should have lots of sparkling wine.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1376-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It could also be a small amount of sparkling wine for which the place is famous.", "self_corrected": false}, {"annotator": 3, "id": "1376-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is only known about the reputation of the town's wine, but not known about the quantity of the wine", "self_corrected": false}], "contradiction": [], "idk": [0], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 72, "n": 28}} +{"id": "6105e", "context": "Asked about abortion the other day on CNN, Republican National Committee Chairman Jim Nicholson also invoked what is apparently the party-line inclusive party.", "statement": "The Republican National Committee Chairman gave the party's standard answer on the subject of abortion when he was asked about it on CNN.", "entailment": [{"annotator": 0, "id": "1060-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I think the party-line mentioned in the context refers to the party's standard answer.", "self_corrected": false}, {"annotator": 1, "id": "1060-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "1060-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Party-line\" typically refers to a position or stance that aligns with the official position or policies of a political party, so it can be refered to the \"standard answer\".", "self_corrected": false}, {"annotator": 3, "id": "1060-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He toed the party line, meaning he said what is in line with the party's agenda", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 2, "id": "1060-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"Inclusive party\" suggests a political party that welcomes diverse views and members, so it should not be the party's typical answer.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": 1.0, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 22, "e": 74, "c": 4}} +{"id": "26143n", "context": "However, the associated cost is primarily some of the costs of assessing and collecting duties on imported merchandise, such as the salaries of import specialists (who classify merchandise) and the costs of processing paperwork.", "statement": "the associated cost is how much people spend relative to this amount", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "234-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The associated cost is not in the general sense of how much people spend, but is specifically defined in the context.", "self_corrected": false}, {"annotator": 2, "id": "234-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In context, \"the associated cost is primarily some of the costs of assessing and collecting duties on imported merchandise\", it is about the goods, so the description \"relative to this amount\" is different from the definition given before.", "self_corrected": false}, {"annotator": 3, "id": "234-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's the cost of assessing and collecting duties", "self_corrected": false}], "idk": [1], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 24, "c": 39, "n": 37}} +{"id": "100768n", "context": "well in a way you can travel light", "statement": "You won't need to pack much.", "entailment": [{"annotator": 0, "id": "328-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Traveling light means exactly no need to pack much.", "self_corrected": false}, {"annotator": 1, "id": "328-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If you can travel light, then you don't need to pack much.", "self_corrected": false}, {"annotator": 2, "id": "328-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Travel light\" means exactly not packing much.", "self_corrected": false}, {"annotator": 3, "id": "328-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Traveling light entails traveling with a small package", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "328-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"In a way\" suggests that it could be not the usual meaning of travelling light.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 85, "n": 15}} +{"id": "82510e", "context": "although the uh it's uh it we almost one day we painted the house to uh we painted we painted the whole inside and it had all this dark trim we thought uh you know we did the one wall but the other trim i'm trying to think i think i think we left most of it because it gets to be uh they don't do that in the newer houses now we don't the uh mold everything is white in a new house everything is white", "statement": "We painted the house over the duration of one day.", "entailment": [{"annotator": 0, "id": "1190-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}], "label_correction": false, "reason": "The context suggests that the speaker spent almost a day to paint the house, so the time they spent is indeed over the duration if one day.", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [1, 2, 3], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 39, "e": 53, "c": 8}} +{"id": "102563n", "context": "The judge gave vent to a faint murmur of disapprobation, and the prisoner in the dock leant forward angrily.", "statement": "The judge ordered the court to be silent.", "entailment": [], "neutral": [{"annotator": 1, "id": "424-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything about slience.", "self_corrected": true}, {"annotator": 2, "id": "424-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The judge \"gave vent to a faint murmur of disapprobation\" can not be taken as an order for quiet.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "424-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The judge only made a murmur and didn't give a direct order.", "self_corrected": false}, {"annotator": 3, "id": "424-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "He let the crowd murmur", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 45, "c": 31, "e": 24}} +{"id": "48553c", "context": "Keep your eyes open for Renaissance details, grand doorways, and views into lovely courtyards.", "statement": "All of the doorways and courtyards have been completely remodeled since the Renaissance.", "entailment": [], "neutral": [{"annotator": 0, "id": "430-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention if the doorways and courtyards were remodeled since the Renaissance.", "self_corrected": false}, {"annotator": 2, "id": "430-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not garanteed that all of the objects mentioned in the context have been completely remodeled, maybe part of them kept the same as before the Renaissance.", "self_corrected": false}, {"annotator": 3, "id": "430-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about whether they have been remodeled or not. They could also be built in the Renaissance time", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "430-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "If the doorways and courtyards would have been remodeled since the Renaissance, then it wouldn't make sense to look for the Reneaissance details.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 53, "c": 44, "e": 3}} +{"id": "54458n", "context": "This one ended up being surprisingly easy!", "statement": "This question was very easy to answer.", "entailment": [{"annotator": 0, "id": "1029-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both context and statement suggest the easiness of what they talk about.", "self_corrected": false}, {"annotator": 1, "id": "1029-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "1029-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is true because the question \"ended up being surprisingly easy\".", "self_corrected": false}, {"annotator": 3, "id": "1029-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A question ended up easy, meaning it's easy to answer", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 81, "n": 19}} +{"id": "17576n", "context": "The percent of total cost for each function included in the model and cost elasticity (with respect to volume) are shown in Table 1.", "statement": "Table 1 also shows a picture diagram for each function.", "entailment": [{"annotator": 3, "id": "67-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "And this diagram shows the cost for each function", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "67-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention if Table 1 presents the picture diagram.", "self_corrected": false}, {"annotator": 1, "id": "67-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear in which way the values are presented in Table 1.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "67-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "No, Table 1 shows the \"the percent of total cost for each function\" and \"cost elasticity\", not picture diagram.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction", "entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 64, "e": 27, "c": 9}} +{"id": "103431n", "context": "In addition, the senior executives at these organizations demonstrated their sustained commitment to financerelated improvement initiatives by using key business/line managers to drive improvement efforts, attending key meetings, ensuring that the necessary resources are made available, and creating a system of rewards and incentives to recognize those who support improvement initiatives.", "statement": "This system of rewards and incentives will hopefully improve company performance.", "entailment": [{"annotator": 0, "id": "343-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The rewards and incentives will be given to those who support improvement initiatives, so the company is more likely to improve their proferance under these rewards incentives.", "self_corrected": false}, {"annotator": 3, "id": "343-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The measurement taken is thought for finance-related improvenemt.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "343-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}], "label_correction": false, "reason": "The system of rewards and incentives", "self_corrected": true}, {"annotator": 1, "id": "343-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is not clear whether the speaker hopes that the rewards and incentives improve company performance.", "self_corrected": false}, {"annotator": 2, "id": "343-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The measures mentioned in the context can not prove their effect \"will hopefully improve company performace\" or not.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 83, "n": 16, "c": 1}} +{"id": "64123c", "context": "Per week?", "statement": "Every day.", "entailment": [], "neutral": [{"annotator": 0, "id": "1104-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "The context is a question and the statement is an answer. But I don't know whether the statement is exactly the answer to the question in the context.", "self_corrected": false}, {"annotator": 3, "id": "1104-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "From a question can not be entailed to a answer", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1104-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "per week means once every seven days", "self_corrected": true}], "idk": [1, 2], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 24, "c": 74, "e": 2}} +{"id": "56163c", "context": "She would be almost certainly sent to you under an assumed one.", "statement": "The man told the other man that Bill would be sent to him.", "entailment": [], "neutral": [{"annotator": 0, "id": "114-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention who told whom.", "self_corrected": false}, {"annotator": 2, "id": "114-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Bill is not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "114-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The gender of Bill and \"she\" and \"you\" is unknown", "self_corrected": false}], "contradiction": [], "idk": [1], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 60, "n": 38, "e": 2}} +{"id": "11297c", "context": "Transforming Control of Public Health Programs Raises Concerns (", "statement": "Everyone is content with the change of public health programs.", "entailment": [], "neutral": [{"annotator": 0, "id": "1321-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context doesn't mention whether people are satisfied with the programs.", "self_corrected": true}, {"annotator": 3, "id": "1321-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "exaggeration: \"Raising concerns\" doesn't mean that everyone is concerned. Maybe someone is not concerned and the context will still hold", "self_corrected": true}], "contradiction": [{"annotator": 1, "id": "1321-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The program raises concerns, so not everyone is content with it.", "self_corrected": false}, {"annotator": 2, "id": "1321-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, because the change of control of Public Health Programs \"raises concerns\", there must be someone not content with it.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["contradiction"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"n": 24, "c": 76}} +{"id": "32889n", "context": "Extremely limited exceptions to the authority are established in 31 U.S.C.", "statement": "They were trying to eliminate all exceptions.", "entailment": [], "neutral": [{"annotator": 0, "id": "807-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions that there are extremely limited exceptions, but not whether someone wanted to eliminate the exceptions.", "self_corrected": false}, {"annotator": 1, "id": "807-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether they tried to eliminate the exceptions.", "self_corrected": false}, {"annotator": 2, "id": "807-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The limitation of exceptions will not definitey lead to eliminating all exceptions.", "self_corrected": false}, {"annotator": 3, "id": "807-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The motive of them is unknown. Maybe they didn't try to eliminate all, but just eliminate the unnecessary ones.", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 62, "c": 28, "e": 10}} +{"id": "19c", "context": "On the northern slopes of this rocky outcropping is the site of the ancient capital of the island, also called Thira, which dates from the third century b.c. (when the Aegean was under Ptolemaic rule).", "statement": "Is the site of the ancient asteroid impact, also called Thira.", "entailment": [], "neutral": [{"annotator": 0, "id": "392-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear if the site of the ancient capital mentioned in the context is also the site of the ancient asteroid impact.", "self_corrected": false}, {"annotator": 1, "id": "392-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether there was an asteroid impact on Thira.", "self_corrected": false}, {"annotator": 2, "id": "392-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"The ancient asteroid impact\" is not mentioned in the context.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "392-contradiction-1", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is the ancient capital of the island, not an asteroid impact", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 59, "e": 8, "c": 33}} +{"id": "32754e", "context": "After shuttering the DOE, Clinton could depict himself as a crusader against waste and bureaucracy who succeeded where even Reagan failed.", "statement": "Clinton shuttered the DOE to move against waste.", "entailment": [{"annotator": 1, "id": "1016-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If shuttering the DOE allowed Clinton to appear as a crusader against waste then it probably was done to do something against waste.", "self_corrected": false}, {"annotator": 3, "id": "1016-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He shuttered the DOE, and were depicted as crusader against waste", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1016-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "The context only suggests that Clinton can describe the reason as being against waste, but it is not clear if this reason is actually the truth.", "self_corrected": false}, {"annotator": 2, "id": "1016-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"Clinton could depict himself as a crusader against waste and bureaucracy\" doesn't mean it is his true intention is \"to move against waste.\" Perhaps he really is just trying to save energy, but maybe he has other intentions.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 18, "e": 80, "c": 2}} +{"id": "12815n", "context": "yeah well that's my uh i mean every time i've tried to go you know it's always there's there's always a league bowling", "statement": "Every time I try to go bowling there are leagues only and I can't bowl.", "entailment": [{"annotator": 0, "id": "1098-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both context and statement suggests that the speaker can't go bowling because of the leagues.", "self_corrected": false}, {"annotator": 1, "id": "1098-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "1098-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Usually if the place is occupied by a league, then other people can not play there.", "self_corrected": false}], "neutral": [{"annotator": 3, "id": "1098-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe the place is big enough, so even the there is a league bowling, I can still bowl", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 56, "n": 44}} +{"id": "135898c", "context": "The end is near! Then a shout went up, and Hanson jerked his eyes from the gears to focus on a group of rocs that were landing at the far end of the camp.", "statement": "It's all over, Hanson whispered as he stared at the gears.", "entailment": [], "neutral": [{"annotator": 0, "id": "456-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context if \"It's all over\" was Hanson's whisper.", "self_corrected": false}, {"annotator": 1, "id": "456-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether or what Hanson whispered", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "456-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The end is near, meaning it's soon to be over but not all over yet", "self_corrected": true}], "idk": [2], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"c": 44, "n": 44, "e": 12}} +{"id": "82830n", "context": "In the 19th century, when Kashmir was the most exotic hill-station of them all, the maharaja forbade the British to buy land there, so they then hit on the brilliant alternative of building luxuriously appointed houseboats moored on the lakes near Srinagar.", "statement": "The maharaja allowed the British to build houseboats on the lakes.", "entailment": [{"annotator": 1, "id": "140-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The British did build houseboats on the lakes, so the maharaja must at least have tolerated it.", "self_corrected": false}, {"annotator": 3, "id": "140-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The British was forbidden to buy land in Kashmir, so they buit houseboats on the lakes near Srinagar. Because they built it, so they must have been allowed", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "140-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions what the maharaja forbade, not what he allowed.", "self_corrected": true}, {"annotator": 2, "id": "140-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The British are allowed to moor houseboats on the lakes, but whether it is allowed for them to \"build\" houseboats is not mentioned.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 60, "n": 20, "c": 20}} +{"id": "74509n", "context": "Under the budget deal, by 2002, national defense will consume about $273 billion a year compared with $267 billion now.", "statement": "The United States national defense budget will increase by 6 billion dollars.", "entailment": [{"annotator": 0, "id": "771-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The budget increases from 267 to 273 billion, an increase of $6 billion.", "self_corrected": false}, {"annotator": 1, "id": "771-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "$273 billion - $267 billion = $6 billion", "self_corrected": false}, {"annotator": 2, "id": "771-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is true, because 273-267 = 6.", "self_corrected": false}, {"annotator": 3, "id": "771-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "right now the budget is 267 Million, in 2002 it will be 273 Million, making the incease by 6 million", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 87, "c": 9, "n": 4}} +{"id": "98944c", "context": "evaluation questions.", "statement": "Only statements of the evaluation are available.", "entailment": [], "neutral": [{"annotator": 2, "id": "1577-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The state of statements of the evaluation is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "1577-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [], "idk": [0, 1], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 33, "n": 55, "e": 12}} +{"id": "19208c", "context": "Hearty Sabbath meals.", "statement": "Hearty meals will only be offered to Buddhists", "entailment": [], "neutral": [], "contradiction": [{"annotator": 1, "id": "316-contradiction-1", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Buddhists typically don't celebrate Sabbath, so the hearty meals will most likely be offered to Jews.", "self_corrected": false}, {"annotator": 2, "id": "316-contradiction-2", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Sabbath is Judaism's day, it doesn't make sense that hearty meals will only be offered to Buddhists, not to jewish people.", "self_corrected": false}, {"annotator": 3, "id": "316-contradiction-3", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is jewish traditional meals", "self_corrected": false}], "idk": [0], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 52, "n": 47, "e": 1}} +{"id": "117576e", "context": "but i don't know you know maybe you could do that for a certain period of time but i mean how long does that kind of a thing take you know to to um say to question the person or to get into their head", "statement": "It might take a long time to do that because getting inside a person's head takes time.", "entailment": [{"annotator": 2, "id": "241-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is true, because the possibility exists, that it takes a long time to get into their head.", "self_corrected": false}, {"annotator": 3, "id": "241-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Context states it is not known how long it would take, so it might take a long time", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "241-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement seems to be a continuation of the context, the reason why it meight take a long time is not mentioned in the context.", "self_corrected": false}, {"annotator": 1, "id": "241-neutral-2", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It's not clear whether it will take to get into a person's head.", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 69, "n": 28, "c": 3}} +{"id": "50484c", "context": "All of our many earnest experiments produced results in line with random chance, they conclude.", "statement": "The experiments proved it was a much better predictor.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "1297-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The results are in line with random chance, which indicates that it was not a better predictor, it was just random.", "self_corrected": false}, {"annotator": 1, "id": "1297-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If results were in line with random chance, it was not a good predictor.", "self_corrected": false}, {"annotator": 2, "id": "1297-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, if it was a much better predictor, then the earnest experiments should produce results much better than random chance.", "self_corrected": false}, {"annotator": 3, "id": "1297-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Because the results are random, so the predictor is not good", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 64, "n": 28, "e": 8}} +{"id": "98445c", "context": "It seeks genuine direct elections after a period that is sufficient to organize alternative parties and prepare a campaign based on freedom of speech and other civil rights, the right to have free trade unions, the release of more than 200 political prisoners, debt relief, stronger penalties for corruption and pollution, no amnesty for Suharto and his fellow thieves, and a respite for the poor from the hardest edges of economic reform.", "statement": "The only thing that can our society is more power to the presidential electors.", "entailment": [], "neutral": [{"annotator": 0, "id": "825-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't contain any information about the power of the presidential electors.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "825-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}], "label_correction": true, "reason": "There is stronger penalties for corruption, so more restrictions on the presidential electors.", "self_corrected": true}], "idk": [1, 2], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"c": 17, "e": 4, "n": 79}} +{"id": "7856c", "context": "but how do you know the good from the bad", "statement": "Why care if it's good or bad?", "entailment": [], "neutral": [{"annotator": 3, "id": "1580-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Two different questions", "self_corrected": false}], "contradiction": [], "idk": [0, 1, 2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 64, "c": 28, "e": 8}} +{"id": "8257n", "context": "'But if White has any designs at all on living, he'll be as far from Little as he can possibly get by now.'", "statement": "White should be afraid to come back to Little.", "entailment": [{"annotator": 0, "id": "1197-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context suggests that White will die if he is with Little, we can infer that he should be afriaid of being with Litte.", "self_corrected": false}, {"annotator": 1, "id": "1197-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It sounds like Little is after White's life.", "self_corrected": false}, {"annotator": 2, "id": "1197-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In context, White should be far away from Little if he wants to live, so coming back to Little should be terrible, and he should be afraid of that.", "self_corrected": false}, {"annotator": 3, "id": "1197-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If White has any planning on life, then he should not come back to Little. It means that Little is not a good place for living, so he should be afraid to come back to this place", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 74, "n": 24, "c": 2}} +{"id": "141110n", "context": "yes well yeah i am um actually actually i think that i at the higher level education i don't think there's so much of a problem there it's pretty much funded well there are small colleges that i'm sure are struggling", "statement": "Small colleges usually have trouble with funding and resources.", "entailment": [{"annotator": 0, "id": "563-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that small colleges have funding issues.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "563-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker doesn't say anything about 'usually'. They only assert that there are some small colleges that are struggling.", "self_corrected": false}, {"annotator": 2, "id": "563-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Some small colleges are struggling, but it can be a common phenomenon, or may be quite rare.", "self_corrected": false}, {"annotator": 3, "id": "563-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker mentioned his college is well funded and then said some small colleges are struggling. It can not be concluded that all small colleges usually have trouble with funding", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 43, "e": 54, "c": 3}} +{"id": "84055n", "context": "Even if auditors do not follow such other standards and methodologies, they may still serve as a useful source of guidance to auditors in planning their work under GAGAS.", "statement": "GAGAS requires strict compliance for auditors to follow.", "entailment": [{"annotator": 0, "id": "1069-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context suggests that auditors need to plan their work under GAGAS.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "1069-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear how strict GAGAS is.", "self_corrected": false}, {"annotator": 2, "id": "1069-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context is not about GAGAS's requirements, we only know under GAGAS auditors can use other standards and methodologies as reference.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1069-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "GAGAS can also only serve as guidance to the auditors", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction", "entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 52, "c": 29, "e": 19}} +{"id": "143789n", "context": "What a brilliantly innocuous metaphor, devised by a master manipulator to obscure his manipulations.", "statement": "The metaphor was created by the manipulator to convince people of something.", "entailment": [{"annotator": 0, "id": "321-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Manipulation does aim to make people believe certain things.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "321-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The purpose of the metaphor in the context is described as \"to obscure his manipulations\", whether he wanted to \"convince people of something\" is not given.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "321-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The metaphor was created to hide the manipulations, not for manipulating directly.", "self_corrected": false}, {"annotator": 3, "id": "321-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It was created to hide his true intention to manipulate", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 88, "n": 7, "c": 5}} +{"id": "13911n", "context": "Changes in technology and its application to electronic commerce and expanding Internet applications will change the specific control activities that may be employed and how they are implemented, but the basic requirements of control will not have changed.", "statement": "Technology will make it so we have less control of activities.", "entailment": [], "neutral": [{"annotator": 1, "id": "466-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It doesn't say anything about whether the possibility for control will change.", "self_corrected": false}, {"annotator": 2, "id": "466-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Although \"the basic requirements of control will not have changed\", we don't know whether technology will bring more control of activies or less control.", "self_corrected": false}, {"annotator": 3, "id": "466-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not mentioned how the specific control activites will be changed. But it is sure that the basic requirements of controll will not change", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "466-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Technology will change the employment and implementierung of some control activities, but the basic requirements, which means the degree of control, will not be changed.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 35, "c": 52, "e": 13}} +{"id": "122062n", "context": "The order was founded by James VII (James II of England) and continues today.", "statement": "Kings frequently founded orders that can still be found today.", "entailment": [], "neutral": [{"annotator": 0, "id": "609-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions the order founded by James VII, we know nothing about orders that founded by other kings.", "self_corrected": false}, {"annotator": 1, "id": "609-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context talks about a single order. We cannot infer that such orders are frequent.", "self_corrected": false}, {"annotator": 2, "id": "609-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The order founded by a king can continue until now, but it is not given whether it is frequently founded.", "self_corrected": false}, {"annotator": 3, "id": "609-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The frequeny of founding orderings is not known", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 63, "e": 36, "c": 1}} +{"id": "49396e", "context": "The road along the coastline to the south travels through busy agricultural towns and fishing villages untouched by tourism.", "statement": "There are no tourists on the road through the agricultural towns and fishing villages.", "entailment": [{"annotator": 0, "id": "738-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"untouched\" implies that these places are not visited by tourists.", "self_corrected": true}, {"annotator": 1, "id": "738-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The towns and villages are untouched by tourism so probably the route going through them is also relatively free of tourists.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "738-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We can only know that the fishing villages are not the destination of tourists, but we can't promise there are no tourists on the road through it.", "self_corrected": false}, {"annotator": 3, "id": "738-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the road being touched by the tourists or not. The towns and villages are not touched by the tourists", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 72, "n": 24, "c": 4}} +{"id": "73444n", "context": "well they're so close to an undefeated undefeated season they can taste it and they wanna make history so i don't think they're gonna lack for motivation", "statement": "Unless they suffer any losses, they'll remain motivated.", "entailment": [{"annotator": 1, "id": "536-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They are motivated by being undefeated. This will persist unless they are defeated.", "self_corrected": true}, {"annotator": 3, "id": "536-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They are motivated to get a full win season. So if they suffer loss, then they can no longer make history, so their movitation may lack", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "536-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention what would happen if they suffer any losses, they may or may not lose motivation.", "self_corrected": true}, {"annotator": 2, "id": "536-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "After suffering any losses they maybe will lose motivation, but maybe still remain motivated.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 41, "e": 56, "c": 3}} +{"id": "79106e", "context": "The woman rolled and drew two spears before the horse had rolled and broken the rest.", "statement": "They were in rotation on the ground grabbing their weapons.", "entailment": [{"annotator": 0, "id": "557-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The woman and the horse were grabbing the spears. They rolled indicates that they were on the ground.", "self_corrected": true}, {"annotator": 3, "id": "557-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The women and horse both rolled. And the woman grabbed two spears", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "557-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "The context only described a woman, we don't know who are \"they\" in the statement.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "557-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The horse most likely wasn't grabbing a weapon.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 51, "e": 33, "c": 16}} +{"id": "82230e", "context": "However, the other young lady was most kind.", "statement": "I received a warm welcome from the other young lady who was present.", "entailment": [], "neutral": [{"annotator": 0, "id": "1550-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention what the young lady did to the speaker.", "self_corrected": false}, {"annotator": 1, "id": "1550-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Only because the woman was kind, she did not have to extend a warm welcome.", "self_corrected": false}, {"annotator": 2, "id": "1550-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The other young lay was most kind, but she could do anything to show her kindness, maybe to give warm welcome, maybe others.", "self_corrected": false}, {"annotator": 3, "id": "1550-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The young lady was kind, but we don't know what she did", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 2, "e": 64, "n": 34}} +{"id": "14280n", "context": "The author began with a set of hunches or hypotheses about what can go wrong in agency management, and what would be evidence supporting-or contradicting-these hypotheses.", "statement": "The hunches provided by the author weren't realistic as it pertains to agency management.", "entailment": [], "neutral": [{"annotator": 0, "id": "40-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the hunches provided by the author were realistic or not.", "self_corrected": false}, {"annotator": 1, "id": "40-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear how realistic the hypotheses were.", "self_corrected": false}, {"annotator": 2, "id": "40-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The judgement of the hunches provided by the author is not given in the context.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "40-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The hunches could be realistic, as the author provide potential evidence supporting these hypotheses", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 64, "e": 6, "c": 30}} +{"id": "19668c", "context": "okay and and i think we just hang up i don't think we have to do anything else", "statement": "We need to wait until they tell us what to do.", "entailment": [], "neutral": [{"annotator": 0, "id": "1232-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context doesn't talk about why they don't have to do anything.", "self_corrected": true}], "contradiction": [{"annotator": 1, "id": "1232-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They explicitly say that they have to only hang up and not do anything else.", "self_corrected": false}, {"annotator": 2, "id": "1232-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"I don't think we have to do anything else\" means we don't need to nothing, so we don't need to wait.", "self_corrected": false}, {"annotator": 3, "id": "1232-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't need to do anything, meaning also not waiting for them to tell us what to do", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["contradiction"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"n": 38, "c": 51, "e": 11}} +{"id": "12562n", "context": "David Cope, a professor of music at the University of California at Santa Cruz, claims to have created a 42 nd Mozart symphony.", "statement": "Music Professor David Cope who specializes in Mozart's music claims to have created Mozart's 42nd symphony.", "entailment": [{"annotator": 1, "id": "885-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "885-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They context doesn't mention the speciality of Professor David Cope.", "self_corrected": false}, {"annotator": 2, "id": "885-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Whether David Cope specialized in Mozart's music is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "885-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the specialization of the Music Professor", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 30, "e": 65, "c": 5}} +{"id": "111693e", "context": "The conspiracy-minded allege that the chains also leverage their influence to persuade the big publishers to produce more blockbusters at the expense of moderate-selling books.", "statement": "Big publishers want to produce more high budget films, even if that means badly selling books.", "entailment": [{"annotator": 1, "id": "208-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "208-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "According to the claim of conspiracy-minded, it's the chains that want to produce more blockbusters, not the big publishers. And we don't know if the claim is true.", "self_corrected": false}, {"annotator": 2, "id": "208-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Producing film is not mentioned in the context, so we don't know the attitude of big publishers to it.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "208-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is at the expense of moderate-selling books, not badly selling books", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": ["entailment"], "has_ambiguity": true, "chaosnli_labels": {"n": 33, "e": 43, "c": 24}} +{"id": "124839c", "context": "(A bigger contribution may or may not mean, I really, really support Candidate X.) Freedom of association is an even bigger stretch--one that Justice Thomas would laugh out of court if some liberal proposed it.", "statement": "A bigger contribution means to support candidate Y.", "entailment": [], "neutral": [{"annotator": 0, "id": "125-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They context doesn't talk about candidate Y.", "self_corrected": false}, {"annotator": 1, "id": "125-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Candidate Y is not mentioned at all.", "self_corrected": false}, {"annotator": 2, "id": "125-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A bigger contribution may or may not mean to support Candidate X, so the possibility of supporting Y exists, but not hundred percent.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "125-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, a bigger contribution can not prove any preference.", "self_corrected": true}, {"annotator": 3, "id": "125-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is to support candidate X", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 51, "n": 48, "e": 1}} +{"id": "72870n", "context": "Because marginal costs are very low, a newspaper price for preprints might be as low as 5 or 6 cents per piece.", "statement": "Many people consider these prices to be unfair to new printers.", "entailment": [], "neutral": [{"annotator": 0, "id": "1537-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention people's opinions on these prices.", "self_corrected": false}, {"annotator": 1, "id": "1537-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not mentioned whether these prices are unfair.", "self_corrected": false}, {"annotator": 2, "id": "1537-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "How do people think about the price for new printers is not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "1537-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not known about the poeple's opinion on the price", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 91, "e": 2, "c": 7}} +{"id": "129601n", "context": "Took forever.", "statement": "Lasted two years", "entailment": [], "neutral": [], "contradiction": [{"annotator": 2, "id": "309-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Forever is longer than two years.", "self_corrected": false}, {"annotator": 3, "id": "309-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "two years are not forever", "self_corrected": false}], "idk": [0, 1], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 47, "n": 53}} +{"id": "26372n", "context": "Just like we have hairpins and powder-puffs.\" Tommy handed over a rather shabby green notebook, and Tuppence began writing busily.", "statement": "Tommy handed Tuppence an empty shabby green notebook.", "entailment": [{"annotator": 1, "id": "591-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "The statement is a paraphrase of a part of the context.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "591-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know if the notebook is empty.", "self_corrected": false}, {"annotator": 2, "id": "591-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The shabby green notebook can be empty or not.", "self_corrected": false}, {"annotator": 3, "id": "591-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about whether the notebook is empty", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 44, "n": 54, "c": 2}} +{"id": "74534n", "context": "And far, far away- lying still on the tracks- was the back of the train.", "statement": "The train wasn't moving but then it started up.", "entailment": [], "neutral": [{"annotator": 0, "id": "119-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is not clear whether the train moved or not.", "self_corrected": false}, {"annotator": 2, "id": "119-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The movement of the train is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "119-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "No info about whether the train started up", "self_corrected": true}], "contradiction": [{"annotator": 1, "id": "119-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The tarin was lying still, so it didn't start up.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 54, "c": 45, "e": 1}} +{"id": "63469c", "context": "It lacked intelligence, introspection, and humor--it was crass, worthy of Cosmopolitan or Star . I do have a sense of humor, but can only appreciate a joke when it starts with a grain of truth.", "statement": "The article won a Pulitzer Prize.", "entailment": [], "neutral": [{"annotator": 0, "id": "369-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the Pulitzer Prize.", "self_corrected": false}, {"annotator": 3, "id": "369-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the article winnning a prize", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "369-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Apparantely, the article was very bad, so it most likely did not win a Pulitzer.", "self_corrected": false}, {"annotator": 2, "id": "369-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The article was lacked intelligence, introspection, and humor, and that is not the taste of Pulitzer Prize.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 67, "c": 31, "e": 2}} +{"id": "141321n", "context": "It will be held in the Maryland woods, and the telecast will consist of jittery footage of the contestants' slow descent into madness as they are systematically stalked and disappeared/disqualified by Bob Barker.", "statement": "The show will be set in the woods north of Boston.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "1244-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The show will be held in the Maryland woods, not the woods north of Boston.", "self_corrected": false}, {"annotator": 1, "id": "1244-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Boston is not near Maryland.", "self_corrected": false}, {"annotator": 2, "id": "1244-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, the show \"will be held in the Maryland woods\".", "self_corrected": false}, {"annotator": 3, "id": "1244-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maryland is to the south of Boston", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 25, "e": 12, "c": 63}} +{"id": "68946c", "context": "It has served as a fortress for the Gallo-Romans, the Visigoths, Franks, and medieval French (you can see the layers of their masonry in the ramparts).", "statement": "The fortress was built by the medieval French in 1173.", "entailment": [], "neutral": [{"annotator": 0, "id": "1471-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention when and by whom the fortress was built.", "self_corrected": false}, {"annotator": 2, "id": "1471-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The building year of the fortress is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "1471-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about who built the fortress. The medieval French only had used it, so did Gallo-Romans.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "1471-contradiction-1", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Gallo-Romans were much earlier than 1173, so the fortress was built earlier.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 20, "n": 79, "e": 1}} +{"id": "41975c", "context": "Tommy realized perfectly that in his own wits lay the only chance of escape, and behind his casual manner he was racking his brains furiously.", "statement": "He'd been stuck for hours, starting to feel doubt crawl into his mind.", "entailment": [], "neutral": [{"annotator": 0, "id": "1313-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no mention in the context of how long he was stuck.", "self_corrected": false}, {"annotator": 1, "id": "1313-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether Tommy experienced doubt.", "self_corrected": false}, {"annotator": 2, "id": "1313-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know from which situation need Tommy escape , and the doubt didn't show up in the context.", "self_corrected": false}, {"annotator": 3, "id": "1313-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about how long he had been stuck.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 70, "c": 16, "e": 14}} +{"id": "100349e", "context": "He touched it and felt his skin swelling and growing hot.", "statement": "His skin was burning.", "entailment": [{"annotator": 1, "id": "1070-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The skin grew hot, so it was burning.", "self_corrected": false}, {"annotator": 3, "id": "1070-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "his skin was swelling and growning hot. They are signs of burning", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1070-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reason that hes skon swelling and growing hot is not clear. It may or may not be because his skin was burning.", "self_corrected": true}, {"annotator": 2, "id": "1070-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "His could be burning or maybe he was taken a bug's bite", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 64, "n": 35, "c": 1}} +{"id": "107399c", "context": "Bush the elder came of age when New England Republicans led the party, and patrician manners were boons to a Republican.", "statement": "New England Republicans were weak.", "entailment": [], "neutral": [{"annotator": 2, "id": "770-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "New England Republicans could be weak or strong.", "self_corrected": false}, {"annotator": 3, "id": "770-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the New England Republicans being weak", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "770-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "New England Republicans had patrician manners doesn't mean that they were weak.", "self_corrected": false}, {"annotator": 1, "id": "770-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "New England Republicans cannot have been weak because they led the party.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 30, "n": 68, "e": 2}} +{"id": "131623c", "context": "In the depths of the Cold War, many Americans suspected Communists had infiltrated Washington and were about to subvert our democracy.", "statement": "Communists assisted America's government during the Cold War.", "entailment": [], "neutral": [{"annotator": 0, "id": "117-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions American suspection. It may or may not be true.", "self_corrected": false}, {"annotator": 1, "id": "117-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context is about what people suspected not about the facts.", "self_corrected": false}, {"annotator": 2, "id": "117-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In the context it is all about Americans' suspect", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "117-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It was suspected that the communists had infiltrated Washington and to subvert the democracy, which is the opposite of assisting America", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"c": 57, "n": 40, "e": 3}} +{"id": "58954c", "context": "For an authentic feel of old Portugal, slip into the cool entrance hall of theimpressive Leal Senado ( Loyal Senate building), a fine example of colonial architecture.", "statement": "All that remains of Leal Senado is old ruins.", "entailment": [], "neutral": [{"annotator": 0, "id": "1166-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The current status of Leal Senado is not mentioned.", "self_corrected": false}, {"annotator": 2, "id": "1166-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The condition of Leal Senado is not mentioned in the context.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "1166-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Leal Senado is impressive and has an entrance hall, so it can hardly be only ruins.", "self_corrected": false}, {"annotator": 3, "id": "1166-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is entrance hall of Leal Senado, so it can not be ruins", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 43, "c": 54, "e": 3}} +{"id": "74377e", "context": "no chemicals and plus then you can use it as a fertilizer and not have to worry about spreading those chemicals like on your lawn or your bushes or whatever", "statement": "We don't want to use chemicals on our lawn", "entailment": [{"annotator": 0, "id": "271-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't need to worry about using them on the lawn because they are not chemicals, which suggests that we don't want to use chemicals on the lawn.", "self_corrected": false}, {"annotator": 1, "id": "271-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker says that they might worry about spreading the chemicals on the lawn, so they don't want that.", "self_corrected": false}, {"annotator": 3, "id": "271-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "when using chemicals, we need to worry about spreading the chemicals on the lawn", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 29, "e": 69, "c": 2}} +{"id": "130680e", "context": "We also have found that leading organizations strive to ensure that their core processes efficiently and effectively support mission-related outcomes.", "statement": "Leading organizations want to be sure their processes are successful.", "entailment": [{"annotator": 0, "id": "107-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Leading organizations strive to ensure the success of their processes.", "self_corrected": false}, {"annotator": 1, "id": "107-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "107-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, because leading organizations strive to ensure their processes.", "self_corrected": false}, {"annotator": 3, "id": "107-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The organizations strive to ensure their process to support the outcomes. So the process needs to be successful to provide support for the outcomes", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 91, "n": 9}} +{"id": "49227n", "context": "well that's uh i agree with you there i mean he didn't have the surrounding cast that Montana had there's no doubt about that", "statement": "I agree that he didn't have the same support as Montana, but he did well.", "entailment": [], "neutral": [{"annotator": 0, "id": "1539-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention whether he did well or not.", "self_corrected": false}, {"annotator": 1, "id": "1539-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the speaker thinks that he did well.", "self_corrected": false}, {"annotator": 2, "id": "1539-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I agree that he didn't have the same support as Montana, but he could did well, or bad.", "self_corrected": false}, {"annotator": 3, "id": "1539-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about where he did well or not", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 40, "n": 57, "c": 3}} +{"id": "124853e", "context": "H-2A agricultural workers are required to maintain a foreign residence which they have no intention of abandoning.", "statement": "Permanent foreign residence is required for some types of agricultural work visas.", "entailment": [{"annotator": 0, "id": "1078-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"H-2A\" is a type of agricultural work visa that mentioned in the statement.", "self_corrected": false}, {"annotator": 1, "id": "1078-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "1078-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "H-2A agricultural workers need permanent foreign residence prove that some types of agricultural need permanent foreign residence.", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 3, "id": "1078-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It's those agricultural workers with H-2A visas needs to maintain the residence as part of their work but not a requirement for their application for the visa", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"e": 84, "n": 16}} +{"id": "117892n", "context": "No, Dave Hanson, you were too important to us for that.", "statement": "No, Dave Hanson, we couldn't risk your life becaus you are too important to us.", "entailment": [], "neutral": [{"annotator": 0, "id": "273-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention anything about risking life.", "self_corrected": false}, {"annotator": 1, "id": "273-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the speakre talks about risking Hanson's life.", "self_corrected": false}, {"annotator": 2, "id": "273-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Dave's is too important to us for \"that\", but that can be anything, not definitely about living or death.", "self_corrected": false}, {"annotator": 3, "id": "273-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not mentioned if Dave is going to risk his life for that", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 54, "e": 46}} +{"id": "111243n", "context": "The pope, suggesting that Gen.", "statement": "Gen is being suggested by the Pope.", "entailment": [{"annotator": 0, "id": "1178-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that the pope suggested the Gen.", "self_corrected": false}, {"annotator": 1, "id": "1178-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "1178-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, it is a switch of active voice and passive voice.", "self_corrected": false}, {"annotator": 3, "id": "1178-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "paraphrases", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 78, "n": 21, "c": 1}} +{"id": "77654e", "context": "but there's no uh inscriptions or or dates or anything else", "statement": "There aren't any dates on it?", "entailment": [], "neutral": [{"annotator": 3, "id": "158-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "question is not a statement", "self_corrected": false}], "contradiction": [], "idk": [0, 1, 2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 87, "n": 11, "c": 2}} +{"id": "110061n", "context": "If you have the energy to climb the 387 steps to the top of the south tower, you will be rewarded with a stunning view over the city.", "statement": "The south tower has the best view in the city.", "entailment": [], "neutral": [{"annotator": 0, "id": "1015-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The view of the south tower may or may not be the best.", "self_corrected": false}, {"annotator": 1, "id": "1015-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the stunning view is actually the best in the city.", "self_corrected": false}, {"annotator": 2, "id": "1015-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context did not compare the view of the south tower with other places, so we don't know whether it has the best view.", "self_corrected": false}, {"annotator": 3, "id": "1015-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The south tower has a stunning view but it is not known if it is the best", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 73, "e": 26, "c": 1}} +{"id": "29844c", "context": "I am glad she wasn't, said Jon.", "statement": "Jon was sad that she wasn't happy.", "entailment": [], "neutral": [{"annotator": 1, "id": "374-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether Jon was talking about her being happy.", "self_corrected": false}, {"annotator": 2, "id": "374-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Jon's attitude to her happiness is not given in the context.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "374-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Jon was glad, not sad.", "self_corrected": false}, {"annotator": 3, "id": "374-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Jon was glad that she was not happy", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 46, "n": 54}} +{"id": "27335e", "context": "is there still that type of music available", "statement": "Is that genre of music still a thing?", "entailment": [{"annotator": 0, "id": "890-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Both the context and statement are about the same question.", "self_corrected": false}, {"annotator": 1, "id": "890-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}], "neutral": [{"annotator": 3, "id": "890-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"is it a thing\" means \"is it popular\". A thing can be available but not popular", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 15, "e": 84, "c": 1}} +{"id": "87332c", "context": "Strange as it may seem to the typical household, capital gains on its existing assets do not contribute to saving as measured in NIPA.", "statement": "NIPA considers cat fur when it defines savings.", "entailment": [{"annotator": 2, "id": "824-entailment-1", "judgments": [{"annotator": 1, "makes_sense": false}], "label_correction": true, "reason": "Yes, because cat fur based on cats, which are existing assets, so it will not be counted by NIPA.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "824-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Cat fur is not mentioned in the context.", "self_corrected": false}, {"annotator": 1, "id": "824-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Cat furs are not mentioned at all.", "self_corrected": false}, {"annotator": 3, "id": "824-neutral-3", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not clear statement", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"c": 53, "n": 44, "e": 3}} +{"id": "53499c", "context": "my goodness it's hard to believe i didn't think there was anybody in the country who hadn't seen that one", "statement": "I thought I was the only one in this country who had seen it.", "entailment": [{"annotator": 3, "id": "871-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "I didn't think there was anybody seen that one. So I thought I was the only one in this country who had seen it", "self_corrected": true}], "neutral": [], "contradiction": [{"annotator": 0, "id": "871-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is false because the speaker thought that everyone had seen that one.", "self_corrected": false}, {"annotator": 2, "id": "871-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I thought everybody in this country had seen it, so I am not the only one.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 23, "c": 68, "n": 9}} +{"id": "65199n", "context": "and i look back on that and i bought shoes i went shopping i did not need that money i did not need it i didn't need it i shouldn't have even qualified to get it i didn't need it and it would have been a little rough i might have eaten some bologna instead of roast beef out of the deli but i did not need it and as i look back now now we're paying that back i told my son if you have to live in the ghetto to go to college do it but don't take out ten thousand dollars in loans don't do it and i don't i hope don't think he'll have to do that but i just so like we might if we didn't have those loans we could have saved in the last five years the money for that and i believe we would have because God's really put it in our heart not to get in debt you know but we have friends at church that do this on a constant basis that are totally debt free and they pay cash for everything they buy", "statement": "I am envious of all my debt-free churchgoing friends.", "entailment": [], "neutral": [{"annotator": 0, "id": "1158-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the speaker's attitude toward the debt-free friends at church.", "self_corrected": false}, {"annotator": 1, "id": "1158-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the speaker is envious.", "self_corrected": false}, {"annotator": 3, "id": "1158-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not known if i am envious of my debt-free friends", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 43, "n": 52, "c": 5}} +{"id": "88050c", "context": "If you have any questions about this report, please contact Henry R. Wray, Senior Associate General Counsel, at (202) 512-8581.", "statement": "Henry R. Wray can be reached at (555) 512-8581.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "51-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The number of Henry R. Wray mentioned in the statement is wrong", "self_corrected": false}, {"annotator": 1, "id": "51-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The phone number starts with (202) not with (555).", "self_corrected": false}, {"annotator": 2, "id": "51-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The number is wrong, it should be (202) 512-8581 not (555) 512-8581.", "self_corrected": false}, {"annotator": 3, "id": "51-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "wrong phone numbers", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 21, "c": 76, "n": 3}} +{"id": "34094e", "context": "No, monsieur.", "statement": "The speaker is answering no to a question.", "entailment": [{"annotator": 0, "id": "1448-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "According to the context, the speaker did say no.", "self_corrected": false}, {"annotator": 1, "id": "1448-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "This is correct.", "self_corrected": false}, {"annotator": 2, "id": "1448-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, the speaker said no.", "self_corrected": false}, {"annotator": 3, "id": "1448-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "monsieur states that the speaker is talking to a man and said no", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 11, "e": 88, "c": 1}} +{"id": "72875e", "context": "The policy succeeded, and I was fortunate to have had the opportunity to make that contribution to my people.", "statement": "Because the policy was a success, I was able to make a contribution to my people.", "entailment": [{"annotator": 0, "id": "287-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement mention the sucessful of the policy and the speaker's contribution to the people.", "self_corrected": false}, {"annotator": 1, "id": "287-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 3, "id": "287-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "paraphrases", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 2, "id": "287-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "No, the contribution I made to my people is the success of policy.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 88, "n": 11, "c": 1}} +{"id": "19921n", "context": "3) The gap between the productivity of women and the productivity of men.", "statement": "The gap of genders.", "entailment": [{"annotator": 2, "id": "1486-entailment-1", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, the gap between the productivity of genders can prove there is a gap between genders.", "self_corrected": false}], "neutral": [{"annotator": 3, "id": "1486-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Statement does not specifiy the gap of WHAT of genders. In the context, it is the gap of productivity", "self_corrected": false}], "contradiction": [], "idk": [0, 1], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 83, "n": 15, "c": 2}} +{"id": "23583e", "context": "While obviously constrained by their bondage, blacks nonetheless forged a culture rich with religious observances, folk tales, family traditions, song, and so on.", "statement": "Clearly are constrained by their folk tales and traditions.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "553-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They are constrained by their bondage, not their folk tales and traditions.", "self_corrected": false}, {"annotator": 3, "id": "553-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They forges a rich culture with folk tales and traditions, which are not their constrans", "self_corrected": false}], "idk": [1, 2], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 52, "e": 14, "n": 34}} +{"id": "91106n", "context": "SSA is also seeking statutory authority for additional tools to recover current overpayments.", "statement": "SSA wants the authority to recover overpayments made to insurers.", "entailment": [], "neutral": [{"annotator": 0, "id": "487-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not mention to whom the overpayment was made.", "self_corrected": false}, {"annotator": 1, "id": "487-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the overpayments were made to insureres or to someone else.", "self_corrected": false}, {"annotator": 2, "id": "487-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The overpayments can be made to insurers or to other shops, or department of government, etc.", "self_corrected": false}, {"annotator": 3, "id": "487-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know to whom was the overpayments made.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 36, "e": 61, "c": 3}} +{"id": "9557e", "context": "Tommy Thompson of Wisconsin and Mayor Rudolph Giuliani of New York, the conservative vanguard on the issue, show no inclination to exploit research that says, in effect, Why care about day-care quality?", "statement": "Thompson and Giuliani don't want to care about day cares.", "entailment": [{"annotator": 2, "id": "839-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "\"No inclination to exploit reserach\" can be understood as to study about day cares.", "self_corrected": true}], "neutral": [], "contradiction": [{"annotator": 0, "id": "839-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Thompson and Giuliani don't want to exploit the research that doesn't care about day cares, which means they did care about day cares.", "self_corrected": false}, {"annotator": 3, "id": "839-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Thompson and Giuliani did not care about the research, that says not to care about the day-care quality. So they might actually care about day cares", "self_corrected": false}], "idk": [1], "label_count_round_1": {"contradiction": 2.0, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 38, "e": 46, "c": 16}} +{"id": "15537n", "context": "So unlike people who are fortunate enough to be able to afford attorneys and can go to another lawyer, our clients are simply lost in the legal system if they cannot get access to it from us.", "statement": "Our clients can barely afford our legal assistance.", "entailment": [{"annotator": 0, "id": "1259-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Both the context and statement suggest that their clients are not able to afford attorneys.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "1259-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the clients have to pay for the services at all.", "self_corrected": false}, {"annotator": 2, "id": "1259-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The cost of our legal assistance is not given in the context, maybe it is free.", "self_corrected": false}, {"annotator": 3, "id": "1259-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not mentioned if our assistance is charged. It could be free.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 66, "n": 26, "c": 8}} +{"id": "3476n", "context": "apparently apparently the appraisers likes it because our taxes sure is high isn't it it really is", "statement": "We wished the taxes were lower.", "entailment": [], "neutral": [{"annotator": 0, "id": "652-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the speaker's wishes about the taxes.", "self_corrected": false}, {"annotator": 1, "id": "652-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether the speaker wants the taxes to be lower.", "self_corrected": false}, {"annotator": 2, "id": "652-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Our taxes is high, but our attitude to high taxes is not shown in the context.", "self_corrected": false}, {"annotator": 3, "id": "652-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no info about our wishes", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 31, "n": 60, "c": 9}} +{"id": "56124n", "context": "Of how, when tea was done, and everyone had stood,He reached for my head, put his hands over it,And gently pulled me to his chest, which smelledOf dung smoke and cinnamon and mutton grease.I could hear his wheezy breathing now, like the prophet's Last whispered word repeated by the faithful.Then he prayed for what no one had time to translate--His son interrupted the old man to tell him a groupOf snake charmers sought his blessing, and a blind thief.The saint pushed me away, took one long look,Then straightened my collar and nodded me toward the door.", "statement": "When tea was done, he put his hands on me romantically.", "entailment": [{"annotator": 0, "id": "1219-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "When tea was done he put his hands over the speaker's head. According to the context, his moves were romantic.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "1219-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether there was romantic intention.", "self_corrected": false}, {"annotator": 2, "id": "1219-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He was a \"saint\", so \"he put his hands on me\" could be romantically, but also could be nothing to do with romance, whereas about religion.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1219-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He is a saint, so it is more a religious practice rather than a romantice move", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": ["entailment"], "has_ambiguity": true, "chaosnli_labels": {"n": 30, "c": 39, "e": 31}} +{"id": "5193n", "context": "EPA estimates that 5.6 million acres of lakes, estuaries and wetlands and 43,500 miles of streams, rivers and coasts are impaired by mercury emissions.", "statement": "The release of mercury has an impact on rivers, streams and lakes", "entailment": [{"annotator": 0, "id": "723-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that mercury emissions inpact streams, rivers and lakes.", "self_corrected": false}, {"annotator": 1, "id": "723-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The mercury impairs the rivers, treams and lakes among others.", "self_corrected": false}, {"annotator": 2, "id": "723-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True because mercury emissions impaired lakes, estuaries, wetlands, streams, rivers and coasts.", "self_corrected": false}, {"annotator": 3, "id": "723-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A large land is impaired by mercury emissions, as reported by EPA. So the release of mercury has an impact on these natural bodies", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 99, "n": 1}} +{"id": "106390n", "context": "Mykonos has had a head start as far as diving is concerned because it was never banned here (after all, there are no ancient sites to protect).", "statement": "Protection of ancient sites is the reason for diving bans in other places.", "entailment": [{"annotator": 0, "id": "779-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions that driving was never banned in Mykonos because there are no ancient sites to protect, which implies that protection of ancient site might be a reation for driving bans.", "self_corrected": false}, {"annotator": 1, "id": "779-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context states that Mykonos did not need to ban diving because there are no ancient sites to proect. This implies that other places banned diving for that reason.", "self_corrected": false}, {"annotator": 3, "id": "779-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Mykonos is not banned for diving, because there is no ancient site to protect. So if there is ancient sites, then it would be a reason for Mykonos to be banned for diving", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "779-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In other places, protection of ancients sites could be one reasonfor diving bans, but there could be other reasons like the danger for divers' life.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 32, "e": 60, "c": 8}} +{"id": "54327n", "context": "substitute my my yeah my kid'll do uh four or five hours this week for me no problem", "statement": "I just can't make the time because of my job.", "entailment": [], "neutral": [{"annotator": 0, "id": "13-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reason the speaker can't make the time is not mentioned in the context.", "self_corrected": false}, {"annotator": 1, "id": "13-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether the speaker needs a substitute because of their job.", "self_corrected": false}, {"annotator": 3, "id": "13-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "No clear context", "self_corrected": true}], "contradiction": [{"annotator": 2, "id": "13-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "\"No problem\" means I can handle the time.", "self_corrected": false}], "idk": [2], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 57, "c": 42, "e": 1}} +{"id": "2870n", "context": "Most menu prices include taxes and a service charge, but it's customary to leave a tip if you were served satisfactorily.", "statement": "Most customers will tip in addition to the tax on the menus.", "entailment": [{"annotator": 1, "id": "607-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": true}, {"annotator": 2, "id": "607-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Leaving a tip is \"customary\", so it should be a behaviour that most people do.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "607-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The customers usually tip additionally if the service was good. I don't know if most customers were served satisfactorily.", "self_corrected": false}, {"annotator": 2, "id": "607-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If people are satisfied with the service, they will usually leave a tip, but if they are not satisfied, maybe they will not leave a tip.", "self_corrected": false}, {"annotator": 3, "id": "607-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The tip is customary, but it is not known how often people tip", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 74, "n": 26}} +{"id": "63218n", "context": "Recently, however, I have settled down and become decidedly less experimental.", "statement": "I have lost my experimental nature due to old age.", "entailment": [], "neutral": [{"annotator": 0, "id": "1273-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reason the speaker lost the experimental nature is not mentioned.", "self_corrected": false}, {"annotator": 1, "id": "1273-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether the speaker is old.", "self_corrected": false}, {"annotator": 2, "id": "1273-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reason for the lost of my experimental nature could be old age, or others like lack of money or poor health condition.", "self_corrected": false}, {"annotator": 3, "id": "1273-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reason for becoming less experimental is unknown", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 74, "e": 23, "c": 3}} +{"id": "144753c", "context": "When he's ready for a major strike, how many innocents do you suppose are going to suffer? To quote one of your contemporaries; 'The needs of the many outweigh the needs of the few.' '", "statement": "He won't do a big strike because of the innocent people.", "entailment": [], "neutral": [{"annotator": 2, "id": "863-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A big strike could influence many innocents, he could give up because of that consideratin, but also could still continue his pain.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "863-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The person is ready for a major strike. Thus he most likely is willing to do it.", "self_corrected": false}, {"annotator": 3, "id": "863-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"when he is ready for a major strike\" means he is not ready not but preparing for it.", "self_corrected": false}], "idk": [0], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 55, "c": 35, "e": 10}} +{"id": "73191n", "context": "To get a wonderful view of the whole stretch of river, and to stretch your legs in a beautiful parklike setting, climb up to the Ceteau de Marqueyssac and its jardins suspendus (hanging gardens).", "statement": "You will enjoy stretching your legs as you climb the Ceteau de Marqueyssac.", "entailment": [{"annotator": 3, "id": "972-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is put as a suggestion to climb up the Ceteau de Marqueyssac. During the climbing one should get wonderful view of the river and be able to strech his legs", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "972-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether the people enjoy stretching their legs.", "self_corrected": false}, {"annotator": 2, "id": "972-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Whether stretching your legs is pleasant or annoying is not discussed in the context.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "972-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context indicate that people can only stretch their legs after climbing up to the Ceteau de Marqueyssac, not during the climb.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"n": 34, "e": 61, "c": 5}} +{"id": "62273n", "context": "The book is a parody of Bartlett's , serving up quotes from Lincoln, Jefferson, and Roger Rosenblatt with equal pomposity.", "statement": "Bill Reilly's book has quotes from various presidents ranging from Lincoln to Jefferson.", "entailment": [{"annotator": 3, "id": "776-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "if assuming it is Bill Reilly's book.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "776-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the name Bill Reilly.", "self_corrected": false}, {"annotator": 1, "id": "776-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions only two presidents (Lincoln and Jefferson). This does not qualify as \"various\" for me.", "self_corrected": false}, {"annotator": 2, "id": "776-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No information of Bill Reilly's book is given in the context.", "self_corrected": false}, {"annotator": 3, "id": "776-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "if the book is not known to be Bill's", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 4.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 46, "n": 46, "c": 8}} +{"id": "58016c", "context": "(As the old saying goes, If you can't figure out who the fool is at the poker table, it's probably you.", "statement": "Dealers say everyone is smart that is playing.", "entailment": [], "neutral": [{"annotator": 0, "id": "454-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention what the dealers say.", "self_corrected": false}, {"annotator": 2, "id": "454-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Dealers are not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "454-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "not relevant", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "454-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The saying implies that there is at least one fool at the poker table.", "self_corrected": true}, {"annotator": 2, "id": "454-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context says, if people can not find the fool, then themselves are fools, so at least one guy who is playing is not smart.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 37, "c": 59, "e": 4}} +{"id": "45605n", "context": "They have prominent red protuberances and may have been named after the British redcoats.", "statement": "They were named after the redcoats because they are the same bright red color on their bodies.", "entailment": [], "neutral": [{"annotator": 0, "id": "528-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions the word \"may\", which suggests that it is just a possibility that they were named after the redcoats.", "self_corrected": false}, {"annotator": 1, "id": "528-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context says that they \"may have been named\" not that the definitely were named after the redcoats.", "self_corrected": false}, {"annotator": 2, "id": "528-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The color of their body is not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "528-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not known for sure that they were named after the British redcoats. In the context, they may have been named after that", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 50, "n": 48, "c": 2}} +{"id": "82174e", "context": "NEH-supported exhibitions were distinguished by their elaborate wall panels--educational maps, photomurals, stenciled treatises--which competed with the objects themselves for space and attention.", "statement": "The exhibitions seem well-funded due to the elaborate detail of the gallery.", "entailment": [{"annotator": 0, "id": "1368-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement makes sense, as it is true that the exhibitions does have elaborate wall panels that can cost a lot of money.", "self_corrected": false}, {"annotator": 1, "id": "1368-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Elaborate wall panels are costly, so most likely the exhibitions are well-funded.", "self_corrected": false}, {"annotator": 3, "id": "1368-entailment-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Because the wall panels etc. are competing with the objects themselves, which cost money and thoughts", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1368-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The financial support of the gallery is not mentioned in the context.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 46, "e": 52, "c": 2}} +{"id": "123703e", "context": "Specifically, by defining mission improvement objectives, senior executives determine whether their organization needs a CIO who is a networking/marketing specialist, business change agent, operations specialist, policy/oversight manager, or any combination thereof.", "statement": "A CIO must be an operations specialist.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "1107-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "According to the context, a CIO could be any combination of the personnel mentioned in the context.", "self_corrected": false}, {"annotator": 1, "id": "1107-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A CIO can also have only a single of the other named specializations. This is indicated by \"or any combination thereof\".", "self_corrected": false}, {"annotator": 2, "id": "1107-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, a CIO can be a networking/marketing specialist, too.", "self_corrected": false}, {"annotator": 3, "id": "1107-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "it can also be networking/marketing specialist or business change agent and so on", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 42, "e": 30, "n": 28}} +{"id": "127410n", "context": "In this case, shareholders can pay twice for the sins of others.", "statement": "shareholders can pay once for the sins of others.", "entailment": [{"annotator": 0, "id": "743-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Shareholders can pay twice, that includes once.", "self_corrected": false}, {"annotator": 2, "id": "743-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, because paying once is included by paying twice.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "743-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We only know shareholders can pay twice for the sins, but whether can pay once or three times or more is not mentioned in the context.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "743-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context clearly states \"twice\" not \"once\".", "self_corrected": true}, {"annotator": 3, "id": "743-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "they can pay twice", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 12, "c": 77, "n": 11}} +{"id": "55572n", "context": "But they also don't seem to mind when the tranquillity of a Zen temple rock garden is shattered by recorded announcements blaring from loudspeakers parroting the information already contained in the leaflets provided at the ticket office; when heavy-metal pop music loudly emanates from the radio of the middle-aged owner of a corner grocery store; and when parks, gardens, and hallowed temples are ringed by garish souvenir shops whose shelves display both the tastefully understated and the hideously kitsch.", "statement": "A Zen temple rock garden is a a place for lots of people to gather and celebrate.", "entailment": [], "neutral": [{"annotator": 0, "id": "481-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention what people do in the Zen temple rock garden.", "self_corrected": false}, {"annotator": 2, "id": "481-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In the context, nothing is about gathering and celebrating.", "self_corrected": false}, {"annotator": 3, "id": "481-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not known that many people come to the garden to celebrate", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "481-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "If the tranquility of a Zen garden can be distirbued, this implies that the traniquility is the usual state. This probably precludes large celebrations.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 26, "e": 24, "n": 50}} +{"id": "45774c", "context": "According to a 1995 Financial Executives Research Foundation report,5 transaction processing and other routine accounting activities, such as accounts payable, payroll, and external reporting, consume about 69 percent of costs within finance.", "statement": "The financial world would be ok it there wasn't any 5 percent processing.", "entailment": [], "neutral": [{"annotator": 0, "id": "649-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention how the financial would be ok.", "self_corrected": false}, {"annotator": 1, "id": "649-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear what 5 percent processing is or what it's impact on the financial world would be.", "self_corrected": false}, {"annotator": 2, "id": "649-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Eliminating 5 percent processing may make the costs with finance lower, but we don't know whether there are some bad influence about that, like workers become less active.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "649-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "5% transaction processing and other routine accounting activities count up about 69% of costs within finance. So the 5% plays a big roll in the financial world", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 63, "c": 31, "e": 6}} +{"id": "130869n", "context": "Castlerigg near Keswick is the best example.", "statement": "A good example would be Castlerigg near Keswick, in Scotland.", "entailment": [{"annotator": 0, "id": "737-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "According to the context, the example is the best, which certainly suggests that it is a good one.", "self_corrected": false}, {"annotator": 1, "id": "737-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If it is the best, then certainly it is also good.", "self_corrected": true}], "neutral": [{"annotator": 2, "id": "737-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The location of Keswick is not given in the context.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "737-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Keswick is in England", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 77, "n": 21, "c": 2}} +{"id": "93955n", "context": "The large scale production of entertainment films is a phenomenon well worth seeing several times.", "statement": "The production of entertainment films is elaborate and large scaled.", "entailment": [{"annotator": 3, "id": "347-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The production is worth seeing several times, so it is elaborate. And the production is large scaled", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "347-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear if the production of entertainment films is elabortate.", "self_corrected": false}, {"annotator": 1, "id": "347-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether it is elaborate.", "self_corrected": true}, {"annotator": 2, "id": "347-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is elaborate and large scaled production of entertainment films, but there could be small-scaled production too.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 73, "n": 26, "c": 1}} +{"id": "72870c", "context": "Because marginal costs are very low, a newspaper price for preprints might be as low as 5 or 6 cents per piece.", "statement": "Newspaper preprints can cost as much as $5.", "entailment": [{"annotator": 3, "id": "229-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "5 dollars for a pieace of newspaper", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "229-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions how low the price may be, not how high it may be.", "self_corrected": false}, {"annotator": 2, "id": "229-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The maximum cost of newspaper preprints is not given in the context.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "229-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context says 5 or 6 cents, not $5.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction", "entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 33, "c": 64, "e": 3}} +{"id": "77025c", "context": "You are sure that you did not in any way disclose your identity?\" Tommy shook his head.", "statement": "I wish you hadn't revealed your identity, that was a mistake.", "entailment": [], "neutral": [{"annotator": 1, "id": "1323-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "Tommy did not reveal his identity.", "self_corrected": false}, {"annotator": 2, "id": "1323-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "My preference of the answer of the question, whether Tommy disclosed his identity is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "1323-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "can not infer about Tommy's wish about whether the other person should disclose his identity or not", "self_corrected": false}], "contradiction": [], "idk": [0], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 44, "e": 11, "c": 45}} +{"id": "69975n", "context": "I'm not interested in tactics, Al.", "statement": "Al is very interested in tactics.", "entailment": [], "neutral": [{"annotator": 0, "id": "593-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only suggests that the speaker's attutude toward tactics, not AI's.", "self_corrected": false}, {"annotator": 1, "id": "593-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether Al is interested in tactics.", "self_corrected": false}, {"annotator": 2, "id": "593-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker is not interested in tactics, but Al's interest is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "593-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "593-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "if the sentence in the context is said by AI, then AI is not interested in tactics", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 59, "c": 33, "e": 8}} +{"id": "125238c", "context": "If the collecting entity transfers the nonexchange revenue to the General Fund or another entity, the amount is accounted for as a custodial activity by the collecting entity.", "statement": "Nonexchange revenue to the General Mills.", "entailment": [], "neutral": [{"annotator": 1, "id": "900-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "General Mills are not mentioned at all.", "self_corrected": false}, {"annotator": 2, "id": "900-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The General Mills is not mentioned in the context.", "self_corrected": false}], "contradiction": [], "idk": [0, 3], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 12, "c": 39, "n": 49}} +{"id": "27022c", "context": "For fiscal year 1996, Congress determined that the Commission should recover $126,400,000 in costs, an amount 8.6 percent higher than required in fiscal year 1995.", "statement": "Congress determined that Commission should recover over $126 in costs.", "entailment": [{"annotator": 0, "id": "303-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "$126,400,000 is indeed more than $126 mentioned in the statement.", "self_corrected": false}, {"annotator": 1, "id": "303-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "$ 126400000 is larger than $126, so technically it's over $126.", "self_corrected": false}, {"annotator": 2, "id": "303-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "$126400000 is more than $126, so it is correct to say over $126.", "self_corrected": false}, {"annotator": 3, "id": "303-entailment-4", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "commission should recover over 126,400,000 in costs", "self_corrected": true}], "neutral": [], "contradiction": [{"annotator": 1, "id": "303-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Commission should recover $126,400,000 not over $126", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 43, "e": 56, "n": 1}} +{"id": "66689c", "context": "OMB issued the guidance in Memorandum M0010, dated April 25, 2000.", "statement": "Memorandum M0010 was issued by INS.", "entailment": [], "neutral": [{"annotator": 1, "id": "1445-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "It's not clear who issued the memorandum.", "self_corrected": true}], "contradiction": [{"annotator": 0, "id": "1445-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Memorandum M0010 was issued by OMB, not by INS.", "self_corrected": false}, {"annotator": 2, "id": "1445-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, Memorandum M0010 was issued by OMB.", "self_corrected": false}, {"annotator": 3, "id": "1445-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is issued by OMB", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["contradiction"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"c": 87, "n": 11, "e": 2}} +{"id": "112547n", "context": "Credibility is a vital factor, and Jim Lehrer does, indeed, have it.", "statement": "Everyone would believe whatever Jim Lehrer said.", "entailment": [{"annotator": 0, "id": "262-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Jim Lehrer has great credibility, which means evertone would believe him.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "262-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "That a person is credible does not mean that everyone will believe them.", "self_corrected": false}, {"annotator": 2, "id": "262-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Jim Lehrer has credibility, but it cannot be promised that no one would disbelieve him whatever he said.", "self_corrected": false}, {"annotator": 3, "id": "262-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "overexaggeration. Jim Lehrer has credibility. But iit is exaggerated to say everyone would believe him", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 49, "n": 47, "c": 4}} +{"id": "129081n", "context": "right oh they've really done uh good job of keeping everybody informed of what's going on sometimes i've wondered if it wasn't almost more than we needed to know", "statement": "I think I have shared too much information with everyone, so next year I will share less.", "entailment": [], "neutral": [{"annotator": 0, "id": "1458-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention what the speaker will do next year.", "self_corrected": false}, {"annotator": 1, "id": "1458-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"They\" shared information, not \"I\".", "self_corrected": false}, {"annotator": 2, "id": "1458-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "What I want to know is whether \"they\" shared too much information, not I, and what I will next year is not given in the context.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1458-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"I\" didn't share the information, \"they\" did", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 67, "c": 19, "e": 14}} +{"id": "45306n", "context": "Each caters to a specific crowd, so hunt around until you find the one right for you.", "statement": "There are marketers who have argued that there needs to be more effort to broaden appeal.", "entailment": [], "neutral": [{"annotator": 0, "id": "1202-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Based on the context, we don't know anything about the marketer's argument.", "self_corrected": false}, {"annotator": 1, "id": "1202-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Marketers or their opinion are not mentioned at all.", "self_corrected": false}, {"annotator": 2, "id": "1202-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In context, there is no comparison between the effort has been made and the need to be made in the future.", "self_corrected": false}, {"annotator": 3, "id": "1202-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 30, "n": 68, "e": 2}} +{"id": "76957c", "context": "Both initial and supplemental proposed rule publications invited comments on the information collection requirements imposed by the rule.", "statement": "There's no point in following politics or voting because your vote won't actually make a difference.", "entailment": [], "neutral": [{"annotator": 0, "id": "1497-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether votes would make a difference or not.", "self_corrected": false}, {"annotator": 1, "id": "1497-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There's no mention of politics or voting in the context.", "self_corrected": false}, {"annotator": 2, "id": "1497-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context has nothing to do with the effect of following politics and voting.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1497-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The comments are invited on the requirements. So one's opinoin might make a difference", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 65, "c": 33, "e": 2}} +{"id": "2262n", "context": "She buried his remains to spare her mother the gruesome sight.", "statement": "The remains would have caused grief to her mother.", "entailment": [{"annotator": 3, "id": "1320-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "She buried the remains to spare her mother the grief. So if her mother saw the remain, she would grieve", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "1320-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no mention in the context of how her mother might feel about the remains.", "self_corrected": false}, {"annotator": 1, "id": "1320-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It could also be that the remains would have triggered another strongly negative reaction like disgust (and not grief).", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 90, "n": 9, "c": 1}} +{"id": "40710n", "context": "Write, write, and write.", "statement": "You should keep practicing writing.", "entailment": [{"annotator": 2, "id": "864-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, the context is a imperative sentence and repeated three times \"write\", which could be seen as a order to keep writing.", "self_corrected": false}, {"annotator": 3, "id": "864-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "the repetition of verbs implies the repetition doing that action. So it means keeps writing", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "864-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether they should write for the sake of practicing.", "self_corrected": false}], "contradiction": [], "idk": [0], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 76, "n": 24}} +{"id": "21957n", "context": "But those that are manufactured for sale in in Europe and so forth are quite the other way around", "statement": "Products are made with differently designed machines in Europe.", "entailment": [], "neutral": [{"annotator": 0, "id": "477-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no mention in the context of what machines are used to produce products made in Europe.", "self_corrected": false}, {"annotator": 1, "id": "477-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear how the products made for Europe differ from the others.", "self_corrected": false}, {"annotator": 2, "id": "477-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Products could be made with differently designed machines in Europe, or with the same designed machines.", "self_corrected": false}], "contradiction": [], "idk": [3], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 24, "n": 70, "c": 6}} +{"id": "14126e", "context": "and so i have really enjoyed that but but there are i do have friends that watch programs like they want to see a particular program and they are either home watching it or definitely recording it they have some programs that they won't miss", "statement": "What programs do your friends like to watch?", "entailment": [], "neutral": [{"annotator": 3, "id": "57-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "statement is a question", "self_corrected": false}], "contradiction": [], "idk": [0, 1, 2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 68, "c": 20, "e": 12}} +{"id": "79013n", "context": "But it just might be because he's afraid he'll lose his No.", "statement": "He's definitely afraid of losing he's No.", "entailment": [{"annotator": 0, "id": "434-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Both the context and the statement mention that he is afraid he'll lose his No.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "434-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context says \"might\" and not \"definitely\".", "self_corrected": true}], "contradiction": [{"annotator": 2, "id": "434-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, it \"just might be\", but not definitely.", "self_corrected": false}, {"annotator": 3, "id": "434-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is a possible that he is afraid of losing he's No. but not definitely", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction"], "error_labels": ["entailment", "neutral"], "has_ambiguity": false, "chaosnli_labels": {"n": 59, "c": 17, "e": 24}} +{"id": "38156n", "context": "BUDGETARY RESOURCES - The forms of authority given to an agency allowing it to incur obligations.", "statement": "Administrations generally feel that some agencies should have more budgetary resources than others.", "entailment": [], "neutral": [{"annotator": 0, "id": "298-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "The context only talks about what budget resources are.", "self_corrected": false}, {"annotator": 1, "id": "298-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no mention of giving different resources to different agencies.", "self_corrected": false}, {"annotator": 2, "id": "298-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no comparison of budgetary resources between agencies.", "self_corrected": false}, {"annotator": 3, "id": "298-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "context is a defination. Statement is the opinion of the administrations.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 79, "e": 14, "c": 7}} +{"id": "25304n", "context": "well we bought this with credit too well we found it with a clearance uh down in Memphis i guess and uh", "statement": "We bought non-sale items in Memphis on credit.", "entailment": [], "neutral": [{"annotator": 3, "id": "215-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is not known if the items are non-sales or not", "self_corrected": true}], "contradiction": [{"annotator": 0, "id": "215-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker found the item with a clearance, which suggests that it is on sale.", "self_corrected": false}, {"annotator": 1, "id": "215-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It was a clearance, so the items were on sale.", "self_corrected": false}, {"annotator": 2, "id": "215-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, we bought it with a clearance down.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["contradiction"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"c": 57, "n": 20, "e": 23}} +{"id": "111338e", "context": "He threw one of them and shot the other.", "statement": "He shot his gun.", "entailment": [{"annotator": 3, "id": "872-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He shot on of them. So he must have shot his gun", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "872-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He shot, but it is not clear what did he shot, it could have been a gun or an arrow.", "self_corrected": false}, {"annotator": 1, "id": "872-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether he shot with a gun or with some other weapon.", "self_corrected": false}, {"annotator": 2, "id": "872-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He shot, but the objective could be his gun or something else like arrow.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 37, "e": 63}} +{"id": "75259c", "context": "Buffet and a\u00a0 la carte available.", "statement": "It has table service.", "entailment": [{"annotator": 0, "id": "682-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A la carte is available suggests that it has table service.", "self_corrected": false}, {"annotator": 1, "id": "682-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If it has a la carte, then it probably also has table service.", "self_corrected": false}, {"annotator": 2, "id": "682-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A la carte includes table service, and a la carte is available.", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 3, "id": "682-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "It is a buffet, so there is no people serving the table", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 47, "e": 36, "c": 17}} +{"id": "53211n", "context": "No, I exclaimed, astonished.", "statement": "I said no to him several time, utterly surprised by the change of events.", "entailment": [{"annotator": 0, "id": "1502-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Both the context and the statement suggest that the speaker said no and was surprised.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "1502-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether they said \"no\" several times or only once.", "self_corrected": false}, {"annotator": 2, "id": "1502-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reason of my astonishment is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "1502-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "it is not known how many times I said no", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 59, "c": 13, "e": 28}} +{"id": "123267e", "context": "He's a bad lot.", "statement": "He's a dishonest person", "entailment": [], "neutral": [{"annotator": 0, "id": "752-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He may or may not be bad because he is dishonest.", "self_corrected": false}, {"annotator": 1, "id": "752-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He might be bad in other ways.", "self_corrected": false}, {"annotator": 2, "id": "752-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Bad people can be both honest and dihonest.", "self_corrected": false}, {"annotator": 3, "id": "752-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He could be honest but bad in other quality", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 57, "e": 42, "c": 1}} +{"id": "80808n", "context": "A button on the Chatterbox page will make this easy, so please do join in.", "statement": "They wanted to make the site very user friendly.", "entailment": [{"annotator": 0, "id": "643-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context did show that they want to make it easy, which is a factor in user-friendliness.", "self_corrected": false}, {"annotator": 2, "id": "643-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Yes, they want to add a botton which will make the operation easy.", "self_corrected": false}, {"annotator": 3, "id": "643-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "only clicking a button would join the user in. So the user does not need to click a lot buttons. Therefore, it is user-friendly", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "643-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "It's not clear from one text of the webpage that they really tried to make it user friendly.", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"e": 68, "n": 32}} +{"id": "72740c", "context": "So it wasn't Missenhardt's singing--marvelous though that was--that made Osmin's rantings so thrilling.", "statement": "Osmin was always calm and collected.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "1092-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions Osmin's rantings, which implies that he was not always calm.", "self_corrected": false}, {"annotator": 1, "id": "1092-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Osmin ranted, so they probably were not always calm and collected.", "self_corrected": false}, {"annotator": 2, "id": "1092-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, because Osmin rants and quite thrilling.", "self_corrected": false}, {"annotator": 3, "id": "1092-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Osmin is ranting. So he is not calm", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 44, "c": 56}} +{"id": "71251n", "context": "Deborah Pryce said Ohio Legal Services in Columbus will receive a $200,000 federal grant toward an online legal self-help center.", "statement": "A $200,000 federal grant will be received by Ohio Legal Services, said Deborah Pryce, who could finally say it to the public.", "entailment": [{"annotator": 1, "id": "600-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context, with the addition of \"who could finally say it to the public\" which is only a minor aspect.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "600-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention whether Deborah Pryce said it to public or not.", "self_corrected": false}, {"annotator": 2, "id": "600-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Finally\" means at the beginning, Deborah Pryce is not allowed to announce this information, but the true situation is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "600-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about whether he could say it to the public or not", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 63, "n": 37}} +{"id": "117680c", "context": "Since the rules were issued as interim rules and not as general notices of proposed rulemaking, they are not subject to the Unfunded Mandates Reform Act of 1995.", "statement": "The rules were not issued as interim rules but rather general notices of proposed rulemaking.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "859-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context clearly states that the rules were issued as interim rules.", "self_corrected": false}, {"annotator": 1, "id": "859-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's clearly stated that the rules were issued as interim rules.", "self_corrected": false}, {"annotator": 2, "id": "859-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, the rules were issued as interim rules.", "self_corrected": false}, {"annotator": 3, "id": "859-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The rules were issued as interim rules", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 76, "e": 18, "n": 6}} +{"id": "80930n", "context": "so you um-hum so you think it comes down to education or or something like that", "statement": "IT all boils down to how much education you have.", "entailment": [{"annotator": 3, "id": "986-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Education is the the reason that explains something", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "986-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context talks about \"your\" thought, this thought may or may not be true.", "self_corrected": false}, {"annotator": 1, "id": "986-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context is a question, not a statement.", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 51, "e": 45, "c": 4}} +{"id": "98844n", "context": "The m??tro (subway) is the fastest way to move around the city, but the buses, both in the capital and the other big towns, are best for taking in the sights.", "statement": "Taking the subway is a good way to experience big city life.", "entailment": [{"annotator": 0, "id": "1394-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Subway is the fastest way to move around the city, so it is somehow a good way to experience big city life.", "self_corrected": false}, {"annotator": 1, "id": "1394-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If \"experience big city life\" means having an authentic experience of people living in the city, then the metro is a good way to do that.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1394-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "Taking bus is a good way to experience big city life, taking subway could be good or not good.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "1394-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If \"experience big city life\" means sight seeing, then the metro is not a good way to do this.", "self_corrected": false}, {"annotator": 3, "id": "1394-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Taking buses would allow one to take in the sights in the city", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 42, "n": 38, "e": 20}} +{"id": "127073c", "context": "maybe adult literacy maybe you know composition writing maybe you know uh volunteering you know on a tutor line or though the even through the elementary schools for help with homework or the other part of me says is God i've had enough kids do i really", "statement": "maybe I could volunteer to help coach sports since I've helped all my children be successful in sports", "entailment": [], "neutral": [{"annotator": 0, "id": "1386-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context makes no mention about coaching sports mentioned in the statement, so I don't know if the statement is true.", "self_corrected": false}, {"annotator": 1, "id": "1386-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker is not talking about sports but about writing.", "self_corrected": false}, {"annotator": 2, "id": "1386-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Helping couch sports is not mentioned in the context.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1386-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "I might volunteer to help with composition writing.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"c": 50, "n": 47, "e": 3}} +{"id": "61216c", "context": "By seeding packs with a few high-value cards, the manufacturer is encouraging kids to buy Pokemon cards like lottery tickets.", "statement": "Each Pokemon card pack is filled with every rare card a kid could want.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "787-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There are only a few rare cards in the Pokemon card packs.", "self_corrected": false}, {"annotator": 1, "id": "787-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Few high value cards\" means that one pack does not contain \"every rare card\".", "self_corrected": false}, {"annotator": 2, "id": "787-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, because lottery tickets are seldom to be valuable, and rare card can only appear rare in common card pack.", "self_corrected": false}, {"annotator": 3, "id": "787-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "the packs are only filled with a few high-value cards.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 20, "c": 63, "e": 17}} +{"id": "102857n", "context": "Expenses included in calculating net cost for education and training programs that are intended to increase or maintain national economic productive capacity shall be reported as investments in human capital as required supplementary stewardship information accompanying the financial statements of the Federal Government and its component units.", "statement": "Net cost for college programs can be calculated as a way to increase productivity.", "entailment": [{"annotator": 0, "id": "923-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Colledge programs mentioned in the statement are included in education and training programs mentioned in the context, which are intended to increase or maintain productivity.", "self_corrected": true}, {"annotator": 1, "id": "923-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"shall be reported as investiments in human capital\" means that they can be included in the calculation as investiments in productivity.", "self_corrected": false}, {"annotator": 2, "id": "923-entailment-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Yes, college programs is kind of education that can increase productivity.", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 3, "id": "923-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is those education and training programs that are intended to increase the producitvity. Not the NEt cost", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"e": 60, "n": 35, "c": 5}} +{"id": "133243e", "context": "He watched the river flow.", "statement": "The river roared by.", "entailment": [], "neutral": [{"annotator": 2, "id": "461-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The condition of the river is not described in the context.", "self_corrected": false}, {"annotator": 3, "id": "461-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear how the river flows", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "461-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context suggests that the river move steadily, while roaring means the opposite.", "self_corrected": false}, {"annotator": 1, "id": "461-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The river flowed, it didn't roar.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 12, "n": 66, "e": 22}} +{"id": "144408n", "context": "Today it is possible to buy cheap papyrus printed with gaudy Egyptian scenes in almost every souvenir shop in the country, but some of the most authentic are sold at The Pharaonic Village in Cairo where the papyrus is grown, processed, and hand-painted on site.", "statement": "The Pharaonic Village in Cairo is the only place where one can buy authentic papyrus.", "entailment": [], "neutral": [{"annotator": 2, "id": "618-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "People can buy authentic papyrus in the Pharaonic Village in Cairo, but there can be other places also sell it.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "618-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Pharaonic Village in Cairo is the place to buy some of the most authentic papyrus, not the only place to buy authentic papyrus.", "self_corrected": false}, {"annotator": 1, "id": "618-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"some of the most authentic\" implies that there are other places that also sell authentic papyrus.", "self_corrected": false}, {"annotator": 3, "id": "618-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "You can buy it everywhere in the country", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 20, "c": 41, "n": 39}} +{"id": "104412e", "context": "After being diagnosed with cancer, Carrey's Kaufman decides to do a show at Carnegie Hall.", "statement": "Carrey's Kaufman was diagnosed with cancer before deciding to do a show at Carnegie Hall.", "entailment": [{"annotator": 0, "id": "420-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement describe the same thing about Carrey's Kaufman.", "self_corrected": false}, {"annotator": 2, "id": "420-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, Carrey's Kaufman was diagnosed with cancer first, and then decided to do a show.", "self_corrected": false}, {"annotator": 3, "id": "420-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Kaufmann decided to do a show at Carnegie Hall after he is diagnosed with cancer", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 1, "id": "420-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "They decided to do the show after the diagnosis, not before.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"e": 88, "c": 12}} +{"id": "129601e", "context": "Took forever.", "statement": "Lasted too long", "entailment": [{"annotator": 0, "id": "1467-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Forvever\" is indeed too long.", "self_corrected": false}, {"annotator": 1, "id": "1467-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 3, "id": "1467-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "took forever\" is an expression to say that it is taking too long", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 90, "n": 10}} +{"id": "62238e", "context": "Clearly, GAO needs assistance to meet its looming human capital challenges.", "statement": "GAO will soon be suffering from a shortage of qualified personnel.", "entailment": [{"annotator": 1, "id": "375-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"human capital challenges\" most likely refers to a lack of qualified personnel.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "375-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Shortage of qualified personnel may or may not included in the GAO's human capital challenges.", "self_corrected": false}, {"annotator": 2, "id": "375-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Human capital challenges can be a shortage of qualifies personnel, but also can be others, like too expensive labor price.", "self_corrected": false}, {"annotator": 3, "id": "375-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear if it is a shortage. They might have hired too many personnel", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 61, "n": 38, "c": 1}} +{"id": "51353e", "context": "It is not a surprise, either, that Al Pacino chews the scenery in Devil's Advocate . And the idea that if the devil showed up on Earth he'd be running a New York corporate-law firm is also, to say the least, pre-chewed.", "statement": "The fact that the devil would work in law is extremely cliche.", "entailment": [{"annotator": 0, "id": "1155-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "The word \"pre-chewed\" in the context indicates that it is cliche.", "self_corrected": false}, {"annotator": 1, "id": "1155-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "\"chewing the scenery\" means artificial acting, so \"pre-chewed\" likely means cliche", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1155-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "The devil would work in law, which is an idea, not the fact, and the fact of devil's job is not given in the context.", "self_corrected": false}], "contradiction": [], "idk": [3], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 16, "e": 79, "c": 5}} +{"id": "108027n", "context": "The door opened and Severn stepped out.", "statement": "They were waiting for someone to open the door for them.", "entailment": [], "neutral": [{"annotator": 0, "id": "280-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention what \"they\" were doing.", "self_corrected": false}, {"annotator": 1, "id": "280-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether they waited or whether they opened the door themselves.", "self_corrected": false}, {"annotator": 2, "id": "280-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Servern stepped out when the door opened, maybe he waited, maybe not.", "self_corrected": false}, {"annotator": 3, "id": "280-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear if someone opened the door for Severn or he opened it himself", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 73, "c": 23, "e": 4}} +{"id": "95883e", "context": "Charles Geveden has introduced legislation that will increase the Access to Justice supplement on court filing fees.", "statement": "Charles Geveden initiated a law that will essentially lower court filing fees.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "370-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Increasing supplement is not a essential way to lower the fees.", "self_corrected": false}, {"annotator": 2, "id": "370-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, Charles Geveden intiated a law wikk increase the court filling fees.", "self_corrected": false}, {"annotator": 3, "id": "370-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The law he initiated increased the access to justice supplement on court filling fees, meaning the court would need to pay more.", "self_corrected": false}], "idk": [1], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 47, "c": 31, "n": 22}} +{"id": "26495e", "context": "Standard screens may not perform as well in these patient subgroups that may represent a considerable part of the ED population.", "statement": "The subgroups may not perform well in standard screens.", "entailment": [{"annotator": 0, "id": "880-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement talk about the possible poor performance of the subgroups in standard screens.", "self_corrected": false}, {"annotator": 3, "id": "880-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"standard screens may not perform well in subgroups \" and \" subgroups may not perform well in standard screens\" has the same meaning", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "880-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The screens do not perform well, not the subgroups.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "880-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "No, it is standard screens may not perform well in the subgroups.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 71, "n": 15, "c": 14}} +{"id": "67836n", "context": "Who are these sons of eggs?", "statement": "I wish they were daughters of eggs.", "entailment": [], "neutral": [{"annotator": 1, "id": "202-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "It's not clear what the speaker wishes.", "self_corrected": false}, {"annotator": 3, "id": "202-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "context is asking about the sons of eggs. Statement is a wish about the daughters of eggs", "self_corrected": false}], "contradiction": [], "idk": [0, 2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 34, "n": 65, "e": 1}} +{"id": "21340n", "context": "uh somewhat they're not my favorite team i am uh somewhat familiar with them", "statement": "They are the best team in the league, by they are not my favorite.", "entailment": [], "neutral": [{"annotator": 0, "id": "1499-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The best team in the league is not mentioned by the context.", "self_corrected": false}, {"annotator": 1, "id": "1499-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether they are the best team in the league.", "self_corrected": false}, {"annotator": 2, "id": "1499-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe they are the best, maybe they are not.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1499-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "I am only somewhat familiar with the team. So I may not know if they are the best team in the league", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 84, "c": 11, "e": 5}} +{"id": "114971e", "context": "They won't be killing off George Clooney's character at ER like they did to Jimmy Smits at NYPD . Instead, Dr. Doug Ross is being forced out over the next two episodes because the maverick heartthrob gives an unauthorized painkiller to a terminally ill boy (Thursday, 10 p.m.).", "statement": "George Clooney will not be getting fired from his TV show.", "entailment": [{"annotator": 0, "id": "1101-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "George Clooney's character will not be killed, which suggests that George Clooney will keep his job.", "self_corrected": false}, {"annotator": 3, "id": "1101-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "George Clooney's character at ER will be kept. So he will not be fired", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1101-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is about George Clooney's character in the context, not the actor himself.", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "1101-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"Forced out\" means that he likely still will be fired.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 36, "e": 36, "n": 28}} +{"id": "103354n", "context": "The Varanasi Hindu University has an Art Museum with a superb collection of 16th-century Mughal miniatures, considered superior to the national collection in Delhi.", "statement": "The Varanasi Hindu University has an art museum on its campus which may be superior objectively to the national collection in Delhi.", "entailment": [{"annotator": 3, "id": "961-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Because the university's museum has a superb collection of 16th-century Mughai miniatures, superior to the national one. Therefore, it could be considered superior in this collection compared to the national collection", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "961-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The location of the Varanasi Hindu University's museum is not mentioned.", "self_corrected": false}, {"annotator": 1, "id": "961-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear what \"objectively superior\" means in this context and whether this really is the acse.", "self_corrected": false}, {"annotator": 2, "id": "961-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The location of the art museum of the Varanasi Hindu University is not given in the context.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "961-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Varanasi Hindu University's meseum is considered superior to the national collection in Delhi, which may not be objective.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"n": 22, "e": 78}} +{"id": "11971n", "context": "In a six-year study, scientists fed dogs and other animals irradiated chicken and found no evidence of increased cancer or other toxic effects.", "statement": "Scientists gave animals irradiated chicken and they all lived as long as the rest of them.", "entailment": [{"annotator": 1, "id": "1326-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If there were no toxic effects, they should have lived as long as the others.", "self_corrected": false}, {"annotator": 3, "id": "1326-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "no evidence of increased cancer or other toxic effects are found. So they should live no different as other chicken", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1326-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "They might lived longer than the rest of them.", "self_corrected": false}, {"annotator": 2, "id": "1326-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No evidence of increased cancer or other toxic effects can make the test animals live as long as others, but the test animals may have other accidents which shorten their lifespan.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 52, "n": 44, "c": 4}} +{"id": "83722e", "context": "Whether a government postal service can engage in these kinds of negotiations deserves serious study.", "statement": "There is serious study needed to check.", "entailment": [{"annotator": 0, "id": "506-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that it needs serious study.", "self_corrected": false}, {"annotator": 1, "id": "506-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is loosely a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "506-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, there is serious study needed to check whether a government postal service can engage in these kinds of negotiations.", "self_corrected": false}, {"annotator": 3, "id": "506-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Context states that the postal service deserves serious study. So there is serious study needed to be looked at", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 19, "e": 76, "c": 5}} +{"id": "38527n", "context": "will never be doused (Brit Hume, Fox News Sunday ; Tony Blankley, Late Edition ; Robert Novak, Capital Gang ; Tucker Carlson, The McLaughlin Group ). The middle way is best expressed by Howard Kurtz (NBC's Meet the Press )--he scolds Brill for undisclosed campaign contributions and for overstretching his legal case against Kenneth Starr but applauds him for casting light on the media.", "statement": "They wanted the public to know where the funds came from.", "entailment": [{"annotator": 0, "id": "325-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Praising him for casting light on the media shows that they want the public to know the truth.", "self_corrected": false}, {"annotator": 1, "id": "325-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They scolded for undisclosed campaign contributions, so they want the public to know where the money came from.", "self_corrected": false}, {"annotator": 3, "id": "325-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Because they scolds Brill for undisclosed campain contributions. So they would want Brill to let public know where the fund comes from", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "325-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The funds are not mentioned in the context.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 39, "n": 47, "c": 14}} +{"id": "59934n", "context": "Likewise, at their production decision reviews, these programs did not capture manufacturing and product reliability knowledge consistent with best practices.", "statement": "Their production decision reviews located an anomaly in the data.", "entailment": [{"annotator": 2, "id": "853-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, because they find these programs did not capturing manufacturing and product reliability knowledge consistent with best practice.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "853-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not mention the anomalies in the data.", "self_corrected": false}, {"annotator": 1, "id": "853-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "An anomaly in the data is not mentioned.", "self_corrected": false}, {"annotator": 3, "id": "853-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is the inconsistence in the manufacutring and product reliability knowledge. Not an anomaly", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 54, "e": 30, "c": 16}} +{"id": "10916n", "context": "He'd gone a long way on what he'd found in one elementary book.", "statement": "He learned a lot from that elementary book.", "entailment": [{"annotator": 0, "id": "509-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both sentences suggest that he learned a lot from the book.", "self_corrected": false}, {"annotator": 1, "id": "509-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "509-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He spent a lot of time on what he found in the elementary book, so it must be very useful.", "self_corrected": false}, {"annotator": 3, "id": "509-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"had gone a long way\" means he had made a lot progress with what he found in one elemantary book", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 88, "n": 12}} +{"id": "34043c", "context": "The Gaiety Theatre in South King Street is worth visiting for its ornate d??cor.", "statement": "The Trump Tower is a terrible place to visit for ornate decor.", "entailment": [], "neutral": [{"annotator": 0, "id": "1134-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Trump Tower is not mentioned by the context.", "self_corrected": false}, {"annotator": 2, "id": "1134-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Trump Tower is not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "1134-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "two different buildings", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "1134-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "If it is worth visiting, it is not a terrible place to visit.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 62, "c": 38}} +{"id": "117089n", "context": "appropriate agency representatives, help resolve", "statement": "the right agency workers, help fix my security system", "entailment": [], "neutral": [{"annotator": 1, "id": "1124-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear what should be resolved.", "self_corrected": false}, {"annotator": 2, "id": "1124-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "the right agency workers may help fix the security system, may help resolve other problems", "self_corrected": false}, {"annotator": 3, "id": "1124-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about what the agency workers help resolve", "self_corrected": false}], "contradiction": [], "idk": [0], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 67, "e": 32, "c": 1}} +{"id": "121910c", "context": "If ancient writings give only a romanticized view, they do offer a more precise picture of Indo-Aryan society.", "statement": "Ancient writings show an accurate picture of Indo-Anryan society.", "entailment": [], "neutral": [{"annotator": 0, "id": "1317-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "More precise picture doesn't necessarily mean accurate picture.", "self_corrected": false}, {"annotator": 1, "id": "1317-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear that the picture is truly accurate, it is only more precise than some other writings.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "1317-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "No, ancient writings give only a romanticized view.", "self_corrected": false}, {"annotator": 3, "id": "1317-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not an accurate picture offered, but only a \"more precise\" picture. It could still be not clear", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 17, "e": 60, "n": 23}} +{"id": "113668n", "context": "If necessary to meeting the restrictions imposed in the preceding sentence, the Administrator shall reduce, pro rata, the basic Phase II allowance allocations for each unit subject to the requirements of section 414.", "statement": "Section 414 helps balance allowance allocations for units.", "entailment": [{"annotator": 1, "id": "1228-entailment-1", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If \"subject to the requirements of section 414\" refers to \"reduce\", then Section 414 is involved in helping to balance the allowance.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "1228-neutral-1", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "If \"subject to the requirements of section 414\" refers to \"unit\", then it is not clear whether Section 414 is involved in helping to balance the allowance.", "self_corrected": false}, {"annotator": 2, "id": "1228-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Section 414 require to reduce the allowance pro rata, so it can be balanced, or not balanced.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1228-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The requirements of section 414relate to the reduction of the basic Phase II allowance allocations, not the balance of allowance allocations.", "self_corrected": true}], "idk": [3], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction", "entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 66, "n": 34}} +{"id": "128176e", "context": "The chart to which Reich refers was actually presented during Saxton's opening statement, hours before Reich testified, and did not look as Reich claims it did.", "statement": "Reich refers to a chart that he misunderstood.", "entailment": [{"annotator": 0, "id": "1333-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The fact that Reich was wrong about what he said about the charts he referred to shows that he misunderstood.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "1333-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether he misrepresented the chart because he misunderstood it. Maybe he did that on purpose.", "self_corrected": false}, {"annotator": 2, "id": "1333-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Reich refers to a chart wrongly, but maybe it is because he misunderstood it, maybe because he remembered incorrectly.", "self_corrected": false}, {"annotator": 3, "id": "1333-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It might be misunderstanding or Reich could also just remember it wrong", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 39, "e": 55, "c": 6}} +{"id": "122322e", "context": "well uh normally i like to to go out fishing in a boat and uh rather than like bank fishing and just like you try and catch anything that's swimming because i've had such problems of trying to catch any type of fish that uh i just really enjoy doing the boat type fishing", "statement": "I fish in the boat and try catching any fish because I have trouble catching certain types.", "entailment": [{"annotator": 1, "id": "71-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 3, "id": "71-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I have problems of cathing any type of fish, not just certain types", "self_corrected": false}], "idk": [0, 2], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": 1.0, "entailment": 1.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 79, "n": 15, "c": 6}} +{"id": "53619c", "context": "True devotees talk shop at even more specialized groups, such as one on Northeastern weather (ne.weather), whose recent conversation topics included the great blizzard of 1978 and the freak snowstorm of May 1977.", "statement": "Ne.weather is a general discussion group, not only about weather.", "entailment": [{"annotator": 1, "id": "245-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "ne.weather is a specialized discussion group focussing on weather", "self_corrected": true}], "neutral": [], "contradiction": [{"annotator": 0, "id": "245-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "According to the context, ne.weather is a specialized group, not a general discussion group.", "self_corrected": false}, {"annotator": 2, "id": "245-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, ne.weather is a specialized group.", "self_corrected": false}, {"annotator": 3, "id": "245-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is specialized in weather topics", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 29, "c": 64, "e": 7}} +{"id": "27287c", "context": "we were talking . Try to behave", "statement": "We are having an argument, come at me if you dare!", "entailment": [], "neutral": [{"annotator": 1, "id": "654-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "These are different statements.", "self_corrected": false}, {"annotator": 2, "id": "654-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Talking can be peaceful or a arguement.", "self_corrected": false}, {"annotator": 3, "id": "654-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "context means rather that the other person should behave and be quiet. It is not aggressive like in the statement", "self_corrected": false}], "contradiction": [], "idk": [0], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 58, "n": 41, "e": 1}} +{"id": "125700e", "context": "Don't forget to take a change of clothing and a towel.", "statement": "Remember to replace your towel and clothing.", "entailment": [{"annotator": 0, "id": "579-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Taking a change of clothing and a towel means taking an extra set of them, which implies the need to replace them with the extra set of clothes and towel.", "self_corrected": false}, {"annotator": 1, "id": "579-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "579-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, a change of clothing and a towel is for replacement.", "self_corrected": false}, {"annotator": 3, "id": "579-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "paraphrases", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 57, "n": 36, "c": 7}} +{"id": "96946n", "context": "Once or twice, but they seem more show than battle, said Adrin.", "statement": "Adrin said they liked to perform more than they did fight.", "entailment": [{"annotator": 0, "id": "584-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "The statement is true because they seem more show than battle.", "self_corrected": false}, {"annotator": 1, "id": "584-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "584-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Their preference is not shown in the context.", "self_corrected": false}], "contradiction": [], "idk": [3], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 21, "e": 74, "c": 5}} +{"id": "17660e", "context": "and not only that it it opens you to phone solicitations", "statement": "It also opens the door to move marketing calls.", "entailment": [{"annotator": 0, "id": "795-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Phone solicitations and move marketing calls have similar meanings.", "self_corrected": false}, {"annotator": 2, "id": "795-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, phone solicitation is marketing call.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "795-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It opens you to phoen solitications. It's not clear whether it also opens you to move marketing calls (whatever that is)", "self_corrected": false}, {"annotator": 3, "id": "795-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "irrelevant", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 62, "n": 36, "c": 2}} +{"id": "107252c", "context": "On the northwestern Alpine frontier, a new state had appeared on the scene, destined to lead the movement to a united Italy.", "statement": "The alpine frontier was separated from Italy by glaciers.", "entailment": [], "neutral": [{"annotator": 0, "id": "1382-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions that Italy is on the northweatern Alpine frontier, Glaciers are not mentioned.", "self_corrected": false}, {"annotator": 1, "id": "1382-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There are no glaciers mentioned.", "self_corrected": false}, {"annotator": 2, "id": "1382-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Glaciers are not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "1382-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about what separating them", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 80, "c": 17, "e": 3}} +{"id": "113967e", "context": "'I don't know what happened, exactly.' I said.", "statement": "You aren't making sense.", "entailment": [], "neutral": [{"annotator": 0, "id": "905-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention what \"you\" said.", "self_corrected": false}, {"annotator": 1, "id": "905-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "These are different statements.", "self_corrected": false}, {"annotator": 2, "id": "905-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I don't know what happened could because your words are nonsense, but also could because of others, like that things are too complicated.", "self_corrected": false}, {"annotator": 3, "id": "905-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 73, "c": 19, "e": 8}} +{"id": "129464c", "context": "It can entail prospective and retrospective designs and it permits synthesis of many individual case studies undertaken at different times and in different sites.", "statement": "It can entail prospective and retrospective designs for system redesigns.", "entailment": [], "neutral": [{"annotator": 0, "id": "567-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention system redesigns.", "self_corrected": false}, {"annotator": 1, "id": "567-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether the designs are for system redesigns.", "self_corrected": false}, {"annotator": 2, "id": "567-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It permits synthesis of many individual case studies, maybe also contributes to system redesigns, but maybe not.", "self_corrected": false}, {"annotator": 3, "id": "567-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about what the design is for", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 48, "n": 48, "c": 4}} +{"id": "82528c", "context": "you know maybe it just wasn't possible at all in the first place you know like the no new taxes thing you know that's uh with the economy going the way it is and everything that was nearly ridiculous thing to", "statement": "it's possible to have no new taxes with the way the economy is right now.", "entailment": [{"annotator": 1, "id": "1180-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}], "label_correction": true, "reason": "The speaker says that the \"new taxes thing\" was not possible with \"the economy going the way it is\"", "self_corrected": true}], "neutral": [{"annotator": 2, "id": "1180-neutral-1", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context is talking about the economy in the past, not right now.", "self_corrected": false}], "contradiction": [], "idk": [0, 3], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"c": 42, "n": 23, "e": 35}} +{"id": "134356n", "context": "You will remember my saying that it was wise to beware of people who were not telling you the truth.\"", "statement": "There might be dishonest people around here.", "entailment": [{"annotator": 0, "id": "854-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions to beware of dishonest people, which implies that there might be dishonest people here.", "self_corrected": false}, {"annotator": 1, "id": "854-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker warns someone of dishonest people, so likely there are some around.", "self_corrected": false}, {"annotator": 3, "id": "854-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "one should be aware of people who aren't telling the truth. So there are dishonest people around", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "854-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Dishonest people maybe are around here, maybe are somewhere else.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 74, "n": 24, "c": 2}} +{"id": "102075c", "context": "um-hum with the ice yeah", "statement": "With the sunshine and heat wave yes.", "entailment": [], "neutral": [{"annotator": 1, "id": "1555-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "These are different statements.", "self_corrected": false}, {"annotator": 3, "id": "1555-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [], "idk": [0, 2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 70, "n": 30}} +{"id": "96956c", "context": "You wonder whether he could win a general election coming out of the right lane of the Democratic Party.", "statement": "He will not run in a general election while he is a conservative Democrat.", "entailment": [], "neutral": [{"annotator": 1, "id": "1216-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Someone only asks themselves if he could win the general election. That does not say anything about its truth.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1216-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He is runing in a general election since \"you\" already wonder whether he could win it.", "self_corrected": false}, {"annotator": 2, "id": "1216-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, he run in a general election while he is under the right lane of the Democratic Party.", "self_corrected": false}, {"annotator": 3, "id": "1216-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is wondered if he could win a general election. So it is possible that he will run in a general election", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 23, "n": 70, "e": 7}} +{"id": "24163e", "context": "We have done that spectacularly.", "statement": "Spectacular results was the only way to describe the impact of our past work.", "entailment": [{"annotator": 3, "id": "521-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Our work has been spectacular. So the result of the work must be spectacular", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "521-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention otherr describtion than spectacular, so we don't if it is the only way.", "self_corrected": false}, {"annotator": 1, "id": "521-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is not clear what they have done spectacularly.", "self_corrected": false}, {"annotator": 2, "id": "521-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Spectacular results was the one way to describe the impact of our past work, but there could be other ways, like historical.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 31, "e": 67, "c": 2}} +{"id": "31775c", "context": "well what station plays uh that type of music", "statement": "What TV station has documentaries about space travel?", "entailment": [], "neutral": [{"annotator": 1, "id": "990-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "These are different questions.", "self_corrected": false}, {"annotator": 3, "id": "990-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [], "idk": [0, 2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 74, "n": 26}} +{"id": "8487n", "context": "We always knew it was an outside chance.", "statement": "We were never assured of it happening in time and we knew this full well.", "entailment": [{"annotator": 1, "id": "419-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "419-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, outside chance means a small probability, so we never assured of it happening.", "self_corrected": false}, {"annotator": 3, "id": "419-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "We have always need it is a very small possibility. So we always knew it will most likely not happen", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "419-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context what was an outside chance.", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"e": 64, "c": 8, "n": 28}} +{"id": "46576e", "context": "Perhaps a further password would be required, or, at any rate, some proof of identity.", "statement": "Identity should be a minimum requirement.", "entailment": [{"annotator": 0, "id": "1538-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Identity is at any rate required, so it is a minimum requirement.", "self_corrected": false}, {"annotator": 1, "id": "1538-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "1538-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, at any rate the proof of identity is needed.", "self_corrected": false}, {"annotator": 3, "id": "1538-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Whatever is needed, some proof of identity is needed. So it makes the identity a minimum requirement", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 68, "n": 25, "c": 7}} +{"id": "83247e", "context": "It's come back? cried Julius excitedly.", "statement": "They were excited to hear it will come back.", "entailment": [{"annotator": 2, "id": "910-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, Julius cried excitedly when heard it back.", "self_corrected": false}, {"annotator": 3, "id": "910-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Julius cried excitedly. So he is excited about it coming back", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "910-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions Julius, it is not clear who \"they\" are.", "self_corrected": false}, {"annotator": 1, "id": "910-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether \"they\" or only Julius was excited.", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 37, "e": 59, "c": 4}} +{"id": "80109n", "context": "if it had rained any more in the last two weeks instead of planting Saint Augustine grass in the front yard i think i would have plowed everything under and had a rice field", "statement": "It has rained enough to flood everything here and make rice pattys.", "entailment": [], "neutral": [{"annotator": 1, "id": "360-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear what \"make rice patty\"s means, but it shouldn't be entailed by having a rice field.", "self_corrected": true}, {"annotator": 3, "id": "360-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "it hasn't rained enough. If it had rained enough, I would have had a rice field", "self_corrected": true}], "contradiction": [{"annotator": 0, "id": "360-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context indicates that it hasn't reained enough.", "self_corrected": false}, {"annotator": 2, "id": "360-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, it has not rained enough in the last two weeks.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["contradiction"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"c": 24, "e": 51, "n": 25}} +{"id": "52854n", "context": "This was used for ceremonial purposes, allowing statues of the gods to be carried to the river for journeys to the west bank, or to the Luxor sanctuary.", "statement": "Statues were moved to Luxor for funerals and other ceremonies.", "entailment": [{"annotator": 3, "id": "901-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "ceremonial purposes were mentioned. Funeral is also a kind of ceremonial purpose. So the statues could also be used for that", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "901-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Statues were moved to Luxor or to the west bank.", "self_corrected": false}, {"annotator": 1, "id": "901-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It's not clear whether the statues were moved for funerals or only for other ceremonies.", "self_corrected": false}, {"annotator": 2, "id": "901-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe statues were moved to Luxor, or to the west bank.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 53, "n": 47}} +{"id": "133274n", "context": "(Imagine the difference between smoking a cigarette and injecting pure nicotine directly into a vein.)", "statement": "Smoking a cigarette is a lot like injecting pure nicotine.", "entailment": [], "neutral": [{"annotator": 0, "id": "1097-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't suggest the difference between smoking a cigarette and injecting pure nicotine.", "self_corrected": false}, {"annotator": 1, "id": "1097-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether the difference is large or small according to the context.", "self_corrected": false}, {"annotator": 2, "id": "1097-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe there are lots of similarities between smoking a cigarette and injecting pure nocotine, but maybe they are very different.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "1097-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, the context emphasizes the difference between smoking a cigarette and injecting pure nocotine.", "self_corrected": false}, {"annotator": 3, "id": "1097-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There should be a difference", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 17, "c": 47, "n": 36}} +{"id": "120070n", "context": "well do you know you have a ten limit a ten minute time limit well that's okay and then they come on and tell you and they tell you got five seconds to say good-bye", "statement": "You get a ten minute time limit, but sometimes you'll be told to end early.", "entailment": [{"annotator": 1, "id": "1540-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}], "neutral": [{"annotator": 3, "id": "1540-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear if then come early or on time to tell one to end", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1540-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context suggests clearly that \"you\" have a ten minute time limit. The requirement for an early end is not valid unless explicitly stated in the context.", "self_corrected": false}, {"annotator": 2, "id": "1540-contradiction-2", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, you will not be told to end early, but will be asked to end it quickly.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 62, "n": 33, "c": 5}} +{"id": "129185n", "context": "Lincoln glared.", "statement": "The man was angry.", "entailment": [{"annotator": 1, "id": "1200-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If he glared, then he probably was angry.", "self_corrected": false}, {"annotator": 3, "id": "1200-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Glaring is a state out of anger", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1200-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Lincoln glared might out of anger or surprise.", "self_corrected": false}, {"annotator": 2, "id": "1200-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The man glared maybe because of anger, maybe because of others like terror.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 56, "n": 42, "c": 2}} +{"id": "49172n", "context": "These alone could have valuable uses.", "statement": "They may be valuable.", "entailment": [{"annotator": 0, "id": "233-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "According to the context, they could be valuable.", "self_corrected": false}, {"annotator": 1, "id": "233-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "233-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, \"could have valuable uses\" implies the probability of being valuable.", "self_corrected": false}, {"annotator": 3, "id": "233-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Valuable uses infer being valuable", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 86, "n": 13, "c": 1}} +{"id": "140782c", "context": "Generally, FGD systems tend to be constructed closer to the ground compared to SCR technology retrofits.", "statement": "FGD systems tend to replicate SCR systems.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "1086-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context clearly suggest the difference between FG and SCR systems, which shows that FGD is not a replication of SCR systems.", "self_corrected": false}, {"annotator": 1, "id": "1086-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They probably do not replicate SCR systems, because they are closer to the ground.", "self_corrected": false}, {"annotator": 2, "id": "1086-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, FGD systems are closer to the ground.", "self_corrected": false}, {"annotator": 3, "id": "1086-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "FGD systems tend to be closer to the ground; whereas SCR system not", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 33, "e": 9, "c": 58}} +{"id": "45443c", "context": "This confluence of a bad tax, a $1 billion reserve, a botched opposition campaign, and voters willing to call a bluff resulted in the I-695 victory.", "statement": "The I-695 failed in its campaign to help the people.", "entailment": [], "neutral": [{"annotator": 1, "id": "447-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether the I-695 helped people.", "self_corrected": false}, {"annotator": 3, "id": "447-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A series of negative factors contributed to I-695 success, but it can not be concluded that I-695 failed to help the people", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "447-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The campaign is not aimed to help the people, as described in the context. And the I-695 didn't fail.", "self_corrected": false}, {"annotator": 2, "id": "447-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, the I-695 succeeded.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 57, "e": 12, "n": 31}} +{"id": "22938e", "context": "Despite a recent renovation, the Meadows Mall is the least appealing of the three suburban malls.", "statement": "The Meadows Mall is not appealing.", "entailment": [{"annotator": 2, "id": "729-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "True, the Meadows Mall is the least appealing of the three suburban malls.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "729-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Meadows Mall is the least appealing of the three malls, which doesn't mean it is not appealing at all, it is just not more appealing than other two.", "self_corrected": false}, {"annotator": 1, "id": "729-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is only less appealing than the other two malls. It's not clear whether they all are appealing.", "self_corrected": false}, {"annotator": 2, "id": "729-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Meadows Mall is the least appealing of the three suburban malls, but maybe compared to other competitors, it is still appealing.", "self_corrected": false}, {"annotator": 3, "id": "729-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "THe Meadows Mall is the least appealing out of three malls. But it could be appealing, just not as appealling as the other two", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 4.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 66, "n": 31, "c": 3}} +{"id": "67571c", "context": "Everybody has this quote from NBA commissioner David You cannot strike your boss and still hold your job--unless you play in the NBA.", "statement": "NBA commissioner said he hates NBA players.", "entailment": [], "neutral": [{"annotator": 0, "id": "674-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention whether NBA commissioner said he hates NBA players.", "self_corrected": false}, {"annotator": 1, "id": "674-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not seem to be about whether the commissioner likes NBA players.", "self_corrected": false}, {"annotator": 2, "id": "674-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "NBA commissioner David said NBA players could strike their boss and still hold their job, as \"the boss\", he maybe hates NBA players, maybe not.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "674-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "His quote ony shows that the boss does not have total power over his players in NBA. It does not convey his personal feelings over the players", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 55, "n": 44, "e": 1}} +{"id": "5087e", "context": "approaches to achieving missions vary considerably between agencies.", "statement": "Approaches to achieving missions might change a lot.", "entailment": [{"annotator": 0, "id": "706-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement show that there are different approaches to achieving missions.", "self_corrected": false}, {"annotator": 1, "id": "706-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If they vary between agencies, they might change a lot, e.g. if you move from one agencie to another.", "self_corrected": false}, {"annotator": 2, "id": "706-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, \"vary considerably\" implies \"change a lot.\"", "self_corrected": false}, {"annotator": 3, "id": "706-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A considerable change could be big", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 75, "n": 23, "c": 2}} +{"id": "23769n", "context": "Kom Ombo is an unusual temple in that it is dedicated to two gods.", "statement": "Rarely visited, Kom Ombo is a strange temple devoted to two gods.", "entailment": [], "neutral": [{"annotator": 0, "id": "1430-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear if Kom Ombo is rarely visited.", "self_corrected": false}, {"annotator": 1, "id": "1430-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether Kom Ombo is rarely visited.", "self_corrected": false}, {"annotator": 2, "id": "1430-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know how often the Kom Ombo temple is visited.", "self_corrected": false}, {"annotator": 3, "id": "1430-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about how many people visiting the Kom Ombo", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 64, "e": 36}} +{"id": "13760n", "context": "If they have overestimated how far the CPI is off, Boskin and his commission may institutionalize an underestimated CPI--guaranteeing a yearly, stealth tax increase.", "statement": "If they've overestimated how far the CPI is off, it will have horrific consequences.", "entailment": [], "neutral": [{"annotator": 1, "id": "1262-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The tax increase will come if CPI is underestimated and it's not clear whether this is horrific.", "self_corrected": false}, {"annotator": 3, "id": "1262-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not sure if a yearly, stealth tax increase counts as a horrific consequence", "self_corrected": false}], "contradiction": [], "idk": [0, 2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 62, "e": 35, "c": 3}} +{"id": "2133n", "context": "The tomb guardian will unlock the gate to the tunnel and give you a candle to explore the small circular catacomb, but for what little you can see, it is hardly worth the effort.", "statement": "The tomb garden can give you a thorough tour of the catacombs.", "entailment": [], "neutral": [{"annotator": 1, "id": "25-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context speaks about the \"guardian\" not the \"garden\".", "self_corrected": false}, {"annotator": 2, "id": "25-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "A thorough tour is not mentioned in the context.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "25-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The tomb guardian will open the gate for you and give you a candle, which implies that he will not give you a tour of the catacombs.", "self_corrected": false}, {"annotator": 3, "id": "25-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, he only gives a candle to explore the catacomb. And you can only see a little, which is not worth the effort", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 76, "e": 10, "n": 14}} +{"id": "117093n", "context": "Hong Kong has long been China's handiest window on the West, and the city is unrivaled in its commercial know-how and managerial expertise.", "statement": "Hong Kong is a great place to find commercial know-how if you are hiring someone new.", "entailment": [{"annotator": 0, "id": "1056-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context mentions that Hong Kong is a great place in its commercial know-how.", "self_corrected": false}, {"annotator": 1, "id": "1056-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 3, "id": "1056-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "the city is experience in commercial know-how. So it is a good idea to find people in that area in Hong Kong", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1056-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Hiring someone new is not mentioned in the context.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 54, "n": 44, "c": 2}} +{"id": "112402c", "context": "Although the accounting and reporting model needs to be updated, in my view, the current attest and assurance model is also out of date.", "statement": "The accounting model needs to be updated in addition to the acquisition model.", "entailment": [], "neutral": [{"annotator": 0, "id": "1236-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the acquisition model.", "self_corrected": false}, {"annotator": 1, "id": "1236-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the acquisition model has to be updated.", "self_corrected": false}, {"annotator": 2, "id": "1236-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The acquisition model is not mentioned in the context.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1236-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is the attest and assurance model needs to be updated", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 43, "n": 44, "c": 13}} +{"id": "32754n", "context": "After shuttering the DOE, Clinton could depict himself as a crusader against waste and bureaucracy who succeeded where even Reagan failed.", "statement": "Reagan had tried to shutter the DOE but was unable to.", "entailment": [{"annotator": 0, "id": "136-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Clinton successfully shuttered the DOE while Reagan failed, as mentioned in the context. So the statement is true.", "self_corrected": false}, {"annotator": 1, "id": "136-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Reagan failed at shuttering the DOE, so he tried to do it.", "self_corrected": false}, {"annotator": 2, "id": "136-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, Reagan failed to shutter the DOE.", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 3, "id": "136-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It was Clinton who successfully shuttering DOE", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"c": 11, "n": 25, "e": 64}} +{"id": "120896n", "context": "Tell me, how did those scribbled words on the envelope help you to discover that a will was made yesterday afternoon?\" Poirot smiled.", "statement": "How did you work out from that text that there was a new will?", "entailment": [{"annotator": 1, "id": "1441-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "1441-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, you discoverd there was a new will form those scribbled words on the envelope.", "self_corrected": false}, {"annotator": 3, "id": "1441-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Context is a question about how \"you\" figure out there is a will made yesterday from only a text of scribbled words. So we can know the will is still new from yesterday", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1441-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context only mentions a will, it is not clear whether the will is a new one or the only one.", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"e": 88, "n": 12}} +{"id": "25437n", "context": "Well, we will come in and interview the brave Dorcas.\" Dorcas was standing in the boudoir, her hands folded in front of her, and her grey hair rose in stiff waves under her white cap.", "statement": "Dorcas is well known for her bravery.", "entailment": [{"annotator": 2, "id": "48-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, \"brave Dorcas\" reveals her bravery.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "48-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear what is Dorcas well known for.", "self_corrected": false}, {"annotator": 1, "id": "48-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether Dorcas was known for her bravery or whether only the speaker thought she was brave.", "self_corrected": false}, {"annotator": 3, "id": "48-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about whether Dorcas is well known for her bravery or not", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 60, "n": 36, "c": 4}} +{"id": "56895c", "context": "The entire economy received a massive jump-start with the outbreak of the Korean War, with Japan ironically becoming the chief local supplier for an army it had battled so furiously just a few years earlier.", "statement": "Korea and Japan were not at war.", "entailment": [], "neutral": [{"annotator": 2, "id": "401-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Japan was the chief local supplier for an army in the Korean war, but we don't know whether Japanese army also involved in the war.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "401-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Japan had battled with Korea furiously.", "self_corrected": false}, {"annotator": 3, "id": "401-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They were a war between the two countries. And Japan even was the chief local supplier for Korean after the war", "self_corrected": false}], "idk": [1], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 28, "n": 20, "e": 52}} +{"id": "43094c", "context": "Time 's cover package considers what makes a good school.", "statement": "Time's cover package is about how most college students have to deal with insane student loans.", "entailment": [], "neutral": [{"annotator": 2, "id": "1131-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Insane student loans are not discussed in the context.", "self_corrected": false}, {"annotator": 3, "id": "1131-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1131-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The topic of the Time's cover package mentioned in the statement is completely different from the one mentioned in the context.", "self_corrected": false}, {"annotator": 1, "id": "1131-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The cover package is about good schools, not about student loans.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 50, "c": 49, "e": 1}} +{"id": "21834e", "context": "But the world is not run for the edification of tourists.", "statement": "The world does not try and morally subject to tourists.", "entailment": [{"annotator": 0, "id": "731-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Runing for the edification of tourists includes morally subjecting to tourists.", "self_corrected": false}, {"annotator": 2, "id": "731-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, the world exists not for the tourists.", "self_corrected": false}], "neutral": [{"annotator": 3, "id": "731-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "irrelevant", "self_corrected": true}], "contradiction": [], "idk": [1], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"e": 58, "n": 38, "c": 4}} +{"id": "139836n", "context": "The centralization dear to Richelieu and Louis XIV was becoming a reality.", "statement": "Louis XIV cared a lot about centralization of his country and people.", "entailment": [{"annotator": 0, "id": "449-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement mention that Louis XIV valued the centralization.", "self_corrected": false}, {"annotator": 1, "id": "449-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "449-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, the centraliza dear to Louis XIV implies he cares a lot about centralization.", "self_corrected": false}], "neutral": [{"annotator": 3, "id": "449-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It was not mentioned whether Louis XIV cared about his people or not", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"e": 66, "n": 34}} +{"id": "57454e", "context": "what does um is Robby Robin Williams does he have a funny part in the movie or is", "statement": "Is Robin Williams in the movie?", "entailment": [], "neutral": [{"annotator": 1, "id": "1204-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The speaker seems to know that Robin Williams is in the movie, but not which part he has.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1204-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He is in the movie, the context asked about if he had a funny part in the movie", "self_corrected": false}], "idk": [0, 2], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 45, "e": 40, "c": 15}} +{"id": "34573n", "context": "But the door was locked?\" These exclamations burst from us disjointedly.", "statement": "We chaotically exclaimed as we all jumped up in a frenzy, \"But the door wasn't unlocked?\"", "entailment": [{"annotator": 1, "id": "311-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "\"Was not unlocked?\" entails \"Was locked?\"", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "311-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn' mention that they jumped up.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "311-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The door was locked", "self_corrected": true}], "idk": [2], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"e": 43, "n": 20, "c": 37}} +{"id": "52278e", "context": "Tuppence rose.", "statement": "Tuppence stood up.", "entailment": [{"annotator": 0, "id": "1129-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that Tuppence stood up.", "self_corrected": false}, {"annotator": 1, "id": "1129-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Rose\" entails \"stood up\".", "self_corrected": false}, {"annotator": 2, "id": "1129-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A human \"rose\" means \"stood up\".", "self_corrected": false}, {"annotator": 3, "id": "1129-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "the word\"rise\" means gets up/ stands up", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 11, "e": 88, "c": 1}} +{"id": "11618c", "context": "Enlarging the village was not desirable and most knew that Severn only desired wealth and a seat on the council of elders.", "statement": "Severn was happy being poor.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "123-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Severn desired wealth, so he was not happy beding poor.", "self_corrected": false}, {"annotator": 1, "id": "123-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Severn desired wealth\" so they were not \"happy being poor\".", "self_corrected": false}, {"annotator": 2, "id": "123-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, Severn only desired wealth so he should be unhappy being poor.", "self_corrected": false}, {"annotator": 3, "id": "123-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Severn wants wealth", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 89, "n": 11}} +{"id": "118460n", "context": "and the other thing is the cost it's almost prohibitive to bring it to a dealer", "statement": "The cost of fixing it makes it hard to bring it to a dealer.", "entailment": [{"annotator": 0, "id": "585-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and statement talk about the large cost of fixing it.", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "585-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It's not clear whether \"fixing it\" increases the cost.", "self_corrected": false}, {"annotator": 2, "id": "585-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The cost makes it hard to bring it to a dealer, but it could be the cost of fixing, or the cost of something else, like transport.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "585-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "It is the cost of bringing it to the dealer that is very expensive", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction", "entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 76, "n": 22, "c": 2}} +{"id": "54383c", "context": "He knew how the Simulacra was supposed to develop.", "statement": "He didn't know about Sims.", "entailment": [], "neutral": [{"annotator": 1, "id": "513-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Sims are not mentioned in the context.", "self_corrected": false}, {"annotator": 2, "id": "513-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Sims is not mentioned in the context.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "513-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He knew about Sims/Simulacra and how they were supposed to develop", "self_corrected": false}], "idk": [0], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 50, "n": 47, "e": 3}} +{"id": "123751n", "context": "i think that the people that are um have um a lower income which you automatically equate with lower education", "statement": "I think because you have lower income you are less educated.", "entailment": [{"annotator": 2, "id": "718-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, I think that lower income is equal to lower education.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "718-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It's not clear whether the speaker talks about other people associating lower income with lower education or whether they talk about themselves.", "self_corrected": false}, {"annotator": 3, "id": "718-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "There is no clear causal relation between the poorness and the education level. Either one could lead to the other", "self_corrected": true}], "contradiction": [{"annotator": 0, "id": "718-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context and the statement discuss just the opposite of the causal relationship between income and educational level.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 7, "e": 78, "n": 15}} +{"id": "43891n", "context": "GAO recommends that the Secretary of Defense revise policy and guidance", "statement": "GAO recommends that you eat 5 fruit/veg per day", "entailment": [], "neutral": [{"annotator": 0, "id": "761-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The recommendation of GAO in the statement is not mentioned in the context.", "self_corrected": false}, {"annotator": 1, "id": "761-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Fruit/veg are not mentioned in the context.", "self_corrected": false}, {"annotator": 2, "id": "761-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Diet is not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "761-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 50, "n": 50}} +{"id": "113039e", "context": "In this respect, bringing Steve Jobs back to save Apple is like bringing Gen.", "statement": "Steve Jobs came back to Apple.", "entailment": [{"annotator": 2, "id": "1142-entailment-1", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, because it describe Steve Jobs' back as \"bringing Gen\", so he came back to Apple and saved it like a General.", "self_corrected": false}, {"annotator": 3, "id": "1142-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Steve Jobs was brought back to save Apple. So he came back to Apple", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1142-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context doesn't suggest if Steve Jobs came back to Apple.", "self_corrected": false}, {"annotator": 1, "id": "1142-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The speaker talks only about what it would be like if Steve Jobs returned to Apple. He doesn't assert that he really came back.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 40, "e": 42, "c": 18}} +{"id": "83900e", "context": "Ca'daan closed the door behind them and retied the not.", "statement": "Ca'daan closed the door as they entered, and bound it shut with rope.", "entailment": [{"annotator": 1, "id": "684-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The \"[k]not\" was probably made with rope.", "self_corrected": false}, {"annotator": 3, "id": "684-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "paraphrases", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "684-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention if Ca'daan bound the door shut with rope", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "684-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, Ca'daan closed the door after they entered, not as they entered.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 59, "n": 33, "c": 8}} +{"id": "10119e", "context": "Then he is very sure.", "statement": "He is very sure of himself.", "entailment": [{"annotator": 3, "id": "42-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "paraphrases", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "42-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention what he's very sure of.", "self_corrected": false}, {"annotator": 1, "id": "42-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's unclear whether he is sure of himself or of something else.", "self_corrected": false}, {"annotator": 2, "id": "42-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He could be very sure of himself, or be sure of any other things.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 60, "n": 37, "c": 3}} +{"id": "21810n", "context": "you can get a hard copy of it and that's about it", "statement": "An email won't cut it.", "entailment": [{"annotator": 3, "id": "112-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Only one hard copy of it is allowed. So an E-mail does not meet the requirement and won't do the trick", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "112-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions the hard copy.", "self_corrected": true}, {"annotator": 1, "id": "112-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "E-mail is not mentioned in the context.", "self_corrected": false}, {"annotator": 2, "id": "112-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Email is not mentioned in the context.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 46, "n": 43, "c": 11}} +{"id": "35700c", "context": "The Honorable Bill Archer, Chairman The Honorable Charles B. Rangel Ranking Minority Member Committee on Ways and Means House of Representatives", "statement": "Bill Archer has never held government office in his entire life.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 1, "id": "368-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Bill Archer was chairman of the house of representatives.", "self_corrected": false}, {"annotator": 2, "id": "368-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "No, Bill Archer at lease held government office as Chairman.", "self_corrected": false}], "idk": [0, 3], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 85, "n": 14, "e": 1}} +{"id": "109278n", "context": "Lawyers in their first three years of practice or who are inactive pay $90, and retired lawyers pay nothing.", "statement": "Lawyers pay $90 to be included in the directory.", "entailment": [], "neutral": [{"annotator": 1, "id": "278-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear for what lawyers pay $90.", "self_corrected": true}], "contradiction": [{"annotator": 0, "id": "278-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not all lawyers are required to pay $90.", "self_corrected": false}, {"annotator": 2, "id": "278-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, retired lawyers pay nothing to be included in the directory.", "self_corrected": false}, {"annotator": 3, "id": "278-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "retired laywers do not pay anything. And it is not clear what the money is for.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["contradiction"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"n": 74, "c": 13, "e": 13}} +{"id": "138530e", "context": "It vibrated under his hand.", "statement": "It hummed quietly in his hand.", "entailment": [{"annotator": 1, "id": "1030-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"It vibrated\" probably entails \"it hummed\".", "self_corrected": false}, {"annotator": 2, "id": "1030-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "True, because if it \"vibrated\", it usually \"hummed quietly\".", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1030-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context if the humming is quiet.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1030-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It's vibrating not humming", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"n": 36, "e": 43, "c": 21}} +{"id": "13387e", "context": "yeah i can believe that", "statement": "I agree with what you said.", "entailment": [{"annotator": 0, "id": "896-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that the speaker believe what was said.", "self_corrected": false}, {"annotator": 3, "id": "896-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "To Believe what has happened does not mean to agree with what has happened", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "896-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Belief does not entail agreement.", "self_corrected": false}, {"annotator": 2, "id": "896-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I believe something, maybe it is what you said, maybe it is anything else.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 70, "n": 28, "c": 2}} +{"id": "58557e", "context": "In the first instance, IRS would have no record of time before the person could get through to an agent and of discouraged callers.", "statement": "There is no recording of the time for callers.", "entailment": [], "neutral": [{"annotator": 1, "id": "815-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The \"would\" implies that this is about a hypothetical situation, not about a factual one.", "self_corrected": false}, {"annotator": 2, "id": "815-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no recording of the time for discouraged callers, but other callers could be recorded, or not.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "815-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "There is recording of the time after the person get through to an agent.", "self_corrected": false}, {"annotator": 3, "id": "815-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "There will be a record after the person get through to an agent", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 77, "n": 16, "c": 7}} +{"id": "36811c", "context": "This having come to his stepmother's ears, she taxed him with it on the afternoon before her death, and a quarrel ensued, part of which was overheard.", "statement": "A love affair sparked just moments before her death.", "entailment": [], "neutral": [{"annotator": 0, "id": "724-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear if the context talks about A love affair.", "self_corrected": false}, {"annotator": 1, "id": "724-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There's no mention of a love affair.", "self_corrected": false}, {"annotator": 2, "id": "724-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "A love affair is not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "724-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not known that it's about a love affair", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 7, "n": 40, "c": 53}} +{"id": "73840n", "context": "Hersheimmer \"WELL,\" said Tuppence, recovering herself, \"it really seems as though it were meant to be.\" Carter nodded.", "statement": "See, luck is real!", "entailment": [{"annotator": 0, "id": "1066-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The speaker seems to believe in luck, as she said that it seems as it were meant to be.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1066-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Luck is not mentioned.", "self_corrected": false}, {"annotator": 3, "id": "1066-neutral-2", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Irrelevant", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 70, "c": 10, "e": 20}} +{"id": "31249e", "context": "(And yes, he has said a few things that can, with some effort, be construed as support for supply-side economics.)", "statement": "It would take some work to construe the things as support for supply-side economics.", "entailment": [{"annotator": 0, "id": "629-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"With some effort\" has the same meaning as \"take some work\".", "self_corrected": false}, {"annotator": 1, "id": "629-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"with some effort\" implies \"it would take some work\".", "self_corrected": false}, {"annotator": 2, "id": "629-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, to construe the things as support for supply-side economics need some effort.", "self_corrected": false}, {"annotator": 3, "id": "629-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "What he said would need some effort, meaning work, to be construed as support", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 86, "c": 6, "n": 8}} +{"id": "91913n", "context": "This is one of the reasons we're growing too weak to fight the Satheri. \"What's wrong with a ceremony of worship, if you must worship your eggshell?\" Dave asked.", "statement": "Eggshell worship is the reason we're growing too weak to fight the Satheri, yet Dave asked about it.", "entailment": [], "neutral": [{"annotator": 0, "id": "1437-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't specify the reasons why they are growing too weak to fight the Satheri.", "self_corrected": false}, {"annotator": 1, "id": "1437-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether \"Eggshell worship\" is really put as a reason.", "self_corrected": false}, {"annotator": 2, "id": "1437-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reasons that we're growing too weak to fight the Satheri is not given in the context.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1437-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Dave meant if\"you\" can worship a eggshell then people can also have a ceremony of worship", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"e": 60, "n": 36, "c": 4}} +{"id": "100895c", "context": "Is there adequate information for judging generalizability?", "statement": "Every output has some kind of resource.", "entailment": [], "neutral": [{"annotator": 1, "id": "181-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Output or resources are not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "181-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Irrelevant", "self_corrected": false}], "contradiction": [], "idk": [0, 2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 78, "e": 2, "c": 20}} +{"id": "837n", "context": "The central features of the Results Act-strategic planning, performance measurement, and public reporting and accountability-can serve as powerful tools to help change the basic culture of government.", "statement": "The Results Act has strategic planning as a central feature for public organizations.", "entailment": [{"annotator": 1, "id": "1361-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"strategic planning\" is mentioned as one of the central features.", "self_corrected": false}, {"annotator": 3, "id": "1361-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Strategic planning is one of its central features", "self_corrected": true}], "neutral": [], "contradiction": [{"annotator": 0, "id": "1361-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The central features of the Results Act are tools to help government, not public organizations.", "self_corrected": false}, {"annotator": 2, "id": "1361-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Results Act has strategic planning as a central feature, but the purpose is to help change the basic culture of government.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 2.0, "neutral": null}, "label_count_round_2": {"contradiction": 2.0, "entailment": 1.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 37, "e": 61, "c": 2}} +{"id": "140005c", "context": "3 It should be noted that the toxicity (LC50) of a sample observed in a range-finding test may be significantly different from the toxicity observed in the follow-up chronic definitive test (1) the definitive test is longer; and (2) the test may be performed with a sample collected at a different time, and possibly differing significantly in the level of toxicity.", "statement": "The toxicity of a sample in the range-finding test will be exactly the same as the toxicity in the follow-up test.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "894-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The toxicity of a sample in a range-finding test may be different from the toxicity in the follow-up test, whereas the statement says the exact opposite.", "self_corrected": false}, {"annotator": 1, "id": "894-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context says it \"may be significantly different\".", "self_corrected": false}, {"annotator": 2, "id": "894-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, the toxicity of a sample in the range-finding test may be significantly different from the toxicity in the follow-up test.", "self_corrected": false}, {"annotator": 3, "id": "894-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They are not the same due to the test time and samples collected at a different time", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 85, "e": 5, "n": 10}} +{"id": "38475c", "context": "In 1984, Clinton picked up rock groupie Connie Hamzy when she was sunbathing in a bikini by a hotel pool.", "statement": "Clinton kept her friends and relationships private in the 80s.", "entailment": [], "neutral": [{"annotator": 0, "id": "1018-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention if Clinton kept her friends and relationships private.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "1018-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}], "label_correction": false, "reason": "No, Clition is male not female.", "self_corrected": true}, {"annotator": 3, "id": "1018-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He picked a one of his friends in a public hotel pool. So it was not private kept", "self_corrected": false}], "idk": [1], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 65, "c": 33, "e": 2}} +{"id": "141293e", "context": "oh wow no i just started about well five years ago i think", "statement": "It had started five years ago.", "entailment": [{"annotator": 0, "id": "1251-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that the speaker started five years ago.", "self_corrected": false}, {"annotator": 2, "id": "1251-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, I think I started five years ago.", "self_corrected": false}, {"annotator": 3, "id": "1251-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "I started something five years ago, so this thing or it was started five years ago", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1251-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"I started\" not \"it started\"", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 31, "e": 64, "c": 5}} +{"id": "30139n", "context": "oh that's not really important the the other stuff is just you know window dressing because we we've never ordered anything fact the the van that we've got we bought uh from an estate it was an estate trade uh it was almost brand new the the gentlemen who owned it had died", "statement": "We were very lucky to get the van given how new it was.", "entailment": [{"annotator": 0, "id": "1396-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is true that it's a lucky thing to get a new van.", "self_corrected": false}, {"annotator": 3, "id": "1396-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The van was almost brand new because the gentleman who owned it died. So it's almost not used", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1396-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Whether they were lucky would depend on the price which is not mentioned.", "self_corrected": false}, {"annotator": 2, "id": "1396-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The van was almost brand new, but it could be lucky to have or not at all.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 62, "n": 35, "c": 3}} +{"id": "30380c", "context": "The NYT , in its front-page coverage, says the plane was flying far lower than the rules for training missions allow.", "statement": "The NYT reported that training missions did allow for planes to fly that low.", "entailment": [{"annotator": 3, "id": "415-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "The plane flew lower than the rules allowed, so the rules do not allow to fly that low", "self_corrected": true}], "neutral": [], "contradiction": [{"annotator": 0, "id": "415-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It was flying lower than the rules allow, which suggests that it was not allowed by rules for training missions.", "self_corrected": false}, {"annotator": 1, "id": "415-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The \"plane was flying far lower than the rules [...] allow\" implies that it was allowed to fly this low.", "self_corrected": true}, {"annotator": 2, "id": "415-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, NYT reported that training missions did not allow for planes to fly that low.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"c": 66, "e": 28, "n": 6}} +{"id": "22436n", "context": "1 Now that each unit is fully staffed, the LSC Office of Program Performance and its state planning team contain over 260 years of experience in LSC-funded programs.", "statement": "The LSC has over 260 years of experience with their lawyers.", "entailment": [], "neutral": [{"annotator": 0, "id": "605-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The experience of LSC with their lawyers is not mentioned.", "self_corrected": false}, {"annotator": 1, "id": "605-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The experience is for the \"state planning team\" and not for \"lawyers\".", "self_corrected": false}, {"annotator": 2, "id": "605-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The LSC has over over 260 years of experience in LSC-funded programs, but it could be with their lawyers, or with other staffs, like interns.", "self_corrected": false}, {"annotator": 3, "id": "605-neutral-4", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "No info about the info but about LSC-funded program, which we do not know info about", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 45, "e": 43, "c": 12}} +{"id": "76037n", "context": "You did, didn't you?\"", "statement": "You didn't mean to do that, did you?", "entailment": [], "neutral": [{"annotator": 1, "id": "1513-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"mean to do that\" is not mentioned in the context.", "self_corrected": false}, {"annotator": 2, "id": "1513-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The deliberation is not given in the context.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1513-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Context is about whether an action has been done by someone, but the statement is about the intention to do it", "self_corrected": true}], "idk": [0], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 49, "c": 40, "e": 11}} +{"id": "28456n", "context": "A clean, wholesome-looking woman opened it.", "statement": "The woman was trying to be desecrate.", "entailment": [], "neutral": [{"annotator": 0, "id": "354-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention anything about desecration.", "self_corrected": false}, {"annotator": 1, "id": "354-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear what the woman was trying to be.", "self_corrected": false}, {"annotator": 2, "id": "354-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The attempt of the woman is not given in the context.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "354-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, the woman is clean and wholesome-looking, not desecrate.", "self_corrected": false}, {"annotator": 3, "id": "354-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Context is a compliment, statement is a negative comment", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 68, "c": 31, "e": 1}} +{"id": "46198c", "context": "How effectively DOD manages these funds will determine whether it receives a good return on its investment.", "statement": "The DOD is certain to have a bad return on these funds.", "entailment": [], "neutral": [{"annotator": 2, "id": "926-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The return on its investment can be bad or good.", "self_corrected": false}, {"annotator": 3, "id": "926-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not known yet about the result, it depends on DOD's management", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "926-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The return is not certainly bad. The return is determined by how DOD manages these funds.", "self_corrected": false}, {"annotator": 1, "id": "926-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"How effectively [...] will determine\" implies that there is at least a chance to have a good return.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 52, "e": 3, "n": 45}} +{"id": "100136e", "context": "Challenges to Restore Public Confidence in", "statement": "Public confidence can be difficult to reestablish.", "entailment": [{"annotator": 0, "id": "755-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement suggest that it is possible that Public confidence is difficult to restore. This is true since the context only mentions that there are challenges, not how large the challenges are.", "self_corrected": false}, {"annotator": 1, "id": "755-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If there are \"challenges to restore public confidence\" then it can be \"difficult to reestablish\".", "self_corrected": false}, {"annotator": 3, "id": "755-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Challenge means to be a difficulty", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 31, "e": 68, "c": 1}} +{"id": "24103n", "context": "if the United States had used full conventional power.", "statement": "The United States is unable to maximize their potential.", "entailment": [], "neutral": [{"annotator": 1, "id": "1373-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Maximizing potential is not mentioned.", "self_corrected": false}, {"annotator": 3, "id": "1373-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It it unknown if US is able to maximize the potential or not. Maybe US is able to, just will not", "self_corrected": false}], "contradiction": [], "idk": [0, 2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 64, "c": 18, "e": 18}} +{"id": "142729c", "context": "What Ellison is doing here, as Hemingway did, is equating the process of becoming an artist with that of becoming a man.", "statement": "Ellison and Hemingway took different ways to compare becoming a man.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "307-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Ellison and Hemingway took the same way, not different ways.", "self_corrected": false}, {"annotator": 1, "id": "307-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They both compared the process to becoming an artist.", "self_corrected": false}, {"annotator": 2, "id": "307-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "No, both Ellison and Hemingway equated the process of becoming an artist with that of becoming a man.", "self_corrected": false}, {"annotator": 3, "id": "307-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They both equete becoming an artist with becoming an man. It means for them, when one becomes an artist, they also becomes a man. The way is the same", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 79, "n": 12, "e": 9}} +{"id": "96516e", "context": "As Ben Yagoda writes in the New York Times Book Review , somewhere along the way, Kidder must have decided not to write a book about Tommy O'Connor.", "statement": "A book was not written about Tommy O'Connor.", "entailment": [{"annotator": 2, "id": "1138-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, Kidder decided not to write a book about Tommy O'Connor.", "self_corrected": false}, {"annotator": 3, "id": "1138-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Kidder dicided not to write a book about O'Conner. So this book about him is not written", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1138-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Kidder didn't write a book about Tommy O'Connor doesn't mean others haven't.", "self_corrected": false}, {"annotator": 1, "id": "1138-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Other people might have written a book about Tommy O'Connor.", "self_corrected": false}, {"annotator": 2, "id": "1138-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Maybe Kidder later wrote a book not about Tommy O'Connor, maybe he even did not write a book, so no book was written at all.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 73, "n": 22, "c": 5}} +{"id": "52761e", "context": "My unborn children will never appear on the Today show.", "statement": "No direct descendent of mine will ever be a guest of the Today show.", "entailment": [{"annotator": 3, "id": "446-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "my unborn children mean the children that are born by me. So they are direct descendent of mine. So my unborn children not being on the show means my direct descendent not being on the show", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "446-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The grandchild is also a direct descendent, we don't know if the speaker's grandchildren will appear on the Today show.", "self_corrected": false}, {"annotator": 1, "id": "446-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "There might be children that have already been born. These would not be \"unborn children\" but also \"direct descendent\".", "self_corrected": false}, {"annotator": 2, "id": "446-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Direct descendent include children and grandchildren. In the context, it is only confirmed that my children will not be a guest of the Today show, but my grandchildren could be on it.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 34, "e": 65, "c": 1}} +{"id": "21297n", "context": "He was crying like his mother had just walloped him.", "statement": "He was crying like his mother hit him with a spoon.", "entailment": [{"annotator": 0, "id": "345-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Both the context and the statement describe how much he was crying.", "self_corrected": false}, {"annotator": 1, "id": "345-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Regarding the intensity of crying \"wallop\" is probably similar to hitting \"with a spoon\".", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "345-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "His mother could hit him with a spoon, could with other things like stike or slippers.", "self_corrected": false}, {"annotator": 3, "id": "345-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not known with what his mother hit him", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 74, "c": 2, "e": 24}} +{"id": "98487c", "context": "Julius nodded gravely.", "statement": "Julius loves to ask questions.", "entailment": [], "neutral": [{"annotator": 0, "id": "532-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context if Julius loves to ask questions.", "self_corrected": false}, {"annotator": 1, "id": "532-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Questions are not mentioned.", "self_corrected": false}, {"annotator": 2, "id": "532-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe Julius loves to ask questions, maybe not.", "self_corrected": false}, {"annotator": 3, "id": "532-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 80, "c": 20}} +{"id": "17179n", "context": "Lie back, and DON'T THINK.", "statement": "Lie back, and do not use your crazy mind.", "entailment": [{"annotator": 0, "id": "62-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context and the statement talk about stopping thinking.", "self_corrected": false}, {"annotator": 3, "id": "62-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"DON'T THINK\" implies not to overthink and relax in this context. So it is similar to \" not use your crazy mind\", which also means not to overthink", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "62-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It's not clear whether the mind is crazy.", "self_corrected": false}, {"annotator": 2, "id": "62-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Using crazy mind could be counted as thinking or not thinking, but dreaming.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 63, "n": 37}} +{"id": "132539e", "context": "Boca da Corrida Encumeada (moderate; 5 hours): views of Curral das Freiras and the valley of Ribeiro do Poco.", "statement": "Boca da Corrida Encumeada is a moderate text that takes 5 hours to complete.", "entailment": [{"annotator": 0, "id": "104-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"Moderate\" and \"5 hours: are two descriptions about Boca da Corrida Encumeada made in the statement which are mentioned in the context as well.", "self_corrected": true}, {"annotator": 3, "id": "104-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It could be assumed that (moderate; 5 hours) is a short form of moderate text with 5 hours reading time", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 1, "id": "104-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Boca da Corrida Encumeada\" sounds more like a hike than a text.", "self_corrected": false}, {"annotator": 2, "id": "104-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, Boca da Corrida Encumeada should be a route, not a text.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 2.0, "neutral": null}, "label_count_round_2": {"contradiction": 2.0, "entailment": 1.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 14, "n": 36, "e": 50}} +{"id": "23642n", "context": "The second half of the book dealt with the use of the true name.", "statement": "The first part dealt with the use of false names.", "entailment": [], "neutral": [{"annotator": 0, "id": "1064-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The first part is not mentioned by the context.", "self_corrected": false}, {"annotator": 1, "id": "1064-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear what the first part of the book is about.", "self_corrected": false}, {"annotator": 2, "id": "1064-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The first part is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "1064-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the first part of the book", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 91, "c": 5, "e": 4}} +{"id": "84781e", "context": "By coordinating policy development and awareness activities in this manner, she helps ensure that new risks and policies are communicated promptly and that employees are periodically reminded of existing policies through means such as monthly bulletins, an intranet web site, and presentations to new employees.", "statement": "She can find new risks with the awareness campaign.", "entailment": [], "neutral": [{"annotator": 1, "id": "282-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The awareness campaign is about communicating risks, not about finding new ones", "self_corrected": false}, {"annotator": 2, "id": "282-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Finding new risks could be a effect of awareness campaign, but it is not given in the context.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "282-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "She can communicate new risks with the awareness campaign insteading of finding new risks.", "self_corrected": false}, {"annotator": 3, "id": "282-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "She does not find new risks but to ensure that the new risks are dealt with correctly", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 31, "e": 57, "c": 12}} +{"id": "15771c", "context": "or just get out and walk uh or even jog a little although i don't do that regularly but Washington's a great place to do that", "statement": "\"I regularly go for a walk or a jog at Washington's.\"", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "246-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The speaker doesn't regularly go for a walk or a jog at Washington's.", "self_corrected": false}, {"annotator": 1, "id": "246-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"i don't [walk or jog] regularly\" implies that the speaker also does no do that at Washington's", "self_corrected": false}, {"annotator": 2, "id": "246-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, I don't go for a walk or a jog regularly.", "self_corrected": false}, {"annotator": 3, "id": "246-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Context implies that \"i\" do not walk or job regularly", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 13, "c": 76, "e": 11}} +{"id": "134655c", "context": "Catch up on the Indian avant-garde and the bohemian people of Caletta at the Academy of Fine Arts on the southeast corner of the Maidan.", "statement": "The Academy of Fine Arts is located in Northern Maidan.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "471-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Academy of Fine Arts is located in Southern Maidan instead of Northern Maidan.", "self_corrected": false}, {"annotator": 1, "id": "471-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is located on the \"southeast corner\", so not in \"Northern\".", "self_corrected": false}, {"annotator": 2, "id": "471-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, The Academy of Fine Arts is located in Southeasten Maidan.", "self_corrected": false}, {"annotator": 3, "id": "471-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is located on the southeast corner of the Maidan", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 15, "c": 77, "e": 8}} +{"id": "33822n", "context": "Why shouldn't he be?", "statement": "He doesn't actually want to be that way.", "entailment": [], "neutral": [{"annotator": 0, "id": "788-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "His thought is not mentioned in the context.", "self_corrected": false}, {"annotator": 1, "id": "788-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether he wants to be like that.", "self_corrected": false}, {"annotator": 3, "id": "788-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about his intention", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 80, "c": 13, "e": 7}} +{"id": "125021c", "context": "Other functional components of the Postal Service are presumed here not to exhibit significant scale economies, although this has not been demonstrated.", "statement": "The Postal Service only operates very large scale economies.", "entailment": [], "neutral": [{"annotator": 0, "id": "884-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The word \"only\" is not mentioned in the context.", "self_corrected": false}, {"annotator": 1, "id": "884-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the Postal Service operates economies at all, or what that should mean.", "self_corrected": true}, {"annotator": 2, "id": "884-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Other functional components of the Postal Service could operate not very large scale economies, but this has not been demonstrated, so it can also be very large.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "884-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They are presumed not to operate significant/large scale economies", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 48, "c": 48, "e": 4}} +{"id": "133005n", "context": "In May 1967, Gallup found that the number of people who said they intensely disliked RFK--who was also probably more intensely liked than any other practicing politician--was twice as high as the number who intensely disliked Johnson, the architect of the increasingly unpopular war in Vietnam.", "statement": "Due to his attitudes on cheesecake, RFK was more disliked than Johnson.", "entailment": [], "neutral": [{"annotator": 0, "id": "315-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The reason why RFK was more disliked than Johnson is not mentioned in the context.", "self_corrected": false}, {"annotator": 1, "id": "315-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether RFK had an attitude towards cheescake or how that impacted his popularity.", "self_corrected": false}, {"annotator": 2, "id": "315-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "RFK's attitudes on cheesecake is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "315-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not known that the dislikes on RFK is due to his attitudes on cheesecakes", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "315-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "No, RFK was more iked than Johnson.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 43, "n": 49, "e": 8}} +{"id": "65650e", "context": "She didn't listen.", "statement": "She did not listen to the noise.", "entailment": [], "neutral": [{"annotator": 0, "id": "310-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't suggest what she didn't listen to.", "self_corrected": false}, {"annotator": 1, "id": "310-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear whether she didn't listen to the noise or to something else.", "self_corrected": false}, {"annotator": 2, "id": "310-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe she did not listen to the noise, maybe she did not listen to vert important messages.", "self_corrected": false}, {"annotator": 3, "id": "310-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is unknown what she did not listen to", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 45, "e": 54, "c": 1}} +{"id": "3545n", "context": "Several of the organizations had professional and administrative staffs that provided analytical capabilities and facilitated their members' participation in the organization's activities.", "statement": "Organizations had mandatory bonding exercises for their members.", "entailment": [], "neutral": [{"annotator": 0, "id": "937-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear if the activities are mandatory.", "self_corrected": false}, {"annotator": 1, "id": "937-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"mandatory boding exercises\" are not mentioned.", "self_corrected": false}, {"annotator": 2, "id": "937-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Organizations facilitated their members' participation in the organization's activities, but they maybe had mandatory bonding exercises, maybe not.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "937-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The members were facilitated to participate. So it is not mandatory but encouraged", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 15, "n": 77, "c": 8}} +{"id": "105196n", "context": "Indeed, said San'doro.", "statement": "They were certain.", "entailment": [{"annotator": 1, "id": "365-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Indeed\" implies a level of certainty.", "self_corrected": false}, {"annotator": 3, "id": "365-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Indeed implies a positive acknowledging attitude.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "365-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions San'doro, it is unclear who \"they\" are.", "self_corrected": false}, {"annotator": 2, "id": "365-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In the context, there is only one person. \"They\" can refer to anyone.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 49, "n": 50, "c": 1}} +{"id": "144753e", "context": "When he's ready for a major strike, how many innocents do you suppose are going to suffer? To quote one of your contemporaries; 'The needs of the many outweigh the needs of the few.' '", "statement": "If he does a big strike, many people will suffer.", "entailment": [{"annotator": 0, "id": "222-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The question in the context might be a rhetorical question which suggests exactly that many innocents will suffer.", "self_corrected": false}, {"annotator": 1, "id": "222-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"how many innocents do you suppose are going to suffer\" implies that \"many people will suffer\".", "self_corrected": false}, {"annotator": 3, "id": "222-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The question in the context implies that a major strike leads to suffering of many innocent people", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "222-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the question in the context is just a ordinary question, then we don't know if many people will suffer.", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 62, "n": 36, "c": 2}} +{"id": "114458e", "context": "Mortifyingly enough, it is all the difficulty, the laziness, the pathetic formlessness in youth, the round peg in the square hole, the whatever do you want?", "statement": "Many youth are lazy.", "entailment": [{"annotator": 1, "id": "4-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"the laziness [...] in youth\" implies that \"many youth are lazy\".", "self_corrected": false}, {"annotator": 3, "id": "4-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The laziness in youth means youth being lazy", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "4-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not mention whether many young people are lazy.", "self_corrected": true}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": null}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"e": 68, "n": 29, "c": 3}} +{"id": "49237c", "context": "'You burned down my house.'", "statement": "'Even though you tried to burn it down, my house is in perfect state.'", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "1566-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context suggest that the speaker's house was burned down, while the statement states that the house was not burned down.", "self_corrected": false}, {"annotator": 1, "id": "1566-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "After being burned down, the house will not be in perfect state.", "self_corrected": false}, {"annotator": 2, "id": "1566-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, you burned down my house means it already happened.", "self_corrected": false}, {"annotator": 3, "id": "1566-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "context entails the house was burned down", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 91, "n": 9}} +{"id": "70047c", "context": "What about the hole?\" They scanned the cliff-side narrowly.", "statement": "They looked from the top of the cliff for the hole.", "entailment": [{"annotator": 3, "id": "75-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They were scanning the cliff-side, so they were on the top of the cliff looking for the hole", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "75-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It's not clear whether they were on top of a cliff.", "self_corrected": false}, {"annotator": 2, "id": "75-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "They could look from at direction to the cliff-side, maybe from the top, maybe from the bottom.", "self_corrected": false}], "contradiction": [], "idk": [0], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 33, "n": 57, "c": 10}} +{"id": "137319n", "context": "And she came to you?", "statement": "The person asked if the woman came to him.", "entailment": [{"annotator": 0, "id": "1383-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is the description of the question in the context.", "self_corrected": false}, {"annotator": 1, "id": "1383-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 3, "id": "1383-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "she' implies it was a woman.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1383-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"You\" can refer to a male or a female.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 81, "n": 18, "c": 1}} +{"id": "63013n", "context": "Although claims data provide the most accurate information about health care use, ensuring adequate follow-up for purposes of obtaining information from patient self-report is important because many people do not report alcohol-related events to insurance compa-nies.", "statement": "The insurance companies want to reduce medical payments by following-up to ensure patient was sober at the time of incident and intoxication may lead to a claim denial on reimbursement for medical expenses.", "entailment": [], "neutral": [{"annotator": 0, "id": "1161-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement seems to be a reason of why the insurance companies want to follow-up, that may just be one possibility.", "self_corrected": false}, {"annotator": 1, "id": "1161-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether it is the insurance companies that should do the follow-up or why.", "self_corrected": false}, {"annotator": 2, "id": "1161-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Intoxication may lead to a claim denial on reimbursement for medical expense, but it may not.", "self_corrected": false}], "contradiction": [], "idk": [3], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 45, "e": 47, "c": 8}} +{"id": "79507e", "context": "An organization's activities, core processes, and resources must be aligned to support its mission and help it achieve its goals.", "statement": "An organization is successful if its activities, resources, and goals align.", "entailment": [], "neutral": [{"annotator": 0, "id": "837-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "What is mentioned in the statement may be a factor in an organization's success, but there might be others.", "self_corrected": false}, {"annotator": 1, "id": "837-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only says that these are required, not that they are sufficient.", "self_corrected": false}, {"annotator": 2, "id": "837-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "An organization's activities, rescources and goals align can help it achieve success, but the alignment can not promise the success.", "self_corrected": false}, {"annotator": 3, "id": "837-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Don't know if this is the only standard to measure if an organization is successful", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 75, "n": 25}} +{"id": "121910e", "context": "If ancient writings give only a romanticized view, they do offer a more precise picture of Indo-Aryan society.", "statement": "Ancient writings don't show an accurate picture of Indo-Anryan society.", "entailment": [{"annotator": 2, "id": "835-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Yes, because ancient writings give only a romanticized view.", "self_corrected": false}, {"annotator": 3, "id": "835-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "An accurate picture of the society is given under the assumption that ancient writings give a romanticized view. So in reality, where this assumption does not hold, the picture is not accurate either", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "835-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The writings are \"more precise\" than something else. That doesn't imply that they are really accurate.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "835-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Ancient writings offer a more accurate picture of Ino-Aryan society.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"c": 63, "n": 18, "e": 19}} +{"id": "120323n", "context": "In the original, Reich is set up by his host and then ambushed by a hostile questioner named John, and when he tries to answer with an eloquent Mr. Smith speech (My fist is clenched.", "statement": "Reich's host is out to get him.", "entailment": [{"annotator": 0, "id": "850-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Reich's host is out to set him up. The word \"get\" in the statement could mean set up.", "self_corrected": false}, {"annotator": 1, "id": "850-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "His host set him up, so he is out to get him.", "self_corrected": false}, {"annotator": 2, "id": "850-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Yes, because Reich is set by his host.", "self_corrected": false}, {"annotator": 3, "id": "850-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He is set up by his host. So his host designed a trap for him, meanig his host is out to get him", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 0, "id": "850-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the the word \"get\" refers to physical, for example \"catch\", then the statement is false.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": 1.0, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 75, "n": 22, "c": 3}} +{"id": "50480n", "context": "But you will find it all right.\"", "statement": "You, I'm sure, will find it more than adequate.", "entailment": [{"annotator": 0, "id": "1548-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that it is fine.", "self_corrected": true}, {"annotator": 1, "id": "1548-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"find it all right\" implies \"more than adequate\"", "self_corrected": true}], "neutral": [{"annotator": 2, "id": "1548-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It might be more than adequate, but also can be just adequate.", "self_corrected": false}, {"annotator": 3, "id": "1548-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "possible extraggeration.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 67, "n": 29, "c": 4}} +{"id": "123027n", "context": "uh high humidity", "statement": "Warm, sweaty temperatures.", "entailment": [{"annotator": 1, "id": "486-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"high humidity\" implies \"warm sweaty temperatures\"", "self_corrected": false}, {"annotator": 3, "id": "486-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "paraphrases to high humidity", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "486-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It can be warm or cold.", "self_corrected": false}], "contradiction": [], "idk": [0], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 72, "n": 26, "c": 2}} +{"id": "85279n", "context": "The almost midtown Massabielle quarter (faubourg de Massabielle), is sometimes described as the most picturesque in the city.", "statement": "The Massabielle quarter is a very touristy place.", "entailment": [], "neutral": [{"annotator": 0, "id": "161-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention whether the Massabielle quarter is very touristy or not", "self_corrected": false}, {"annotator": 1, "id": "161-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"picturesque\" does not necessarily imply \"touristy\".", "self_corrected": false}, {"annotator": 2, "id": "161-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It could be a very touristy place because of its great beauty, but it also can be not touristy because of poor transportation, or it is not so famous.", "self_corrected": false}, {"annotator": 3, "id": "161-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "picturesque is not directly related to touristy", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 78, "e": 20, "c": 2}} +{"id": "121360c", "context": "The tip was hooked towards the edge, the same way the tips are hammered for knives used for slaughter.", "statement": "They were fragile and could not leave a scratch.", "entailment": [], "neutral": [{"annotator": 0, "id": "152-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear if they were fragile.", "self_corrected": true}, {"annotator": 1, "id": "152-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It's not clear from the context whether \"they\" are fragile or not.", "self_corrected": false}, {"annotator": 3, "id": "152-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "irrelevant", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "152-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, because knives used for slaughter are usually sharp.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 52, "c": 47, "e": 1}} +{"id": "56582n", "context": "So far, however, the number of mail pieces lost to alternative bill-paying methods is too small to have any material impact on First-Class volume.", "statement": "Occasionally mail is lost but not often", "entailment": [{"annotator": 0, "id": "1356-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The number is too small indicates that mail loss is not often.", "self_corrected": false}, {"annotator": 2, "id": "1356-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, because number of lost mail pieces is too small.", "self_corrected": false}, {"annotator": 3, "id": "1356-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "the number of mail lost is too small. So it means it is not often to lose the mails", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1356-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The context only talks about the impact of losses because of \"alternative bill-paying methods\". Mail could also be lost for other reasons.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 36, "e": 60, "c": 4}} +{"id": "46650n", "context": "The draft treaty was Tommy's bait.", "statement": "Tommy took the bait of the treaty.", "entailment": [], "neutral": [{"annotator": 0, "id": "336-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear if Tommy took the bait of the treaty.", "self_corrected": false}, {"annotator": 1, "id": "336-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The treaty acts as bait for Tommy, but it is not clear whether he really took it.", "self_corrected": false}, {"annotator": 3, "id": "336-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not known if Tommy took the bait.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "336-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, Tommy is the person who set the bait, but not the one took the bait.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 56, "c": 10, "e": 34}} +{"id": "81579e", "context": "All were prominent nationally known organizations.", "statement": "The only identified organizations were well-known.", "entailment": [{"annotator": 1, "id": "750-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "750-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention identified organizations.", "self_corrected": false}, {"annotator": 2, "id": "750-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The only identified organizations could be well-known or not well-known.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "750-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}], "label_correction": true, "reason": "All the organizations were well-known", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"e": 78, "n": 18, "c": 4}} +{"id": "23414c", "context": "Why bother to sacrifice your lives for dirt farmers and slavers?", "statement": "No one cares about the dirt farmers and slaves.", "entailment": [], "neutral": [{"annotator": 0, "id": "523-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context indicates the speaker's attitude toward the dirt farmers and slaves, not everyone's attitude.", "self_corrected": true}, {"annotator": 1, "id": "523-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Judging by the context, the speaker probably does not care for \"dirt farmers\" and \"slavers\". It does not follow that they think that no one cares for them. Also the statement talks about \"slaves\" which are not mentioned at all.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "523-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The person at whom this question was directed at, cared about the farmers.", "self_corrected": false}], "idk": [2], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 48, "e": 40, "c": 12}} +{"id": "36715e", "context": "Jon twisted the man's wrist.", "statement": "Jon grabbed the man.", "entailment": [{"annotator": 0, "id": "526-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is clear that Jon grabbed the man by twisting his wrist.", "self_corrected": false}, {"annotator": 1, "id": "526-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In order to twist the wrist, Jon has to first grab the man.", "self_corrected": false}, {"annotator": 3, "id": "526-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "To twist, one would need to grab", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "526-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The man could be grabbed by Jon, but also could slip from him.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 76, "n": 22, "c": 2}} +{"id": "114492n", "context": "and the same is true of the drug hangover you know if you", "statement": "It's just like a drug hangover but worse.", "entailment": [], "neutral": [{"annotator": 0, "id": "512-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't contain any information about if it is worse.", "self_corrected": false}, {"annotator": 1, "id": "512-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context says that it's similar to \"drug hangover\" not \"worse\".", "self_corrected": false}, {"annotator": 2, "id": "512-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is similar to the drug hangover, but the extent can be worse or better.", "self_corrected": false}, {"annotator": 3, "id": "512-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about where a drug hangover is worse", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 23, "n": 66, "e": 11}} +{"id": "136097n", "context": "and going to school is also always very prohibitive now unless your parents are wealthy", "statement": "Wealthy parents are necessary for school.", "entailment": [{"annotator": 0, "id": "1525-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement talk about the necessity of wealthy parents.", "self_corrected": false}, {"annotator": 1, "id": "1525-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If \"going to school is [...] prohibitive [...] unless your parents are wealthy\" then \"wealthy parents are necessary for school\".", "self_corrected": false}, {"annotator": 3, "id": "1525-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The school are too expensive to go to, making wealthy parents necessary to pay for the tuition", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1525-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Wealthy parents can make going to school easier, but maybe without wealthy parents, it is still possible.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 28, "e": 61, "c": 11}} +{"id": "101253c", "context": "In his effort to build nationalism across Turkey in the 1920s, Ataterk instituted a campaign to suppress Kurdish identity that continues today.", "statement": "In 1942, Ataterk tried to build nationalism in Turkey.", "entailment": [], "neutral": [{"annotator": 1, "id": "284-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context talks about the 1920s. It is not clear whether Ataterk still was politically active in the 1940s and whether he continued his campaign.", "self_corrected": false}, {"annotator": 2, "id": "284-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We only can sure that in 1920s, Ataterk tried to build nationalism in Turkey, but in 1942, maybe he tried, maybe not.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "284-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Ataterk tried to build nationalism in Turkey in the 1920s, not in 1942.", "self_corrected": false}, {"annotator": 3, "id": "284-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It should be in 1920s", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 62, "e": 22, "n": 16}} +{"id": "97520e", "context": "AC Green's pretty good", "statement": "AC Green is also a solid player.", "entailment": [{"annotator": 1, "id": "520-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"pretty good\" implies \"solid\".", "self_corrected": false}, {"annotator": 3, "id": "520-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "a solid player is a good player", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "520-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention AC Green's occupation.", "self_corrected": false}, {"annotator": 2, "id": "520-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "AC Green could be a player or do other jobs,", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 69, "n": 29, "c": 2}} +{"id": "97569e", "context": "Candle grease?", "statement": "Was it candle grease?", "entailment": [{"annotator": 0, "id": "120-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement ask if it was candle grease.", "self_corrected": false}, {"annotator": 1, "id": "120-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 3, "id": "120-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "paraphrases", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 72, "n": 28}} +{"id": "23751c", "context": "Part of the reason for the difference in pieces per possible delivery may be due to the fact that five percent of possible residential deliveries are businesses, and it is thought, but not known, that a lesser percentage of possible deliveries on rural routes are businesses.", "statement": "We all know that the reason for a lesser percentage of possible deliveries on rural routes being businesses, is because of the fact that people prefer living in cities rather than rural areas.", "entailment": [], "neutral": [{"annotator": 0, "id": "292-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the reason why lesser percentage of possible deliveries on rural routes are businesses.", "self_corrected": false}, {"annotator": 1, "id": "292-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context whether \"we all know\" that. No group of people that could know something is mentioned in the context.", "self_corrected": false}, {"annotator": 2, "id": "292-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We don't know whether people prefer living in cities or in rural areas.", "self_corrected": false}, {"annotator": 3, "id": "292-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The fact that people prefer living in cities is not known", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 16, "n": 65, "c": 19}} +{"id": "103169n", "context": "'Dave Hanson, to whom nothing was impossible.' Well, we have a nearly impossible task: a task of engineering and building.", "statement": "This building job is almost impossible, even for an experienced engineer.", "entailment": [{"annotator": 0, "id": "783-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We can infer that Dave Hanson is an experienced enginneer since nothing was impossible to him. The task is impossible since it is impossible to Dave Hanson, an experienced engineer.", "self_corrected": false}, {"annotator": 1, "id": "783-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the task is \"nearly impossible\" then it is also \"almost impossible\" for an experienced engineer.", "self_corrected": false}], "neutral": [{"annotator": 3, "id": "783-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It should be the job of engineering and building together", "self_corrected": true}], "contradiction": [{"annotator": 2, "id": "783-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, because nothing was impossible to Dava Hanson.", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["contradiction", "neutral"], "has_ambiguity": false, "chaosnli_labels": {"e": 72, "n": 26, "c": 2}} +{"id": "134217e", "context": "uh-huh and is it true i mean is it um", "statement": "It's true.", "entailment": [{"annotator": 0, "id": "1231-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Both the context and the statement sugguest that it is true.", "self_corrected": true}, {"annotator": 1, "id": "1231-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": true}], "neutral": [{"annotator": 3, "id": "1231-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "context is a question", "self_corrected": true}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": [], "error_labels": ["entailment", "neutral"], "has_ambiguity": false, "chaosnli_labels": {"n": 42, "e": 50, "c": 8}} +{"id": "11601n", "context": "36 AC usage nationally for mercury control from power plants should be roughly proportional to the total MWe of coal-fired facilities that are equipped with the technology (this assumes an average capacity factor of 85 percent and other assumptions of Tables 4-4 and 4-5).", "statement": "Power plants' mercury control AC usage is higher than total MWe from coal facilities.", "entailment": [], "neutral": [{"annotator": 0, "id": "275-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions that Power plants' mercury control AC usage is proportional to total MWe from coal facilities. It is not clear if the usage should be lower or higher than total MWe.", "self_corrected": false}, {"annotator": 1, "id": "275-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is \"proportional\". This could be higher or lower.", "self_corrected": false}, {"annotator": 2, "id": "275-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Power plants' mercury control AC usage is proportinal to the total MWe from coal facilities, so it could be more and could be less.", "self_corrected": false}, {"annotator": 3, "id": "275-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not mentioned if the AC usage is higher than total MWe from coal facilities, but only in roughly proportional.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 48, "n": 47, "e": 5}} +{"id": "46059c", "context": "The results of even the most well designed epidemiological studies are characterized by this type of uncertainty, though well-designed studies typically report narrower uncertainty bounds around the best estimate than do studies of lesser quality.", "statement": "All studies have the same amount of uncertainty to them.", "entailment": [{"annotator": 0, "id": "920-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "Well-designed studies and studies of lesser quality have different amount of uncertainty to them.", "self_corrected": true}], "neutral": [], "contradiction": [{"annotator": 1, "id": "920-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"well-designed studies typically report narrower uncertainty bounds\" means that they have less uncertainty than other types of studies.", "self_corrected": false}, {"annotator": 2, "id": "920-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, well-designed studies typically report narrower uncertainty bounds around the best estimate than do studies of lesser quality.", "self_corrected": false}, {"annotator": 3, "id": "920-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "well-designed studies has less amount of uncertainty", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"c": 61, "e": 27, "n": 12}} +{"id": "82156e", "context": "The great breathtaking Italian adventure remains the road.", "statement": "The road remains the Italy people want to see.", "entailment": [], "neutral": [{"annotator": 0, "id": "1077-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention what Italy people want to see.", "self_corrected": false}, {"annotator": 2, "id": "1077-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The road remains the great breathtaking Italian adventure, but Italy people could like adventure, could not.", "self_corrected": false}], "contradiction": [], "idk": [1, 3], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 41, "e": 54, "c": 5}} +{"id": "30894n", "context": "Earlier this week, the Pakistani paper Dawn ran an editorial about reports that Pakistani poppy growers are planning to recultivate opium on a bigger scale because they haven't received promised compensation for switching to other crops.", "statement": "Pakistani poppy growers are mad at the government.", "entailment": [{"annotator": 0, "id": "1479-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Pakistani poppy growers haven't received compensation, which could be a reason why they might be mad at the government. Planning to recultivate opium on a bigger scale could be a sideways indication that they're mad.", "self_corrected": false}, {"annotator": 2, "id": "1479-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Yes, because Pakistani poppy growers haven't received promised compensation.", "self_corrected": false}, {"annotator": 3, "id": "1479-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Because the growers haven't received promised compensation from the government, so they can be mad at government because of it", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1479-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the poppy growers are really mad. Maybe they did not care much about the promised compensations.", "self_corrected": false}, {"annotator": 2, "id": "1479-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Maybe Pakistani poppy are mad at the government, maybe they are not mad.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 44, "n": 54, "c": 2}} +{"id": "122645n", "context": "Then you're ready for the fray, either in the bustling great bazaars such as Delhi's Chandni Chowk or Mumbai's Bhuleshwar, or the more sedate ambience of grander shops and showrooms.", "statement": "All of the great bazaars are bustling at all times.", "entailment": [], "neutral": [{"annotator": 0, "id": "1503-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear if all of the great bazaars are bustling at all times. The context only mentions two.", "self_corrected": false}, {"annotator": 1, "id": "1503-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the bazaars are really bustling at all times or only at some times.", "self_corrected": false}, {"annotator": 2, "id": "1503-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe great bazaars are bustling at all times, maybe only at day time or at night.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1503-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "In the context, only some bustling great bazaars were named, but it does not mean all of the great bazaars are bustling", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"e": 21, "n": 66, "c": 13}} +{"id": "26142n", "context": "The importer pays duties that are required by law", "statement": "Imported goods have duties", "entailment": [{"annotator": 0, "id": "1150-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Both the context and the statement show that imported products have duties.", "self_corrected": false}, {"annotator": 1, "id": "1150-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the importer has to pay duties then imported goods have duties on them.", "self_corrected": false}, {"annotator": 2, "id": "1150-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "True, Imported goods have duties that are required by law.", "self_corrected": false}], "neutral": [], "contradiction": [{"annotator": 3, "id": "1150-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The importers have duties not the imported goods", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": 1.0, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 88, "n": 9, "c": 3}} +{"id": "66225n", "context": "uh but you could fill a whole bunch of uh holes with these things i used to i used to advertise buying wheat pennies um i'd give a dollar a roll which two cents a piece which is basically overpriced", "statement": "I made a good dollar while selling them.", "entailment": [], "neutral": [{"annotator": 0, "id": "865-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context if the speaker sold them.", "self_corrected": false}, {"annotator": 1, "id": "865-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear how well the selling went.", "self_corrected": false}, {"annotator": 3, "id": "865-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not clear context, but seems to be irrelevant to statement", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 31, "e": 29, "n": 40}} +{"id": "134103n", "context": "He walked out into the street and I followed.", "statement": "I followed him down the street.", "entailment": [{"annotator": 0, "id": "1057-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context and the statement talk about the speaker following him down the street.", "self_corrected": false}, {"annotator": 1, "id": "1057-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 3, "id": "1057-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He walked out on to street and I followed him. So naturally, I was also on the street, following him", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1057-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Maybe I followed him down the street, maybe I followed him up.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 75, "n": 22, "c": 3}} +{"id": "117177n", "context": "I guess history repeats itself, Jane.", "statement": "I truly think the prior situation shows history repeats itself.", "entailment": [{"annotator": 2, "id": "800-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "I truly think history repeats itself.", "self_corrected": false}, {"annotator": 3, "id": "800-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "paraphrases", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "800-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't specify what shows that history repeats itself.", "self_corrected": false}, {"annotator": 1, "id": "800-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is not clear whether the speaker truly believes that history repeats itself. They could also use it figuratively.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 25, "e": 73, "c": 2}} +{"id": "78105e", "context": "Their supplies scarce, their harvest meager, and their spirit broken, they abandoned the fort in 1858.", "statement": "Their supplies remained very low and hard to maintain.", "entailment": [{"annotator": 0, "id": "1345-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is true because the context suggests that their supplies scarce and they abandonedd the fort.", "self_corrected": false}, {"annotator": 1, "id": "1345-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The supplies probably remained low because otherwise they might not have abandoned the fort.", "self_corrected": false}, {"annotator": 2, "id": "1345-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, their supplies are scarce and they abandoned the fort.", "self_corrected": false}, {"annotator": 3, "id": "1345-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "scarce means insufficient, so their supplies were low", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 81, "n": 18, "c": 1}} +{"id": "23280e", "context": "Sphinxes were guardian deitiesinEgyptianmythologyandthis was monumentalprotection,standing73 m (240 ft)longand20 m (66 feet) high.", "statement": "Sphinxes guarded people.", "entailment": [{"annotator": 0, "id": "774-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context clearly suggests that Sphinxes were guardian deities.", "self_corrected": true}, {"annotator": 2, "id": "774-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, Sphinxes were guardian deities.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "774-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context what exactly sphinxes guard.", "self_corrected": false}, {"annotator": 2, "id": "774-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "In Egyptian mythology, Sphinxes were guardian deities, but in reality, maybe they guarded people, maybe not.", "self_corrected": false}, {"annotator": 3, "id": "774-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about what Sphinxes guarded, it could be people or a temple.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 45, "e": 52, "c": 3}} +{"id": "9393n", "context": "Next, you enter the vast and splendid Imperial Hall, with three handsome marble fountains, and a canopied throne from which the sultan would enjoy the music and dancing of his concubines.", "statement": "The sultan enjoyed drinking from the marble fountains in the Imperial Hall.", "entailment": [], "neutral": [{"annotator": 0, "id": "976-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't talk about the sultan drinking.", "self_corrected": false}, {"annotator": 1, "id": "976-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether the Sultan drank from the fountains.", "self_corrected": false}, {"annotator": 2, "id": "976-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe the sultan enjoyed drinking from the marble fountains, maybe he didn't like it.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "976-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "He enjoyed music and dancing there", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 70, "e": 7, "c": 23}} +{"id": "82510c", "context": "although the uh it's uh it we almost one day we painted the house to uh we painted we painted the whole inside and it had all this dark trim we thought uh you know we did the one wall but the other trim i'm trying to think i think i think we left most of it because it gets to be uh they don't do that in the newer houses now we don't the uh mold everything is white in a new house everything is white", "statement": "It took over a day to paint the house", "entailment": [], "neutral": [{"annotator": 1, "id": "720-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "If it took \"almost a day\" then it took less not more than one day.", "self_corrected": true}], "contradiction": [{"annotator": 0, "id": "720-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They took almost one day to paint the house, which is less than a day.", "self_corrected": false}, {"annotator": 2, "id": "720-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, it took almost one day to paint the house.", "self_corrected": false}, {"annotator": 3, "id": "720-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It took almost one day, so less than a day", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 3.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["contradiction"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"c": 47, "e": 23, "n": 30}} +{"id": "14459n", "context": "After their savage battles, the warriors recuperated through meditation in the peace of a Zen monastery rock garden.", "statement": "The warriors recuperated through mediation learned from monks.", "entailment": [], "neutral": [{"annotator": 0, "id": "556-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention from whom the warriors learned to meditate.", "self_corrected": false}, {"annotator": 1, "id": "556-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They only meditated in a monastery. It is not clear from whom they learned the meditation.", "self_corrected": false}, {"annotator": 2, "id": "556-neutral-3", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe there are monks in the Zen monastery rock garden, maybe there are not.", "self_corrected": false}, {"annotator": 3, "id": "556-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not known if they learned it from monks", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 34, "n": 62, "c": 4}} +{"id": "70711c", "context": "because otherwise it's too it gets if you start them when it's cooler in the spring then it gets too hot in the summer", "statement": "You should start them during Spring if you want them to be cool during the summer.", "entailment": [], "neutral": [{"annotator": 1, "id": "1133-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is not clear whether \"they\" get too hot or whether it gets too hot for them.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "1133-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, it gets too hot in the summer.", "self_corrected": false}, {"annotator": 3, "id": "1133-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "One should start in the spring, cause it's hot summer and it's cool in spring.", "self_corrected": false}], "idk": [0], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 38, "c": 40, "e": 22}} +{"id": "77893n", "context": "As he stepped across the threshold, Tommy brought the picture down with terrific force on his head.", "statement": "Tommy hurt his head bringing the picture down.", "entailment": [{"annotator": 0, "id": "1241-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is true since the picture that Tommy brought down hit him in the head.", "self_corrected": false}, {"annotator": 1, "id": "1241-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Tommy probably hurt his head because a picture hit his head with \"terrific force\".", "self_corrected": false}, {"annotator": 2, "id": "1241-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "True, Tommy hurt his head with the picture.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1241-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Tommy should hit another guy not himself, but in the statement, it could be understood as Tommy hurt himself, or another guy.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1241-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "He is not hurt but rather bad strong emotion", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"n": 25, "e": 72, "c": 3}} +{"id": "20181n", "context": "What the judge really wants are the facts -- he wants to make a good decision, he said.", "statement": "In the end the judge made a bad decision since he imprisoned someone innocent.", "entailment": [], "neutral": [{"annotator": 0, "id": "137-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't suggest what kind of decision the judge made.", "self_corrected": false}, {"annotator": 1, "id": "137-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The judge could have made a bad decision even if his intentions were to only rely on facts.", "self_corrected": false}, {"annotator": 2, "id": "137-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "We only know that the judge wants to make a good decision, but the desicion could be actually good or not.", "self_corrected": false}, {"annotator": 3, "id": "137-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about what happened in the end", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 27, "n": 70, "e": 3}} +{"id": "137715n", "context": "We still espouse a God-given right of human beings to use the environment for their benefit, says Barrett Duke of the Southern Baptists.", "statement": "Human beings are entitled to the environment.", "entailment": [{"annotator": 0, "id": "1584-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement refer to the right of human beings to use the environment.", "self_corrected": false}, {"annotator": 1, "id": "1584-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"God-given right of human beings to use the environment\" implies that humans \"are entitled to the environment\".", "self_corrected": false}, {"annotator": 3, "id": "1584-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Because it's god given right. So human are born to have this right", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1584-neutral-1", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Barrett Duke of the Southern Baptists believes human beings are entitled to the environment, but the facts can be not.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 81, "n": 16, "c": 3}} +{"id": "109679n", "context": "The Palace of Jahangir is built around a square court with arches.", "statement": "The Palace of Jahangir houses a wonderful square court, complete with arches.", "entailment": [{"annotator": 1, "id": "914-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"is built around a square court with arches\" implies \"houses a wonderful square court [...] with arches", "self_corrected": false}, {"annotator": 2, "id": "914-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, the Palace of Jahangir is built around a square court with arches.", "self_corrected": false}, {"annotator": 3, "id": "914-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The place is built around the square, so the place houses it", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "914-neutral-1", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context whether the square court is wonderful or not.", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["entailment"], "error_labels": ["neutral"], "has_ambiguity": false, "chaosnli_labels": {"e": 86, "n": 14}} +{"id": "123891c", "context": "His proud reserve--a product of 40 years in the spotlight--is refreshing but does not bode well for his capacity to shepherd big ideas through Congress.", "statement": "He is way too loud.", "entailment": [{"annotator": 0, "id": "1341-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The word \"loud\" in the statement may have two kinds of meanings. If it refers to flashy, then it fits the description of him in the context.", "self_corrected": false}, {"annotator": 2, "id": "1341-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "True, his proud reserve is a product of 40 years in the spotlights, but does not bode well.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1341-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It \"loud\" refers to noisy, the statement may be true or false since the context doesn't mention anything about his voice.", "self_corrected": false}, {"annotator": 1, "id": "1341-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether he is too loud or whether his proudness shows in other ways.", "self_corrected": false}, {"annotator": 3, "id": "1341-neutral-3", "judgments": [{"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Irrelevant", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 56, "c": 40, "e": 4}} +{"id": "16086n", "context": "Unless the report is restricted by law or regulation, auditors should ensure that copies be made available for public inspection.", "statement": "This report is most likely restricted by law or regulation and should not be ensured.", "entailment": [{"annotator": 2, "id": "1176-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "True, being restricted by law or regulation is an exception that this report should not make the copies available for public inspection.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1176-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't suggest whether this report is restricted or not.", "self_corrected": false}, {"annotator": 1, "id": "1176-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only states what should happen if the report is not restricted. It does not say anything about whether it is restricted.", "self_corrected": false}, {"annotator": 3, "id": "1176-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about a specific report", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 38, "n": 52, "e": 10}} +{"id": "91650n", "context": "yep because it's when it's self propelled it's heavy yeah", "statement": "it's heavy when it's self propelled, in case you were wondering", "entailment": [{"annotator": 0, "id": "1267-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that it is heavy when it is self propelled.", "self_corrected": false}, {"annotator": 1, "id": "1267-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 3, "id": "1267-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Same sentence: it's heavy when it's self propelled", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 93, "n": 5, "c": 2}} +{"id": "85428e", "context": "Christ on a crutch, what does he have to do to lose your support, stab David Geffen with a kitchen knife?", "statement": "Your support is unwavering.", "entailment": [{"annotator": 1, "id": "346-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the only way to lose one person's support is to stab someone then the support is unwavering.", "self_corrected": false}, {"annotator": 2, "id": "346-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, because it used exaggeration to prove that you will always support him.", "self_corrected": false}, {"annotator": 3, "id": "346-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"Your\" support is so unwavering that he has to do something very extreme to lose your support", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [0], "label_count_round_1": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 78, "n": 18, "c": 4}} +{"id": "118403n", "context": "oh really it wouldn't matter if we plant them when it was starting to get warmer", "statement": "It is better to plant when it is colder.", "entailment": [], "neutral": [{"annotator": 1, "id": "1596-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If it doesn't matter when they plant them when it gets warmer, then it is not better to plant when it is colder.", "self_corrected": false}, {"annotator": 2, "id": "1596-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "1) The question mark of context is missing, the end of the sentence could be a question mark. 2) Maybe it is better to plant when it is complete warm or even hot.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1596-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context clearly suggests that it is fine to plant when it is warmer.", "self_corrected": false}, {"annotator": 3, "id": "1596-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It doesn't matter if it's warmer or colder", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 41, "n": 52, "e": 7}} +{"id": "105561e", "context": "and they just put instructors out there and you you sign up for instruction and they just give you an arm band and if you see an instructor who's not doing anything you just tap him on the shoulder and ask him questions and they'll show you things", "statement": "The instructors are marked with armbands, and anytime you want to know anything, you just find one of them.", "entailment": [{"annotator": 1, "id": "827-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "The statement is a paraphrase of the context.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "827-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The contexte only mentions that they will give the speaker an arm band, it is not clear if the instructors are marked with armbands.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "827-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "No, I am marked with armbands and I need instructoion.", "self_corrected": false}, {"annotator": 3, "id": "827-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not anytime, the instructor has to be free first", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": ["entailment"], "has_ambiguity": true, "chaosnli_labels": {"n": 14, "e": 60, "c": 26}} +{"id": "11303n", "context": "'I see.'", "statement": "It was clear", "entailment": [{"annotator": 1, "id": "943-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 3, "id": "943-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If I see means literally to see", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "943-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Maybe it is not clear but I understand it anyway.", "self_corrected": false}, {"annotator": 3, "id": "943-neutral-2", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Irrelevant if I see means I understand", "self_corrected": false}], "contradiction": [], "idk": [0], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 67, "n": 31, "c": 2}} +{"id": "11534n", "context": "He found himself thinking in circles of worry and pulled himself back to his problem.", "statement": "He could not afford to get distracted from his problem.", "entailment": [], "neutral": [{"annotator": 0, "id": "1230-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Although the context mentions that he pulled himself back to his problem, it is not clear if he could afford to get distracted.", "self_corrected": false}, {"annotator": 1, "id": "1230-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "He might also focus on the problem to distract him from his worries. It is not clear whether the problem really was so important.", "self_corrected": false}, {"annotator": 2, "id": "1230-neutral-3", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "His problem may be related to money, also may not.", "self_corrected": false}, {"annotator": 3, "id": "1230-neutral-4", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Context means his way of thinking does not help him solve his problem", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 51, "n": 45, "c": 4}} +{"id": "46198n", "context": "How effectively DOD manages these funds will determine whether it receives a good return on its investment.", "statement": "These funds are for the purchase of five thousand tons of potatoes.", "entailment": [], "neutral": [{"annotator": 0, "id": "806-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not specify what these funds are used for.", "self_corrected": false}, {"annotator": 1, "id": "806-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The purpose of the funds is not mentioned at all.", "self_corrected": false}, {"annotator": 2, "id": "806-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The purchase of five thousand tons of potatoes is not given in the context.", "self_corrected": false}, {"annotator": 3, "id": "806-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Irrelevant", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 74, "c": 24, "e": 2}} +{"id": "125013e", "context": "yeah okay yeah those games are fun to watch you you you watch those games", "statement": "Those games are a lot of fun.", "entailment": [{"annotator": 0, "id": "221-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is clear from the context and statement that those games are fun.", "self_corrected": false}, {"annotator": 1, "id": "221-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the games are fun to watch then they are fun (at least in that way).", "self_corrected": false}, {"annotator": 2, "id": "221-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, because those games are fun to watch.", "self_corrected": false}, {"annotator": 3, "id": "221-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The games are fun to watch so it's a lot of fun", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 74, "n": 26}} +{"id": "44747n", "context": "Total volume grew 13.", "statement": "The expected increase was 10.", "entailment": [], "neutral": [{"annotator": 1, "id": "648-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It's not clear what the expected increase was.", "self_corrected": false}, {"annotator": 2, "id": "648-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The expected increase maybe more or less than 10.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "648-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Accordding to the context, the increase was 13, not 10.", "self_corrected": false}, {"annotator": 3, "id": "648-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It was 13", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 3, "n": 69, "c": 28}} +{"id": "19768c", "context": "Wear a nicely ventilated hat and keep to the shade in the street.", "statement": "The buildings are so low that there is no shade in the streets.", "entailment": [], "neutral": [{"annotator": 0, "id": "1270-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not mention if the buildings are low.", "self_corrected": false}, {"annotator": 2, "id": "1270-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe the buildings are not low, but the street is too wide.", "self_corrected": false}, {"annotator": 3, "id": "1270-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Irrelevant", "self_corrected": false}], "contradiction": [{"annotator": 1, "id": "1270-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "If the recommendation is to stay in the shade of the buildings then there are probably buildings casting shades. Otherwise the recommendation would not make sense.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 61, "n": 37, "e": 2}} +{"id": "53468n", "context": "But is the Internet so miraculous an advertising vehicle that Gross will be able to siphon off $400 per person from total ad spending of $1,000 per family--or persuade advertisers to spend an additional $400 to reach each of his customers?", "statement": "The internet is so great at advertising that is saved Gross money.", "entailment": [], "neutral": [{"annotator": 1, "id": "1048-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context asks the question of whether the Internet is so great at advertising, whereas the statement asserts it.", "self_corrected": false}, {"annotator": 3, "id": "1048-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Context is a question. It can not come to a conclusion as in the statement", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "1048-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "No, Gross saved no money, but siphoned money from other people.", "self_corrected": true}], "idk": [0], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"e": 36, "n": 56, "c": 8}} +{"id": "136752n", "context": "The questions may need to be tailored to", "statement": "A majority of the questions referenced will need to be tailored to.", "entailment": [{"annotator": 1, "id": "1350-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "The statement is a paraphrase of the context.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "1350-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't specify how many questions need to be tailored.", "self_corrected": false}, {"annotator": 2, "id": "1350-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe all the questions referenced will need to be tailored to.", "self_corrected": false}, {"annotator": 3, "id": "1350-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about whether the majority of the questions or all of them", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"e": 43, "n": 54, "c": 3}} +{"id": "10724n", "context": "Traditionally, certain designs were reserved for royalty, but today elegant geometric or exuberant, stylized floral patterns are available to all.", "statement": "Designs once reserved for royalty cost more to buy.", "entailment": [], "neutral": [{"annotator": 0, "id": "1564-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There is no mention of designs' costs in the context.", "self_corrected": false}, {"annotator": 1, "id": "1564-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It doesn't say whether these designs are more expensive.", "self_corrected": false}, {"annotator": 2, "id": "1564-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The price of the designs once reserved for royalty is not mentioned in the context.", "self_corrected": false}, {"annotator": 3, "id": "1564-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about the cost", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 78, "c": 18, "e": 4}} +{"id": "17753n", "context": "The street ends at Taksim Square (Taksim Meydane), the heart of modern Istanbul, lined with luxurious five-star hotels and the glass-fronted Ataturk Cultural Centre (Ataturk Keleter Sarayy), also called the Opera House.", "statement": "The street is quite a luxurious one.", "entailment": [{"annotator": 0, "id": "844-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The description of the street in the context shows that it is indeed a luxurious one.", "self_corrected": false}, {"annotator": 3, "id": "844-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "On the street there are fancy centers and a Luxurious hotel", "self_corrected": true}], "neutral": [{"annotator": 1, "id": "844-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Taksim Square is luxurious. It is not clear whether the street leading to it is also.", "self_corrected": false}, {"annotator": 2, "id": "844-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The detail of the street itself is not mentioned in the context, we only know the end of the street.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 88, "n": 12}} +{"id": "130928n", "context": "Still, I guess that can be got over.", "statement": "There are some things that you need to ignore.", "entailment": [{"annotator": 3, "id": "1001-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Getting over means to move on and stop caring about this. Thus to ignore", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1001-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't say anything about whether there's something to ignore.", "self_corrected": false}, {"annotator": 1, "id": "1001-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Getting over something does not necsessarily mean that you should ignore the issues. You could also work through them.", "self_corrected": false}, {"annotator": 2, "id": "1001-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "There are some things that you can ignore, but it is not necessary.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 45, "e": 51, "c": 4}} +{"id": "66858n", "context": "Managing better requires that agencies have, and rely upon, sound financial and program information.", "statement": "Agencies that rely on information based on unsound financial information will have management problems.", "entailment": [{"annotator": 0, "id": "496-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context talks about the importance of sound financial information, o it is natural to deduce that if financial information is not sound, there will be problems.", "self_corrected": false}, {"annotator": 1, "id": "496-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If sound information is required for better managing then unsound information will lead to management problems.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "496-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Sound financial information will help manage the agencies better, but without it doesn't meant to have problems, maybe it just makes the process less efficient.", "self_corrected": false}, {"annotator": 3, "id": "496-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Good Management requires sound financial, but unsound financial does noch necessarily Leads to management problems", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 75, "c": 7, "n": 18}} +{"id": "28601n", "context": "Three more days went by in dreary inaction.", "statement": "The days passed by slowly.", "entailment": [{"annotator": 0, "id": "409-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The description\"dreary inaction\" implies that the days went by slowly.", "self_corrected": false}, {"annotator": 1, "id": "409-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": true}], "neutral": [{"annotator": 2, "id": "409-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The days may made people feel slow, maybe feel fast.", "self_corrected": false}, {"annotator": 3, "id": "409-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "Dreary inaction means nothing being down. It does not mean slowly", "self_corrected": true}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 78, "n": 21, "c": 1}} +{"id": "92774n", "context": "The party's broad aims were to support capitalist policies and to continue close ties with Britain and the rest of the Commonwealth.", "statement": "The party sought to establish ties with the United States.", "entailment": [], "neutral": [{"annotator": 0, "id": "265-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention anything about establishing ties with the United States.", "self_corrected": false}, {"annotator": 1, "id": "265-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The United States are not mentioned in the context.", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "265-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "No, the party had close ties with the United States already.", "self_corrected": true}, {"annotator": 3, "id": "265-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They aim to maintain ties with Britain and its commonwealth", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 46, "c": 53, "e": 1}} +{"id": "142604e", "context": "As the budgets, functions, and points of service of many government programs devolve to state and local government, private entities and nonprofit organizations, and other third parties, it may become harder for GAO to obtain the records it needs to complete audits and evaluations.", "statement": "Audits and evaluations are harder because it is more difficult for GAO to get the records.", "entailment": [{"annotator": 2, "id": "1208-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "True, because it may become harder for GAO to obtain the records it needs to complete audits and evaluations.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "1208-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context only mentions that it may be more difficult to obtain records, which is a possibility rather than an absolute thing.", "self_corrected": false}, {"annotator": 1, "id": "1208-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context says that it \"may become harder\" not that \"it is more difficult\". These are not the same things because the context describes a future state.", "self_corrected": false}, {"annotator": 2, "id": "1208-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It can be more difficult for GAO to get the records, but maybe it actually doesn't become harder for GAO ro obtain the records.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "1208-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "It was because the budget functions etc. of government programs further devolves that makes it harder", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"n": 19, "e": 81}} +{"id": "42860n", "context": "That's why we tried to kill you.", "statement": "That's one of the reasons we wanted to kill you.", "entailment": [{"annotator": 1, "id": "1298-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "1298-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Maybe it is the only reason that we wanted to kill you, maybe that is just one of the reasons.", "self_corrected": false}], "contradiction": [{"annotator": 0, "id": "1298-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context implies that there is one reason, while the statement suggests that therea are multiple reasons.", "self_corrected": false}, {"annotator": 3, "id": "1298-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "From the context we know it might be the only reason", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 2.0, "entailment": 1.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": 2.0, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["contradiction", "neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 17, "e": 75, "c": 8}} +{"id": "77299n", "context": "The inquiry expanded very quickly, however, from asking what technology failed to an examination of contextual influences, such as", "statement": "They moved they inquiries over from technology failing because they thought it may be something else.", "entailment": [{"annotator": 1, "id": "23-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "\"expanded [...] from asking what technology failed to [...] contextual influences\" means that they \"moved inquiries from technology failing because they thought it may be something else\".", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "23-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention the reason of moving inquiries over from technology failing.", "self_corrected": false}, {"annotator": 2, "id": "23-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe they thought it may be something else, maybe they just can not sure, what is the true reason.", "self_corrected": false}], "contradiction": [], "idk": [3], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 9, "e": 46, "n": 45}} +{"id": "13765c", "context": "it's just it's the morals of the people which i mean i guess we everybody's responsible for the society but if i had a child that that did things so bad it's not they don't care about anybody these people they're stealing from they're just the big bad rich guy", "statement": "I have no issue with people stealing from others.", "entailment": [{"annotator": 1, "id": "638-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"they don't care about anybody these people they're stealing from\" shows that the speaker has issues with people stealing.", "self_corrected": true}], "neutral": [], "contradiction": [{"annotator": 3, "id": "638-contradiction-1", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "I have no problem people stealing from bag rich people", "self_corrected": true}], "idk": [0, 2], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": [], "error_labels": ["contradiction", "entailment"], "has_ambiguity": false, "chaosnli_labels": {"c": 31, "n": 60, "e": 9}} +{"id": "139362e", "context": "Endorphins were flowing.", "statement": "My endorphins were flowing.", "entailment": [{"annotator": 1, "id": "1500-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "The statement is a paraphrase of the context.", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "1500-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't specify whose endorphins were flowing.", "self_corrected": false}, {"annotator": 2, "id": "1500-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It could be my endorphins, but also could be yours or anyone's.", "self_corrected": false}, {"annotator": 3, "id": "1500-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about whose endorphins it is", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"n": 50, "e": 47, "c": 3}} +{"id": "116176c", "context": "Students of human misery can savor its underlying sadness and futility.", "statement": "Students of human misery will be delighted to see how sad it truly is.", "entailment": [{"annotator": 1, "id": "704-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "\"can savor\" implies \"will be delighted\".", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "704-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context if the students will be delighted.", "self_corrected": true}, {"annotator": 2, "id": "704-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Students of human misery can \"savored\" that sadness, so maybe they are delighted to see that, maybe they are tortured by the disasters.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "704-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "Savor means to understand. Not to enjoy", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 1.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"e": 69, "n": 26, "c": 5}} +{"id": "110234e", "context": "really oh i thought it was great yeah", "statement": "that was a nice experience", "entailment": [{"annotator": 0, "id": "612-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement say that it was good.", "self_corrected": false}, {"annotator": 1, "id": "612-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"it was great\" means that it was \"a nice experience\".", "self_corrected": false}, {"annotator": 2, "id": "612-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, because I thought it was great", "self_corrected": false}], "neutral": [{"annotator": 2, "id": "612-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Maybe it was a great experience, maybe it was a great present or something else.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "612-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "He thought it was a great experience. But in the context there was an element of surprise. So it was not a great experience after all", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": 3.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 3.0, "neutral": 1.0}, "label_set_round_1": ["contradiction", "neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": ["contradiction"], "has_ambiguity": true, "chaosnli_labels": {"e": 81, "n": 19}} +{"id": "84781c", "context": "By coordinating policy development and awareness activities in this manner, she helps ensure that new risks and policies are communicated promptly and that employees are periodically reminded of existing policies through means such as monthly bulletins, an intranet web site, and presentations to new employees.", "statement": "There new employees are a risk.", "entailment": [], "neutral": [{"annotator": 0, "id": "356-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't mention whether new employees are a risk or not.", "self_corrected": false}, {"annotator": 1, "id": "356-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Not the employees are a risk but they should be made aware of the risks.", "self_corrected": false}, {"annotator": 2, "id": "356-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The situation of new employees is not given in the context.", "self_corrected": false}], "contradiction": [{"annotator": 3, "id": "356-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Nothing about the new employee being a risk", "self_corrected": true}], "idk": [], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 3.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral"], "error_labels": ["contradiction"], "has_ambiguity": false, "chaosnli_labels": {"n": 54, "c": 35, "e": 11}} +{"id": "41052n", "context": "HCFA published a Notice of Proposed Rulemaking on March 28, 1997 (62 Fed.", "statement": "HCFA tried to keep everyone informed about the rules they were making.", "entailment": [{"annotator": 0, "id": "1567-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Publishing a Notice is a way HCFA tried to keep everyone informed.", "self_corrected": false}, {"annotator": 3, "id": "1567-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They published the rules. So theoretically everyone can see it.", "self_corrected": false}], "neutral": [{"annotator": 1, "id": "1567-neutral-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "It is not clear whether they tried to keep everyone informed or whether they published the notice only because they had to.", "self_corrected": false}, {"annotator": 2, "id": "1567-neutral-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "HCFS published a Notice, but the notice may have tried to keep everyone informed, maybe just made it quitely.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 2.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 31, "e": 68, "c": 1}} +{"id": "128160n", "context": "Suddenly she started, and her face blanched.", "statement": "She moved swiftly, her face pale.", "entailment": [{"annotator": 1, "id": "93-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "93-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context doesn't specify what she started doing, it could be moving, thinking or talking.", "self_corrected": false}, {"annotator": 2, "id": "93-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "She \"started\" can mean she moved swiftly, but also can do other actions, like crying, singing, etc.", "self_corrected": false}, {"annotator": 3, "id": "93-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about her moving swiftly", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 3.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 7, "e": 78, "n": 15}} +{"id": "123038e", "context": "Reports on attestation engagements should state that the engagement was made in accordance with generally accepted government auditing standards.", "statement": "Details regarding validation engagements ought to express that the engagement was made as per by and large acknowledged government evaluating guidelines.", "entailment": [{"annotator": 0, "id": "945-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and statement suggest the same requirement for reports.", "self_corrected": false}, {"annotator": 3, "id": "945-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Generally accepted is synonym to large acknowledged", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 66, "n": 27, "c": 7}} +{"id": "98621n", "context": "In other cases, we must rely on survey approaches to estimate WTP, usually through a variant of the contingent valuation approach, which generally involves directly questioning respondents for their WTP in hypothetical market situations.", "statement": "Hypothetical market situations are uniform across all respondents.", "entailment": [], "neutral": [{"annotator": 1, "id": "1266-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}], "label_correction": false, "reason": "It is not clear whether they use the same market situtations for all people they ask.", "self_corrected": false}, {"annotator": 3, "id": "1266-neutral-2", "judgments": [{"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about uniformity across the respondents", "self_corrected": false}], "contradiction": [{"annotator": 2, "id": "1266-contradiction-1", "judgments": [{"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, if market situations are uniform, then \"a variant of the contingent valuation approach\" is not necessary.", "self_corrected": false}], "idk": [0], "label_count_round_1": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_count_round_2": {"contradiction": 1.0, "entailment": null, "neutral": 2.0}, "label_set_round_1": ["neutral", "contradiction"], "label_set_round_2": ["neutral", "contradiction"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"c": 39, "n": 54, "e": 7}} +{"id": "97926c", "context": "General Motors, for instance, lost $460 million to strikes in 1997, but investors treated the costs as a kind of extraordinary charge and valued the company as if the losses had never happened.", "statement": "GM lost a lot almost a million dollars in labor disputes.", "entailment": [], "neutral": [], "contradiction": [{"annotator": 0, "id": "530-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "GM lost more than a million dollars.", "self_corrected": false}, {"annotator": 1, "id": "530-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "GM lost over 460 million which is much more than one million.", "self_corrected": false}, {"annotator": 2, "id": "530-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No, GM lost $460 million in labor disputes, far more than a million.", "self_corrected": false}, {"annotator": 3, "id": "530-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They lost 460 million", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_count_round_2": {"contradiction": 4.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction"], "label_set_round_2": ["contradiction"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 72, "n": 17, "e": 11}} +{"id": "98561e", "context": "was it bad", "statement": "Was it not good?", "entailment": [{"annotator": 1, "id": "170-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 3, "id": "170-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Bad and not good are synonyms", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [0, 2], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"e": 84, "n": 16}} +{"id": "86429n", "context": "Agencies may perform the analyses required by sections 603 and 604 in conjunction with or as part of any other agenda or analysis required by other law if such other analysis satisfies the provisions of these sections.", "statement": "There are many times when the agencies have gotten in trouble.", "entailment": [], "neutral": [{"annotator": 0, "id": "1163-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "It is not clear from the context if th eagencies have gotten in trouble.", "self_corrected": false}, {"annotator": 1, "id": "1163-neutral-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context does not say anything about trouble.", "self_corrected": false}, {"annotator": 2, "id": "1163-neutral-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "The conditions of trouble is not offered in the context.", "self_corrected": false}, {"annotator": 3, "id": "1163-neutral-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "No info about agencies in trouble", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_count_round_2": {"contradiction": null, "entailment": null, "neutral": 4.0}, "label_set_round_1": ["neutral"], "label_set_round_2": ["neutral"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"n": 77, "c": 20, "e": 3}} +{"id": "105911c", "context": "and to have children and just get a day care or someone to take care of it and not really have the bonding process that takes place with babies and stuff you know", "statement": "The children should not go to day car.", "entailment": [{"annotator": 1, "id": "1095-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "\"just get a day care [...] and not really have the bonding process\" sound like the speaker is opposed to day care.", "self_corrected": false}, {"annotator": 3, "id": "1095-entailment-2", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Because the bonding process will be missed", "self_corrected": true}], "neutral": [{"annotator": 0, "id": "1095-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The context says that it is not good if sending children to day care is the only way to take care of them, but it doesn't mention if children shouldn't go to day care at all.", "self_corrected": false}], "contradiction": [], "idk": [2], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 1.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"e": 37, "n": 50, "c": 13}} +{"id": "126486n", "context": "The entire setup has an anti-competitive, anti-entrepreneurial flavor that rewards political lobbying rather than good business practices.", "statement": "The setup has lead to increases in political lobbying.", "entailment": [{"annotator": 1, "id": "427-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "If the setup rewards political lobbying then it will likely lead to increases in lobbying.", "self_corrected": false}, {"annotator": 3, "id": "427-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Because political lobbying is rewarded", "self_corrected": false}], "neutral": [{"annotator": 0, "id": "427-neutral-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": false}], "label_correction": false, "reason": "Rewarding political lobbying does not necessarily mean an actual increase in political lobbying.", "self_corrected": false}], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_count_round_2": {"contradiction": null, "entailment": 2.0, "neutral": 1.0}, "label_set_round_1": ["neutral", "entailment"], "label_set_round_2": ["neutral", "entailment"], "error_labels": [], "has_ambiguity": true, "chaosnli_labels": {"n": 33, "e": 67}} +{"id": "127809n", "context": "I'm confused.", "statement": "Not all of it is very clear to me.", "entailment": [{"annotator": 0, "id": "1196-entailment-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "Both the context and the statement suggest that the speaker does not understand.", "self_corrected": false}, {"annotator": 1, "id": "1196-entailment-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The statement is a paraphrase of the context.", "self_corrected": false}, {"annotator": 2, "id": "1196-entailment-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "True, because \"confused\" means, at lease one thing is not very clear to me.", "self_corrected": false}, {"annotator": 3, "id": "1196-entailment-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "When someone is confused , then not everything is clear to him", "self_corrected": false}], "neutral": [], "contradiction": [], "idk": [], "label_count_round_1": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_count_round_2": {"contradiction": null, "entailment": 4.0, "neutral": null}, "label_set_round_1": ["entailment"], "label_set_round_2": ["entailment"], "error_labels": [], "has_ambiguity": false, "chaosnli_labels": {"c": 3, "e": 92, "n": 5}} +{"id": "28306c", "context": "They made little effort, despite the Jesuit presence in Asia, to convert local inhabitants to Christianity or to expand their territory into the interior.", "statement": "The Jesuit presence in Asia helped to convert local residents to Christianity, allowing them to expand their territory.", "entailment": [{"annotator": 0, "id": "22-entailment-1", "judgments": [{"annotator": 0, "makes_sense": false}, {"annotator": 1, "makes_sense": false}, {"annotator": 2, "makes_sense": false}, {"annotator": 3, "makes_sense": false}], "label_correction": true, "reason": "Both the context and the statement suggest that the speaker does not understand.", "self_corrected": true}], "neutral": [], "contradiction": [{"annotator": 0, "id": "22-contradiction-1", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Jesuit presence didn't make much effort to convert local residents to Christianity or to expand their territory.", "self_corrected": false}, {"annotator": 1, "id": "22-contradiction-2", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They did not try to expand their territory.", "self_corrected": true}, {"annotator": 2, "id": "22-contradiction-3", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "The Jesuit did not make effort to convert local residents to Christianity, or to expand their territory.", "self_corrected": false}, {"annotator": 3, "id": "22-contradiction-4", "judgments": [{"annotator": 0, "makes_sense": true}, {"annotator": 1, "makes_sense": true}, {"annotator": 2, "makes_sense": true}, {"annotator": 3, "makes_sense": true}], "label_correction": false, "reason": "They made little effort to convert the locals or to expand the their territory. So they did not help.", "self_corrected": false}], "idk": [], "label_count_round_1": {"contradiction": 4.0, "entailment": 1.0, "neutral": null}, "label_count_round_2": {"contradiction": 3.0, "entailment": null, "neutral": null}, "label_set_round_1": ["contradiction", "entailment"], "label_set_round_2": ["contradiction"], "error_labels": ["entailment"], "has_ambiguity": false, "chaosnli_labels": {"c": 69, "e": 14, "n": 17}}