--- dataset_info: features: - name: id dtype: string - name: subreddit dtype: string - name: title dtype: string - name: post dtype: string - name: summary dtype: string - name: query_token sequence: int64 - name: query dtype: string - name: reference_response dtype: string - name: reference_response_token sequence: int64 - name: reference_response_token_len dtype: int64 - name: query_reference_response dtype: string - name: query_reference_response_token sequence: int64 - name: query_reference_response_token_response_label sequence: int64 - name: query_reference_response_token_len dtype: int64 splits: - name: train num_bytes: 2125689249 num_examples: 116722 - name: validation num_bytes: 117437271 num_examples: 6447 - name: test num_bytes: 119410966 num_examples: 6553 download_size: 562087836 dataset_size: 2362537486 --- # Dataset Card for "summarize_from_feedback_tldr_3_filtered_oai_preprocessing_1706381144" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)