File size: 1,889 Bytes
8ad7857 88519b2 063d038 88519b2 063d038 88519b2 063d038 3c9285e 063d038 88519b2 063d038 3c9285e 063d038 88519b2 063d038 88519b2 063d038 3c9285e 063d038 88519b2 063d038 5cb052e 063d038 86b2218 063d038 e17f7fb 063d038 d44a1b8 e17f7fb 8ad7857 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 |
---
dataset_info:
features:
- name: day
dtype: string
- name: num_downloads
dtype: int64
splits:
- name: accelerate
num_bytes: 27390
num_examples: 1245
- name: datasets
num_bytes: 27390
num_examples: 1245
- name: diffusers
num_bytes: 16764
num_examples: 762
- name: evaluate
num_bytes: 19360
num_examples: 880
- name: gradio
num_bytes: 30756
num_examples: 1398
- name: huggingface_hub
num_bytes: 28270
num_examples: 1285
- name: optimum
num_bytes: 22374
num_examples: 1017
- name: peft
num_bytes: 11616
num_examples: 528
- name: pytorch_image_models
num_bytes: 30756
num_examples: 1398
- name: safetensors
num_bytes: 12826
num_examples: 583
- name: tokenizers
num_bytes: 30756
num_examples: 1398
- name: transformers
num_bytes: 31438
num_examples: 1429
- name: sentence_transformers
num_bytes: 5742
num_examples: 261
download_size: 171911
dataset_size: 295438
configs:
- config_name: default
data_files:
- split: accelerate
path: data/accelerate-*
- split: datasets
path: data/datasets-*
- split: diffusers
path: data/diffusers-*
- split: evaluate
path: data/evaluate-*
- split: gradio
path: data/gradio-*
- split: huggingface_hub
path: data/huggingface_hub-*
- split: optimum
path: data/optimum-*
- split: peft
path: data/peft-*
- split: pytorch_image_models
path: data/pytorch_image_models-*
- split: safetensors
path: data/safetensors-*
- split: tokenizers
path: data/tokenizers-*
- split: transformers
path: data/transformers-*
- split: sentence_transformers
path: data/sentence_transformers-*
---
# Dataset Card for "pip"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |