pip / README.md
lysandre's picture
lysandre HF staff
Upload README.md with huggingface_hub
3c9285e verified
|
raw
history blame
1.74 kB
metadata
dataset_info:
  features:
    - name: day
      dtype: string
    - name: num_downloads
      dtype: int64
  splits:
    - name: gradio
      num_bytes: 27742
      num_examples: 1261
    - name: safetensors
      num_bytes: 9812
      num_examples: 446
    - name: optimum
      num_bytes: 19360
      num_examples: 880
    - name: evaluate
      num_bytes: 16346
      num_examples: 743
    - name: huggingface_hub
      num_bytes: 25256
      num_examples: 1148
    - name: pytorch_image_models
      num_bytes: 27742
      num_examples: 1261
    - name: accelerate
      num_bytes: 24376
      num_examples: 1108
    - name: tokenizers
      num_bytes: 27742
      num_examples: 1261
    - name: transformers
      num_bytes: 28424
      num_examples: 1292
    - name: peft
      num_bytes: 8602
      num_examples: 391
    - name: diffusers
      num_bytes: 13750
      num_examples: 625
    - name: datasets
      num_bytes: 24376
      num_examples: 1108
  download_size: 148060
  dataset_size: 253528
configs:
  - config_name: default
    data_files:
      - split: accelerate
        path: data/accelerate-*
      - split: datasets
        path: data/datasets-*
      - split: diffusers
        path: data/diffusers-*
      - split: evaluate
        path: data/evaluate-*
      - split: gradio
        path: data/gradio-*
      - split: huggingface_hub
        path: data/huggingface_hub-*
      - split: optimum
        path: data/optimum-*
      - split: peft
        path: data/peft-*
      - split: pytorch_image_models
        path: data/pytorch_image_models-*
      - split: safetensors
        path: data/safetensors-*
      - split: tokenizers
        path: data/tokenizers-*
      - split: transformers
        path: data/transformers-*

Dataset Card for "pip"

More Information needed