File size: 1,887 Bytes
8ad7857 88519b2 629a76b 88519b2 629a76b 88519b2 629a76b 3c9285e 629a76b 88519b2 629a76b 3c9285e 629a76b 88519b2 629a76b 88519b2 629a76b 3c9285e 629a76b 88519b2 629a76b 5cb052e 629a76b 86b2218 629a76b e17f7fb d44a1b8 e17f7fb 8ad7857 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 |
---
dataset_info:
features:
- name: day
dtype: string
- name: num_downloads
dtype: int64
splits:
- name: accelerate
num_bytes: 25630
num_examples: 1165
- name: datasets
num_bytes: 25630
num_examples: 1165
- name: diffusers
num_bytes: 15004
num_examples: 682
- name: evaluate
num_bytes: 17600
num_examples: 800
- name: gradio
num_bytes: 28996
num_examples: 1318
- name: huggingface_hub
num_bytes: 26510
num_examples: 1205
- name: optimum
num_bytes: 20614
num_examples: 937
- name: peft
num_bytes: 9856
num_examples: 448
- name: pytorch_image_models
num_bytes: 28996
num_examples: 1318
- name: safetensors
num_bytes: 11066
num_examples: 503
- name: tokenizers
num_bytes: 28996
num_examples: 1318
- name: transformers
num_bytes: 29678
num_examples: 1349
- name: sentence_transformers
num_bytes: 3982
num_examples: 181
download_size: 158967
dataset_size: 272558
configs:
- config_name: default
data_files:
- split: accelerate
path: data/accelerate-*
- split: datasets
path: data/datasets-*
- split: diffusers
path: data/diffusers-*
- split: evaluate
path: data/evaluate-*
- split: gradio
path: data/gradio-*
- split: huggingface_hub
path: data/huggingface_hub-*
- split: optimum
path: data/optimum-*
- split: peft
path: data/peft-*
- split: pytorch_image_models
path: data/pytorch_image_models-*
- split: safetensors
path: data/safetensors-*
- split: tokenizers
path: data/tokenizers-*
- split: transformers
path: data/transformers-*
- split: sentence_transformers
path: data/sentence_transformers-*
---
# Dataset Card for "pip"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |