File size: 1,889 Bytes
8ad7857 88519b2 bbcc9e6 88519b2 bbcc9e6 88519b2 bbcc9e6 3c9285e bbcc9e6 88519b2 bbcc9e6 3c9285e bbcc9e6 88519b2 bbcc9e6 88519b2 bbcc9e6 3c9285e bbcc9e6 88519b2 bbcc9e6 5cb052e bbcc9e6 86b2218 bbcc9e6 e17f7fb bbcc9e6 d44a1b8 e17f7fb 8ad7857 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 |
---
dataset_info:
features:
- name: day
dtype: string
- name: num_downloads
dtype: int64
splits:
- name: accelerate
num_bytes: 28688
num_examples: 1304
- name: datasets
num_bytes: 28688
num_examples: 1304
- name: diffusers
num_bytes: 18062
num_examples: 821
- name: evaluate
num_bytes: 20658
num_examples: 939
- name: gradio
num_bytes: 32054
num_examples: 1457
- name: huggingface_hub
num_bytes: 29568
num_examples: 1344
- name: optimum
num_bytes: 23672
num_examples: 1076
- name: peft
num_bytes: 12914
num_examples: 587
- name: pytorch_image_models
num_bytes: 32054
num_examples: 1457
- name: safetensors
num_bytes: 14124
num_examples: 642
- name: tokenizers
num_bytes: 32054
num_examples: 1457
- name: transformers
num_bytes: 32736
num_examples: 1488
- name: sentence_transformers
num_bytes: 7040
num_examples: 320
download_size: 180766
dataset_size: 312312
configs:
- config_name: default
data_files:
- split: accelerate
path: data/accelerate-*
- split: datasets
path: data/datasets-*
- split: diffusers
path: data/diffusers-*
- split: evaluate
path: data/evaluate-*
- split: gradio
path: data/gradio-*
- split: huggingface_hub
path: data/huggingface_hub-*
- split: optimum
path: data/optimum-*
- split: peft
path: data/peft-*
- split: pytorch_image_models
path: data/pytorch_image_models-*
- split: safetensors
path: data/safetensors-*
- split: tokenizers
path: data/tokenizers-*
- split: transformers
path: data/transformers-*
- split: sentence_transformers
path: data/sentence_transformers-*
---
# Dataset Card for "pip"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |