datasetId
stringlengths 5
121
| author
stringlengths 2
42
| last_modified
unknown | downloads
int64 0
2.47M
| likes
int64 0
7k
| tags
sequencelengths 1
7.92k
| task_categories
sequencelengths 0
47
⌀ | createdAt
unknown | card
stringlengths 15
1.01M
|
---|---|---|---|---|---|---|---|---|
distil-whisper/meanwhile | distil-whisper | "2023-10-17T17:17:28Z" | 4,325 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"arxiv:2212.04356",
"region:us"
] | null | "2023-09-19T15:45:32Z" | ---
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
dataset_info:
features:
- name: audio
dtype: audio
- name: begin
dtype: string
- name: end
dtype: string
- name: text
dtype: string
splits:
- name: test
num_bytes: 58250833.0
num_examples: 64
download_size: 58229969
dataset_size: 58250833.0
---
# Dataset Card for "meanwhile"
This dataset consists of 64 segments from The Late Show with Stephen Colbert. This dataset was published as
part of the Whisper release by OpenAI. See page 19 of the [Whisper paper](https://arxiv.org/pdf/2212.04356.pdf)
for details. |
jkot/parliament_hearings_processed | jkot | "2023-04-25T08:53:38Z" | 4,321 | 1 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2023-04-21T10:06:00Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
dtype: string
splits:
- name: train
num_bytes: 51234859011.0
num_examples: 191455
- name: test
num_bytes: 762989296.0
num_examples: 2726
download_size: 51507735963
dataset_size: 51997848307.0
---
# Preprocessed parliament hearings ASR dataset to truecased form.
## Original dataset: https://lindat.mff.cuni.cz/repository/xmlui/handle/11234/1-3126
---
dataset_info:
features:
- name: id
dtype: string
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: string
splits:
- name: train
num_bytes: 53645064353.18
num_examples: 191455
- name: test
num_bytes: 740331298.0
num_examples: 2726
download_size: 51507379112
dataset_size: 54385395651.18
--- |
ybelkada/english_quotes_copy | ybelkada | "2023-04-04T06:13:26Z" | 4,277 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2023-04-04T06:13:24Z" | ---
dataset_info:
features:
- name: quote
dtype: string
- name: author
dtype: string
- name: tags
sequence: string
splits:
- name: train
num_bytes: 598359
num_examples: 2508
download_size: 349107
dataset_size: 598359
---
# Dataset Card for "english_quotes_copy"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
sailor2/sea-commoncrawl | sailor2 | "2024-12-04T08:10:42Z" | 4,277 | 0 | [
"license:odc-by",
"size_categories:100M<n<1B",
"format:json",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"region:us"
] | null | "2024-10-30T01:25:02Z" | ---
license: odc-by
--- |
autogluon/chronos_datasets | autogluon | "2025-01-03T10:46:31Z" | 4,274 | 31 | [
"task_categories:time-series-forecasting",
"task_ids:univariate-time-series-forecasting",
"task_ids:multivariate-time-series-forecasting",
"annotations_creators:no-annotation",
"source_datasets:original",
"license:other",
"size_categories:10M<n<100M",
"format:parquet",
"modality:tabular",
"modality:text",
"modality:timeseries",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2403.07815",
"region:us"
] | [
"time-series-forecasting"
] | "2024-06-22T15:59:58Z" | ---
annotations_creators:
- no-annotation
license: other
source_datasets:
- original
task_categories:
- time-series-forecasting
task_ids:
- univariate-time-series-forecasting
- multivariate-time-series-forecasting
pretty_name: Chronos datasets
dataset_info:
- config_name: dominick
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: im_0
dtype: int64
splits:
- name: train
num_bytes: 477140250
num_examples: 100014
download_size: 42290010
dataset_size: 477140250
- config_name: electricity_15min
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: consumption_kW
sequence: float64
splits:
- name: train
num_bytes: 670989988
num_examples: 370
download_size: 284497403
dataset_size: 670989988
license: CC BY 4.0
homepage: https://archive.ics.uci.edu/dataset/321/electricityloaddiagrams20112014
- config_name: ercot
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ns]
- name: target
sequence: float32
splits:
- name: train
num_examples: 8
download_size: 14504261
- config_name: exchange_rate
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float32
splits:
- name: train
num_examples: 8
download_size: 401501
license: MIT
homepage: https://github.com/laiguokun/multivariate-time-series-data/tree/master/exchange_rate
- config_name: m4_daily
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: category
dtype: string
splits:
- name: train
num_bytes: 160504176
num_examples: 4227
download_size: 65546675
dataset_size: 160504176
homepage: https://github.com/Mcompetitions/M4-methods
- config_name: m4_hourly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: category
dtype: string
splits:
- name: train
num_bytes: 5985544
num_examples: 414
download_size: 1336971
dataset_size: 5985544
homepage: https://github.com/Mcompetitions/M4-methods
- config_name: m4_monthly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: category
dtype: string
splits:
- name: train
num_bytes: 181372969
num_examples: 48000
download_size: 52772258
dataset_size: 181372969
homepage: https://github.com/Mcompetitions/M4-methods
- config_name: m4_quarterly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: category
dtype: string
splits:
- name: train
num_bytes: 39205397
num_examples: 24000
download_size: 13422579
dataset_size: 39205397
homepage: https://github.com/Mcompetitions/M4-methods
- config_name: m4_weekly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: category
dtype: string
splits:
- name: train
num_bytes: 5955806
num_examples: 359
download_size: 2556691
dataset_size: 5955806
homepage: https://github.com/Mcompetitions/M4-methods
- config_name: m4_yearly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: category
dtype: string
splits:
- name: train
num_bytes: 14410042
num_examples: 23000
download_size: 5488601
dataset_size: 14410042
homepage: https://github.com/Mcompetitions/M4-methods
- config_name: m5
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: item_id
dtype: string
- name: target
sequence: float32
- name: dept_id
dtype: string
- name: cat_id
dtype: string
- name: store_id
dtype: string
- name: state_id
dtype: string
splits:
- name: train
num_bytes: 574062630
num_examples: 30490
download_size: 78063286
dataset_size: 574062630
homepage: https://www.kaggle.com/competitions/m5-forecasting-accuracy/rules
- config_name: mexico_city_bikes
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 618999406
num_examples: 494
download_size: 103206946
dataset_size: 618999406
homepage: https://ecobici.cdmx.gob.mx/en/open-data/
- config_name: monash_australian_electricity
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 18484319
num_examples: 5
download_size: 16856156
dataset_size: 18484319
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_car_parts
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 2232790
num_examples: 2674
download_size: 70278
dataset_size: 2232790
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_cif_2016
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 115096
num_examples: 72
download_size: 70876
dataset_size: 115096
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_covid_deaths
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 907326
num_examples: 266
download_size: 58957
dataset_size: 907326
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_electricity_hourly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 135103443
num_examples: 321
download_size: 31139117
dataset_size: 135103443
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_electricity_weekly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 807315
num_examples: 321
download_size: 333563
dataset_size: 807315
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_fred_md
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 1248369
num_examples: 107
download_size: 412207
dataset_size: 1248369
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_hospital
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: int64
splits:
- name: train
num_examples: 767
download_size: 117038
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_kdd_cup_2018
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: city
dtype: string
- name: station
dtype: string
- name: measurement
dtype: string
splits:
- name: train
num_bytes: 47091540
num_examples: 270
download_size: 8780105
dataset_size: 47091540
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_london_smart_meters
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 2664567976
num_examples: 5560
download_size: 597389119
dataset_size: 2664567976
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_m1_monthly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 907691
num_examples: 617
download_size: 244372
dataset_size: 907691
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_m1_quarterly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 162961
num_examples: 203
download_size: 48439
dataset_size: 162961
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_m1_yearly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 75679
num_examples: 181
download_size: 30754
dataset_size: 75679
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_m3_monthly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 2708124
num_examples: 1428
download_size: 589699
dataset_size: 2708124
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_m3_quarterly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 606428
num_examples: 756
download_size: 188543
dataset_size: 606428
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_m3_yearly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 305359
num_examples: 645
download_size: 100184
dataset_size: 305359
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_nn5_weekly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float32
splits:
- name: train
num_examples: 111
download_size: 64620
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_pedestrian_counts
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: int64
splits:
- name: train
num_bytes: 50118790
num_examples: 66
download_size: 12377357
dataset_size: 50118790
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_rideshare
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: source_location
dtype: string
- name: provider_name
dtype: string
- name: provider_service
dtype: string
- name: price_min
sequence: float64
- name: price_mean
sequence: float64
- name: price_max
sequence: float64
- name: distance_min
sequence: float64
- name: distance_mean
sequence: float64
- name: distance_max
sequence: float64
- name: surge_min
sequence: float64
- name: surge_mean
sequence: float64
- name: surge_max
sequence: float64
- name: api_calls
sequence: float64
- name: temp
sequence: float64
- name: rain
sequence: float64
- name: humidity
sequence: float64
- name: clouds
sequence: float64
- name: wind
sequence: float64
splits:
- name: train
num_bytes: 10819910
num_examples: 156
download_size: 781873
dataset_size: 10819910
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_saugeenday
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: T1
sequence: float64
splits:
- name: train
num_bytes: 379875
num_examples: 1
download_size: 222678
dataset_size: 379875
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_temperature_rain
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: t_mean
sequence: float64
- name: prcp_sum
sequence: float64
- name: t_max
sequence: float64
- name: t_min
sequence: float64
- name: fcst_0_dailypop
sequence: float64
- name: fcst_0_dailypop1
sequence: float64
- name: fcst_0_dailypop10
sequence: float64
- name: fcst_0_dailypop15
sequence: float64
- name: fcst_0_dailypop25
sequence: float64
- name: fcst_0_dailypop5
sequence: float64
- name: fcst_0_dailypop50
sequence: float64
- name: fcst_0_dailyprecip
sequence: float64
- name: fcst_0_dailyprecip10pct
sequence: float64
- name: fcst_0_dailyprecip25pct
sequence: float64
- name: fcst_0_dailyprecip50pct
sequence: float64
- name: fcst_0_dailyprecip75pct
sequence: float64
- name: fcst_1_dailypop
sequence: float64
- name: fcst_1_dailypop1
sequence: float64
- name: fcst_1_dailypop10
sequence: float64
- name: fcst_1_dailypop15
sequence: float64
- name: fcst_1_dailypop25
sequence: float64
- name: fcst_1_dailypop5
sequence: float64
- name: fcst_1_dailypop50
sequence: float64
- name: fcst_1_dailyprecip
sequence: float64
- name: fcst_1_dailyprecip10pct
sequence: float64
- name: fcst_1_dailyprecip25pct
sequence: float64
- name: fcst_1_dailyprecip50pct
sequence: float64
- name: fcst_1_dailyprecip75pct
sequence: float64
- name: fcst_2_dailypop
sequence: float64
- name: fcst_2_dailypop1
sequence: float64
- name: fcst_2_dailypop10
sequence: float64
- name: fcst_2_dailypop15
sequence: float64
- name: fcst_2_dailypop25
sequence: float64
- name: fcst_2_dailypop5
sequence: float64
- name: fcst_2_dailypop50
sequence: float64
- name: fcst_2_dailyprecip
sequence: float64
- name: fcst_2_dailyprecip10pct
sequence: float64
- name: fcst_2_dailyprecip25pct
sequence: float64
- name: fcst_2_dailyprecip50pct
sequence: float64
- name: fcst_2_dailyprecip75pct
sequence: float64
- name: fcst_3_dailypop
sequence: float64
- name: fcst_3_dailypop1
sequence: float64
- name: fcst_3_dailypop10
sequence: float64
- name: fcst_3_dailypop15
sequence: float64
- name: fcst_3_dailypop25
sequence: float64
- name: fcst_3_dailypop5
sequence: float64
- name: fcst_3_dailypop50
sequence: float64
- name: fcst_3_dailyprecip
sequence: float64
- name: fcst_3_dailyprecip10pct
sequence: float64
- name: fcst_3_dailyprecip25pct
sequence: float64
- name: fcst_3_dailyprecip50pct
sequence: float64
- name: fcst_3_dailyprecip75pct
sequence: float64
- name: fcst_4_dailypop
sequence: float64
- name: fcst_4_dailypop1
sequence: float64
- name: fcst_4_dailypop10
sequence: float64
- name: fcst_4_dailypop15
sequence: float64
- name: fcst_4_dailypop25
sequence: float64
- name: fcst_4_dailypop5
sequence: float64
- name: fcst_4_dailypop50
sequence: float64
- name: fcst_4_dailyprecip
sequence: float64
- name: fcst_4_dailyprecip10pct
sequence: float64
- name: fcst_4_dailyprecip25pct
sequence: float64
- name: fcst_4_dailyprecip50pct
sequence: float64
- name: fcst_4_dailyprecip75pct
sequence: float64
- name: fcst_5_dailypop
sequence: float64
- name: fcst_5_dailypop1
sequence: float64
- name: fcst_5_dailypop10
sequence: float64
- name: fcst_5_dailypop15
sequence: float64
- name: fcst_5_dailypop25
sequence: float64
- name: fcst_5_dailypop5
sequence: float64
- name: fcst_5_dailypop50
sequence: float64
- name: fcst_5_dailyprecip
sequence: float64
- name: fcst_5_dailyprecip10pct
sequence: float64
- name: fcst_5_dailyprecip25pct
sequence: float64
- name: fcst_5_dailyprecip50pct
sequence: float64
- name: fcst_5_dailyprecip75pct
sequence: float64
splits:
- name: train
num_bytes: 188598927
num_examples: 422
download_size: 44967856
dataset_size: 188598927
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_tourism_monthly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 1755434
num_examples: 366
download_size: 334951
dataset_size: 1755434
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_tourism_quarterly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 688817
num_examples: 427
download_size: 177407
dataset_size: 688817
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_tourism_yearly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 213954
num_examples: 518
download_size: 81479
dataset_size: 213954
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_traffic
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 241983226
num_examples: 862
download_size: 52748547
dataset_size: 241983226
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: monash_weather
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: subset
dtype: string
splits:
- name: train
num_bytes: 688598539
num_examples: 3010
download_size: 133164027
dataset_size: 688598539
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: nn5
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float32
splits:
- name: train
num_examples: 111
download_size: 203096
homepage: http://www.neural-forecasting-competition.com/downloads/NN5/datasets/download.htm
- config_name: solar
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: power_mw
sequence: float64
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: capacity_mw
dtype: float64
- name: subset
dtype: string
splits:
- name: train
num_bytes: 8689093932
num_examples: 5166
download_size: 1507924920
dataset_size: 8689093932
homepage: https://www.nrel.gov/grid/solar-power-data.html
- config_name: solar_1h
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: power_mw
sequence: float64
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: capacity_mw
dtype: float64
- name: subset
dtype: string
splits:
- name: train
num_bytes: 724361772
num_examples: 5166
download_size: 124515417
dataset_size: 724361772
homepage: https://www.nrel.gov/grid/solar-power-data.html
- config_name: taxi_1h
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: subset
dtype: string
- name: lat
dtype: float64
- name: lng
dtype: float64
splits:
- name: train
num_bytes: 28832500
num_examples: 2428
download_size: 2265297
dataset_size: 28832500
license: Apache 2.0
homepage: https://github.com/mbohlkeschneider/gluon-ts/tree/mv_release/datasets
- config_name: taxi_30min
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: subset
dtype: string
- name: lat
dtype: float64
- name: lng
dtype: float64
splits:
- name: train
num_bytes: 57560596
num_examples: 2428
download_size: 4541244
dataset_size: 57560596
license: Apache 2.0
homepage: https://github.com/mbohlkeschneider/gluon-ts/tree/mv_release/datasets
- config_name: training_corpus_kernel_synth_1m
features:
- name: target
sequence: float64
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
splits:
- name: train
num_examples: 1000000
download_size: 8313239368
- config_name: training_corpus_tsmixup_10m
features:
- name: target
sequence: float64
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
splits:
- name: train
num_examples: 10000000
download_size: 82189589906
- config_name: uber_tlc_daily
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: int64
splits:
- name: train
num_examples: 262
download_size: 84747
homepage: https://github.com/fivethirtyeight/uber-tlc-foil-response
- config_name: uber_tlc_hourly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: int64
splits:
- name: train
num_examples: 262
download_size: 1878515
homepage: https://github.com/fivethirtyeight/uber-tlc-foil-response
- config_name: ushcn_daily
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: state
dtype: string
- name: coop_id
dtype: int64
- name: PRCP
sequence: float64
- name: SNOW
sequence: float64
- name: SNWD
sequence: float64
- name: TMAX
sequence: float64
- name: TMIN
sequence: float64
splits:
- name: train
num_bytes: 2259905202
num_examples: 1218
download_size: 221089890
dataset_size: 2259905202
homepage: https://data.ess-dive.lbl.gov/portals/CDIAC
- config_name: weatherbench_daily
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float32
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: level
dtype: float64
- name: subset
dtype: string
splits:
- name: train
num_bytes: 39510157312
num_examples: 225280
download_size: 18924392742
dataset_size: 39510157312
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_10m_u_component_of_wind
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: float64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 2048
download_size: 7292845757
dataset_size: 8617472000
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_10m_v_component_of_wind
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: float64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 2048
download_size: 7292352508
dataset_size: 8617472000
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_2m_temperature
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: float64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 2048
download_size: 7276396852
dataset_size: 8617453568
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_geopotential
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: int64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 26624
download_size: 87305564613
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_potential_vorticity
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: int64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 26624
download_size: 92426240043
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_relative_humidity
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: int64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 26624
download_size: 94728788382
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_specific_humidity
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: int64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 26624
download_size: 85139896451
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_temperature
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: int64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 26624
download_size: 94081539079
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_toa_incident_solar_radiation
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: float64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 2048
download_size: 6057953007
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_total_cloud_cover
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: float64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 2048
download_size: 6628258398
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_total_precipitation
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: float64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 2048
download_size: 6473160755
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_u_component_of_wind
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: int64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 26624
download_size: 94801498563
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_v_component_of_wind
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: int64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 26624
download_size: 94800557482
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_hourly_vorticity
features:
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: target
sequence: float32
- name: level
dtype: int64
- name: timestamp
sequence: timestamp[ms]
- name: subset
dtype: string
- name: id
dtype: string
splits:
- name: train
num_examples: 26624
download_size: 94720960560
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: weatherbench_weekly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float32
- name: latitude
dtype: float64
- name: longitude
dtype: float64
- name: level
dtype: float64
- name: subset
dtype: string
splits:
- name: train
num_bytes: 5656029184
num_examples: 225280
download_size: 2243012083
dataset_size: 5656029184
license: MIT
homepage: https://github.com/pangeo-data/WeatherBench
- config_name: wiki_daily_100k
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
- name: page_name
dtype: string
splits:
- name: train
num_bytes: 4389782678
num_examples: 100000
download_size: 592554033
dataset_size: 4389782678
license: CC0
homepage: https://dumps.wikimedia.org/other/pageviews/readme.html
- config_name: wind_farms_daily
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 1919187
num_examples: 337
download_size: 598834
dataset_size: 1919187
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
- config_name: wind_farms_hourly
features:
- name: id
dtype: string
- name: timestamp
sequence: timestamp[ms]
- name: target
sequence: float64
splits:
- name: train
num_bytes: 45917027
num_examples: 337
download_size: 12333116
dataset_size: 45917027
license: CC BY 4.0
homepage: https://zenodo.org/communities/forecasting
configs:
- config_name: dominick
data_files:
- split: train
path: dominick/train-*
- config_name: electricity_15min
data_files:
- split: train
path: electricity_15min/train-*
- config_name: ercot
data_files:
- split: train
path: ercot/train-*
- config_name: exchange_rate
data_files:
- split: train
path: exchange_rate/train-*
- config_name: m4_daily
data_files:
- split: train
path: m4_daily/train-*
- config_name: m4_hourly
data_files:
- split: train
path: m4_hourly/train-*
- config_name: m4_monthly
data_files:
- split: train
path: m4_monthly/train-*
- config_name: m4_quarterly
data_files:
- split: train
path: m4_quarterly/train-*
- config_name: m4_weekly
data_files:
- split: train
path: m4_weekly/train-*
- config_name: m4_yearly
data_files:
- split: train
path: m4_yearly/train-*
- config_name: m5
data_files:
- split: train
path: m5/train-*
- config_name: mexico_city_bikes
data_files:
- split: train
path: mexico_city_bikes/train-*
- config_name: monash_australian_electricity
data_files:
- split: train
path: monash_australian_electricity/train-*
- config_name: monash_car_parts
data_files:
- split: train
path: monash_car_parts/train-*
- config_name: monash_cif_2016
data_files:
- split: train
path: monash_cif_2016/train-*
- config_name: monash_covid_deaths
data_files:
- split: train
path: monash_covid_deaths/train-*
- config_name: monash_electricity_hourly
data_files:
- split: train
path: monash_electricity_hourly/train-*
- config_name: monash_electricity_weekly
data_files:
- split: train
path: monash_electricity_weekly/train-*
- config_name: monash_fred_md
data_files:
- split: train
path: monash_fred_md/train-*
- config_name: monash_hospital
data_files:
- split: train
path: monash_hospital/train-*
- config_name: monash_kdd_cup_2018
data_files:
- split: train
path: monash_kdd_cup_2018/train-*
- config_name: monash_london_smart_meters
data_files:
- split: train
path: monash_london_smart_meters/train-*
- config_name: monash_m1_monthly
data_files:
- split: train
path: monash_m1_monthly/train-*
- config_name: monash_m1_quarterly
data_files:
- split: train
path: monash_m1_quarterly/train-*
- config_name: monash_m1_yearly
data_files:
- split: train
path: monash_m1_yearly/train-*
- config_name: monash_m3_monthly
data_files:
- split: train
path: monash_m3_monthly/train-*
- config_name: monash_m3_quarterly
data_files:
- split: train
path: monash_m3_quarterly/train-*
- config_name: monash_m3_yearly
data_files:
- split: train
path: monash_m3_yearly/train-*
- config_name: monash_nn5_weekly
data_files:
- split: train
path: monash_nn5_weekly/train-*
- config_name: monash_pedestrian_counts
data_files:
- split: train
path: monash_pedestrian_counts/train-*
- config_name: monash_rideshare
data_files:
- split: train
path: monash_rideshare/train-*
- config_name: monash_saugeenday
data_files:
- split: train
path: monash_saugeenday/train-*
- config_name: monash_temperature_rain
data_files:
- split: train
path: monash_temperature_rain/train-*
- config_name: monash_tourism_monthly
data_files:
- split: train
path: monash_tourism_monthly/train-*
- config_name: monash_tourism_quarterly
data_files:
- split: train
path: monash_tourism_quarterly/train-*
- config_name: monash_tourism_yearly
data_files:
- split: train
path: monash_tourism_yearly/train-*
- config_name: monash_traffic
data_files:
- split: train
path: monash_traffic/train-*
- config_name: monash_weather
data_files:
- split: train
path: monash_weather/train-*
- config_name: nn5
data_files:
- split: train
path: nn5/train-*
- config_name: solar
data_files:
- split: train
path: solar/train-*
- config_name: solar_1h
data_files:
- split: train
path: solar_1h/train-*
- config_name: taxi_1h
data_files:
- split: train
path: taxi_1h/train-*
- config_name: taxi_30min
data_files:
- split: train
path: taxi_30min/train-*
- config_name: training_corpus_kernel_synth_1m
data_files:
- split: train
path: training_corpus/kernel_synth_1m/train-*
- config_name: training_corpus_tsmixup_10m
data_files:
- split: train
path: training_corpus/tsmixup_10m/train-*
- config_name: uber_tlc_daily
data_files:
- split: train
path: uber_tlc_daily/train-*
- config_name: uber_tlc_hourly
data_files:
- split: train
path: uber_tlc_hourly/train-*
- config_name: ushcn_daily
data_files:
- split: train
path: ushcn_daily/train-*
- config_name: weatherbench_daily
data_files:
- split: train
path: weatherbench_daily/train-*
- config_name: weatherbench_hourly_10m_u_component_of_wind
data_files:
- split: train
path: weatherbench_hourly/10m_u_component_of_wind/train-*
- config_name: weatherbench_hourly_10m_v_component_of_wind
data_files:
- split: train
path: weatherbench_hourly/10m_v_component_of_wind/train-*
- config_name: weatherbench_hourly_2m_temperature
data_files:
- split: train
path: weatherbench_hourly/2m_temperature/train-*
- config_name: weatherbench_hourly_geopotential
data_files:
- split: train
path: weatherbench_hourly/geopotential/train-*
- config_name: weatherbench_hourly_potential_vorticity
data_files:
- split: train
path: weatherbench_hourly/potential_vorticity/train-*
- config_name: weatherbench_hourly_relative_humidity
data_files:
- split: train
path: weatherbench_hourly/relative_humidity/train-*
- config_name: weatherbench_hourly_specific_humidity
data_files:
- split: train
path: weatherbench_hourly/specific_humidity/train-*
- config_name: weatherbench_hourly_temperature
data_files:
- split: train
path: weatherbench_hourly/temperature/train-*
- config_name: weatherbench_hourly_toa_incident_solar_radiation
data_files:
- split: train
path: weatherbench_hourly/toa_incident_solar_radiation/train-*
- config_name: weatherbench_hourly_total_cloud_cover
data_files:
- split: train
path: weatherbench_hourly/total_cloud_cover/train-*
- config_name: weatherbench_hourly_total_precipitation
data_files:
- split: train
path: weatherbench_hourly/total_precipitation/train-*
- config_name: weatherbench_hourly_u_component_of_wind
data_files:
- split: train
path: weatherbench_hourly/u_component_of_wind/train-*
- config_name: weatherbench_hourly_v_component_of_wind
data_files:
- split: train
path: weatherbench_hourly/v_component_of_wind/train-*
- config_name: weatherbench_hourly_vorticity
data_files:
- split: train
path: weatherbench_hourly/vorticity/train-*
- config_name: weatherbench_weekly
data_files:
- split: train
path: weatherbench_weekly/train-*
- config_name: wiki_daily_100k
data_files:
- split: train
path: wiki_daily_100k/train-*
- config_name: wind_farms_daily
data_files:
- split: train
path: wind_farms_daily/train-*
- config_name: wind_farms_hourly
data_files:
- split: train
path: wind_farms_hourly/train-*
---
# Chronos datasets
Time series datasets used for training and evaluation of the [Chronos](https://github.com/amazon-science/chronos-forecasting) forecasting models.
Note that some Chronos datasets (`ETTh`, `ETTm`, `brazilian_cities_temperature` and `spanish_energy_and_weather`) that rely on a custom builder script are available in the companion repo [`autogluon/chronos_datasets_extra`](https://huggingface.co/datasets/autogluon/chronos_datasets_extra).
See the [paper](https://arxiv.org/abs/2403.07815) for more information.
## Data format and usage
All datasets satisfy the following high-level schema:
- Each dataset row corresponds to a single (univariate or multivariate) time series.
- There exists one column with name `id` and type `string` that contains the unique identifier of each time series.
- There exists one column of type `Sequence` with dtype `timestamp[ms]`. This column contains the timestamps of the observations. Timestamps are guaranteed to have a regular frequency that can be obtained with [`pandas.infer_freq`](https://pandas.pydata.org/docs/reference/api/pandas.infer_freq.html).
- There exists at least one column of type `Sequence` with numeric (`float`, `double`, or `int`) dtype. These columns can be interpreted as target time series.
- For each row, all columns of type `Sequence` have same length.
- Remaining columns of types other than `Sequence` (e.g., `string` or `float`) can be interpreted as static covariates.
Datasets can be loaded using the 🤗 [`datasets`](https://huggingface.co/docs/datasets/en/index) library
```python
import datasets
ds = datasets.load_dataset("autogluon/chronos_datasets", "m4_daily", split="train")
ds.set_format("numpy") # sequences returned as numpy arrays
```
> **NOTE:** The `train` split of all datasets contains the full time series and has no relation to the train/test split used in the Chronos paper.
Example entry in the `m4_daily` dataset
```python
>>> ds[0]
{'id': 'T000000',
'timestamp': array(['1994-03-01T12:00:00.000', '1994-03-02T12:00:00.000',
'1994-03-03T12:00:00.000', ..., '1996-12-12T12:00:00.000',
'1996-12-13T12:00:00.000', '1996-12-14T12:00:00.000'],
dtype='datetime64[ms]'),
'target': array([1017.1, 1019.3, 1017. , ..., 2071.4, 2083.8, 2080.6], dtype=float32),
'category': 'Macro'}
```
### Converting to pandas
We can easily convert data in such format to a long format data frame
```python
def to_pandas(ds: datasets.Dataset) -> "pd.DataFrame":
"""Convert dataset to long data frame format."""
sequence_columns = [col for col in ds.features if isinstance(ds.features[col], datasets.Sequence)]
return ds.to_pandas().explode(sequence_columns).infer_objects()
```
Example output
```python
>>> print(to_pandas(ds).head())
id timestamp target category
0 T000000 1994-03-01 12:00:00 1017.1 Macro
1 T000000 1994-03-02 12:00:00 1019.3 Macro
2 T000000 1994-03-03 12:00:00 1017.0 Macro
3 T000000 1994-03-04 12:00:00 1019.2 Macro
4 T000000 1994-03-05 12:00:00 1018.7 Macro
```
### Dealing with large datasets
Note that some datasets, such as subsets of WeatherBench, are extremely large (~100GB). To work with them efficiently, we recommend either loading them from disk (files will be downloaded to disk, but won't be all loaded into memory)
```python
ds = datasets.load_dataset("autogluon/chronos_datasets", "weatherbench_daily", keep_in_memory=False, split="train")
```
or, for the largest datasets like `weatherbench_hourly_temperature`, reading them in streaming format (chunks will be downloaded one at a time)
```python
ds = datasets.load_dataset("autogluon/chronos_datasets", "weatherbench_hourly_temperature", streaming=True, split="train")
```
## Chronos training corpus with TSMixup & KernelSynth
The training corpus used for training the Chronos models can be loaded via the configs `training_corpus_tsmixup_10m` (10M TSMixup augmentations of real-world data) and `training_corpus_kernel_synth_1m` (1M synthetic time series generated with KernelSynth), e.g.,
```python
ds = datasets.load_dataset("autogluon/chronos_datasets", "training_corpus_tsmixup_10m", streaming=True, split="train")
```
Note that since data in the training corpus was obtained by combining various synthetic & real-world time series, the timestamps contain dummy values that have no connection to the original data.
## License
Different datasets available in this collection are distributed under different open source licenses. Please see `ds.info.license` and `ds.info.homepage` for each individual dataset.
## Citation
If you find these datasets useful for your research, please consider citing the associated paper:
```markdown
@article{ansari2024chronos,
author = {Ansari, Abdul Fatir and Stella, Lorenzo and Turkmen, Caner and Zhang, Xiyuan and Mercado, Pedro and Shen, Huibin and Shchur, Oleksandr and Rangapuram, Syama Syndar and Pineda Arango, Sebastian and Kapoor, Shubham and Zschiegner, Jasper and Maddix, Danielle C. and Wang, Hao and Mahoney, Michael W. and Torkkola, Kari and Gordon Wilson, Andrew and Bohlke-Schneider, Michael and Wang, Yuyang},
title = {Chronos: Learning the Language of Time Series},
journal = {arXiv preprint arXiv:2403.07815},
year = {2024}
}
```
|
echo840/OCRBench | echo840 | "2024-12-18T11:03:09Z" | 4,267 | 11 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"arxiv:2305.07895",
"region:us"
] | null | "2024-03-24T04:33:58Z" | ---
dataset_info:
features:
- name: dataset
dtype: string
- name: question
dtype: string
- name: question_type
dtype: string
- name: answer
sequence: string
- name: image
dtype: image
splits:
- name: test
num_bytes: 85534416.0
num_examples: 1000
download_size: 67576988
dataset_size: 85534416.0
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
---
[Github](https://github.com/Yuliang-Liu/MultimodalOCR)|[Paper](https://arxiv.org/abs/2305.07895)
OCRBench has been accepted by [Science China Information Sciences](https://link.springer.com/article/10.1007/s11432-024-4235-6).
|
diffusers/test-arrays | diffusers | "2023-05-24T15:36:31Z" | 4,262 | 1 | [
"license:apache-2.0",
"size_categories:n<1K",
"format:imagefolder",
"modality:image",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2022-12-12T14:36:33Z" | ---
license: apache-2.0
---
|
occiglot/tokenizer-wiki-bench | occiglot | "2024-04-23T21:00:00Z" | 4,247 | 4 | [
"language:af",
"language:ar",
"language:bg",
"language:ca",
"language:cs",
"language:da",
"language:de",
"language:el",
"language:en",
"language:es",
"language:et",
"language:eu",
"language:fa",
"language:fi",
"language:fr",
"language:ga",
"language:he",
"language:hi",
"language:hr",
"language:hu",
"language:hy",
"language:id",
"language:it",
"language:ja",
"language:ko",
"language:lt",
"language:lv",
"language:mr",
"language:nl",
"language:no",
"language:pl",
"language:pt",
"language:ro",
"language:ru",
"language:sa",
"language:sk",
"language:sl",
"language:sr",
"language:sv",
"language:ta",
"language:te",
"language:tr",
"language:uk",
"language:ur",
"language:vi",
"license:mit",
"size_categories:10M<n<100M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2012.15613",
"region:us"
] | null | "2024-03-13T14:49:07Z" | ---
language:
- af
- ar
- bg
- ca
- cs
- da
- de
- el
- en
- es
- et
- eu
- fa
- fi
- fr
- ga
- he
- hi
- hr
- hu
- hy
- id
- it
- ja
- ko
- lt
- lv
- mr
- nl
- 'no'
- pl
- pt
- ro
- ru
- sa
- sk
- sl
- sr
- sv
- ta
- te
- tr
- uk
- ur
- vi
license: mit
pretty_name: Multilingual Tokenizer Wikipedia Benchmark
dataset_info:
- config_name: af
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 541481060
num_examples: 112518
- name: clean
num_bytes: 539551289.6071739
num_examples: 112117
download_size: 441191361
dataset_size: 1081032349.607174
- config_name: ar
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 7007645793
num_examples: 1219201
- name: clean
num_bytes: 6980694657.688122
num_examples: 1214512
download_size: 4415559180
dataset_size: 13988340450.688122
- config_name: bg
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 2437923560
num_examples: 294275
- name: clean
num_bytes: 2433855866.6248918
num_examples: 293784
download_size: 1805069655
dataset_size: 4871779426.624891
- config_name: ca
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 4801022979
num_examples: 737409
- name: clean
num_bytes: 4766991732.959834
num_examples: 732182
download_size: 3884482903
dataset_size: 9568014711.959835
- config_name: cs
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 3740905267
num_examples: 534044
- name: clean
num_bytes: 3730243864.91258
num_examples: 532522
download_size: 3671037924
dataset_size: 7471149131.9125805
- config_name: da
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 1298277678
num_examples: 295347
- name: clean
num_bytes: 1292602738.074089
num_examples: 294056
download_size: 1782396281
dataset_size: 2590880416.074089
- config_name: de
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 23086869184
num_examples: 2845308
- name: clean
num_bytes: 23073148386.18474
num_examples: 2843617
download_size: 21942020975
dataset_size: 46160017570.18474
- config_name: el
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 3002968703
num_examples: 226834
- name: clean
num_bytes: 2973684879.714972
num_examples: 224622
download_size: 2295250961
dataset_size: 5976653582.714972
- config_name: en
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 49746869820
num_examples: 6407814
- name: clean
num_bytes: 49560903666.851944
num_examples: 6383860
download_size: 40592018321
dataset_size: 99307773486.85194
- config_name: es
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 14759846818
num_examples: 1841155
- name: clean
num_bytes: 14536992695.618353
num_examples: 1813356
download_size: 12175892555
dataset_size: 29296839513.618355
- config_name: et
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 1019050491
num_examples: 240397
- name: clean
num_bytes: 1016723262.6254404
num_examples: 239848
download_size: 1019164563
dataset_size: 2035773753.6254404
- config_name: eu
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 1291195010
num_examples: 416347
- name: clean
num_bytes: 1265327506.262949
num_examples: 408006
download_size: 968840915
dataset_size: 2556522516.262949
- config_name: fa
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 4224898253
num_examples: 979869
- name: clean
num_bytes: 4213433450.6083264
num_examples: 977210
download_size: 2499698548
dataset_size: 8438331703.608326
- config_name: fi
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 2600737260
num_examples: 561598
- name: clean
num_bytes: 2595874753.1481237
num_examples: 560548
download_size: 2642007766
dataset_size: 5196612013.148124
- config_name: fr
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 20069732840
num_examples: 2564646
- name: clean
num_bytes: 19942544382.860683
num_examples: 2548393
download_size: 16151551755
dataset_size: 40012277222.86069
- config_name: ga
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 142209710
num_examples: 59156
- name: clean
num_bytes: 141702470.68682805
num_examples: 58945
download_size: 121745838
dataset_size: 283912180.686828
- config_name: he
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 4776226234
num_examples: 333874
- name: clean
num_bytes: 4760232712.702708
num_examples: 332756
download_size: 3499530576
dataset_size: 9536458946.70271
- config_name: hi
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 1452853579
num_examples: 163093
- name: clean
num_bytes: 1443152625.8779714
num_examples: 162004
download_size: 785363639
dataset_size: 2896006204.8779716
- config_name: hr
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 1064630680
num_examples: 202848
- name: clean
num_bytes: 1053026432.3195693
num_examples: 200637
download_size: 1028743775
dataset_size: 2117657112.3195693
- config_name: hu
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 3533169653
num_examples: 532427
- name: clean
num_bytes: 3510335279.8822336
num_examples: 528986
download_size: 3558613373
dataset_size: 7043504932.882234
- config_name: hy
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 2568868378
num_examples: 303036
- name: clean
num_bytes: 2555898405.394963
num_examples: 301506
download_size: 1781142597
dataset_size: 5124766783.394962
- config_name: id
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 2650288629
num_examples: 665622
- name: clean
num_bytes: 2630666948.280745
num_examples: 660694
download_size: 2040186206
dataset_size: 5280955577.280745
- config_name: it
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 12188918391
num_examples: 1833639
- name: clean
num_bytes: 12163279397.591763
num_examples: 1829782
download_size: 10368836428
dataset_size: 24352197788.591763
- config_name: ja
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 18752888787
num_examples: 1389467
- name: clean
num_bytes: 18684866617.717476
num_examples: 1384427
download_size: 15232900753
dataset_size: 37437755404.717476
- config_name: ko
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 3160932689
num_examples: 647897
- name: clean
num_bytes: 3151741108.878351
num_examples: 646013
download_size: 3074385022
dataset_size: 6312673797.878351
- config_name: lt
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 781319902
num_examples: 211292
- name: clean
num_bytes: 777474168.616436
num_examples: 210252
download_size: 722780874
dataset_size: 1558794070.616436
- config_name: lv
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 531956241
num_examples: 123413
- name: clean
num_bytes: 530943303.00615007
num_examples: 123178
download_size: 700342420
dataset_size: 1062899544.00615
- config_name: mr
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 547060763
num_examples: 94133
- name: clean
num_bytes: 545450957.3914355
num_examples: 93856
download_size: 278141890
dataset_size: 1092511720.3914356
- config_name: nl
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 6191062892
num_examples: 2135977
- name: clean
num_bytes: 6177393712.697661
num_examples: 2131261
download_size: 5179824678
dataset_size: 12368456604.697662
- config_name: 'no'
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 2437191515
num_examples: 617937
- name: clean
num_bytes: 2428893175.610127
num_examples: 615833
download_size: 2175299531
dataset_size: 4866084690.6101265
- config_name: pl
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 6864626419
num_examples: 1587721
- name: clean
num_bytes: 6861024883.335341
num_examples: 1586888
download_size: 6565864124
dataset_size: 13725651302.335342
- config_name: pt
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 6844185526
num_examples: 1112246
- name: clean
num_bytes: 6755821527.2502985
num_examples: 1097886
download_size: 5516209748
dataset_size: 13600007053.250298
- config_name: ro
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 2023493174
num_examples: 442389
- name: clean
num_bytes: 2006866635.6197736
num_examples: 438754
download_size: 1652633599
dataset_size: 4030359809.619774
- config_name: ru
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 22550679128
num_examples: 1945063
- name: clean
num_bytes: 22439204702.844765
num_examples: 1935448
download_size: 18884603758
dataset_size: 44989883830.844765
- config_name: sa
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 146460109
num_examples: 12156
- name: clean
num_bytes: 145435996.68797302
num_examples: 12071
download_size: 95836795
dataset_size: 291896105.687973
- config_name: sk
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 977962245
num_examples: 242235
- name: clean
num_bytes: 976048590.4738994
num_examples: 241761
download_size: 1346611201
dataset_size: 1954010835.4738994
- config_name: sl
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 1106532891
num_examples: 183006
- name: clean
num_bytes: 1097995332.4385757
num_examples: 181594
download_size: 1006028852
dataset_size: 2204528223.4385757
- config_name: sr
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 3755288114
num_examples: 676605
- name: clean
num_bytes: 3735557179.0449376
num_examples: 673050
download_size: 2558022832
dataset_size: 7490845293.044937
- config_name: sv
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 4861956987
num_examples: 2574513
- name: clean
num_bytes: 4857071448.365948
num_examples: 2571926
download_size: 3512612936
dataset_size: 9719028435.365948
- config_name: ta
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 1693909025
num_examples: 160651
- name: clean
num_bytes: 1682405487.85255
num_examples: 159560
download_size: 985318775
dataset_size: 3376314512.85255
- config_name: te
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 1556095028
num_examples: 87854
- name: clean
num_bytes: 1550320823.3066678
num_examples: 87528
download_size: 746686495
dataset_size: 3106415851.306668
- config_name: tr
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 2317236022
num_examples: 534988
- name: clean
num_bytes: 2301578085.336879
num_examples: 531373
download_size: 2055444454
dataset_size: 4618814107.336879
- config_name: uk
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 10935662610
num_examples: 1294720
- name: clean
num_bytes: 10860532296.947023
num_examples: 1285825
download_size: 8344390939
dataset_size: 21796194906.94702
- config_name: ur
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 918249794
num_examples: 200154
- name: clean
num_bytes: 912616078.225986
num_examples: 198926
download_size: 534834968
dataset_size: 1830865872.225986
- config_name: vi
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 3685585608
num_examples: 1288680
- name: clean
num_bytes: 3669872935.086358
num_examples: 1283186
download_size: 2646807342
dataset_size: 7355458543.086358
- config_name: zh
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: split_text
sequence: string
splits:
- name: train
num_bytes: 7820979602
num_examples: 1384748
- name: clean
num_bytes: 7781957954.689285
num_examples: 1377839
download_size: 6540517932
dataset_size: 15602937556.689285
configs:
- config_name: af
data_files:
- split: train
path: af/train-*
- split: clean
path: af/clean-*
- config_name: ar
data_files:
- split: train
path: ar/train-*
- split: clean
path: ar/clean-*
- config_name: bg
data_files:
- split: train
path: bg/train-*
- split: clean
path: bg/clean-*
- config_name: ca
data_files:
- split: train
path: ca/train-*
- split: clean
path: ca/clean-*
- config_name: cs
data_files:
- split: train
path: cs/train-*
- split: clean
path: cs/clean-*
- config_name: da
data_files:
- split: train
path: da/train-*
- split: clean
path: da/clean-*
- config_name: de
data_files:
- split: train
path: de/train-*
- split: clean
path: de/clean-*
- config_name: el
data_files:
- split: train
path: el/train-*
- split: clean
path: el/clean-*
- config_name: en
data_files:
- split: train
path: en/train-*
- split: clean
path: en/clean-*
- config_name: es
data_files:
- split: train
path: es/train-*
- split: clean
path: es/clean-*
- config_name: et
data_files:
- split: train
path: et/train-*
- split: clean
path: et/clean-*
- config_name: eu
data_files:
- split: train
path: eu/train-*
- split: clean
path: eu/clean-*
- config_name: fa
data_files:
- split: train
path: fa/train-*
- split: clean
path: fa/clean-*
- config_name: fi
data_files:
- split: train
path: fi/train-*
- split: clean
path: fi/clean-*
- config_name: fr
data_files:
- split: train
path: fr/train-*
- split: clean
path: fr/clean-*
- config_name: ga
data_files:
- split: train
path: ga/train-*
- split: clean
path: ga/clean-*
- config_name: he
data_files:
- split: train
path: he/train-*
- split: clean
path: he/clean-*
- config_name: hi
data_files:
- split: train
path: hi/train-*
- split: clean
path: hi/clean-*
- config_name: hr
data_files:
- split: train
path: hr/train-*
- split: clean
path: hr/clean-*
- config_name: hu
data_files:
- split: train
path: hu/train-*
- split: clean
path: hu/clean-*
- config_name: hy
data_files:
- split: train
path: hy/train-*
- split: clean
path: hy/clean-*
- config_name: id
data_files:
- split: train
path: id/train-*
- split: clean
path: id/clean-*
- config_name: it
data_files:
- split: train
path: it/train-*
- split: clean
path: it/clean-*
- config_name: ja
data_files:
- split: train
path: ja/train-*
- split: clean
path: ja/clean-*
- config_name: ko
data_files:
- split: train
path: ko/train-*
- split: clean
path: ko/clean-*
- config_name: lt
data_files:
- split: train
path: lt/train-*
- split: clean
path: lt/clean-*
- config_name: lv
data_files:
- split: train
path: lv/train-*
- split: clean
path: lv/clean-*
- config_name: mr
data_files:
- split: train
path: mr/train-*
- split: clean
path: mr/clean-*
- config_name: nl
data_files:
- split: train
path: nl/train-*
- split: clean
path: nl/clean-*
- config_name: 'no'
data_files:
- split: train
path: no/train-*
- split: clean
path: no/clean-*
- config_name: pl
data_files:
- split: train
path: pl/train-*
- split: clean
path: pl/clean-*
- config_name: pt
data_files:
- split: train
path: pt/train-*
- split: clean
path: pt/clean-*
- config_name: ro
data_files:
- split: train
path: ro/train-*
- split: clean
path: ro/clean-*
- config_name: ru
data_files:
- split: train
path: ru/train-*
- split: clean
path: ru/clean-*
- config_name: sa
data_files:
- split: train
path: sa/train-*
- split: clean
path: sa/clean-*
- config_name: sk
data_files:
- split: train
path: sk/train-*
- split: clean
path: sk/clean-*
- config_name: sl
data_files:
- split: train
path: sl/train-*
- split: clean
path: sl/clean-*
- config_name: sr
data_files:
- split: train
path: sr/train-*
- split: clean
path: sr/clean-*
- config_name: sv
data_files:
- split: train
path: sv/train-*
- split: clean
path: sv/clean-*
- config_name: ta
data_files:
- split: train
path: ta/train-*
- split: clean
path: ta/clean-*
- config_name: te
data_files:
- split: train
path: te/train-*
- split: clean
path: te/clean-*
- config_name: tr
data_files:
- split: train
path: tr/train-*
- split: clean
path: tr/clean-*
- config_name: uk
data_files:
- split: train
path: uk/train-*
- split: clean
path: uk/clean-*
- config_name: ur
data_files:
- split: train
path: ur/train-*
- split: clean
path: ur/clean-*
- config_name: vi
data_files:
- split: train
path: vi/train-*
- split: clean
path: vi/clean-*
- config_name: zh
data_files:
- split: train
path: zh/train-*
- split: clean
path: zh/clean-*
---
# Multilingual Tokenizer Benchmark
This dataset includes pre-processed wikipedia data for tokenizer evaluation in [45 languages](https://huggingface.co/datasets/occiglot/tokenizer-wiki-bench/blob/main/README.md#supported-languages). We provide more information on the evaluation task in general [this blogpost](https://occiglot.github.io/occiglot/posts/eu_tokenizer_perfomance/).
## Usage
The dataset allows us to easily calculate *tokenizer fertility* and the *proportion of continued words* on any of the supported languages. In the example below we take the Mistral tokenizer and evaluate its performance on Slovak.
```python
from transformers import AutoTokenizer
from datasets import load_dataset
import numpy as np
def calculate_metrics(tokens):
tmp = np.array([len(y) for y in tokens])
return {'fertility': np.mean(tmp), 'cont_prop': np.count_nonzero(tmp > 1) / tmp.shape[0]}
tokenizer_name = 'mistralai/Mistral-7B-v0.1'
language = 'sk' #Slovak
tokenizer = AutoTokenizer.from_pretrained(tokenizer_name)
ds = load_dataset('occiglot/tokenizer-wiki-bench', name=language, split='clean')
remove_columns = list(set(ds.column_names) - set(["text"]))
ds = ds.map(lambda x: {'tokens': tokenizer(x['split_text'], add_special_tokens=False)['input_ids']} ,num_proc=256, remove_columns=remove_columns, batched=False)
remove_columns = None#list(set(ds.column_names))
ds = ds.map(lambda x: calculate_metrics(x['tokens']), num_proc=256, remove_columns=remove_columns, batched=False)
df = ds.to_pandas()
print('Fertility: ', df.fertility.mean())
print('Prop. continued words:', df.cont_prop.mean())
```
## Dataset Creation
We loosely follow the approach of [Rust _et al.](https://arxiv.org/abs/2012.15613) using the fast [UDPipe](https://ufal.mff.cuni.cz/udpipe) to pre-split documents into words and subsequently run the tokenizer over isolated words. For all languages we use the respective November 2023 snapshot from [Wikipedia](wikimedia/wikipedia). Since Wikipedia, by nature, contains significantly more numbers and dates than other text and most tokenizers split those into single digits, we filtered all lone-standing numbers from the documents. Additionally, we removed any documents that still contained non-parsed HTML code (less than 1%).
## Licensing
We release our curated benchmark and any associated code under [MIT](https://opensource.org/license/mit) license. However, depending on your use case, the licensing conditions of the original [Wikipedia data](https://huggingface.co/datasets/wikimedia/wikipedia#licensing-information) and [UDPipe](https://github.com/ufal/udpipe/tree/udpipe-2?tab=License-1-ov-file) may apply.
## Supported Languages
This dataset currently contains pre-processed data for the following languages:
| Language | Code |
|:-----------|:-------|
| Afrikaans | af |
| Arabic | ar |
| Armenian | hy |
| Basque | eu |
| Bulgarian | bg |
| Catalan | ca |
| Croatian | hr |
| Czech | cs |
| Danish | da |
| Dutch | nl |
| English | en |
| Estonian | et |
| Finnish | fi |
| French | fr |
| German | de |
| Greek | el |
| Hebrew | he |
| Hindi | hi |
| Hungarian | hu |
| Indonesian | id |
| Irish | ga |
| Italian | it |
| Japanese | ja |
| Korean | ko |
| Latvian | lv |
| Lithuanian | lt |
| Marathi | mr |
| Norwegian | no |
| Persian | fa |
| Polish | pl |
| Portuguese | pt |
| Romanian | ro |
| Russian | ru |
| Sanskrit | sa |
| Serbian | sr |
| Slovak | sk |
| Slovenian | sl |
| Spanish | es |
| Swedish | sv |
| Tamil | ta |
| Telugu | te |
| Turkish | tr |
| Ukrainian | uk |
| Urdu | ur |
| Vietnamese | vi | |
xinrongzhang2022/InfiniteBench | xinrongzhang2022 | "2024-10-08T01:59:10Z" | 4,240 | 27 | [
"region:us"
] | null | "2023-11-16T09:29:02Z" | ---
configs:
- config_name: default
data_files:
- split: passkey
path: "passkey.jsonl"
- split: kv_retrieval
path: "kv_retrieval.jsonl"
- split: number_string
path: "number_string.jsonl"
- split: code_run
path: "code_run.jsonl"
- split: code_debug
path: "code_debug.jsonl"
- split: math_find
path: "math_find.jsonl"
- split: math_calc
path: "math_calc.jsonl"
- split: longdialogue_qa_eng
path: "longdialogue_qa_eng.jsonl"
- split: longbook_qa_eng
path: "longbook_qa_eng.jsonl"
- split: longbook_sum_eng
path: "longbook_sum_eng.jsonl"
- split: longbook_choice_eng
path: "longbook_choice_eng.jsonl"
- split: longbook_qa_chn
path: "longbook_qa_chn.jsonl"
---
---
license: apache-2.0
---
---
## Usage
load with datasets
```
from datasets import load_dataset, Features, Value, Sequence
# Define the features schema
ft = Features({
"id": Value("int64"),
"context": Value("string"),
"input": Value("string"),
"answer": Sequence(Value("string")),
"options": Sequence(Value("string"))
})
# Load the dataset with the specified features
dataset = load_dataset("xinrongzhang2022/InfiniteBench", features=ft)
```
## Citation
Please cite us if you use $\infty$Bench.
```bibtex
@inproceedings{zhang-etal-2024-bench,
title = "$\infty${B}ench: Extending Long Context Evaluation Beyond 100{K} Tokens",
author = "Zhang, Xinrong and
Chen, Yingfa and
Hu, Shengding and
Xu, Zihang and
Chen, Junhao and
Hao, Moo and
Han, Xu and
Thai, Zhen and
Wang, Shuo and
Liu, Zhiyuan and
Sun, Maosong",
editor = "Ku, Lun-Wei and
Martins, Andre and
Srikumar, Vivek",
booktitle = "Proceedings of the 62nd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = aug,
year = "2024",
address = "Bangkok, Thailand",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2024.acl-long.814",
pages = "15262--15277",
abstract = "Processing and reasoning over long contexts is crucial for many practical applications of Large Language Models (LLMs), such as document comprehension and agent construction. Despite recent strides in making LLMs process contexts with more than 100K tokens, there is currently a lack of a standardized benchmark to evaluate this long-context capability. Existing public benchmarks typically focus on contexts around 10K tokens, limiting the assessment and comparison of LLMs in processing longer contexts. In this paper, we propose , the first LLM benchmark featuring an average data length surpassing 100K tokens. comprises synthetic and realistic tasks spanning diverse domains in English and Chinese. The tasks in are designed to require an understanding of long dependencies in contexts and make simply retrieving a limited number of passages from contexts not sufficient for these tasks. Based on , we evaluate several state-of-the-art LLMs tailored for processing long contexts. The experimental results indicate that existing long-context LLMs still require significant advancements to process 100K+ contexts effectively. Furthermore, we present three intriguing analyses regarding the behavior of LLMs processing long context. Our code and data is released.",
} |
facebook/multilingual_librispeech | facebook | "2024-08-12T16:50:57Z" | 4,238 | 118 | [
"task_categories:automatic-speech-recognition",
"task_categories:text-to-speech",
"task_categories:text-to-audio",
"annotations_creators:expert-generated",
"language_creators:crowdsourced",
"language_creators:expert-generated",
"multilinguality:multilingual",
"source_datasets:original",
"language:de",
"language:nl",
"language:fr",
"language:it",
"language:es",
"language:pt",
"language:pl",
"language:en",
"license:cc-by-4.0",
"size_categories:1M<n<10M",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2012.03411",
"region:us"
] | [
"automatic-speech-recognition",
"text-to-speech",
"text-to-audio"
] | "2022-03-02T23:29:22Z" | ---
annotations_creators:
- expert-generated
language_creators:
- crowdsourced
- expert-generated
language:
- de
- nl
- fr
- it
- es
- pt
- pl
- en
license:
- cc-by-4.0
multilinguality:
- multilingual
size_categories:
- 100K<n<1M
source_datasets:
- original
task_categories:
- automatic-speech-recognition
- text-to-speech
- text-to-audio
paperswithcode_id: multilingual-librispeech
pretty_name: MultiLingual LibriSpeech
dataset_info:
- config_name: dutch
features:
- name: audio
dtype: audio
- name: original_path
dtype: string
- name: begin_time
dtype: float64
- name: end_time
dtype: float64
- name: transcript
dtype: string
- name: audio_duration
dtype: float64
- name: speaker_id
dtype: string
- name: chapter_id
dtype: string
- name: file
dtype: string
- name: id
dtype: string
splits:
- name: dev
num_bytes: 199959986
num_examples: 3095
- name: test
num_bytes: 199298575
num_examples: 3075
- name: train
num_bytes: 23931679031
num_examples: 374287
- name: 9_hours
num_bytes: 139884664.668
num_examples: 2153
- name: 1_hours
num_bytes: 15462181
num_examples: 234
download_size: 24376256629
dataset_size: 24486284437.668
- config_name: french
features:
- name: audio
dtype: audio
- name: original_path
dtype: string
- name: begin_time
dtype: float64
- name: end_time
dtype: float64
- name: transcript
dtype: string
- name: audio_duration
dtype: float64
- name: speaker_id
dtype: string
- name: chapter_id
dtype: string
- name: file
dtype: string
- name: id
dtype: string
splits:
- name: dev
num_bytes: 157923970.696
num_examples: 2416
- name: test
num_bytes: 158352158.582
num_examples: 2426
- name: train
num_bytes: 16984935842.04
num_examples: 258213
- name: 9_hours
num_bytes: 142796680.609
num_examples: 2167
- name: 1_hours
num_bytes: 15675831
num_examples: 241
download_size: 17381581776
dataset_size: 17459684482.927002
- config_name: german
features:
- name: audio
dtype: audio
- name: original_path
dtype: string
- name: begin_time
dtype: float64
- name: end_time
dtype: float64
- name: transcript
dtype: string
- name: audio_duration
dtype: float64
- name: speaker_id
dtype: string
- name: chapter_id
dtype: string
- name: file
dtype: string
- name: id
dtype: string
splits:
- name: dev
num_bytes: 224293581.302
num_examples: 3469
- name: test
num_bytes: 225756069.096
num_examples: 3394
- name: train
num_bytes: 31050881388
num_examples: 469942
- name: 9_hours
num_bytes: 142777983.118
num_examples: 2194
- name: 1_hours
num_bytes: 15714704
num_examples: 241
download_size: 31526161821
dataset_size: 31659423725.516
- config_name: italian
features:
- name: audio
dtype: audio
- name: original_path
dtype: string
- name: begin_time
dtype: float64
- name: end_time
dtype: float64
- name: transcript
dtype: string
- name: audio_duration
dtype: float64
- name: speaker_id
dtype: string
- name: chapter_id
dtype: string
- name: file
dtype: string
- name: id
dtype: string
splits:
- name: dev
num_bytes: 81607596.048
num_examples: 1248
- name: test
num_bytes: 83216752.046
num_examples: 1262
- name: train
num_bytes: 3896742625
num_examples: 59623
- name: 9_hours
num_bytes: 141671904.428
num_examples: 2173
- name: 1_hours
num_bytes: 15560398
num_examples: 240
download_size: 4200633596
dataset_size: 4218799275.522
- config_name: polish
features:
- name: audio
dtype: audio
- name: original_path
dtype: string
- name: begin_time
dtype: float64
- name: end_time
dtype: float64
- name: transcript
dtype: string
- name: audio_duration
dtype: float64
- name: speaker_id
dtype: string
- name: chapter_id
dtype: string
- name: file
dtype: string
- name: id
dtype: string
splits:
- name: dev
num_bytes: 32746725
num_examples: 512
- name: test
num_bytes: 33735044
num_examples: 520
- name: train
num_bytes: 1638889846
num_examples: 25043
- name: 9_hours
num_bytes: 142005461
num_examples: 2173
- name: 1_hours
num_bytes: 15681216
num_examples: 238
download_size: 1855342312
dataset_size: 1863058292
- config_name: portuguese
features:
- name: audio
dtype: audio
- name: original_path
dtype: string
- name: begin_time
dtype: float64
- name: end_time
dtype: float64
- name: transcript
dtype: string
- name: audio_duration
dtype: float64
- name: speaker_id
dtype: string
- name: chapter_id
dtype: string
- name: file
dtype: string
- name: id
dtype: string
splits:
- name: dev
num_bytes: 57533473
num_examples: 826
- name: test
num_bytes: 59141979
num_examples: 871
- name: train
num_bytes: 2518553713.946
num_examples: 37533
- name: 9_hours
num_bytes: 141641902.42
num_examples: 2116
- name: 1_hours
num_bytes: 15697139
num_examples: 236
download_size: 2780836500
dataset_size: 2792568207.366
- config_name: spanish
features:
- name: audio
dtype: audio
- name: original_path
dtype: string
- name: begin_time
dtype: float64
- name: end_time
dtype: float64
- name: transcript
dtype: string
- name: audio_duration
dtype: float64
- name: speaker_id
dtype: string
- name: chapter_id
dtype: string
- name: file
dtype: string
- name: id
dtype: string
splits:
- name: dev
num_bytes: 157804903.144
num_examples: 2408
- name: test
num_bytes: 158526899.32
num_examples: 2385
- name: train
num_bytes: 14562584188
num_examples: 220701
- name: 9_hours
num_bytes: 142473624.48
num_examples: 2110
- name: 1_hours
num_bytes: 15702048
num_examples: 233
download_size: 14971394533
dataset_size: 15037091662.944
configs:
- config_name: dutch
data_files:
- split: dev
path: dutch/dev-*
- split: test
path: dutch/test-*
- split: train
path: dutch/train-*
- split: 9_hours
path: dutch/9_hours-*
- split: 1_hours
path: dutch/1_hours-*
- config_name: french
data_files:
- split: dev
path: french/dev-*
- split: test
path: french/test-*
- split: train
path: french/train-*
- split: 9_hours
path: french/9_hours-*
- split: 1_hours
path: french/1_hours-*
- config_name: german
data_files:
- split: dev
path: german/dev-*
- split: test
path: german/test-*
- split: train
path: german/train-*
- split: 9_hours
path: german/9_hours-*
- split: 1_hours
path: german/1_hours-*
- config_name: italian
data_files:
- split: dev
path: italian/dev-*
- split: test
path: italian/test-*
- split: train
path: italian/train-*
- split: 9_hours
path: italian/9_hours-*
- split: 1_hours
path: italian/1_hours-*
- config_name: polish
data_files:
- split: dev
path: polish/dev-*
- split: test
path: polish/test-*
- split: train
path: polish/train-*
- split: 9_hours
path: polish/9_hours-*
- split: 1_hours
path: polish/1_hours-*
- config_name: portuguese
data_files:
- split: dev
path: portuguese/dev-*
- split: test
path: portuguese/test-*
- split: train
path: portuguese/train-*
- split: 9_hours
path: portuguese/9_hours-*
- split: 1_hours
path: portuguese/1_hours-*
- config_name: spanish
data_files:
- split: dev
path: spanish/dev-*
- split: test
path: spanish/test-*
- split: train
path: spanish/train-*
- split: 9_hours
path: spanish/9_hours-*
- split: 1_hours
path: spanish/1_hours-*
---
# Dataset Card for MultiLingual LibriSpeech
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [How to use](#how-to-use)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** [MultiLingual LibriSpeech ASR corpus](http://www.openslr.org/94)
- **Repository:** [Needs More Information]
- **Paper:** [MLS: A Large-Scale Multilingual Dataset for Speech Research](https://arxiv.org/abs/2012.03411)
- **Leaderboard:** [🤗 Autoevaluate Leaderboard](https://huggingface.co/spaces/autoevaluate/leaderboards?dataset=facebook%2Fmultilingual_librispeech&only_verified=0&task=automatic-speech-recognition&config=-unspecified-&split=-unspecified-&metric=wer)
### Dataset Summary
This is a streamable version of the Multilingual LibriSpeech (MLS) dataset.
The data archives were restructured from the original ones from [OpenSLR](http://www.openslr.org/94) to make it easier to stream.
MLS dataset is a large multilingual corpus suitable for speech research. The dataset is derived from read audiobooks from LibriVox and consists of
8 languages - English, German, Dutch, Spanish, French, Italian, Portuguese, Polish. It includes about 44.5K hours of English and a total of about 6K hours for other languages.
### Supported Tasks and Leaderboards
- `automatic-speech-recognition`, `speaker-identification`: The dataset can be used to train a model for Automatic Speech Recognition (ASR). The model is presented with an audio file and asked to transcribe the audio file to written text. The most common evaluation metric is the word error rate (WER). The task has an active leaderboard which can be found at https://paperswithcode.com/dataset/multilingual-librispeech and ranks models based on their WER.
- `text-to-speech`, `text-to-audio`: The dataset can also be used to train a model for Text-To-Speech (TTS).
### Languages
The dataset is derived from read audiobooks from LibriVox and consists of 8 languages - English, German, Dutch, Spanish, French, Italian, Portuguese, Polish
### How to use
The `datasets` library allows you to load and pre-process your dataset in pure Python, at scale. The dataset can be downloaded and prepared in one call to your local drive by using the `load_dataset` function.
For example, to download the German config, simply specify the corresponding language config name (i.e., "german" for German):
```python
from datasets import load_dataset
mls = load_dataset("facebook/multilingual_librispeech", "german", split="train")
```
Using the datasets library, you can also stream the dataset on-the-fly by adding a `streaming=True` argument to the `load_dataset` function call. Loading a dataset in streaming mode loads individual samples of the dataset at a time, rather than downloading the entire dataset to disk.
```python
from datasets import load_dataset
mls = load_dataset("facebook/multilingual_librispeech", "german", split="train", streaming=True)
print(next(iter(mls)))
```
*Bonus*: create a [PyTorch dataloader](https://huggingface.co/docs/datasets/use_with_pytorch) directly with your own datasets (local/streamed).
Local:
```python
from datasets import load_dataset
from torch.utils.data.sampler import BatchSampler, RandomSampler
mls = load_dataset("facebook/multilingual_librispeech", "german", split="train")
batch_sampler = BatchSampler(RandomSampler(mls), batch_size=32, drop_last=False)
dataloader = DataLoader(mls, batch_sampler=batch_sampler)
```
Streaming:
```python
from datasets import load_dataset
from torch.utils.data import DataLoader
mls = load_dataset("facebook/multilingual_librispeech", "german", split="train", streaming=True)
dataloader = DataLoader(mls, batch_size=32)
```
To find out more about loading and preparing audio datasets, head over to [hf.co/blog/audio-datasets](https://huggingface.co/blog/audio-datasets).
### Example scripts
Train your own CTC or Seq2Seq Automatic Speech Recognition models on MultiLingual Librispeech with `transformers` - [here](https://github.com/huggingface/transformers/tree/main/examples/pytorch/speech-recognition).
## Dataset Structure
### Data Instances
A typical data point comprises the path to the audio file, usually called `file` and its transcription, called `text`. Some additional information about the speaker and the passage which contains the transcription is provided.
```
{'file': '10900_6473_000030.flac',
'audio': {'path': '10900_6473_000030.flac',
'array': array([-1.52587891e-04, 6.10351562e-05, 0.00000000e+00, ...,
4.27246094e-04, 5.49316406e-04, 4.57763672e-04]),
'sampling_rate': 16000},
'text': 'więc czego chcecie odemnie spytałem wysłuchawszy tego zadziwiającego opowiadania broń nas stary człowieku broń zakrzyknęli równocześnie obaj posłowie\n',
'speaker_id': 10900,
'chapter_id': 6473,
'id': '10900_6473_000030'}
```
### Data Fields
- file: A filename .flac format.
- audio: A dictionary containing the audio filename, the decoded audio array, and the sampling rate. Note that when accessing the audio column: `dataset[0]["audio"]` the audio file is automatically decoded and resampled to `dataset.features["audio"].sampling_rate`. Decoding and resampling of a large number of audio files might take a significant amount of time. Thus it is important to first query the sample index before the `"audio"` column, *i.e.* `dataset[0]["audio"]` should **always** be preferred over `dataset["audio"][0]`.
- text: the transcription of the audio file.
- id: unique id of the data sample.
- speaker_id: unique id of the speaker. The same speaker id can be found for multiple data samples.
- chapter_id: id of the audiobook chapter which includes the transcription.
### Data Splits
| Number of samples | Train | Train.9h | Train.1h | Dev | Test |
| ----- | ------ | ----- | ---- | ---- | ---- |
| german | 469942 | 2194 | 241 | 3469 | 3394 |
| dutch | 374287 | 2153 | 234 | 3095 | 3075 |
| french | 258213 | 2167 | 241 | 2416 | 2426 |
| spanish | 220701 | 2110 | 233 | 2408 | 2385 |
| italian | 59623 | 2173 | 240 | 1248 | 1262 |
| portuguese | 37533 | 2116 | 236 | 826 | 871 |
| polish | 25043 | 2173 | 238 | 512 | 520 |
## Dataset Creation
### Curation Rationale
[Needs More Information]
### Source Data
#### Initial Data Collection and Normalization
[Needs More Information]
#### Who are the source language producers?
[Needs More Information]
### Annotations
#### Annotation process
[Needs More Information]
#### Who are the annotators?
[Needs More Information]
### Personal and Sensitive Information
The dataset consists of people who have donated their voice online. You agree to not attempt to determine the identity of speakers in this dataset.
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[Needs More Information]
## Additional Information
### Dataset Curators
[Needs More Information]
### Licensing Information
Public Domain, Creative Commons Attribution 4.0 International Public License ([CC-BY-4.0](https://creativecommons.org/licenses/by/4.0/legalcode))
### Citation Information
```
@article{Pratap2020MLSAL,
title={MLS: A Large-Scale Multilingual Dataset for Speech Research},
author={Vineel Pratap and Qiantong Xu and Anuroop Sriram and Gabriel Synnaeve and Ronan Collobert},
journal={ArXiv},
year={2020},
volume={abs/2012.03411}
}
```
### Data Statistics
| Duration (h) | Train | Dev | Test |
|--------------|-----------|-------|-------|
| English | 44,659.74 | 15.75 | 15.55 |
| German | 1,966.51 | 14.28 | 14.29 |
| Dutch | 1,554.24 | 12.76 | 12.76 |
| French | 1,076.58 | 10.07 | 10.07 |
| Spanish | 917.68 | 9.99 | 10 |
| Italian | 247.38 | 5.18 | 5.27 |
| Portuguese | 160.96 | 3.64 | 3.74 |
| Polish | 103.65 | 2.08 | 2.14 |
| # Speakers | Train | | Dev | | Test | |
|------------|-------|------|-----|----|------|----|
| Gender | M | F | M | F | M | F |
| English | 2742 | 2748 | 21 | 21 | 21 | 21 |
| German | 81 | 95 | 15 | 15 | 15 | 15 |
| Dutch | 9 | 31 | 3 | 3 | 3 | 3 |
| French | 62 | 80 | 9 | 9 | 9 | 9 |
| Spanish | 36 | 50 | 10 | 10 | 10 | 10 |
| Italian | 22 | 43 | 5 | 5 | 5 | 5 |
| Portuguese | 26 | 16 | 5 | 5 | 5 | 5 |
| Polish | 6 | 5 | 2 | 2 | 2 | 2 |
| # Hours / Gender | Dev | | Test | |
|------------------|------|------|------|------|
| Gender | M | F | M | F |
| English | 7.76 | 7.99 | 7.62 | 7.93 |
| German | 7.06 | 7.22 | 7 | 7.29 |
| Dutch | 6.44 | 6.32 | 6.72 | 6.04 |
| French | 5.13 | 4.94 | 5.04 | 5.02 |
| Spanish | 4.91 | 5.08 | 4.78 | 5.23 |
| Italian | 2.5 | 2.68 | 2.38 | 2.9 |
| Portuguese | 1.84 | 1.81 | 1.83 | 1.9 |
| Polish | 1.12 | 0.95 | 1.09 | 1.05 |
### Contributions
Thanks to [@patrickvonplaten](https://github.com/patrickvonplaten) and [@polinaeterna](https://github.com/polinaeterna) for adding this dataset. |
zhaoyang9425/NoisyLibriSpeechDataset-MUSAN | zhaoyang9425 | "2023-09-14T12:29:19Z" | 4,234 | 1 | [
"language:en",
"license:afl-3.0",
"modality:audio",
"region:us",
"read book"
] | [
"noisy_speech_recognition"
] | "2023-09-11T14:31:43Z" | ---
license: afl-3.0
task_categories:
- noisy_speech_recognition
language:
- en
tags:
- read book
pretty_name: NoisyLibriSpeech_MUSAN
---
# Dataset Card for the Noisy LibriSpeech dataset
## Dataset Description
- **Homepage:** Coming Soon
- **Repository:** https://huggingface.co/datasets/zhaoyang9425/NoisyLibriSpeechDataset-MUSAN
- **Paper:** Coming Soon
=- **Point of Contact:** [email protected]
### Dataset Summary
The noisy speech corpus is constructed by randomly sampling noise clips from the MUSAN noise dataset and adding them to LibriSpeech dataset.
The Signal-to-Noise Ratio (SNR) levels are sampled from a uniform distribution in 0 dB, 5 dB, 10 dB, 15 dB, and 20 dB.
## Dataset Structure
same structure with LibriSpeech dataset
|
notable12/AICamp-2023-Skin-Conditions-Dataset | notable12 | "2023-06-19T17:45:17Z" | 4,233 | 8 | [
"license:mit",
"size_categories:1K<n<10K",
"format:imagefolder",
"modality:image",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2023-06-15T18:26:23Z" | ---
license: mit
---
|
Anonymous-Uploader1/DUET | Anonymous-Uploader1 | "2024-09-12T15:19:21Z" | 4,214 | 1 | [
"language:en",
"region:us"
] | null | "2024-07-09T15:54:49Z" | ---
language:
- en
---
# Dataset Overview
This repository introduces a multi-modal dataset, **Dyadic User Engagement dataseT (DUET)**, which contains 12 two-person—or
dyadic—activities. Theses activities are adopted from **The Repertoire of Nonverbal Behavior: Categories, Origins, Usage, and Coding** by Paul Ekman et al,
which allows us to distill the semantics embedded in bodily movements. Besides increasing the number, diversity, and quality of dyadic datasets, contextualizing
human activities has been proven to improve the performance of human activity recognition (HAR) tasks, as well as benefit downstream applications, such as
autonomous vehicles, smart homes, healthcare, and many more. The dataset is collected via Microsoft Azure Kinect v2 and constitutes of **14,400** samples, all of which come with
4 modalities: **RGB**, **depth**, **infrared (IR)**, and **3D skeleton joints**. The following sections detail the folder structure used to categorize our data, sample frames, and
the specifications of Microsoft Azure Kinect v2.
# Data Collection and Management
### Data modalities and data format
For the data collection, we use the high-quality and multimodal Azure Kinect, equipped with an RGB camera, a depth sensor, and an IR sensor. These sensors all operate
at 30 frames per second (FPS) for three seconds for each video sample, yielding 91 frames per sample. The specification of each data format varies depending on the
conventions commonly used in the research community: each RGB frame is captured with a resolution of **1,920x1,080** and is stored in a **.jpeg** format.
We record depth and IR sequences with a resolution of **640x576** and store them as 24-bit **.png** files. The skeleton joints of every sample video are stored in their
corresponding **.csv** files. Each file contains a **91x193** array, where each row represents a frame, and each column holds information related to that frame. The first
column records the timestamp of the frame, and the following 96 columns capture the <em>x, <em>y, and <em>z coordinates of 32 joints of one subject
(as illustrated in Figure 1), measured as the distance (in millimeters) from the joint to the camera. For instance, the first three columns record the <em>x,
<em>y, and <em>z values of the first joint. The order of the joints follows the joint index in [Azure Kinect Body Tracking Joints](https://learn.microsoft.com/en-us/previous-versions/azure/kinect-dk/body-joints).
The last 96 columns record the 32 joints of the other object.
<p align="center" width="100%">
<img width="30%" src="./Figures/kinect_joints_enlarged_text.png">
Figure 1. 32 skeleton joints of a subject extracted using the Azure Kinect software development kit (SDK).
</p>
### Data acquisistion arrangement
After selecting the Azure Kinect as the multimodal sensing module, a setup for housing the sensor was needed to guarantee consistency throughout the experiment.
We built a sensing module, illustrated in Figure 2, that situates the Azure Kinect 84 inches above the ground and tilts it 37° forward to capture
the interactions with a full field of view and minimal occlusions.
<p align="center" width="100%">
<img width="33%" src="./Figures/testbed_configurations.png">
Figure 2. On the left, we have the bird's-eye view of the testbed configuration, whereas on the right is the sensing module used across the experiment.
</p>
Another important aspect of the experiment is the testbeds. Three locations across a US university campus are selected to carry out the experiment. As shown in
Figure 3, these include an open indoor space, a confined indoor space, and an outdoor space. These three locations are chosen (1) to enrich the variety
of backgrounds and (2) investigate the effects the ambient environment imposes on the sensors. One constraint of HAR datasets is the scarcity of diverse backgrounds,
which can lead to overfitting to background noise for deep learning models. The experiment is carried out at three distinct locations to improve the generalizability of
background noise. We also recognize that a contextualizable dataset should be suitable for a wide range of environments (e.g., parks, schools, nursing facilities, smart homes).
Collecting our dataset at different locations–especially outdoors–encourages the exploration of the direct and indirect effects the ambient environment imposes on the sensors and algorithms.
<p align="center" width="100%">
<img width="80%" src="./Figures/locations.png">
Figure 3. Data collection locations include, starting from the left to right, an open indoor space, a confined indoor space, and an open outdoor space.
</p>
Since the experiment is carried out at three locations, there is a need to ensure the collection process is repeatable. Towards this end, we designed a testbed arrangement,
shown in Figure 2, that was used across all three environments. In the testbed, volunteers are asked to perform each interaction for 40 repetitions in a rectangular area
taped to the ground. After each repetition, a beep would sound, instructing the subjects to rotate either clockwise or counterclockwise and proceed to the next repetition. This novel technique
collects data on the interactions from a wide array of perspectives with respect to the camera, diversifying the way interactions are captured and ameliorating the perspective invariance quality of deep learning algorithms.
### Subjects
A total of 15 male and eight female subjects participated in the experiments. The subjects were randomly paired to perform actions across the three locations.
The subjects' ages range from 23 to 42 years old with a mean of 27 years old and standard deviation of 4.01 years. The subjects' heights range from 165.1cm to 185.4cm with a
mean of 172.7cm and standard deviation of 8.46cm. The subjects' weights range from 55kg to 93kg with a mean of 69kg and standard deviation of 10.1kg.
### Folder structure
In this repository, we have 14,400 samples that comprise RGB, depth, IR, and 3D skeleton joints, which can be very complicated.
To provide simple access for users, we have organized our data into a folder structure, as shown in Figure 5. The folder structure comprises four layers:
(1) modality, (2) location combination, interaction label, and subject, (3) timestamps, and (4) image or csv files. Traversing through this structure,
we first classify the files based on their modality, including RGB, depth, IR, and 3D skeleton joints. The next layer classifies the location, interaction label,
and subject using six-digit codes, *LLIISS*. Here, *LL* stands for the location, which can be *CM* for the indoor open space, *CC* for the indoor confined space,
or *CL* for the outdoor space. Next, *II* denotes numbers ranging from 1–12, where each number corresponds to the enumeration of activities listed in the table below.
Last, *SS* identifies the subject pairs ranging from 1–10. It is worth noting that the same subject pair number in different locations does not represent the same pair. In fact, only *CCII02* and *CLII07*, *CCII01* and *CMII10*, and *CCII03* and *CMII05* share the same subject pairs, respectively. Also, as previously mentioned, we ask each pair of subjects to repeat an interaction for 40 times, all of which are recorded in the same video. To temporally segment each clip, we classify each time window by the start and finish time marks. For example, a folder named 40800222\_43800211 contains a recording starting from 40800222 and ending at 43800211. The clock, which generates the timestamps in milliseconds, begins once the Azure Kinect is connected. Every timestamp folder stores the clip of the corresponding time window, frame by frame, in which all frames are chronologically ordered by numbers ranging from 0–90.
<p align="center" width="100%">
<img width="60%" src="./Figures/folder_structure.png">
Figure 4. The data folder structure for our dataset, which is designed for easy user access. Here, RGB, depth, and IR modalities share an identidcal hierarchy, while
3D skeleton joint folders store all 3D coordinates of a sample clip in a single .csv file.
</p>
| Label ID | Dyadic interaction |
| :--------: | :------- |
| 1 | Waving in |
| 2 | Thumbs up |
| 3 | Waving |
| 4 | Painting |
| 5 | Showing measurements |
| 6 | Nodding |
| 7 | Drawing circles in the air |
| 8 | Holding palms out |
| 9 | Twirling or scratching hair |
| 10 | Laughing |
| 11 | Arm crossing |
| 12 | Hugging |
<p align="center" width="100%">
Table 1. Activity labels and their corresponding interactions.
</p>
### Sample frames
Sample frames are provided in Figure 6 to visualize the differences between different modalities, each of which possess
different strengths and weaknesses. RGB frames capture information-rich features like interaction, location, and characteristic features of subjects,
which are informative but fail to prioritize user privacy. However, since RGB frames compress the 3D world into a 2D plane, they often suffer from occlusion
and variation in perspective. On the other hand, 3D skeleton joints reveal the placement of each joint in the 3D space. The additional dimension gives 3D skeleton
joints a desirable perspective-invariant characteristic. Besides the 3D position of each joint, no further information indicative of the subject is conspicuous,
prioritizing the preservation of privacy. This feature is preferred by human-centered applications, such as smart homes, CPSIS, and elder care management.
Overall, the juxtaposition of different modalities exemplifies the inversely proportional relationship between privacy and value of information---the more information
a modality carries, the less user privacy it typically protects. We provide four modalities in our dataset that span this full spectrum to encourage both the exploration
of a single modality and the fusion of multiple modalities to strike a balance between privacy preservation and value of information.
<p align="center" width="100%">
<img width="80%" src="./Figures/example_frames.png">
Figure 5. Sample data of 12 interactions. Modalities presented are, from top row to bottom row: RGB, IR, depth, and 3D skeleton joints.
The 12 interactions are, from left to right: waving in, thumbs up, waving, pointing, showing measurements, nodding, drawing circles in the air,
holding palms out, twirling or scratching hair, laughing, arm crossing, and hugging.
</p>
### Cross-location and cross-subject evaluations
One of the motivations for creating DUET is to encourage the research community to study HAR in the context of dyadic, contextualizable interactions. Hence, there is a
need to provide a baseline training and test data split for algorithms to evaluate their performance. In addition to the basic cross-subject evaluation, we include a
cross-location evaluation. We recognize that applications leveraging dyadic, contextualizable interactions might occur in various locations, both indoor and outdoors.
Therefore, we include cross-location evaluation for HAR algorithm training to ensure resilience to location variation. For the cross-subject evaluation, we use
**CCII05**, **CCII07**, **CLII01**, **CLII05**, **CMII06**, and **CMII09** for the test data, and the remainder for the training data.
For cross-location evaluation, **CCIISS** is selected as the test data, while **CLIISS** and **CMIISS** are used as the training data. |
BramVanroy/wikipedia_culturax_dutch | BramVanroy | "2024-12-23T20:20:49Z" | 4,194 | 3 | [
"task_categories:text-generation",
"task_categories:text2text-generation",
"language:nl",
"size_categories:1B<n<10B",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2412.15450",
"region:us"
] | [
"text-generation",
"text2text-generation"
] | "2024-03-25T22:11:29Z" | ---
language:
- nl
size_categories:
- 10B<n<100B
task_categories:
- text-generation
- text2text-generation
pretty_name: Filtered CulturaX + Wikipedia for Dutch
dataset_info:
- config_name: 100M
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 738455828.5851797
num_examples: 1018200
- name: test
num_bytes: 7458534.414820259
num_examples: 10284
download_size: 411183119
dataset_size: 745914363.0
- config_name: 100k
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 745955.3074739829
num_examples: 1047
- name: test
num_bytes: 7124.692526017029
num_examples: 10
download_size: 366788
dataset_size: 753080.0
- config_name: 10B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 66539945646.34457
num_examples: 40176566
- name: test
num_bytes: 105996030.65543362
num_examples: 64000
download_size: 42132184504
dataset_size: 66645941677.0
- config_name: 10M
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 76734151.72157606
num_examples: 139851
- name: test
num_bytes: 774743.2784239326
num_examples: 1412
download_size: 37995388
dataset_size: 77508895.0
- config_name: 10k
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 72048.30379746835
num_examples: 78
- name: test
num_bytes: 5896
num_examples: 1
download_size: 47197
dataset_size: 77944.30379746835
- config_name: 15B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 99730049355.25276
num_examples: 59584123
- name: test
num_bytes: 107121206.74724333
num_examples: 64000
download_size: 63139415312
dataset_size: 99837170562.0
- config_name: 1B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 6797502496.392602
num_examples: 5102360
- name: test
num_bytes: 68660322.60739774
num_examples: 51538
download_size: 4260450464
dataset_size: 6866162819.0
- config_name: 1M
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 7442665.619329753
num_examples: 10694
- name: test
num_bytes: 75164.38067024625
num_examples: 108
download_size: 3845466
dataset_size: 7517830.0
- config_name: 20B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 132920704365.75093
num_examples: 78991679
- name: test
num_bytes: 107693939.24907027
num_examples: 64000
download_size: 84141456153
dataset_size: 133028398305.0
- config_name: 25B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 166111586295.01904
num_examples: 98399236
- name: test
num_bytes: 108040894.98094498
num_examples: 64000
download_size: 105147418131
dataset_size: 166219627190.0
- config_name: 30B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 199302582477.5805
num_examples: 117806793
- name: test
num_bytes: 108273597.41950662
num_examples: 64000
download_size: 126152714564
dataset_size: 199410856075.0
- config_name: 35B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 232493644456.181
num_examples: 137214350
- name: test
num_bytes: 108440503.81899258
num_examples: 64000
download_size: 147149925109
dataset_size: 232602084960.0
- config_name: 40B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 265684747781.7734
num_examples: 156621907
- name: test
num_bytes: 108566063.22660531
num_examples: 64000
download_size: 168152290262
dataset_size: 265793313845.0
- config_name: 45B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 298875877641.391
num_examples: 176029463
- name: test
num_bytes: 108663946.60903454
num_examples: 64000
download_size: 189159571162
dataset_size: 298984541588.0
- config_name: 50B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 332067028077.12775
num_examples: 195437020
- name: test
num_bytes: 108742395.87226707
num_examples: 64000
download_size: 210160621183
dataset_size: 332175770473.0
- config_name: 55B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 365258192681.75964
num_examples: 214844577
- name: test
num_bytes: 108806676.24034382
num_examples: 64000
download_size: 231164757019
dataset_size: 365366999358.0
- config_name: 5B
features:
- name: text
dtype: string
- name: url
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 33351938314.309906
num_examples: 20769009
- name: test
num_bytes: 102774477.69009268
num_examples: 64000
download_size: 21119808690
dataset_size: 33454712792.0
configs:
- config_name: 100M
data_files:
- split: train
path: 100M/train-*
- split: test
path: 100M/test-*
- config_name: 100k
data_files:
- split: train
path: 100k/train-*
- split: test
path: 100k/test-*
- config_name: 10B
data_files:
- split: train
path: 10B/train-*
- split: test
path: 10B/test-*
- config_name: 10M
data_files:
- split: train
path: 10M/train-*
- split: test
path: 10M/test-*
- config_name: 10k
data_files:
- split: train
path: 10k/train-*
- split: test
path: 10k/test-*
- config_name: 15B
data_files:
- split: train
path: 15B/train-*
- split: test
path: 15B/test-*
- config_name: 1B
data_files:
- split: train
path: 1B/train-*
- split: test
path: 1B/test-*
- config_name: 1M
data_files:
- split: train
path: 1M/train-*
- split: test
path: 1M/test-*
- config_name: 20B
data_files:
- split: train
path: 20B/train-*
- split: test
path: 20B/test-*
- config_name: 25B
data_files:
- split: train
path: 25B/train-*
- split: test
path: 25B/test-*
- config_name: 30B
data_files:
- split: train
path: 30B/train-*
- split: test
path: 30B/test-*
- config_name: 35B
data_files:
- split: train
path: 35B/train-*
- split: test
path: 35B/test-*
- config_name: 40B
data_files:
- split: train
path: 40B/train-*
- split: test
path: 40B/test-*
- config_name: 45B
data_files:
- split: train
path: 45B/train-*
- split: test
path: 45B/test-*
- config_name: 50B
data_files:
- split: train
path: 50B/train-*
- split: test
path: 50B/test-*
- config_name: 55B
data_files:
- split: train
path: 55B/train-*
- split: test
path: 55B/test-*
- config_name: 5B
data_files:
- split: train
path: 5B/train-*
- split: test
path: 5B/test-*
---
# Filtered CulturaX + Wikipedia for Dutch
This is a combined and filtered version of [CulturaX](https://huggingface.co/datasets/uonlp/CulturaX) and [Wikipedia](https://huggingface.co/datasets/wikimedia/wikipedia), only including Dutch. It is intended for the training of LLMs.
Different configs are available based on the number of tokens (see a section below with an overview). This can be useful if you want to know exactly how many tokens you have. Great for using as a streaming dataset, too. Tokens are counted as white-space tokens, so depending on your tokenizer, you'll likely end up with more tokens than indicated here.
Every config also has a test set (for validation) of 1% the total size of the dataset, minimally 1 max. 64k samples (~16M tokens).
Wikipedia and CulturaX were shuffled before merging and the test set creation was also shuffled. Priority is given to Wikipedia to prioritize knowledge and cultural content, so the smaller configs will consist exclusively of Wikipedia and for the larger configs we augment with CulturaX. Every config builds further on the previous, so this means that every config contains the same data as the smaller ones and more HOWEVER their train/test splits are not the same, so test set of one config may overlap with samples for another training set. This is usually not a problem but just be aware that you do not train on one config's training set and test with another config's test set.
## Citation
If you use [Fietje](https://huggingface.co/BramVanroy/fietje-2) or the [CulturaX + Wikipedia filtered subset](https://huggingface.co/datasets/BramVanroy/wikipedia_culturax_dutch) in your work, please cite to the following paper:
```bibtex
@misc{vanroy2024fietjeopenefficientllm,
title={Fietje: An open, efficient LLM for Dutch},
author={Bram Vanroy},
year={2024},
eprint={2412.15450},
archivePrefix={arXiv},
primaryClass={cs.CL},
url={https://arxiv.org/abs/2412.15450},
}
```
## Configs
### `10k` -- 79 samples -- 10,087 tokens
- ratio_wikipedia: 100.00%
- total_num_tokens: 10,087
- train_num_tokens: 9,205
- test_num_tokens: 882
- total_num_samples: 79
- train_num_samples: 78
- test_num_samples: 1
### `100k` -- 1,057 samples -- 100,075 tokens
- ratio_wikipedia: 100.00%
- total_num_tokens: 100,075
- train_num_tokens: 98,044
- test_num_tokens: 2,031
- total_num_samples: 1,057
- train_num_samples: 1,047
- test_num_samples: 10
### `1M` -- 10,802 samples -- 1,000,239 tokens
- ratio_wikipedia: 100.00%
- total_num_tokens: 1,000,239
- train_num_tokens: 991,119
- test_num_tokens: 9,120
- total_num_samples: 10,802
- train_num_samples: 10,694
- test_num_samples: 108
### `10M` -- 141,263 samples -- 10,000,022 tokens
- ratio_wikipedia: 100.00%
- total_num_tokens: 10,000,022
- train_num_tokens: 9,874,772
- test_num_tokens: 125,250
- total_num_samples: 141,263
- train_num_samples: 139,851
- test_num_samples: 1,412
### `100M` -- 1,028,484 samples -- 100,000,047 tokens
- ratio_wikipedia: 100.00%
- total_num_tokens: 100,000,047
- train_num_tokens: 99,013,372
- test_num_tokens: 986,675
- total_num_samples: 1,028,484
- train_num_samples: 1,018,200
- test_num_samples: 10,284
### `1B` -- 5,153,898 samples -- 1,000,000,187 tokens
- ratio_wikipedia: 61.21%
- total_num_tokens: 1,000,000,187
- train_num_tokens: 989,990,190
- test_num_tokens: 10,009,997
- total_num_samples: 5,153,898
- train_num_samples: 5,102,360
- test_num_samples: 51,538
### `5B` -- 20,833,009 samples -- 5,000,000,076 tokens
- ratio_wikipedia: 25.35%
- total_num_tokens: 5,000,000,076
- train_num_tokens: 4,984,493,654
- test_num_tokens: 15,506,422
- total_num_samples: 20,833,009
- train_num_samples: 20,769,009
- test_num_samples: 64,000
### `10B` -- 40,240,566 samples -- 10,000,000,115 tokens
- ratio_wikipedia: 18.41%
- total_num_tokens: 10,000,000,115
- train_num_tokens: 9,984,156,828
- test_num_tokens: 15,843,287
- total_num_samples: 40,240,566
- train_num_samples: 40,176,566
- test_num_samples: 64,000
### `15B` -- 59,648,123 samples -- 15,000,000,154 tokens
- ratio_wikipedia: 15.98%
- total_num_tokens: 15,000,000,154
- train_num_tokens: 14,983,970,518
- test_num_tokens: 16,029,636
- total_num_samples: 59,648,123
- train_num_samples: 59,584,123
- test_num_samples: 64,000
### `20B` -- 79,055,679 samples -- 20,000,000,009 tokens
- ratio_wikipedia: 14.75%
- total_num_tokens: 20,000,000,009
- train_num_tokens: 19,983,799,357
- test_num_tokens: 16,200,652
- total_num_samples: 79,055,679
- train_num_samples: 78,991,679
- test_num_samples: 64,000
### `25B` -- 98,463,236 samples -- 25,000,000,048 tokens
- ratio_wikipedia: 14.00%
- total_num_tokens: 25,000,000,048
- train_num_tokens: 24,983,765,326
- test_num_tokens: 16,234,722
- total_num_samples: 98,463,236
- train_num_samples: 98,399,236
- test_num_samples: 64,000
### `30B` -- 117,870,793 samples -- 30,000,000,087 tokens
- ratio_wikipedia: 13.50%
- total_num_tokens: 30,000,000,087
- train_num_tokens: 29,983,707,932
- test_num_tokens: 16,292,155
- total_num_samples: 117,870,793
- train_num_samples: 117,806,793
- test_num_samples: 64,000
### `35B` -- 137,278,350 samples -- 35,000,000,126 tokens
- ratio_wikipedia: 13.14%
- total_num_tokens: 35,000,000,126
- train_num_tokens: 34,983,914,739
- test_num_tokens: 16,085,387
- total_num_samples: 137,278,350
- train_num_samples: 137,214,350
- test_num_samples: 64,000
### `40B` -- 156,685,907 samples -- 40,000,000,165 tokens
- ratio_wikipedia: 12.87%
- total_num_tokens: 40,000,000,165
- train_num_tokens: 39,983,508,625
- test_num_tokens: 16,491,540
- total_num_samples: 156,685,907
- train_num_samples: 156,621,907
- test_num_samples: 64,000
### `45B` -- 176,093,463 samples -- 45,000,000,020 tokens
- ratio_wikipedia: 12.66%
- total_num_tokens: 45,000,000,020
- train_num_tokens: 44,983,608,118
- test_num_tokens: 16,391,902
- total_num_samples: 176,093,463
- train_num_samples: 176,029,463
- test_num_samples: 64,000
### `50B` -- 195,501,020 samples -- 50,000,000,059 tokens
- ratio_wikipedia: 12.49%
- total_num_tokens: 50,000,000,059
- train_num_tokens: 49,983,567,461
- test_num_tokens: 16,432,598
- total_num_samples: 195,501,020
- train_num_samples: 195,437,020
- test_num_samples: 64,000
### `55B` -- 214,908,577 samples -- 55,000,000,098 tokens
- ratio_wikipedia: 12.35%
- total_num_tokens: 55,000,000,098
- train_num_tokens: 54,983,723,278
- test_num_tokens: 16,276,820
- total_num_samples: 214,908,577
- train_num_samples: 214,844,577
- test_num_samples: 64,000
## Filtering
While CultruaX already has done a lot of filtering, some more filtering can be done to improve the quality of the corpus. These filters are described below.
The baseline ratios (punctuation, uppercase, digits) were calculated on the SONAR-500 corpus (excluding WRPEA WRPED WRUEA WRUED WRUEB).
**CulturaX**:
- removed documents that contain the text "rechten voorbehouden" or "rights reserved"
- remove documents whose URL contained "wikipedia.org" (because we include a cleaned version of Wikipedia ourselves)
- removed documents that contain a "bad word" (see the section below)
- removed documents that contain any non-latin characters. The idea is that "knowledge"-based information (e.g. original writing of a name) are allowed
when the data comes from Wikipedia, but not from any other webcrawl, to avoid unsollicited noise.
**CulturaX + Wikipedia**:
- removed documents where ratio of punctuation marks vs. non-whitespace characters is higher than 0.2
- removed documents where ratio of uppercase vs. non-whitespace characters is higher than 0.22
- removed documents where ratio of digits vs. non-whitespace characters is higher than 0.16
- removed documents where the average token length is < 2 or > 20
## Bad words
```python
BAD_PHRASES_DOC_LEVEL = {
# https://en.wikipedia.org/wiki/Dutch_profanity
"achterlijk",
"debiel",
"downie",
"idioot",
"kankerlijer",
"klere",
"kolere",
"minkukel",
"pestkop",
"pleuris",
"pleuritis",
"teringlijer",
"tyfuslijer",
"gadver",
"getver",
"godver",
"godskolere",
"godverork",
"graftak",
"kopvod",
"verdomme",
"anaalgeneraal",
"bitch",
"dikzak",
"flikker",
"fok",
"fuck",
"hoer",
"klootzak",
"klote",
"kreng",
"kringspiermusketier",
"kut",
"lamzak",
"lul",
"manwijf",
"matennaai",
"neuken",
"neuker",
"ouwehoer",
"reet",
"reetkever",
"reetridder",
"rotzak",
"schijt",
"shit",
"slet",
"slijmbal",
"slons",
"sodemieter",
"stoephoer",
"swaffel",
"teef",
"trut",
"tut",
"zak",
"uilskuiken",
"zeik",
"bamivreter",
"bosneger",
"neger",
"fransoos",
"geitenneuker",
"kaaskop",
"kakker",
"koelie",
"lijp",
"medelander",
"mocro",
"mof",
"nikker",
"poepchinees",
"roetmop",
"spaghettivreter",
"loempiavouwer",
"spanjool",
"spleetoog",
"tatta",
"tokkie",
"zandneger",
"zwartzak",
"halvezool",
"kenau",
"klootviool",
"knuppel",
"koekert",
"koekwaus",
"oelewapper",
"smeerlap",
"sukkel",
"sul",
"wappie",
"wijf",
"zooi",
# xxx (a.o. https://gitlab.com/yhavinga/c4nlpreproc/-/blob/master/clean/badwords_ennl.py?ref_type=heads)
"xxx",
"anal",
"blowjob",
"buttplug",
"cock",
"cunt",
"geil",
"sex", # Standaardnederlands = seks, maybe we catch some porn or socialmedia sites with this misspelling
"porn",
# extra
"nigger",
"nigga",
"hoerig",
"klojo",
}
```
## Config details
## License information
For CulturaX: https://huggingface.co/datasets/uonlp/CulturaX#license-information
For Wikipedia: https://huggingface.co/datasets/wikimedia/wikipedia#licensing-information |
DL3DV/DL3DV-Benchmark | DL3DV | "2024-03-06T04:11:00Z" | 4,193 | 22 | [
"size_categories:n>1T",
"region:us",
"3D vision",
"novel view synthesis",
"NeRF",
"3D Gaussian Splatting",
"Generalizable NeRF",
"Generative Methods",
"text-to-3d",
"image-to-3d"
] | null | "2023-12-31T12:23:57Z" | ---
tags:
- 3D vision
- novel view synthesis
- NeRF
- 3D Gaussian Splatting
- Generalizable NeRF
- Generative Methods
- text-to-3d
- image-to-3d
pretty_name: DL3DV
size_categories:
- n>1T
---
# DL3DV Benchmark Download Instructions
This repo contains all the benchmark data, including a README, License, colmaps/images (compatible to nerfstudio and 3D gaussian splatting), scene labels and the performances of methods reported in the paper (ZipNeRF, 3D GS, MipNeRF-360, nerfacto, Instant-NGP).
# Download
As the whole benchmark dataset is very big (~2.1T), we provide two ways to download: full benchmark dataset download or use a script to download a subset for memory sensitive cases.
## Full benchmark dataset download
If you have enough space (more than 2.1T), download the full benchmark is simple:
``` bash
# Make sure you have git-lfs installed
# (https://git-lfs.github.com/)
git lfs install
git clone https://huggingface.co/datasets/DL3DV/DL3DV-10K-Benchmark
```
## Script download
Sometimes you may just need to flexibly download a subset the benchmark, e.g. just download several scenes, or just need images with 960P resolution (images_4 level used in the paper). To provide this flexibiliy, we provide a [download.py](https://huggingface.co/datasets/DL3DV/DL3DV-10K-Benchmark/blob/main/download.py) script for use.
Use this [link](https://huggingface.co/datasets/DL3DV/DL3DV-10K-Benchmark/resolve/main/download.py?download=true) to download.
This download script provies several different options to use:
* Download the full dataset (which is equivalent to git clone method). In total 2.1T.
* Download the full dataset with only 960P images. In total 100~150G.
* Download with specific scene name (hash name)
### Environment Setup
The download script relies on `huggingface hub`, `tqdm`, and `pandas`. You can download by the following command in your python environment. The download script was
```bash
pip install huggingface_hub tqdm pandas
```
After downloading `huggingface_hub`, remember to login first to get ready for download.
```bash
# in terminal, use the following command and your huggingface token to login
huggingface-cli login
```
### Download the full benchmark
To download the full dataset, use this command:
``` bash
# Note, it is suggested to use --clean_cache flag as it saves space by cleaning the cache folder created by huggingface hub API.
python download.py --subset full --clean_cache
```
### Download the full benchmark with 960P resolution (same with the paper)
Not all the methods can handle multi-resolution. Some methods have assumptions on the input resolution. So the paper uses 960P.
``` bash
# Note, it is suggested to use --clean_cache flag as it saves space by cleaning the cache folder created by huggingface hub API.
python download.py --subset full --only_level4 --clean_cache
```
### Download with specific scene name (hash name)
There is a benchmark preview page in https://github.com/DL3DV-10K/Dataset. If you just need a specific hash (e.g. 0853979305f7ecb80bd8fc2c8df916410d471ef04ed5f1a64e9651baa41d7695), use the following command:
``` bash
# Note, it is suggested to use --clean_cache flag as it saves space by cleaning the cache folder created by huggingface hub API.
# e.g. a scene with hash 0853979305f7ecb80bd8fc2c8df916410d471ef04ed5f1a64e9651baa41d7695
python download.py --subset hash --hash 0853979305f7ecb80bd8fc2c8df916410d471ef04ed5f1a64e9651baa41d7695 --only_level4
``` |
statmt/cc100 | statmt | "2024-03-05T12:15:34Z" | 4,183 | 82 | [
"task_categories:text-generation",
"task_categories:fill-mask",
"task_ids:language-modeling",
"task_ids:masked-language-modeling",
"annotations_creators:no-annotation",
"language_creators:found",
"multilinguality:multilingual",
"source_datasets:original",
"language:af",
"language:am",
"language:ar",
"language:as",
"language:az",
"language:be",
"language:bg",
"language:bn",
"language:br",
"language:bs",
"language:ca",
"language:cs",
"language:cy",
"language:da",
"language:de",
"language:el",
"language:en",
"language:eo",
"language:es",
"language:et",
"language:eu",
"language:fa",
"language:ff",
"language:fi",
"language:fr",
"language:fy",
"language:ga",
"language:gd",
"language:gl",
"language:gn",
"language:gu",
"language:ha",
"language:he",
"language:hi",
"language:hr",
"language:ht",
"language:hu",
"language:hy",
"language:id",
"language:ig",
"language:is",
"language:it",
"language:ja",
"language:jv",
"language:ka",
"language:kk",
"language:km",
"language:kn",
"language:ko",
"language:ku",
"language:ky",
"language:la",
"language:lg",
"language:li",
"language:ln",
"language:lo",
"language:lt",
"language:lv",
"language:mg",
"language:mk",
"language:ml",
"language:mn",
"language:mr",
"language:ms",
"language:my",
"language:ne",
"language:nl",
"language:no",
"language:ns",
"language:om",
"language:or",
"language:pa",
"language:pl",
"language:ps",
"language:pt",
"language:qu",
"language:rm",
"language:ro",
"language:ru",
"language:sa",
"language:sc",
"language:sd",
"language:si",
"language:sk",
"language:sl",
"language:so",
"language:sq",
"language:sr",
"language:ss",
"language:su",
"language:sv",
"language:sw",
"language:ta",
"language:te",
"language:th",
"language:tl",
"language:tn",
"language:tr",
"language:ug",
"language:uk",
"language:ur",
"language:uz",
"language:vi",
"language:wo",
"language:xh",
"language:yi",
"language:yo",
"language:zh",
"language:zu",
"license:unknown",
"size_categories:10M<n<100M",
"arxiv:1911.02116",
"arxiv:1911.00359",
"region:us"
] | [
"text-generation",
"fill-mask"
] | "2022-03-02T23:29:22Z" | ---
annotations_creators:
- no-annotation
language_creators:
- found
language:
- af
- am
- ar
- as
- az
- be
- bg
- bn
- br
- bs
- ca
- cs
- cy
- da
- de
- el
- en
- eo
- es
- et
- eu
- fa
- ff
- fi
- fr
- fy
- ga
- gd
- gl
- gn
- gu
- ha
- he
- hi
- hr
- ht
- hu
- hy
- id
- ig
- is
- it
- ja
- jv
- ka
- kk
- km
- kn
- ko
- ku
- ky
- la
- lg
- li
- ln
- lo
- lt
- lv
- mg
- mk
- ml
- mn
- mr
- ms
- my
- ne
- nl
- 'no'
- ns
- om
- or
- pa
- pl
- ps
- pt
- qu
- rm
- ro
- ru
- sa
- sc
- sd
- si
- sk
- sl
- so
- sq
- sr
- ss
- su
- sv
- sw
- ta
- te
- th
- tl
- tn
- tr
- ug
- uk
- ur
- uz
- vi
- wo
- xh
- yi
- yo
- zh
- zu
language_bcp47:
- bn-Latn
- hi-Latn
- my-x-zawgyi
- ta-Latn
- te-Latn
- ur-Latn
- zh-Hans
- zh-Hant
license:
- unknown
multilinguality:
- multilingual
size_categories:
- 10M<n<100M
- 1M<n<10M
source_datasets:
- original
task_categories:
- text-generation
- fill-mask
task_ids:
- language-modeling
- masked-language-modeling
paperswithcode_id: cc100
pretty_name: CC-100
dataset_info:
- config_name: am
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 935440775
num_examples: 3124561
download_size: 138821056
dataset_size: 935440775
- config_name: sr
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 10299427460
num_examples: 35747957
download_size: 1578989320
dataset_size: 10299427460
- config_name: ka
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 10228918845
num_examples: 31708119
download_size: 1100446372
dataset_size: 10228918845
config_names:
- am
- sr
---
# Dataset Card for CC-100
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** https://data.statmt.org/cc-100/
- **Repository:** [More Information Needed]
- **Paper:** https://aclanthology.org/2020.acl-main.747/
- **Paper:** https://aclanthology.org/2020.lrec-1.494/
- **Paper:** https://arxiv.org/abs/1911.02116
- **Paper:** https://arxiv.org/abs/1911.00359
- **Leaderboard:** [More Information Needed]
- **Point of Contact:** [More Information Needed]
### Dataset Summary
This corpus is an attempt to recreate the dataset used for training XLM-R. This corpus comprises of monolingual data for 100+ languages and also includes data for romanized languages (indicated by *_rom). This was constructed using the urls and paragraph indices provided by the CC-Net repository by processing January-December 2018 Commoncrawl snapshots.
### Supported Tasks and Leaderboards
CC-100 is mainly intended to pretrain language models and word representations.
### Languages
The languages in the dataset are:
- af: Afrikaans (305M)
- am: Amharic (133M)
- ar: Arabic (5.4G)
- as: Assamese (7.6M)
- az: Azerbaijani (1.3G)
- be: Belarusian (692M)
- bg: Bulgarian (9.3G)
- bn: Bengali (860M)
- bn_rom: Bengali Romanized (164M)
- br: Breton (21M)
- bs: Bosnian (18M)
- ca: Catalan (2.4G)
- cs: Czech (4.4G)
- cy: Welsh (179M)
- da: Danish (12G)
- de: German (18G)
- el: Greek (7.4G)
- en: English (82G)
- eo: Esperanto (250M)
- es: Spanish (14G)
- et: Estonian (1.7G)
- eu: Basque (488M)
- fa: Persian (20G)
- ff: Fulah (3.1M)
- fi: Finnish (15G)
- fr: French (14G)
- fy: Frisian (38M)
- ga: Irish (108M)
- gd: Scottish Gaelic (22M)
- gl: Galician (708M)
- gn: Guarani (1.5M)
- gu: Gujarati (242M)
- ha: Hausa (61M)
- he: Hebrew (6.1G)
- hi: Hindi (2.5G)
- hi_rom: Hindi Romanized (129M)
- hr: Croatian (5.7G)
- ht: Haitian (9.1M)
- hu: Hungarian (15G)
- hy: Armenian (776M)
- id: Indonesian (36G)
- ig: Igbo (6.6M)
- is: Icelandic (779M)
- it: Italian (7.8G)
- ja: Japanese (15G)
- jv: Javanese (37M)
- ka: Georgian (1.1G)
- kk: Kazakh (889M)
- km: Khmer (153M)
- kn: Kannada (360M)
- ko: Korean (14G)
- ku: Kurdish (90M)
- ky: Kyrgyz (173M)
- la: Latin (609M)
- lg: Ganda (7.3M)
- li: Limburgish (2.2M)
- ln: Lingala (2.3M)
- lo: Lao (63M)
- lt: Lithuanian (3.4G)
- lv: Latvian (2.1G)
- mg: Malagasy (29M)
- mk: Macedonian (706M)
- ml: Malayalam (831M)
- mn: Mongolian (397M)
- mr: Marathi (334M)
- ms: Malay (2.1G)
- my: Burmese (46M)
- my_zaw: Burmese (Zawgyi) (178M)
- ne: Nepali (393M)
- nl: Dutch (7.9G)
- no: Norwegian (13G)
- ns: Northern Sotho (1.8M)
- om: Oromo (11M)
- or: Oriya (56M)
- pa: Punjabi (90M)
- pl: Polish (12G)
- ps: Pashto (107M)
- pt: Portuguese (13G)
- qu: Quechua (1.5M)
- rm: Romansh (4.8M)
- ro: Romanian (16G)
- ru: Russian (46G)
- sa: Sanskrit (44M)
- sc: Sardinian (143K)
- sd: Sindhi (67M)
- si: Sinhala (452M)
- sk: Slovak (6.1G)
- sl: Slovenian (2.8G)
- so: Somali (78M)
- sq: Albanian (1.3G)
- sr: Serbian (1.5G)
- ss: Swati (86K)
- su: Sundanese (15M)
- sv: Swedish (21G)
- sw: Swahili (332M)
- ta: Tamil (1.3G)
- ta_rom: Tamil Romanized (68M)
- te: Telugu (536M)
- te_rom: Telugu Romanized (79M)
- th: Thai (8.7G)
- tl: Tagalog (701M)
- tn: Tswana (8.0M)
- tr: Turkish (5.4G)
- ug: Uyghur (46M)
- uk: Ukrainian (14G)
- ur: Urdu (884M)
- ur_rom: Urdu Romanized (141M)
- uz: Uzbek (155M)
- vi: Vietnamese (28G)
- wo: Wolof (3.6M)
- xh: Xhosa (25M)
- yi: Yiddish (51M)
- yo: Yoruba (1.1M)
- zh-Hans: Chinese (Simplified) (14G)
- zh-Hant: Chinese (Traditional) (5.3G)
- zu: Zulu (4.3M)
## Dataset Structure
### Data Instances
An example from the `am` configuration:
```
{'id': '0', 'text': 'ተለዋዋጭ የግድግዳ አንግል ሙቅ አንቀሳቅሷል ቲ-አሞሌ አጥቅሼ ...\n'}
```
Each data point is a paragraph of text. The paragraphs are presented in the original (unshuffled) order. Documents are separated by a data point consisting of a single newline character.
### Data Fields
The data fields are:
- id: id of the example
- text: content as a string
### Data Splits
Sizes of some configurations:
| name |train|
|----------|----:|
|am|3124561|
|sr|35747957|
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
[More Information Needed]
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
The data comes from multiple web pages in a large variety of languages.
### Annotations
The dataset does not contain any additional annotations.
#### Annotation process
[N/A]
#### Who are the annotators?
[N/A]
### Personal and Sensitive Information
Being constructed from Common Crawl, personal and sensitive information might be present. This **must** be considered before training deep learning models with CC-100, specially in the case of text-generation models.
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
This dataset was prepared by [Statistical Machine Translation at the University of Edinburgh](https://www.statmt.org/ued/) using the [CC-Net](https://github.com/facebookresearch/cc_net) toolkit by Facebook Research.
### Licensing Information
Statistical Machine Translation at the University of Edinburgh makes no claims of intellectual property on the work of preparation of the corpus. By using this, you are also bound by the [Common Crawl terms of use](https://commoncrawl.org/terms-of-use/) in respect of the content contained in the dataset.
### Citation Information
Please cite the following if you found the resources in this corpus useful:
```bibtex
@inproceedings{conneau-etal-2020-unsupervised,
title = "Unsupervised Cross-lingual Representation Learning at Scale",
author = "Conneau, Alexis and
Khandelwal, Kartikay and
Goyal, Naman and
Chaudhary, Vishrav and
Wenzek, Guillaume and
Guzm{\'a}n, Francisco and
Grave, Edouard and
Ott, Myle and
Zettlemoyer, Luke and
Stoyanov, Veselin",
editor = "Jurafsky, Dan and
Chai, Joyce and
Schluter, Natalie and
Tetreault, Joel",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.acl-main.747",
doi = "10.18653/v1/2020.acl-main.747",
pages = "8440--8451",
abstract = "This paper shows that pretraining multilingual language models at scale leads to significant performance gains for a wide range of cross-lingual transfer tasks. We train a Transformer-based masked language model on one hundred languages, using more than two terabytes of filtered CommonCrawl data. Our model, dubbed XLM-R, significantly outperforms multilingual BERT (mBERT) on a variety of cross-lingual benchmarks, including +14.6{\%} average accuracy on XNLI, +13{\%} average F1 score on MLQA, and +2.4{\%} F1 score on NER. XLM-R performs particularly well on low-resource languages, improving 15.7{\%} in XNLI accuracy for Swahili and 11.4{\%} for Urdu over previous XLM models. We also present a detailed empirical analysis of the key factors that are required to achieve these gains, including the trade-offs between (1) positive transfer and capacity dilution and (2) the performance of high and low resource languages at scale. Finally, we show, for the first time, the possibility of multilingual modeling without sacrificing per-language performance; XLM-R is very competitive with strong monolingual models on the GLUE and XNLI benchmarks. We will make our code and models publicly available.",
}
```
```bibtex
@inproceedings{wenzek-etal-2020-ccnet,
title = "{CCN}et: Extracting High Quality Monolingual Datasets from Web Crawl Data",
author = "Wenzek, Guillaume and
Lachaux, Marie-Anne and
Conneau, Alexis and
Chaudhary, Vishrav and
Guzm{\'a}n, Francisco and
Joulin, Armand and
Grave, Edouard",
editor = "Calzolari, Nicoletta and
B{\'e}chet, Fr{\'e}d{\'e}ric and
Blache, Philippe and
Choukri, Khalid and
Cieri, Christopher and
Declerck, Thierry and
Goggi, Sara and
Isahara, Hitoshi and
Maegaard, Bente and
Mariani, Joseph and
Mazo, H{\'e}l{\`e}ne and
Moreno, Asuncion and
Odijk, Jan and
Piperidis, Stelios",
booktitle = "Proceedings of the Twelfth Language Resources and Evaluation Conference",
month = may,
year = "2020",
address = "Marseille, France",
publisher = "European Language Resources Association",
url = "https://aclanthology.org/2020.lrec-1.494",
pages = "4003--4012",
abstract = "Pre-training text representations have led to significant improvements in many areas of natural language processing. The quality of these models benefits greatly from the size of the pretraining corpora as long as its quality is preserved. In this paper, we describe an automatic pipeline to extract massive high-quality monolingual datasets from Common Crawl for a variety of languages. Our pipeline follows the data processing introduced in fastText (Mikolov et al., 2017; Grave et al., 2018), that deduplicates documents and identifies their language. We augment this pipeline with a filtering step to select documents that are close to high quality corpora like Wikipedia.",
language = "English",
ISBN = "979-10-95546-34-4",
}
```
### Contributions
Thanks to [@abhishekkrthakur](https://github.com/abhishekkrthakur) for adding this dataset. |
sraimund/MapPool | sraimund | "2024-09-02T14:29:18Z" | 4,172 | 1 | [
"license:cc-by-4.0",
"size_categories:10M<n<100M",
"format:parquet",
"modality:image",
"modality:tabular",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-05-10T19:49:38Z" | ---
license: cc-by-4.0
---
# MapPool - Bubbling up an extremely large corpus of maps for AI
<img src="map_bubbles.png" alt="many small air bubbles containing colorful maps arising with light rays under the ocean (AI-generated image)" width="256"/>
MapPool is a dataset of 75 million potential maps and textual captions. It has been derived from [CommonPool](https://www.datacomp.ai/), a dataset consisting of 12 billion text-image pairs from the Internet. The images have been encoded by a vision transformer and classified into maps and non-maps by a support vector machine. This approach outperforms previous models and yields a validation accuracy of 98.5%. The MapPool dataset may help to train data-intensive architectures in order to establish vision and language foundation models specialized in maps. The analysis of the dataset and the exploration of the embedding space offers a large potential for future work.
## How is the data structured?
| Key | Meaning
|----------------------------------|----------
| uid | Unique identifier
| url | Link to the image
| text | Textual description of the image
| original_width / original_height | Dimensions of the image
| sha256 | Hash of the image (to verify if the image is the same as the one in the URL)
| l14_img | Embedding of the image (768 dimensions)
| l14_txt | Embedding of the textual description (768 dimensions)
| clip_l14_similarity_score | Similarity between the image and text (higher values indicate higher similarity)
## How can this repository be downloaded?
Simply use Git (or TortoiseGit):
```
git clone https://huggingface.co/datasets/sraimund/MapPool/
```
Alternatively use the HuggingFace API:
```python
import json
import os
from huggingface_hub import hf_hub_download
download_folder = "<your-download-folder>"
repo_id = "sraimund/MapPool"
# this file is given at the root of this repository
with open("file_list.json") as f:
file_list = json.load(f)
for part, files in file_list.items():
for file in files:
file_path = f"{download_folder}/{part}/{file}.parquet"
if os.path.exists(file_path):
continue
hf_hub_download(repo_type="dataset",
repo_id=repo_id,
filename=f"{part}/{file}.parquet",
local_dir=download_folder,
token=read_token)
```
About 225 GB of space are required. The amount doubles when using Git since the files are duplicated in the .git folder.
## How can the parquet files be read?
You can read parquet files with [pandas](https://pandas.pydata.org/):
```python
import pandas as pd
df = pd.read_parquet("<file_name>.parquet")
```
The pyarrow or fastparquet library is required additionally.
## How can the map images be downloaded?
You can download the map images with [img2dataset](https://github.com/rom1504/img2dataset).
```python
from img2dataset import download
download(
thread_count=64,
url_list="<file_name>.parquet",
output_folder="<folder_path>",
resize_mode="no",
output_format="files",
input_format="parquet",
url_col="url",
caption_col="text",
verify_hash=("sha256", "sha256"),
)
```
For Windows users:
```python
import multiprocessing as mp
from img2dataset import download
# a small patch is also needed: https://github.com/rom1504/img2dataset/issues/347
def main():
download(...)
if __name__ == "__main__":
multiprocessing.freeze_support()
main()
```
As the Internet is constantly changing, about two thirds of the original images (= 48 million) are still downloadable. 6TB of space are required to store them in their original formats and 100GB of space are needed when creating 128x128px thumbnails in the webm format with 60% quality. Downloading the images took 40 hours with 24 CPUs, 30GB RAM, and 40MB/s of network traffic on average.
## How was this dataset created?
MapPool has been created by classifying the image embeddings included in [CommonPool](https://huggingface.co/datasets/mlfoundations/datacomp_xlarge), which have been generated by two pre-trained vision transformers (ViTs). The [L/14 model](https://github.com/mlfoundations/open_clip) with more parameters and outputting 768-dimensional embeddings has been considered since it has achieved higher classification accuracies. In this work, different map classifiers (Table 1) from [scikit-learn](https://scikit-learn.org/) with the [Intel Extension](https://intel.github.io/scikit-learn-intelex) have been trained on the embeddings of 1,860 maps and 1,860 non-maps, and have been evaluated on 1,240 maps and 1,240 non-maps ([Schnürer et al. 2021](https://doi.org/10.1080/00087041.2020.1738112)). Only simple classification models have been considered due to their efficiency and as meaningful embeddings have already been created by the vision transformer.
| Model | Accuracy
|----------------------------------------------------------|----------
| Xception / InceptionResNetV2 (= Baseline) | 96.7
| ViT-L/14 + L2 distance to averaged embeddings | 96.7
| ViT-L/14 + Logistic Regression | 97.9
| ViT-L/14 + Multilayer Perceptron (3x256 units) | 98.2
| ViT-L/14 + Support Vector Machine (polynomial, degree 3) | 98.5
With the Support Vector Machine, 500,000 image embeddings could be classified within 10 seconds. Downloading, classifying the whole dataset, and uploading the results took about 50 hours with 10 CPUs, 120GB RAM, and 500MB/s of network traffic on average.
## Is the inference model available?
Yes, try it out and download it here: [https://huggingface.co/spaces/sraimund/MapPool](https://huggingface.co/spaces/sraimund/MapPool)
## What are the limitations?
A qualitative inspection of the detected maps looks promising; however, it is not known what the actual accuracy is. Especially the false negative rate is hard to estimate due to the high number of non-maps among the CommonPool images. Mixtures between natural images and maps (e.g., a map printed on a bag, a map in a park) have not been further examined.
Textual embeddings have not been considered in the separation process so far. The training dataset for the map classifier has a large visual variety, such as pictorial maps and 3D maps as well as sketches and paintings. However, the textual descriptions may be too biased since the training dataset originates only from one source.
## What are future research directions?
A detailed analysis of the content and metadata of maps in MapPool, potentially resulting in a search engine, is the subject of future work. Additionally, the visual and textual embedding space may be explored to refine the map classifier and to detect duplicates among the images. It can be examined whether training with map-only images leads to better results for cartographic tasks, for instance generating maps based on textual prompts, than with a mixture of maps and other images.
Feel free to contact [me](https://schnuerer.dev/contact/) in case you like to collaborate!
## Disclaimer
The creator is not responsible for the content of linked external websites and will not guarantee for any damage any content of these websites may cause.
## License
The dataset is published under the Creative Commons Attribution 4.0 license. Please respect the copyright of the original images when making use of MapPool.
## Citation
A [short paper](https://infoscience.epfl.ch/handle/20.500.14299/240495) is available.
```
@inproceedings{Schnürer_MapPool_2024, title={MapPool - Bubbling up an extremely large corpus of maps for AI}, author={Schnürer, Raimund}, year={2024}, url={https://infoscience.epfl.ch/handle/20.500.14299/240495}}
``` |
AI-MO/NuminaMath-CoT | AI-MO | "2024-11-25T05:31:43Z" | 4,166 | 330 | [
"task_categories:text-generation",
"language:en",
"license:apache-2.0",
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us",
"aimo",
"math"
] | [
"text-generation"
] | "2024-07-15T20:14:23Z" | ---
dataset_info:
features:
- name: source
dtype: string
- name: problem
dtype: string
- name: solution
dtype: string
- name: messages
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 2495457595.0398345
num_examples: 859494
- name: test
num_bytes: 290340.31593470514
num_examples: 100
download_size: 1234351634
dataset_size: 2495747935.355769
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
license: apache-2.0
task_categories:
- text-generation
language:
- en
tags:
- aimo
- math
pretty_name: NuminaMath CoT
---
# Dataset Card for NuminaMath CoT
## Dataset Description
- **Homepage:** https://projectnumina.ai
- **Repository:** https://github.com/project-numina/aimo-progress-prize
- **Paper:** https://github.com/project-numina/aimo-progress-prize/blob/main/report/numina_dataset.pdf
- **Leaderboard:**
- **Point of Contact:** [Jia Li]([email protected])
### Dataset Summary
Approximately 860k math problems, where each solution is formatted in a Chain of Thought (CoT) manner. The sources of the dataset range from Chinese high school math exercises to US and international mathematics olympiad competition problems. The data were primarily collected from online exam paper PDFs and mathematics discussion forums. The processing steps include (a) OCR from the original PDFs, (b) segmentation into problem-solution pairs, (c) Translation into English, (d) realignment to produce a CoT reasoning format, and (e) final answer formatting.
### Source breakdown
| Source | Number of Samples |
| --- | --- |
| aops_forum | 30201 |
| amc_aime | 4072 |
| cn_k12 | 276591 |
| gsm8k | 7345 |
| math | 7478 |
| olympiads | 150581 |
| orca_math | 153334 |
| synthetic_amc | 62111 |
| synthetic_math | 167895 |
| **Total** | **859608** |
### Licensing Information
The dataset is available under the [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0).
### Citation Information
```
@misc{numina_math_datasets,
author = {Jia LI and Edward Beeching and Lewis Tunstall and Ben Lipkin and Roman Soletskyi and Shengyi Costa Huang and Kashif Rasul and Longhui Yu and Albert Jiang and Ziju Shen and Zihan Qin and Bin Dong and Li Zhou and Yann Fleureau and Guillaume Lample and Stanislas Polu},
title = {NuminaMath},
year = {2024},
publisher = {Numina},
journal = {Hugging Face repository},
howpublished = {\url{[https://huggingface.co/AI-MO/NuminaMath-CoT](https://github.com/project-numina/aimo-progress-prize/blob/main/report/numina_dataset.pdf)}}
}
``` |
KBlueLeaf/danbooru2023-webp-4Mpixel | KBlueLeaf | "2024-07-18T10:41:35Z" | 4,145 | 66 | [
"task_categories:image-classification",
"task_categories:zero-shot-image-classification",
"task_categories:text-to-image",
"language:en",
"license:mit",
"size_categories:1M<n<10M",
"format:webdataset",
"modality:image",
"modality:text",
"library:datasets",
"library:webdataset",
"library:mlcroissant",
"region:us",
"art",
"anime",
"not-for-all-audiences"
] | [
"image-classification",
"zero-shot-image-classification",
"text-to-image"
] | "2024-01-25T04:18:45Z" | ---
license: mit
task_categories:
- image-classification
- zero-shot-image-classification
- text-to-image
language:
- en
tags:
- art
- anime
- not-for-all-audiences
size_categories:
- 1M<n<10M
---
# Danbooru 2023 webp: A space-efficient version of Danbooru 2023
This dataset is a resized/re-encoded version of [danbooru2023](https://huggingface.co/datasets/nyanko7/danbooru2023).<br>
Which removed the non-image/truncated files and resize all of them into smaller size.
This dataset already be updated to latest_id = 7,832,883.
Thx to DeepGHS!
**Notice**: content of updates folder and deepghs/danbooru_newest-webp-4Mpixel have been merged to 2000~2999.tar, You can ignore all the content in updates folder safely!
---
## Details
This dataset employs few method to reduce the size and improve the efficiency.
### Size and Format
This dataset resize all the image which have more than 2048x2048 pixel into near 2048x2048 pixels with bicubic algorithm.<br>
And remove all the image with longer edge larger than 16383 after resize.<br>
(one reason is beacuse webp doesn't allow that, another is that aspect ratio is too large/small.)
This dataset encode/save all the image with 90% quality webp with pillow library in Python.
Which is half size of the 100% quality lossy webp.
The total size of this dataset is around 1.3~1.4TB. Which is less than the 20% of original file size.
### Webdataset
This dataset use webdataset library to save all the tarfile, therefore, you can also use webdataset to load them easily. This is also a recommended way.
The `__key__` of each files is the id of it. You can use this id to query the [metadata database](https://huggingface.co/datasets/KBlueLeaf/danbooru2023-sqlite) easily.
|
bigcode/commitpackft | bigcode | "2023-08-20T07:13:43Z" | 4,141 | 62 | [
"language:code",
"license:mit",
"size_categories:100K<n<1M",
"modality:text",
"library:datasets",
"library:mlcroissant",
"arxiv:2308.07124",
"region:us"
] | null | "2023-06-27T06:54:48Z" | ---
license: mit
pretty_name: CommitPackFT
language:
- code
---
![Octopack](https://github.com/bigcode-project/octopack/blob/31f3320f098703c7910e43492c39366eeea68d83/banner.png?raw=true)
# Dataset Card for CommitPackFT
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Additional Information](#additional-information)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Repository:** https://github.com/bigcode-project/octopack
- **Paper:** [OctoPack: Instruction Tuning Code Large Language Models](https://arxiv.org/abs/2308.07124)
- **Point of Contact:** [Niklas Muennighoff](mailto:[email protected])
### Dataset Summary
> CommitPackFT is a 2GB filtered version of [CommitPack](https://huggingface.co/datasets/bigcode/commitpack) to contain only high-quality commit messages that resemble natural language instructions.
>
- **Creation:** The dataset can be recreated using instructions available [here](https://github.com/bigcode-project/octopack).
- **Languages:** 277
- **OctoPack🐙🎒:**
<table>
<tr>
<th>Data</t>
<td><a href=https://huggingface.co/datasets/bigcode/commitpack>CommitPack</a></td>
<td>4TB of GitHub commits across 350 programming languages</td>
</tr>
<tr>
<th></t>
<td><a href=https://huggingface.co/datasets/bigcode/commitpackft>CommitPackFT</a></td>
<td>Filtered version of CommitPack for high-quality commit messages that resemble instructions</td>
</tr>
<tr>
<th>Model</t>
<td><a href=https://huggingface.co/bigcode/octocoder>OctoCoder</a></td>
<td>StarCoder (16B parameters) instruction tuned on CommitPackFT + OASST</td>
</tr>
<tr>
<th></t>
<td><a href=https://huggingface.co/bigcode/octogeex>OctoGeeX</a></td>
<td>CodeGeeX2 (6B parameters) instruction tuned on CommitPackFT + OASST</td>
</tr>
<tr>
<th>Evaluation </t>
<td><a href=https://huggingface.co/datasets/bigcode/humanevalpack>HumanEvalPack</a></td>
<td>Extension of OpenAI's HumanEval to cover 3 scenarios across 6 languages</td>
</tr>
</table>
## Dataset Structure
### Data Instances
An example looks as follows:
```json
{
'commit': '0c17311f7fd511f5dae8f8e4acc2dce1a2de3cf5',
'old_file': 'main.py',
'new_file': 'main.py',
'old_contents': "import numpy as np\nimport matplotlib.pyplot as plt\n\n# generate sample data\nx_data = np.linspace(-5, 5, 20)\ny_data = np.random.normal(0.0, 1.0, x_data.size)\n\nplt.plot(x_data, y_data, 'o')\nplt.show()\n",
'new_contents': "import math\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n# generate sample data\nx_data = np.linspace(-math.pi, math.pi, 30)\ny_data = np.sin(x_data) + np.random.normal(0.0, 0.1, x_data.size)\n\nplt.plot(x_data, y_data, 'o')\nplt.show()\n\n",
'subject': 'Change to sin() function with noise',
'message': 'Change to sin() function with noise\n',
'lang': 'Python',
'license': 'mit',
'repos': 'MorganR/basic-gaussian-process'
}
```
### Data Fields
The data fields are the same among all splits:
- `commit`: unique commit id
- `old_file`: name of the file before the commit
- `new_file`: name of the file after the commit
- `old_contents`: contents of the file before the commit
- `new_contents`: contents of the file after the commit
- `subject`: subject of the commit (this is used for all experiments in the paper)
- `message`: message of the commit (commonly the same as the subject)
- `lang`: programming language
- `license`: license of the repository the code stems from, one of `['mit', 'artistic-2.0', 'isc', 'cc0-1.0', 'epl-1.0', 'mpl-2.0', 'unlicense', 'unknown', 'apache-2.0', 'bsd-3-clause', 'agpl-3.0', 'lgpl-2.1', 'bsd-2-clause']`
- `repos`: name of the the repository the code stems from (if multiple, they are comma separated)
### Data Splits
| Name | Megabytes | % of total | Samples | % of total |
| --- | --- | --- | --- | --- |
| total | 1545.02 | 100.0% | 702062 | 100.0% |
| ruby | 195.292 | 12.6401% | 69413 | 9.887% |
| yaml | 190.876 | 12.3543% | 114320 | 16.2835% |
| python | 132.68 | 8.5876% | 56025 | 7.9801% |
| markdown | 131.152 | 8.4887% | 62518 | 8.9049% |
| javascript | 125.008 | 8.091% | 52989 | 7.5476% |
| json | 86.744 | 5.6144% | 39777 | 5.6657% |
| shell | 66.864 | 4.3277% | 31217 | 4.4465% |
| text | 66.664 | 4.3148% | 46588 | 6.6359% |
| php | 60.22 | 3.8977% | 24791 | 3.5312% |
| java | 56.284 | 3.6429% | 20635 | 2.9392% |
| html | 48.42 | 3.1339% | 20214 | 2.8792% |
| c# | 26.84 | 1.7372% | 9346 | 1.3312% |
| xml | 23.676 | 1.5324% | 9337 | 1.3299% |
| html+erb | 23.104 | 1.4954% | 10910 | 1.554% |
| c | 21.08 | 1.3644% | 8506 | 1.2116% |
| ini | 21.04 | 1.3618% | 11360 | 1.6181% |
| coffeescript | 16.96 | 1.0977% | 5513 | 0.7853% |
| swift | 16.272 | 1.0532% | 4849 | 0.6907% |
| restructuredtext | 15.728 | 1.018% | 6560 | 0.9344% |
| typescript | 14.284 | 0.9245% | 5868 | 0.8358% |
| c++ | 14.136 | 0.9149% | 4992 | 0.711% |
| scss | 13.208 | 0.8549% | 6829 | 0.9727% |
| go | 12.132 | 0.7852% | 5004 | 0.7128% |
| scala | 11.184 | 0.7239% | 5040 | 0.7179% |
| haml | 10.74 | 0.6951% | 4415 | 0.6289% |
| css | 9.364 | 0.6061% | 5049 | 0.7192% |
| rust | 7.244 | 0.4689% | 2996 | 0.4267% |
| toml | 5.584 | 0.3614% | 3424 | 0.4877% |
| jsx | 5.5 | 0.356% | 2199 | 0.3132% |
| kotlin | 5.368 | 0.3474% | 2214 | 0.3154% |
| clojure | 5.068 | 0.328% | 2403 | 0.3423% |
| perl | 4.988 | 0.3228% | 2288 | 0.3259% |
| bitbake | 4.464 | 0.2889% | 1308 | 0.1863% |
| groovy | 4.168 | 0.2698% | 1486 | 0.2117% |
| twig | 3.956 | 0.256% | 1610 | 0.2293% |
| nix | 3.84 | 0.2485% | 1593 | 0.2269% |
| sql | 3.74 | 0.2421% | 2069 | 0.2947% |
| less | 3.724 | 0.241% | 1360 | 0.1937% |
| haskell | 3.308 | 0.2141% | 1389 | 0.1978% |
| handlebars | 3.292 | 0.2131% | 1429 | 0.2035% |
| unknown | 3.048 | 0.1973% | 1597 | 0.2275% |
| batchfile | 2.984 | 0.1931% | 1466 | 0.2088% |
| cucumber | 2.588 | 0.1675% | 976 | 0.139% |
| makefile | 2.528 | 0.1636% | 960 | 0.1367% |
| elixir | 2.348 | 0.152% | 1150 | 0.1638% |
| jade | 2.348 | 0.152% | 1119 | 0.1594% |
| cmake | 2.268 | 0.1468% | 981 | 0.1397% |
| powershell | 2.064 | 0.1336% | 991 | 0.1412% |
| slim | 2.056 | 0.1331% | 1052 | 0.1498% |
| emacs-lisp | 1.972 | 0.1276% | 1015 | 0.1446% |
| dart | 1.96 | 0.1269% | 765 | 0.109% |
| viml | 1.956 | 0.1266% | 1063 | 0.1514% |
| asciidoc | 1.864 | 0.1206% | 523 | 0.0745% |
| lua | 1.852 | 0.1199% | 920 | 0.131% |
| llvm | 1.6 | 0.1036% | 780 | 0.1111% |
| smarty | 1.588 | 0.1028% | 737 | 0.105% |
| diff | 1.48 | 0.0958% | 680 | 0.0969% |
| common-lisp | 1.448 | 0.0937% | 778 | 0.1108% |
| saltstack | 1.412 | 0.0914% | 617 | 0.0879% |
| vue | 1.384 | 0.0896% | 587 | 0.0836% |
| sass | 1.364 | 0.0883% | 705 | 0.1004% |
| fish | 1.328 | 0.086% | 813 | 0.1158% |
| erlang | 1.192 | 0.0772% | 480 | 0.0684% |
| freemarker | 1.028 | 0.0665% | 510 | 0.0726% |
| stylus | 0.948 | 0.0614% | 480 | 0.0684% |
| qml | 0.936 | 0.0606% | 368 | 0.0524% |
| hcl | 0.912 | 0.059% | 421 | 0.06% |
| html+django | 0.848 | 0.0549% | 399 | 0.0568% |
| mako | 0.756 | 0.0489% | 170 | 0.0242% |
| ada | 0.728 | 0.0471% | 265 | 0.0377% |
| ocaml | 0.704 | 0.0456% | 333 | 0.0474% |
| f# | 0.656 | 0.0425% | 254 | 0.0362% |
| elm | 0.62 | 0.0401% | 265 | 0.0377% |
| tex | 0.564 | 0.0365% | 307 | 0.0437% |
| rdoc | 0.552 | 0.0357% | 270 | 0.0385% |
| csv | 0.532 | 0.0344% | 375 | 0.0534% |
| protocol-buffer | 0.524 | 0.0339% | 181 | 0.0258% |
| smalltalk | 0.46 | 0.0298% | 284 | 0.0405% |
| arduino | 0.456 | 0.0295% | 225 | 0.032% |
| java-server-pages | 0.452 | 0.0293% | 173 | 0.0246% |
| scheme | 0.42 | 0.0272% | 213 | 0.0303% |
| groff | 0.396 | 0.0256% | 192 | 0.0273% |
| objective-c++ | 0.376 | 0.0243% | 86 | 0.0122% |
| desktop | 0.364 | 0.0236% | 186 | 0.0265% |
| factor | 0.356 | 0.023% | 113 | 0.0161% |
| crystal | 0.348 | 0.0225% | 182 | 0.0259% |
| rhtml | 0.348 | 0.0225% | 135 | 0.0192% |
| haxe | 0.344 | 0.0223% | 174 | 0.0248% |
| glsl | 0.34 | 0.022% | 164 | 0.0234% |
| gas | 0.336 | 0.0217% | 193 | 0.0275% |
| html+php | 0.332 | 0.0215% | 150 | 0.0214% |
| qmake | 0.32 | 0.0207% | 140 | 0.0199% |
| julia | 0.312 | 0.0202% | 180 | 0.0256% |
| cython | 0.308 | 0.0199% | 123 | 0.0175% |
| html+eex | 0.292 | 0.0189% | 135 | 0.0192% |
| tcl | 0.292 | 0.0189% | 103 | 0.0147% |
| org | 0.272 | 0.0176% | 136 | 0.0194% |
| perl6 | 0.268 | 0.0173% | 122 | 0.0174% |
| m4 | 0.264 | 0.0171% | 101 | 0.0144% |
| xslt | 0.256 | 0.0166% | 99 | 0.0141% |
| svg | 0.252 | 0.0163% | 169 | 0.0241% |
| nimrod | 0.236 | 0.0153% | 67 | 0.0095% |
| r | 0.228 | 0.0148% | 121 | 0.0172% |
| robotframework | 0.212 | 0.0137% | 85 | 0.0121% |
| racket | 0.196 | 0.0127% | 117 | 0.0167% |
| textile | 0.184 | 0.0119% | 61 | 0.0087% |
| assembly | 0.172 | 0.0111% | 105 | 0.015% |
| purescript | 0.172 | 0.0111% | 80 | 0.0114% |
| unity3d-asset | 0.156 | 0.0101% | 101 | 0.0144% |
| visual-basic | 0.152 | 0.0098% | 48 | 0.0068% |
| dm | 0.148 | 0.0096% | 16 | 0.0023% |
| pod | 0.148 | 0.0096% | 54 | 0.0077% |
| standard-ml | 0.148 | 0.0096% | 72 | 0.0103% |
| fortran | 0.144 | 0.0093% | 70 | 0.01% |
| gettext-catalog | 0.132 | 0.0085% | 72 | 0.0103% |
| idris | 0.132 | 0.0085% | 38 | 0.0054% |
| livescript | 0.128 | 0.0083% | 63 | 0.009% |
| xtend | 0.128 | 0.0083% | 55 | 0.0078% |
| actionscript | 0.12 | 0.0078% | 49 | 0.007% |
| vala | 0.116 | 0.0075% | 50 | 0.0071% |
| awk | 0.104 | 0.0067% | 52 | 0.0074% |
| ceylon | 0.1 | 0.0065% | 49 | 0.007% |
| jupyter-notebook | 0.1 | 0.0065% | 48 | 0.0068% |
| dockerfile | 0.096 | 0.0062% | 39 | 0.0056% |
| rouge | 0.096 | 0.0062% | 41 | 0.0058% |
| asp | 0.092 | 0.006% | 22 | 0.0031% |
| sqf | 0.092 | 0.006% | 45 | 0.0064% |
| edn | 0.088 | 0.0057% | 48 | 0.0068% |
| liquid | 0.088 | 0.0057% | 30 | 0.0043% |
| xquery | 0.084 | 0.0054% | 39 | 0.0056% |
| linker-script | 0.08 | 0.0052% | 37 | 0.0053% |
| mediawiki | 0.08 | 0.0052% | 33 | 0.0047% |
| parrot-internal-representation | 0.08 | 0.0052% | 23 | 0.0033% |
| solidity | 0.08 | 0.0052% | 37 | 0.0053% |
| json5 | 0.076 | 0.0049% | 33 | 0.0047% |
| systemverilog | 0.076 | 0.0049% | 35 | 0.005% |
| thrift | 0.076 | 0.0049% | 28 | 0.004% |
| groovy-server-pages | 0.072 | 0.0047% | 25 | 0.0036% |
| processing | 0.072 | 0.0047% | 35 | 0.005% |
| cuda | 0.068 | 0.0044% | 25 | 0.0036% |
| graphviz-dot | 0.068 | 0.0044% | 35 | 0.005% |
| inno-setup | 0.064 | 0.0041% | 16 | 0.0023% |
| api-blueprint | 0.06 | 0.0039% | 23 | 0.0033% |
| nsis | 0.06 | 0.0039% | 15 | 0.0021% |
| gentoo-ebuild | 0.056 | 0.0036% | 16 | 0.0023% |
| logtalk | 0.056 | 0.0036% | 21 | 0.003% |
| jasmin | 0.052 | 0.0034% | 9 | 0.0013% |
| literate-coffeescript | 0.052 | 0.0034% | 19 | 0.0027% |
| webidl | 0.052 | 0.0034% | 6 | 0.0009% |
| coldfusion-cfc | 0.048 | 0.0031% | 20 | 0.0028% |
| opencl | 0.048 | 0.0031% | 23 | 0.0033% |
| openscad | 0.048 | 0.0031% | 21 | 0.003% |
| pan | 0.048 | 0.0031% | 23 | 0.0033% |
| pascal | 0.048 | 0.0031% | 25 | 0.0036% |
| pony | 0.048 | 0.0031% | 16 | 0.0023% |
| turtle | 0.048 | 0.0031% | 21 | 0.003% |
| chapel | 0.044 | 0.0028% | 20 | 0.0028% |
| ioke | 0.044 | 0.0028% | 25 | 0.0036% |
| ooc | 0.044 | 0.0028% | 15 | 0.0021% |
| sparql | 0.044 | 0.0028% | 23 | 0.0033% |
| applescript | 0.04 | 0.0026% | 19 | 0.0027% |
| augeas | 0.04 | 0.0026% | 13 | 0.0019% |
| g-code | 0.04 | 0.0026% | 7 | 0.001% |
| mirah | 0.04 | 0.0026% | 16 | 0.0023% |
| capn-proto | 0.036 | 0.0023% | 12 | 0.0017% |
| digital-command-language | 0.036 | 0.0023% | 19 | 0.0027% |
| hy | 0.036 | 0.0023% | 12 | 0.0017% |
| logos | 0.036 | 0.0023% | 19 | 0.0027% |
| modelica | 0.036 | 0.0023% | 15 | 0.0021% |
| vcl | 0.036 | 0.0023% | 18 | 0.0026% |
| antlr | 0.032 | 0.0021% | 15 | 0.0021% |
| gdscript | 0.032 | 0.0021% | 9 | 0.0013% |
| graphql | 0.032 | 0.0021% | 17 | 0.0024% |
| hlsl | 0.032 | 0.0021% | 11 | 0.0016% |
| gnuplot | 0.028 | 0.0018% | 17 | 0.0024% |
| http | 0.028 | 0.0018% | 19 | 0.0027% |
| ninja | 0.028 | 0.0018% | 14 | 0.002% |
| oz | 0.028 | 0.0018% | 8 | 0.0011% |
| raml | 0.028 | 0.0018% | 9 | 0.0013% |
| aspectj | 0.024 | 0.0016% | 8 | 0.0011% |
| autohotkey | 0.024 | 0.0016% | 15 | 0.0021% |
| fancy | 0.024 | 0.0016% | 8 | 0.0011% |
| moonscript | 0.024 | 0.0016% | 10 | 0.0014% |
| piglatin | 0.024 | 0.0016% | 11 | 0.0016% |
| stata | 0.024 | 0.0016% | 10 | 0.0014% |
| urweb | 0.024 | 0.0016% | 6 | 0.0009% |
| xs | 0.024 | 0.0016% | 7 | 0.001% |
| yang | 0.024 | 0.0016% | 6 | 0.0009% |
| agda | 0.02 | 0.0013% | 10 | 0.0014% |
| coldfusion | 0.02 | 0.0013% | 9 | 0.0013% |
| emberscript | 0.02 | 0.0013% | 7 | 0.001% |
| latte | 0.02 | 0.0013% | 7 | 0.001% |
| literate-haskell | 0.02 | 0.0013% | 7 | 0.001% |
| postscript | 0.02 | 0.0013% | 9 | 0.0013% |
| scilab | 0.02 | 0.0013% | 10 | 0.0014% |
| tcsh | 0.02 | 0.0013% | 10 | 0.0014% |
| volt | 0.02 | 0.0013% | 9 | 0.0013% |
| apl | 0.016 | 0.001% | 7 | 0.001% |
| genshi | 0.016 | 0.001% | 3 | 0.0004% |
| jsonld | 0.016 | 0.001% | 6 | 0.0009% |
| krl | 0.016 | 0.001% | 4 | 0.0006% |
| lean | 0.016 | 0.001% | 3 | 0.0004% |
| lfe | 0.016 | 0.001% | 6 | 0.0009% |
| metal | 0.016 | 0.001% | 4 | 0.0006% |
| monkey | 0.016 | 0.001% | 4 | 0.0006% |
| mupad | 0.016 | 0.001% | 4 | 0.0006% |
| nesc | 0.016 | 0.001% | 7 | 0.001% |
| nit | 0.016 | 0.001% | 3 | 0.0004% |
| pike | 0.016 | 0.001% | 6 | 0.0009% |
| purebasic | 0.016 | 0.001% | 5 | 0.0007% |
| renpy | 0.016 | 0.001% | 3 | 0.0004% |
| vhdl | 0.016 | 0.001% | 5 | 0.0007% |
| xproc | 0.016 | 0.001% | 3 | 0.0004% |
| zephir | 0.016 | 0.001% | 4 | 0.0006% |
| apacheconf | 0.012 | 0.0008% | 2 | 0.0003% |
| boo | 0.012 | 0.0008% | 2 | 0.0003% |
| brainfuck | 0.012 | 0.0008% | 2 | 0.0003% |
| bro | 0.012 | 0.0008% | 3 | 0.0004% |
| cartocss | 0.012 | 0.0008% | 3 | 0.0004% |
| creole | 0.012 | 0.0008% | 2 | 0.0003% |
| csound | 0.012 | 0.0008% | 4 | 0.0006% |
| dylan | 0.012 | 0.0008% | 2 | 0.0003% |
| eagle | 0.012 | 0.0008% | 4 | 0.0006% |
| ecl | 0.012 | 0.0008% | 4 | 0.0006% |
| eiffel | 0.012 | 0.0008% | 2 | 0.0003% |
| flux | 0.012 | 0.0008% | 3 | 0.0004% |
| io | 0.012 | 0.0008% | 4 | 0.0006% |
| jsoniq | 0.012 | 0.0008% | 6 | 0.0009% |
| lilypond | 0.012 | 0.0008% | 6 | 0.0009% |
| lsl | 0.012 | 0.0008% | 3 | 0.0004% |
| mask | 0.012 | 0.0008% | 4 | 0.0006% |
| nginx | 0.012 | 0.0008% | 2 | 0.0003% |
| nu | 0.012 | 0.0008% | 2 | 0.0003% |
| pov-ray-sdl | 0.012 | 0.0008% | 5 | 0.0007% |
| ragel-in-ruby-host | 0.012 | 0.0008% | 4 | 0.0006% |
| slash | 0.012 | 0.0008% | 4 | 0.0006% |
| sourcepawn | 0.012 | 0.0008% | 3 | 0.0004% |
| squirrel | 0.012 | 0.0008% | 4 | 0.0006% |
| ston | 0.012 | 0.0008% | 6 | 0.0009% |
| uno | 0.012 | 0.0008% | 2 | 0.0003% |
| wisp | 0.012 | 0.0008% | 3 | 0.0004% |
| xbase | 0.012 | 0.0008% | 3 | 0.0004% |
| yacc | 0.012 | 0.0008% | 3 | 0.0004% |
| zig | 0.012 | 0.0008% | 4 | 0.0006% |
| abap | 0.008 | 0.0005% | 1 | 0.0001% |
| arc | 0.008 | 0.0005% | 2 | 0.0003% |
| ats | 0.008 | 0.0005% | 3 | 0.0004% |
| blitzmax | 0.008 | 0.0005% | 1 | 0.0001% |
| bluespec | 0.008 | 0.0005% | 2 | 0.0003% |
| c2hs-haskell | 0.008 | 0.0005% | 2 | 0.0003% |
| clean | 0.008 | 0.0005% | 1 | 0.0001% |
| dns-zone | 0.008 | 0.0005% | 2 | 0.0003% |
| forth | 0.008 | 0.0005% | 2 | 0.0003% |
| harbour | 0.008 | 0.0005% | 1 | 0.0001% |
| igor-pro | 0.008 | 0.0005% | 1 | 0.0001% |
| inform-7 | 0.008 | 0.0005% | 2 | 0.0003% |
| isabelle | 0.008 | 0.0005% | 2 | 0.0003% |
| jflex | 0.008 | 0.0005% | 1 | 0.0001% |
| literate-agda | 0.008 | 0.0005% | 1 | 0.0001% |
| maple | 0.008 | 0.0005% | 2 | 0.0003% |
| mathematica | 0.008 | 0.0005% | 1 | 0.0001% |
| module-management-system | 0.008 | 0.0005% | 1 | 0.0001% |
| mtml | 0.008 | 0.0005% | 2 | 0.0003% |
| netlinx | 0.008 | 0.0005% | 1 | 0.0001% |
| parrot-assembly | 0.008 | 0.0005% | 2 | 0.0003% |
| pawn | 0.008 | 0.0005% | 3 | 0.0004% |
| propeller-spin | 0.008 | 0.0005% | 1 | 0.0001% |
| pure-data | 0.008 | 0.0005% | 1 | 0.0001% |
| rebol | 0.008 | 0.0005% | 3 | 0.0004% |
| red | 0.008 | 0.0005% | 1 | 0.0001% |
| sage | 0.008 | 0.0005% | 1 | 0.0001% |
| sas | 0.008 | 0.0005% | 1 | 0.0001% |
| scaml | 0.008 | 0.0005% | 1 | 0.0001% |
| smt | 0.008 | 0.0005% | 3 | 0.0004% |
| supercollider | 0.008 | 0.0005% | 2 | 0.0003% |
| unrealscript | 0.008 | 0.0005% | 1 | 0.0001% |
| xpages | 0.008 | 0.0005% | 1 | 0.0001% |
## Additional Information
### Licensing Information
Each sample comes from a code repository with a permissive license. The license is provided by the `license` field for each sample.
### Citation Information
```bibtex
@article{muennighoff2023octopack,
title={OctoPack: Instruction Tuning Code Large Language Models},
author={Niklas Muennighoff and Qian Liu and Armel Zebaze and Qinkai Zheng and Binyuan Hui and Terry Yue Zhuo and Swayam Singh and Xiangru Tang and Leandro von Werra and Shayne Longpre},
journal={arXiv preprint arXiv:2308.07124},
year={2023}
}
``` |
mteb/biblenlp-corpus-mmteb | mteb | "2024-05-07T00:47:48Z" | 4,140 | 1 | [
"annotations_creators:no-annotation",
"language_creators:expert-generated",
"multilinguality:translation",
"multilinguality:multilingual",
"language:aai",
"language:aak",
"language:aau",
"language:aaz",
"language:abt",
"language:abx",
"language:aby",
"language:acf",
"language:acr",
"language:acu",
"language:adz",
"language:aer",
"language:aey",
"language:agd",
"language:agg",
"language:agm",
"language:agn",
"language:agr",
"language:agt",
"language:agu",
"language:aia",
"language:aii",
"language:aka",
"language:ake",
"language:alp",
"language:alq",
"language:als",
"language:aly",
"language:ame",
"language:amf",
"language:amk",
"language:amm",
"language:amn",
"language:amo",
"language:amp",
"language:amr",
"language:amu",
"language:amx",
"language:anh",
"language:anv",
"language:aoi",
"language:aoj",
"language:aom",
"language:aon",
"language:apb",
"language:ape",
"language:apn",
"language:apr",
"language:apu",
"language:apw",
"language:apz",
"language:arb",
"language:are",
"language:arl",
"language:arn",
"language:arp",
"language:asm",
"language:aso",
"language:ata",
"language:atb",
"language:atd",
"language:atg",
"language:att",
"language:auc",
"language:aui",
"language:auy",
"language:avt",
"language:awb",
"language:awk",
"language:awx",
"language:azb",
"language:azg",
"language:azz",
"language:bao",
"language:bba",
"language:bbb",
"language:bbr",
"language:bch",
"language:bco",
"language:bdd",
"language:bea",
"language:bef",
"language:bel",
"language:ben",
"language:beo",
"language:beu",
"language:bgs",
"language:bgt",
"language:bhg",
"language:bhl",
"language:big",
"language:bjk",
"language:bjp",
"language:bjr",
"language:bjv",
"language:bjz",
"language:bkd",
"language:bki",
"language:bkq",
"language:bkx",
"language:bla",
"language:blw",
"language:blz",
"language:bmh",
"language:bmk",
"language:bmr",
"language:bmu",
"language:bnp",
"language:boa",
"language:boj",
"language:bon",
"language:box",
"language:bpr",
"language:bps",
"language:bqc",
"language:bqp",
"language:bre",
"language:bsj",
"language:bsn",
"language:bsp",
"language:bss",
"language:buk",
"language:bus",
"language:bvd",
"language:bvr",
"language:bxh",
"language:byr",
"language:byx",
"language:bzd",
"language:bzh",
"language:bzj",
"language:caa",
"language:cab",
"language:cac",
"language:caf",
"language:cak",
"language:cao",
"language:cap",
"language:car",
"language:cav",
"language:cax",
"language:cbc",
"language:cbi",
"language:cbk",
"language:cbr",
"language:cbs",
"language:cbt",
"language:cbu",
"language:cbv",
"language:cco",
"language:ceb",
"language:cek",
"language:ces",
"language:cgc",
"language:cha",
"language:chd",
"language:chf",
"language:chk",
"language:chq",
"language:chz",
"language:cjo",
"language:cjv",
"language:ckb",
"language:cle",
"language:clu",
"language:cme",
"language:cmn",
"language:cni",
"language:cnl",
"language:cnt",
"language:cof",
"language:con",
"language:cop",
"language:cot",
"language:cpa",
"language:cpb",
"language:cpc",
"language:cpu",
"language:cpy",
"language:crn",
"language:crx",
"language:cso",
"language:csy",
"language:cta",
"language:cth",
"language:ctp",
"language:ctu",
"language:cub",
"language:cuc",
"language:cui",
"language:cuk",
"language:cut",
"language:cux",
"language:cwe",
"language:cya",
"language:daa",
"language:dad",
"language:dah",
"language:dan",
"language:ded",
"language:deu",
"language:dgc",
"language:dgr",
"language:dgz",
"language:dhg",
"language:dif",
"language:dik",
"language:dji",
"language:djk",
"language:djr",
"language:dob",
"language:dop",
"language:dov",
"language:dwr",
"language:dww",
"language:dwy",
"language:ebk",
"language:eko",
"language:emi",
"language:emp",
"language:eng",
"language:enq",
"language:epo",
"language:eri",
"language:ese",
"language:esk",
"language:etr",
"language:ewe",
"language:faa",
"language:fai",
"language:far",
"language:ffm",
"language:for",
"language:fra",
"language:fue",
"language:fuf",
"language:fuh",
"language:gah",
"language:gai",
"language:gam",
"language:gaw",
"language:gdn",
"language:gdr",
"language:geb",
"language:gfk",
"language:ghs",
"language:glk",
"language:gmv",
"language:gng",
"language:gnn",
"language:gnw",
"language:gof",
"language:grc",
"language:gub",
"language:guh",
"language:gui",
"language:guj",
"language:gul",
"language:gum",
"language:gun",
"language:guo",
"language:gup",
"language:gux",
"language:gvc",
"language:gvf",
"language:gvn",
"language:gvs",
"language:gwi",
"language:gym",
"language:gyr",
"language:hat",
"language:hau",
"language:haw",
"language:hbo",
"language:hch",
"language:heb",
"language:heg",
"language:hin",
"language:hix",
"language:hla",
"language:hlt",
"language:hmo",
"language:hns",
"language:hop",
"language:hot",
"language:hrv",
"language:hto",
"language:hub",
"language:hui",
"language:hun",
"language:hus",
"language:huu",
"language:huv",
"language:hvn",
"language:ian",
"language:ign",
"language:ikk",
"language:ikw",
"language:ilo",
"language:imo",
"language:inb",
"language:ind",
"language:ino",
"language:iou",
"language:ipi",
"language:isn",
"language:ita",
"language:iws",
"language:ixl",
"language:jac",
"language:jae",
"language:jao",
"language:jic",
"language:jid",
"language:jiv",
"language:jni",
"language:jpn",
"language:jvn",
"language:kan",
"language:kaq",
"language:kbc",
"language:kbh",
"language:kbm",
"language:kbq",
"language:kdc",
"language:kde",
"language:kdl",
"language:kek",
"language:ken",
"language:kew",
"language:kgf",
"language:kgk",
"language:kgp",
"language:khs",
"language:khz",
"language:kik",
"language:kiw",
"language:kiz",
"language:kje",
"language:kjn",
"language:kjs",
"language:kkc",
"language:kkl",
"language:klt",
"language:klv",
"language:kmg",
"language:kmh",
"language:kmk",
"language:kmo",
"language:kms",
"language:kmu",
"language:kne",
"language:knf",
"language:knj",
"language:knv",
"language:kos",
"language:kpf",
"language:kpg",
"language:kpj",
"language:kpr",
"language:kpw",
"language:kpx",
"language:kqa",
"language:kqc",
"language:kqf",
"language:kql",
"language:kqw",
"language:ksd",
"language:ksj",
"language:ksr",
"language:ktm",
"language:kto",
"language:kud",
"language:kue",
"language:kup",
"language:kvg",
"language:kvn",
"language:kwd",
"language:kwf",
"language:kwi",
"language:kwj",
"language:kyc",
"language:kyf",
"language:kyg",
"language:kyq",
"language:kyz",
"language:kze",
"language:lac",
"language:lat",
"language:lbb",
"language:lbk",
"language:lcm",
"language:leu",
"language:lex",
"language:lgl",
"language:lid",
"language:lif",
"language:lin",
"language:lit",
"language:llg",
"language:lug",
"language:luo",
"language:lww",
"language:maa",
"language:maj",
"language:mal",
"language:mam",
"language:maq",
"language:mar",
"language:mau",
"language:mav",
"language:maz",
"language:mbb",
"language:mbc",
"language:mbh",
"language:mbj",
"language:mbl",
"language:mbs",
"language:mbt",
"language:mca",
"language:mcb",
"language:mcd",
"language:mcf",
"language:mco",
"language:mcp",
"language:mcq",
"language:mcr",
"language:mdy",
"language:med",
"language:mee",
"language:mek",
"language:meq",
"language:met",
"language:meu",
"language:mgc",
"language:mgh",
"language:mgw",
"language:mhl",
"language:mib",
"language:mic",
"language:mie",
"language:mig",
"language:mih",
"language:mil",
"language:mio",
"language:mir",
"language:mit",
"language:miz",
"language:mjc",
"language:mkj",
"language:mkl",
"language:mkn",
"language:mks",
"language:mle",
"language:mlh",
"language:mlp",
"language:mmo",
"language:mmx",
"language:mna",
"language:mop",
"language:mox",
"language:mph",
"language:mpj",
"language:mpm",
"language:mpp",
"language:mps",
"language:mpt",
"language:mpx",
"language:mqb",
"language:mqj",
"language:msb",
"language:msc",
"language:msk",
"language:msm",
"language:msy",
"language:mti",
"language:mto",
"language:mux",
"language:muy",
"language:mva",
"language:mvn",
"language:mwc",
"language:mwe",
"language:mwf",
"language:mwp",
"language:mxb",
"language:mxp",
"language:mxq",
"language:mxt",
"language:mya",
"language:myk",
"language:myu",
"language:myw",
"language:myy",
"language:mzz",
"language:nab",
"language:naf",
"language:nak",
"language:nas",
"language:nay",
"language:nbq",
"language:nca",
"language:nch",
"language:ncj",
"language:ncl",
"language:ncu",
"language:ndg",
"language:ndj",
"language:nfa",
"language:ngp",
"language:ngu",
"language:nhe",
"language:nhg",
"language:nhi",
"language:nho",
"language:nhr",
"language:nhu",
"language:nhw",
"language:nhy",
"language:nif",
"language:nii",
"language:nin",
"language:nko",
"language:nld",
"language:nlg",
"language:nmw",
"language:nna",
"language:nnq",
"language:noa",
"language:nop",
"language:not",
"language:nou",
"language:npi",
"language:npl",
"language:nsn",
"language:nss",
"language:ntj",
"language:ntp",
"language:ntu",
"language:nuy",
"language:nvm",
"language:nwi",
"language:nya",
"language:nys",
"language:nyu",
"language:obo",
"language:okv",
"language:omw",
"language:ong",
"language:ons",
"language:ood",
"language:opm",
"language:ory",
"language:ote",
"language:otm",
"language:otn",
"language:otq",
"language:ots",
"language:pab",
"language:pad",
"language:pah",
"language:pan",
"language:pao",
"language:pes",
"language:pib",
"language:pio",
"language:pir",
"language:piu",
"language:pjt",
"language:pls",
"language:plu",
"language:pma",
"language:poe",
"language:poh",
"language:poi",
"language:pol",
"language:pon",
"language:por",
"language:poy",
"language:ppo",
"language:prf",
"language:pri",
"language:ptp",
"language:ptu",
"language:pwg",
"language:qub",
"language:quc",
"language:quf",
"language:quh",
"language:qul",
"language:qup",
"language:qvc",
"language:qve",
"language:qvh",
"language:qvm",
"language:qvn",
"language:qvs",
"language:qvw",
"language:qvz",
"language:qwh",
"language:qxh",
"language:qxn",
"language:qxo",
"language:rai",
"language:reg",
"language:rgu",
"language:rkb",
"language:rmc",
"language:rmy",
"language:ron",
"language:roo",
"language:rop",
"language:row",
"language:rro",
"language:ruf",
"language:rug",
"language:rus",
"language:rwo",
"language:sab",
"language:san",
"language:sbe",
"language:sbk",
"language:sbs",
"language:seh",
"language:sey",
"language:sgb",
"language:sgz",
"language:shj",
"language:shp",
"language:sim",
"language:sja",
"language:sll",
"language:smk",
"language:snc",
"language:snn",
"language:snp",
"language:snx",
"language:sny",
"language:som",
"language:soq",
"language:soy",
"language:spa",
"language:spl",
"language:spm",
"language:spp",
"language:sps",
"language:spy",
"language:sri",
"language:srm",
"language:srn",
"language:srp",
"language:srq",
"language:ssd",
"language:ssg",
"language:ssx",
"language:stp",
"language:sua",
"language:sue",
"language:sus",
"language:suz",
"language:swe",
"language:swh",
"language:swp",
"language:sxb",
"language:tac",
"language:taj",
"language:tam",
"language:tav",
"language:taw",
"language:tbc",
"language:tbf",
"language:tbg",
"language:tbl",
"language:tbo",
"language:tbz",
"language:tca",
"language:tcs",
"language:tcz",
"language:tdt",
"language:tee",
"language:tel",
"language:ter",
"language:tet",
"language:tew",
"language:tfr",
"language:tgk",
"language:tgl",
"language:tgo",
"language:tgp",
"language:tha",
"language:thd",
"language:tif",
"language:tim",
"language:tiw",
"language:tiy",
"language:tke",
"language:tku",
"language:tlf",
"language:tmd",
"language:tna",
"language:tnc",
"language:tnk",
"language:tnn",
"language:tnp",
"language:toc",
"language:tod",
"language:tof",
"language:toj",
"language:ton",
"language:too",
"language:top",
"language:tos",
"language:tpa",
"language:tpi",
"language:tpt",
"language:tpz",
"language:trc",
"language:tsw",
"language:ttc",
"language:tte",
"language:tuc",
"language:tue",
"language:tuf",
"language:tuo",
"language:tur",
"language:tvk",
"language:twi",
"language:txq",
"language:txu",
"language:tzj",
"language:tzo",
"language:ubr",
"language:ubu",
"language:udu",
"language:uig",
"language:ukr",
"language:uli",
"language:ulk",
"language:upv",
"language:ura",
"language:urb",
"language:urd",
"language:uri",
"language:urt",
"language:urw",
"language:usa",
"language:usp",
"language:uvh",
"language:uvl",
"language:vid",
"language:vie",
"language:viv",
"language:vmy",
"language:waj",
"language:wal",
"language:wap",
"language:wat",
"language:wbi",
"language:wbp",
"language:wed",
"language:wer",
"language:wim",
"language:wiu",
"language:wiv",
"language:wmt",
"language:wmw",
"language:wnc",
"language:wnu",
"language:wol",
"language:wos",
"language:wrk",
"language:wro",
"language:wrs",
"language:wsk",
"language:wuv",
"language:xav",
"language:xbi",
"language:xed",
"language:xla",
"language:xnn",
"language:xon",
"language:xsi",
"language:xtd",
"language:xtm",
"language:yaa",
"language:yad",
"language:yal",
"language:yap",
"language:yaq",
"language:yby",
"language:ycn",
"language:yka",
"language:yle",
"language:yml",
"language:yon",
"language:yor",
"language:yrb",
"language:yre",
"language:yss",
"language:yuj",
"language:yut",
"language:yuw",
"language:yva",
"language:zaa",
"language:zab",
"language:zac",
"language:zad",
"language:zai",
"language:zaj",
"language:zam",
"language:zao",
"language:zap",
"language:zar",
"language:zas",
"language:zat",
"language:zav",
"language:zaw",
"language:zca",
"language:zga",
"language:zia",
"language:ziw",
"language:zlm",
"language:zos",
"language:zpc",
"language:zpl",
"language:zpm",
"language:zpo",
"language:zpq",
"language:zpu",
"language:zpv",
"language:zpz",
"language:zsr",
"language:ztq",
"language:zty",
"language:zyp",
"language:be",
"language:br",
"language:cs",
"language:ch",
"language:zh",
"language:de",
"language:en",
"language:eo",
"language:fr",
"language:ht",
"language:he",
"language:hr",
"language:id",
"language:it",
"language:ja",
"language:la",
"language:nl",
"language:ru",
"language:sa",
"language:so",
"language:es",
"language:sr",
"language:sv",
"language:to",
"language:uk",
"language:vi",
"license:cc-by-4.0",
"license:other",
"size_categories:1M<n<10M",
"format:json",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"region:us"
] | null | "2024-05-05T22:41:26Z" | ---
annotations_creators:
- no-annotation
language_creators:
- expert-generated
language:
- aai
- aak
- aau
- aaz
- abt
- abx
- aby
- acf
- acr
- acu
- adz
- aer
- aey
- agd
- agg
- agm
- agn
- agr
- agt
- agu
- aia
- aii
- aka
- ake
- alp
- alq
- als
- aly
- ame
- amf
- amk
- amm
- amn
- amo
- amp
- amr
- amu
- amx
- anh
- anv
- aoi
- aoj
- aom
- aon
- apb
- ape
- apn
- apr
- apu
- apw
- apz
- arb
- are
- arl
- arn
- arp
- asm
- aso
- ata
- atb
- atd
- atg
- att
- auc
- aui
- auy
- avt
- awb
- awk
- awx
- azb
- azg
- azz
- bao
- bba
- bbb
- bbr
- bch
- bco
- bdd
- bea
- bef
- bel
- ben
- beo
- beu
- bgs
- bgt
- bhg
- bhl
- big
- bjk
- bjp
- bjr
- bjv
- bjz
- bkd
- bki
- bkq
- bkx
- bla
- blw
- blz
- bmh
- bmk
- bmr
- bmu
- bnp
- boa
- boj
- bon
- box
- bpr
- bps
- bqc
- bqp
- bre
- bsj
- bsn
- bsp
- bss
- buk
- bus
- bvd
- bvr
- bxh
- byr
- byx
- bzd
- bzh
- bzj
- caa
- cab
- cac
- caf
- cak
- cao
- cap
- car
- cav
- cax
- cbc
- cbi
- cbk
- cbr
- cbs
- cbt
- cbu
- cbv
- cco
- ceb
- cek
- ces
- cgc
- cha
- chd
- chf
- chk
- chq
- chz
- cjo
- cjv
- ckb
- cle
- clu
- cme
- cmn
- cni
- cnl
- cnt
- cof
- con
- cop
- cot
- cpa
- cpb
- cpc
- cpu
- cpy
- crn
- crx
- cso
- csy
- cta
- cth
- ctp
- ctu
- cub
- cuc
- cui
- cuk
- cut
- cux
- cwe
- cya
- daa
- dad
- dah
- dan
- ded
- deu
- dgc
- dgr
- dgz
- dhg
- dif
- dik
- dji
- djk
- djr
- dob
- dop
- dov
- dwr
- dww
- dwy
- ebk
- eko
- emi
- emp
- eng
- enq
- epo
- eri
- ese
- esk
- etr
- ewe
- faa
- fai
- far
- ffm
- for
- fra
- fue
- fuf
- fuh
- gah
- gai
- gam
- gaw
- gdn
- gdr
- geb
- gfk
- ghs
- glk
- gmv
- gng
- gnn
- gnw
- gof
- grc
- gub
- guh
- gui
- guj
- gul
- gum
- gun
- guo
- gup
- gux
- gvc
- gvf
- gvn
- gvs
- gwi
- gym
- gyr
- hat
- hau
- haw
- hbo
- hch
- heb
- heg
- hin
- hix
- hla
- hlt
- hmo
- hns
- hop
- hot
- hrv
- hto
- hub
- hui
- hun
- hus
- huu
- huv
- hvn
- ian
- ign
- ikk
- ikw
- ilo
- imo
- inb
- ind
- ino
- iou
- ipi
- isn
- ita
- iws
- ixl
- jac
- jae
- jao
- jic
- jid
- jiv
- jni
- jpn
- jvn
- kan
- kaq
- kbc
- kbh
- kbm
- kbq
- kdc
- kde
- kdl
- kek
- ken
- kew
- kgf
- kgk
- kgp
- khs
- khz
- kik
- kiw
- kiz
- kje
- kjn
- kjs
- kkc
- kkl
- klt
- klv
- kmg
- kmh
- kmk
- kmo
- kms
- kmu
- kne
- knf
- knj
- knv
- kos
- kpf
- kpg
- kpj
- kpr
- kpw
- kpx
- kqa
- kqc
- kqf
- kql
- kqw
- ksd
- ksj
- ksr
- ktm
- kto
- kud
- kue
- kup
- kvg
- kvn
- kwd
- kwf
- kwi
- kwj
- kyc
- kyf
- kyg
- kyq
- kyz
- kze
- lac
- lat
- lbb
- lbk
- lcm
- leu
- lex
- lgl
- lid
- lif
- lin
- lit
- llg
- lug
- luo
- lww
- maa
- maj
- mal
- mam
- maq
- mar
- mau
- mav
- maz
- mbb
- mbc
- mbh
- mbj
- mbl
- mbs
- mbt
- mca
- mcb
- mcd
- mcf
- mco
- mcp
- mcq
- mcr
- mdy
- med
- mee
- mek
- meq
- met
- meu
- mgc
- mgh
- mgw
- mhl
- mib
- mic
- mie
- mig
- mih
- mil
- mio
- mir
- mit
- miz
- mjc
- mkj
- mkl
- mkn
- mks
- mle
- mlh
- mlp
- mmo
- mmx
- mna
- mop
- mox
- mph
- mpj
- mpm
- mpp
- mps
- mpt
- mpx
- mqb
- mqj
- msb
- msc
- msk
- msm
- msy
- mti
- mto
- mux
- muy
- mva
- mvn
- mwc
- mwe
- mwf
- mwp
- mxb
- mxp
- mxq
- mxt
- mya
- myk
- myu
- myw
- myy
- mzz
- nab
- naf
- nak
- nas
- nay
- nbq
- nca
- nch
- ncj
- ncl
- ncu
- ndg
- ndj
- nfa
- ngp
- ngu
- nhe
- nhg
- nhi
- nho
- nhr
- nhu
- nhw
- nhy
- nif
- nii
- nin
- nko
- nld
- nlg
- nmw
- nna
- nnq
- noa
- nop
- not
- nou
- npi
- npl
- nsn
- nss
- ntj
- ntp
- ntu
- nuy
- nvm
- nwi
- nya
- nys
- nyu
- obo
- okv
- omw
- ong
- ons
- ood
- opm
- ory
- ote
- otm
- otn
- otq
- ots
- pab
- pad
- pah
- pan
- pao
- pes
- pib
- pio
- pir
- piu
- pjt
- pls
- plu
- pma
- poe
- poh
- poi
- pol
- pon
- por
- poy
- ppo
- prf
- pri
- ptp
- ptu
- pwg
- qub
- quc
- quf
- quh
- qul
- qup
- qvc
- qve
- qvh
- qvm
- qvn
- qvs
- qvw
- qvz
- qwh
- qxh
- qxn
- qxo
- rai
- reg
- rgu
- rkb
- rmc
- rmy
- ron
- roo
- rop
- row
- rro
- ruf
- rug
- rus
- rwo
- sab
- san
- sbe
- sbk
- sbs
- seh
- sey
- sgb
- sgz
- shj
- shp
- sim
- sja
- sll
- smk
- snc
- snn
- snp
- snx
- sny
- som
- soq
- soy
- spa
- spl
- spm
- spp
- sps
- spy
- sri
- srm
- srn
- srp
- srq
- ssd
- ssg
- ssx
- stp
- sua
- sue
- sus
- suz
- swe
- swh
- swp
- sxb
- tac
- taj
- tam
- tav
- taw
- tbc
- tbf
- tbg
- tbl
- tbo
- tbz
- tca
- tcs
- tcz
- tdt
- tee
- tel
- ter
- tet
- tew
- tfr
- tgk
- tgl
- tgo
- tgp
- tha
- thd
- tif
- tim
- tiw
- tiy
- tke
- tku
- tlf
- tmd
- tna
- tnc
- tnk
- tnn
- tnp
- toc
- tod
- tof
- toj
- ton
- too
- top
- tos
- tpa
- tpi
- tpt
- tpz
- trc
- tsw
- ttc
- tte
- tuc
- tue
- tuf
- tuo
- tur
- tvk
- twi
- txq
- txu
- tzj
- tzo
- ubr
- ubu
- udu
- uig
- ukr
- uli
- ulk
- upv
- ura
- urb
- urd
- uri
- urt
- urw
- usa
- usp
- uvh
- uvl
- vid
- vie
- viv
- vmy
- waj
- wal
- wap
- wat
- wbi
- wbp
- wed
- wer
- wim
- wiu
- wiv
- wmt
- wmw
- wnc
- wnu
- wol
- wos
- wrk
- wro
- wrs
- wsk
- wuv
- xav
- xbi
- xed
- xla
- xnn
- xon
- xsi
- xtd
- xtm
- yaa
- yad
- yal
- yap
- yaq
- yby
- ycn
- yka
- yle
- yml
- yon
- yor
- yrb
- yre
- yss
- yuj
- yut
- yuw
- yva
- zaa
- zab
- zac
- zad
- zai
- zaj
- zam
- zao
- zap
- zar
- zas
- zat
- zav
- zaw
- zca
- zga
- zia
- ziw
- zlm
- zos
- zpc
- zpl
- zpm
- zpo
- zpq
- zpu
- zpv
- zpz
- zsr
- ztq
- zty
- zyp
- be
- br
- cs
- ch
- zh
- de
- en
- eo
- fr
- ht
- he
- hr
- id
- it
- ja
- la
- nl
- ru
- sa
- so
- es
- sr
- sv
- to
- uk
- vi
license:
- cc-by-4.0
- other
multilinguality:
- translation
- multilingual
pretty_name: biblenlp-corpus-mmteb
size_categories:
- 1M<n<10M
configs:
- config_name: default
data_files:
- path: train/*.jsonl.gz
split: train
- path: test/*.jsonl.gz
split: test
- path: validation/*.jsonl.gz
split: validation
- config_name: eng_Latn-zac_Latn
data_files:
- path: train/eng_Latn-zac_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zac_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zac_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-lit_Latn
data_files:
- path: train/eng_Latn-lit_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lit_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lit_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cgc_Latn
data_files:
- path: train/eng_Latn-cgc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cgc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cgc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-guh_Latn
data_files:
- path: train/eng_Latn-guh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-guh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-guh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ckb_Arab
data_files:
- path: train/eng_Latn-ckb_Arab.jsonl.gz
split: train
- path: test/eng_Latn-ckb_Arab.jsonl.gz
split: test
- path: validation/eng_Latn-ckb_Arab.jsonl.gz
split: validation
- config_name: eng_Latn-cop_Copt
data_files:
- path: train/eng_Latn-cop_Copt.jsonl.gz
split: train
- path: test/eng_Latn-cop_Copt.jsonl.gz
split: test
- path: validation/eng_Latn-cop_Copt.jsonl.gz
split: validation
- config_name: eng_Latn-lif_Deva
data_files:
- path: train/eng_Latn-lif_Deva.jsonl.gz
split: train
- path: test/eng_Latn-lif_Deva.jsonl.gz
split: test
- path: validation/eng_Latn-lif_Deva.jsonl.gz
split: validation
- config_name: eng_Latn-cwe_Latn
data_files:
- path: train/eng_Latn-cwe_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cwe_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cwe_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kwj_Latn
data_files:
- path: train/eng_Latn-kwj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kwj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kwj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-srp_Latn
data_files:
- path: train/eng_Latn-srp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-srp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-srp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qvn_Latn
data_files:
- path: train/eng_Latn-qvn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qvn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qvn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tsw_Latn
data_files:
- path: train/eng_Latn-tsw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tsw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tsw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wro_Latn
data_files:
- path: train/eng_Latn-wro_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wro_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wro_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tod_Latn
data_files:
- path: train/eng_Latn-tod_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tod_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tod_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bco_Latn
data_files:
- path: train/eng_Latn-bco_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bco_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bco_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ikk_Latn
data_files:
- path: train/eng_Latn-ikk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ikk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ikk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tna_Latn
data_files:
- path: train/eng_Latn-tna_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tna_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tna_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-swp_Latn
data_files:
- path: train/eng_Latn-swp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-swp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-swp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-agm_Latn
data_files:
- path: train/eng_Latn-agm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-agm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-agm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-con_Latn
data_files:
- path: train/eng_Latn-con_Latn.jsonl.gz
split: train
- path: test/eng_Latn-con_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-con_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sgz_Latn
data_files:
- path: train/eng_Latn-sgz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sgz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sgz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mwc_Latn
data_files:
- path: train/eng_Latn-mwc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mwc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mwc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-azb_Arab
data_files:
- path: train/eng_Latn-azb_Arab.jsonl.gz
split: train
- path: test/eng_Latn-azb_Arab.jsonl.gz
split: test
- path: validation/eng_Latn-azb_Arab.jsonl.gz
split: validation
- config_name: eng_Latn-aon_Latn
data_files:
- path: train/eng_Latn-aon_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aon_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aon_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mvn_Latn
data_files:
- path: train/eng_Latn-mvn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mvn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mvn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mpj_Latn
data_files:
- path: train/eng_Latn-mpj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mpj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mpj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cot_Latn
data_files:
- path: train/eng_Latn-cot_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cot_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cot_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tuo_Latn
data_files:
- path: train/eng_Latn-tuo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tuo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tuo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-iou_Latn
data_files:
- path: train/eng_Latn-iou_Latn.jsonl.gz
split: train
- path: test/eng_Latn-iou_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-iou_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-big_Latn
data_files:
- path: train/eng_Latn-big_Latn.jsonl.gz
split: train
- path: test/eng_Latn-big_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-big_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-apw_Latn
data_files:
- path: train/eng_Latn-apw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-apw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-apw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kpx_Latn
data_files:
- path: train/eng_Latn-kpx_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kpx_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kpx_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cui_Latn
data_files:
- path: train/eng_Latn-cui_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cui_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cui_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bnp_Latn
data_files:
- path: train/eng_Latn-bnp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bnp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bnp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ngp_Latn
data_files:
- path: train/eng_Latn-ngp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ngp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ngp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mkj_Latn
data_files:
- path: train/eng_Latn-mkj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mkj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mkj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-chf_Latn
data_files:
- path: train/eng_Latn-chf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-chf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-chf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tca_Latn
data_files:
- path: train/eng_Latn-tca_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tca_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tca_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-poh_Latn
data_files:
- path: train/eng_Latn-poh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-poh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-poh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ese_Latn
data_files:
- path: train/eng_Latn-ese_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ese_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ese_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-plu_Latn
data_files:
- path: train/eng_Latn-plu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-plu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-plu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-crn_Latn
data_files:
- path: train/eng_Latn-crn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-crn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-crn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mxt_Latn
data_files:
- path: train/eng_Latn-mxt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mxt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mxt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tnk_Latn
data_files:
- path: train/eng_Latn-tnk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tnk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tnk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zar_Latn
data_files:
- path: train/eng_Latn-zar_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zar_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zar_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sri_Latn
data_files:
- path: train/eng_Latn-sri_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sri_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sri_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pan_Guru
data_files:
- path: train/eng_Latn-pan_Guru.jsonl.gz
split: train
- path: test/eng_Latn-pan_Guru.jsonl.gz
split: test
- path: validation/eng_Latn-pan_Guru.jsonl.gz
split: validation
- config_name: eng_Latn-kik_Latn
data_files:
- path: train/eng_Latn-kik_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kik_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kik_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yby_Latn
data_files:
- path: train/eng_Latn-yby_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yby_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yby_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qup_Latn
data_files:
- path: train/eng_Latn-qup_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qup_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qup_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mco_Latn
data_files:
- path: train/eng_Latn-mco_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mco_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mco_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gux_Latn
data_files:
- path: train/eng_Latn-gux_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gux_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gux_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-spa_Latn
data_files:
- path: train/eng_Latn-spa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-spa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-spa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-heg_Latn
data_files:
- path: train/eng_Latn-heg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-heg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-heg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gwi_Latn
data_files:
- path: train/eng_Latn-gwi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gwi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gwi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ttc_Latn
data_files:
- path: train/eng_Latn-ttc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ttc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ttc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mqj_Latn
data_files:
- path: train/eng_Latn-mqj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mqj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mqj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pjt_Latn
data_files:
- path: train/eng_Latn-pjt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pjt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pjt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gui_Latn
data_files:
- path: train/eng_Latn-gui_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gui_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gui_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tel_Telu
data_files:
- path: train/eng_Latn-tel_Telu.jsonl.gz
split: train
- path: test/eng_Latn-tel_Telu.jsonl.gz
split: test
- path: validation/eng_Latn-tel_Telu.jsonl.gz
split: validation
- config_name: eng_Latn-lbb_Latn
data_files:
- path: train/eng_Latn-lbb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lbb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lbb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cbr_Latn
data_files:
- path: train/eng_Latn-cbr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cbr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cbr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-jvn_Latn
data_files:
- path: train/eng_Latn-jvn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-jvn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-jvn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-huu_Latn
data_files:
- path: train/eng_Latn-huu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-huu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-huu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kyq_Latn
data_files:
- path: train/eng_Latn-kyq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kyq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kyq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-lex_Latn
data_files:
- path: train/eng_Latn-lex_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lex_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lex_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-lug_Latn
data_files:
- path: train/eng_Latn-lug_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lug_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lug_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tbc_Latn
data_files:
- path: train/eng_Latn-tbc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tbc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tbc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-srm_Latn
data_files:
- path: train/eng_Latn-srm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-srm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-srm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ztq_Latn
data_files:
- path: train/eng_Latn-ztq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ztq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ztq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-clu_Latn
data_files:
- path: train/eng_Latn-clu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-clu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-clu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wol_Latn
data_files:
- path: train/eng_Latn-wol_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wol_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wol_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wrk_Latn
data_files:
- path: train/eng_Latn-wrk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wrk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wrk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ssg_Latn
data_files:
- path: train/eng_Latn-ssg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ssg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ssg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tha_Thai
data_files:
- path: train/eng_Latn-tha_Thai.jsonl.gz
split: train
- path: test/eng_Latn-tha_Thai.jsonl.gz
split: test
- path: validation/eng_Latn-tha_Thai.jsonl.gz
split: validation
- config_name: eng_Latn-gub_Latn
data_files:
- path: train/eng_Latn-gub_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gub_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gub_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-rop_Latn
data_files:
- path: train/eng_Latn-rop_Latn.jsonl.gz
split: train
- path: test/eng_Latn-rop_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-rop_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ind_Latn
data_files:
- path: train/eng_Latn-ind_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ind_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ind_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-urb_Latn
data_files:
- path: train/eng_Latn-urb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-urb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-urb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ziw_Latn
data_files:
- path: train/eng_Latn-ziw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ziw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ziw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-waj_Latn
data_files:
- path: train/eng_Latn-waj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-waj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-waj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tku_Latn
data_files:
- path: train/eng_Latn-tku_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tku_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tku_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pao_Latn
data_files:
- path: train/eng_Latn-pao_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pao_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pao_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tet_Latn
data_files:
- path: train/eng_Latn-tet_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tet_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tet_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-msc_Latn
data_files:
- path: train/eng_Latn-msc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-msc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-msc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wal_Ethi
data_files:
- path: train/eng_Latn-wal_Ethi.jsonl.gz
split: train
- path: test/eng_Latn-wal_Ethi.jsonl.gz
split: test
- path: validation/eng_Latn-wal_Ethi.jsonl.gz
split: validation
- config_name: eng_Latn-bmu_Latn
data_files:
- path: train/eng_Latn-bmu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bmu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bmu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yaq_Latn
data_files:
- path: train/eng_Latn-yaq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yaq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yaq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bgt_Latn
data_files:
- path: train/eng_Latn-bgt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bgt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bgt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-atb_Latn
data_files:
- path: train/eng_Latn-atb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-atb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-atb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-apn_Latn
data_files:
- path: train/eng_Latn-apn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-apn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-apn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-por_Latn
data_files:
- path: train/eng_Latn-por_Latn.jsonl.gz
split: train
- path: test/eng_Latn-por_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-por_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-quf_Latn
data_files:
- path: train/eng_Latn-quf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-quf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-quf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-prf_Latn
data_files:
- path: train/eng_Latn-prf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-prf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-prf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ndj_Latn
data_files:
- path: train/eng_Latn-ndj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ndj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ndj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hub_Latn
data_files:
- path: train/eng_Latn-hub_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hub_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hub_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kvn_Latn
data_files:
- path: train/eng_Latn-kvn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kvn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kvn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-swe_Latn
data_files:
- path: train/eng_Latn-swe_Latn.jsonl.gz
split: train
- path: test/eng_Latn-swe_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-swe_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-xtd_Latn
data_files:
- path: train/eng_Latn-xtd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-xtd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-xtd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-blz_Latn
data_files:
- path: train/eng_Latn-blz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-blz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-blz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-arb_Arab
data_files:
- path: train/eng_Latn-arb_Arab.jsonl.gz
split: train
- path: test/eng_Latn-arb_Arab.jsonl.gz
split: test
- path: validation/eng_Latn-arb_Arab.jsonl.gz
split: validation
- config_name: eng_Latn-gdr_Latn
data_files:
- path: train/eng_Latn-gdr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gdr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gdr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ksd_Latn
data_files:
- path: train/eng_Latn-ksd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ksd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ksd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-toj_Latn
data_files:
- path: train/eng_Latn-toj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-toj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-toj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-arp_Latn
data_files:
- path: train/eng_Latn-arp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-arp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-arp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cnt_Latn
data_files:
- path: train/eng_Latn-cnt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cnt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cnt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aoj_Latn
data_files:
- path: train/eng_Latn-aoj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aoj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aoj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-fai_Latn
data_files:
- path: train/eng_Latn-fai_Latn.jsonl.gz
split: train
- path: test/eng_Latn-fai_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-fai_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-far_Latn
data_files:
- path: train/eng_Latn-far_Latn.jsonl.gz
split: train
- path: test/eng_Latn-far_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-far_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ons_Latn
data_files:
- path: train/eng_Latn-ons_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ons_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ons_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-emi_Latn
data_files:
- path: train/eng_Latn-emi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-emi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-emi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yre_Latn
data_files:
- path: train/eng_Latn-yre_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yre_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yre_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zpz_Latn
data_files:
- path: train/eng_Latn-zpz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zpz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zpz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yss_Latn
data_files:
- path: train/eng_Latn-yss_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yss_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yss_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kos_Latn
data_files:
- path: train/eng_Latn-kos_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kos_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kos_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-reg_Latn
data_files:
- path: train/eng_Latn-reg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-reg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-reg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-rro_Latn
data_files:
- path: train/eng_Latn-rro_Latn.jsonl.gz
split: train
- path: test/eng_Latn-rro_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-rro_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-apz_Latn
data_files:
- path: train/eng_Latn-apz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-apz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-apz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-boj_Latn
data_files:
- path: train/eng_Latn-boj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-boj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-boj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hla_Latn
data_files:
- path: train/eng_Latn-hla_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hla_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hla_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gyr_Latn
data_files:
- path: train/eng_Latn-gyr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gyr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gyr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ukr_Cyrl
data_files:
- path: train/eng_Latn-ukr_Cyrl.jsonl.gz
split: train
- path: test/eng_Latn-ukr_Cyrl.jsonl.gz
split: test
- path: validation/eng_Latn-ukr_Cyrl.jsonl.gz
split: validation
- config_name: eng_Latn-gvs_Latn
data_files:
- path: train/eng_Latn-gvs_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gvs_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gvs_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mil_Latn
data_files:
- path: train/eng_Latn-mil_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mil_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mil_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gul_Latn
data_files:
- path: train/eng_Latn-gul_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gul_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gul_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ood_Latn
data_files:
- path: train/eng_Latn-ood_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ood_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ood_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ewe_Latn
data_files:
- path: train/eng_Latn-ewe_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ewe_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ewe_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qul_Latn
data_files:
- path: train/eng_Latn-qul_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qul_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qul_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kbm_Latn
data_files:
- path: train/eng_Latn-kbm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kbm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kbm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mih_Latn
data_files:
- path: train/eng_Latn-mih_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mih_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mih_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-maq_Latn
data_files:
- path: train/eng_Latn-maq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-maq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-maq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bbr_Latn
data_files:
- path: train/eng_Latn-bbr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bbr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bbr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cbu_Latn
data_files:
- path: train/eng_Latn-cbu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cbu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cbu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-meq_Latn
data_files:
- path: train/eng_Latn-meq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-meq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-meq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bmk_Latn
data_files:
- path: train/eng_Latn-bmk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bmk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bmk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hui_Latn
data_files:
- path: train/eng_Latn-hui_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hui_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hui_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tgl_Latn
data_files:
- path: train/eng_Latn-tgl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tgl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tgl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-lgl_Latn
data_files:
- path: train/eng_Latn-lgl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lgl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lgl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zpq_Latn
data_files:
- path: train/eng_Latn-zpq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zpq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zpq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mti_Latn
data_files:
- path: train/eng_Latn-mti_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mti_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mti_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pah_Latn
data_files:
- path: train/eng_Latn-pah_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pah_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pah_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nch_Latn
data_files:
- path: train/eng_Latn-nch_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nch_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nch_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mjc_Latn
data_files:
- path: train/eng_Latn-mjc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mjc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mjc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zty_Latn
data_files:
- path: train/eng_Latn-zty_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zty_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zty_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ksj_Latn
data_files:
- path: train/eng_Latn-ksj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ksj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ksj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nvm_Latn
data_files:
- path: train/eng_Latn-nvm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nvm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nvm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kyc_Latn
data_files:
- path: train/eng_Latn-kyc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kyc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kyc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bao_Latn
data_files:
- path: train/eng_Latn-bao_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bao_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bao_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zas_Latn
data_files:
- path: train/eng_Latn-zas_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zas_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zas_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-djr_Latn
data_files:
- path: train/eng_Latn-djr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-djr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-djr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bpr_Latn
data_files:
- path: train/eng_Latn-bpr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bpr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bpr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ndg_Latn
data_files:
- path: train/eng_Latn-ndg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ndg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ndg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ots_Latn
data_files:
- path: train/eng_Latn-ots_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ots_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ots_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-miz_Latn
data_files:
- path: train/eng_Latn-miz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-miz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-miz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cco_Latn
data_files:
- path: train/eng_Latn-cco_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cco_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cco_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mbc_Latn
data_files:
- path: train/eng_Latn-mbc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mbc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mbc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-myy_Latn
data_files:
- path: train/eng_Latn-myy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-myy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-myy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-att_Latn
data_files:
- path: train/eng_Latn-att_Latn.jsonl.gz
split: train
- path: test/eng_Latn-att_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-att_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aly_Latn
data_files:
- path: train/eng_Latn-aly_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aly_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aly_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mgh_Latn
data_files:
- path: train/eng_Latn-mgh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mgh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mgh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mqb_Latn
data_files:
- path: train/eng_Latn-mqb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mqb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mqb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sps_Latn
data_files:
- path: train/eng_Latn-sps_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sps_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sps_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wbi_Latn
data_files:
- path: train/eng_Latn-wbi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wbi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wbi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-rai_Latn
data_files:
- path: train/eng_Latn-rai_Latn.jsonl.gz
split: train
- path: test/eng_Latn-rai_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-rai_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-knf_Latn
data_files:
- path: train/eng_Latn-knf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-knf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-knf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-txq_Latn
data_files:
- path: train/eng_Latn-txq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-txq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-txq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cuk_Latn
data_files:
- path: train/eng_Latn-cuk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cuk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cuk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tew_Latn
data_files:
- path: train/eng_Latn-tew_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tew_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tew_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aia_Latn
data_files:
- path: train/eng_Latn-aia_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aia_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aia_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ghs_Latn
data_files:
- path: train/eng_Latn-ghs_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ghs_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ghs_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kne_Latn
data_files:
- path: train/eng_Latn-kne_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kne_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kne_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-snp_Latn
data_files:
- path: train/eng_Latn-snp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-snp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-snp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yon_Latn
data_files:
- path: train/eng_Latn-yon_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yon_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yon_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-rkb_Latn
data_files:
- path: train/eng_Latn-rkb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-rkb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-rkb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mam_Latn
data_files:
- path: train/eng_Latn-mam_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mam_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mam_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ffm_Latn
data_files:
- path: train/eng_Latn-ffm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ffm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ffm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tbo_Latn
data_files:
- path: train/eng_Latn-tbo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tbo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tbo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cta_Latn
data_files:
- path: train/eng_Latn-cta_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cta_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cta_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mca_Latn
data_files:
- path: train/eng_Latn-mca_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mca_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mca_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-smk_Latn
data_files:
- path: train/eng_Latn-smk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-smk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-smk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bqc_Latn
data_files:
- path: train/eng_Latn-bqc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bqc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bqc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-khz_Latn
data_files:
- path: train/eng_Latn-khz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-khz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-khz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ceb_Latn
data_files:
- path: train/eng_Latn-ceb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ceb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ceb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nyu_Latn
data_files:
- path: train/eng_Latn-nyu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nyu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nyu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hlt_Latn
data_files:
- path: train/eng_Latn-hlt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hlt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hlt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qvw_Latn
data_files:
- path: train/eng_Latn-qvw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qvw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qvw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-poy_Latn
data_files:
- path: train/eng_Latn-poy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-poy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-poy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-jiv_Latn
data_files:
- path: train/eng_Latn-jiv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-jiv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-jiv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mna_Latn
data_files:
- path: train/eng_Latn-mna_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mna_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mna_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-xsi_Latn
data_files:
- path: train/eng_Latn-xsi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-xsi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-xsi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-crx_Latn
data_files:
- path: train/eng_Latn-crx_Latn.jsonl.gz
split: train
- path: test/eng_Latn-crx_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-crx_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-apb_Latn
data_files:
- path: train/eng_Latn-apb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-apb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-apb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-imo_Latn
data_files:
- path: train/eng_Latn-imo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-imo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-imo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-fue_Latn
data_files:
- path: train/eng_Latn-fue_Latn.jsonl.gz
split: train
- path: test/eng_Latn-fue_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-fue_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bhl_Latn
data_files:
- path: train/eng_Latn-bhl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bhl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bhl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tim_Latn
data_files:
- path: train/eng_Latn-tim_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tim_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tim_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mgw_Latn
data_files:
- path: train/eng_Latn-mgw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mgw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mgw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-taj_Deva
data_files:
- path: train/eng_Latn-taj_Deva.jsonl.gz
split: train
- path: test/eng_Latn-taj_Deva.jsonl.gz
split: test
- path: validation/eng_Latn-taj_Deva.jsonl.gz
split: validation
- config_name: eng_Latn-djk_Latn
data_files:
- path: train/eng_Latn-djk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-djk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-djk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ruf_Latn
data_files:
- path: train/eng_Latn-ruf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ruf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ruf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bqp_Latn
data_files:
- path: train/eng_Latn-bqp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bqp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bqp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-adz_Latn
data_files:
- path: train/eng_Latn-adz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-adz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-adz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bmr_Latn
data_files:
- path: train/eng_Latn-bmr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bmr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bmr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ata_Latn
data_files:
- path: train/eng_Latn-ata_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ata_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ata_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mio_Latn
data_files:
- path: train/eng_Latn-mio_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mio_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mio_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pad_Latn
data_files:
- path: train/eng_Latn-pad_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pad_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pad_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qxh_Latn
data_files:
- path: train/eng_Latn-qxh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qxh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qxh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tfr_Latn
data_files:
- path: train/eng_Latn-tfr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tfr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tfr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mie_Latn
data_files:
- path: train/eng_Latn-mie_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mie_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mie_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tpi_Latn
data_files:
- path: train/eng_Latn-tpi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tpi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tpi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-fuh_Latn
data_files:
- path: train/eng_Latn-fuh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-fuh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-fuh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dgr_Latn
data_files:
- path: train/eng_Latn-dgr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dgr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dgr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bch_Latn
data_files:
- path: train/eng_Latn-bch_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bch_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bch_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mcr_Latn
data_files:
- path: train/eng_Latn-mcr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mcr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mcr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bkx_Latn
data_files:
- path: train/eng_Latn-bkx_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bkx_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bkx_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-agr_Latn
data_files:
- path: train/eng_Latn-agr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-agr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-agr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-chq_Latn
data_files:
- path: train/eng_Latn-chq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-chq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-chq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-rwo_Latn
data_files:
- path: train/eng_Latn-rwo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-rwo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-rwo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-esk_Latn
data_files:
- path: train/eng_Latn-esk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-esk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-esk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zpl_Latn
data_files:
- path: train/eng_Latn-zpl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zpl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zpl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bjr_Latn
data_files:
- path: train/eng_Latn-bjr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bjr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bjr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kiw_Latn
data_files:
- path: train/eng_Latn-kiw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kiw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kiw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-azg_Latn
data_files:
- path: train/eng_Latn-azg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-azg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-azg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mbb_Latn
data_files:
- path: train/eng_Latn-mbb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mbb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mbb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-knj_Latn
data_files:
- path: train/eng_Latn-knj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-knj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-knj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cao_Latn
data_files:
- path: train/eng_Latn-cao_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cao_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cao_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dji_Latn
data_files:
- path: train/eng_Latn-dji_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dji_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dji_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bss_Latn
data_files:
- path: train/eng_Latn-bss_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bss_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bss_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bgs_Latn
data_files:
- path: train/eng_Latn-bgs_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bgs_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bgs_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mek_Latn
data_files:
- path: train/eng_Latn-mek_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mek_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mek_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yuj_Latn
data_files:
- path: train/eng_Latn-yuj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yuj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yuj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tpt_Latn
data_files:
- path: train/eng_Latn-tpt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tpt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tpt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zos_Latn
data_files:
- path: train/eng_Latn-zos_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zos_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zos_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-blw_Latn
data_files:
- path: train/eng_Latn-blw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-blw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-blw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-viv_Latn
data_files:
- path: train/eng_Latn-viv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-viv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-viv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ikw_Latn
data_files:
- path: train/eng_Latn-ikw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ikw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ikw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tue_Latn
data_files:
- path: train/eng_Latn-tue_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tue_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tue_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-uvh_Latn
data_files:
- path: train/eng_Latn-uvh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-uvh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-uvh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yap_Latn
data_files:
- path: train/eng_Latn-yap_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yap_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yap_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nca_Latn
data_files:
- path: train/eng_Latn-nca_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nca_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nca_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-luo_Latn
data_files:
- path: train/eng_Latn-luo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-luo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-luo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tmd_Latn
data_files:
- path: train/eng_Latn-tmd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tmd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tmd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-txu_Latn
data_files:
- path: train/eng_Latn-txu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-txu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-txu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yor_Latn
data_files:
- path: train/eng_Latn-yor_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yor_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yor_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-amx_Latn
data_files:
- path: train/eng_Latn-amx_Latn.jsonl.gz
split: train
- path: test/eng_Latn-amx_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-amx_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-uli_Latn
data_files:
- path: train/eng_Latn-uli_Latn.jsonl.gz
split: train
- path: test/eng_Latn-uli_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-uli_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dov_Latn
data_files:
- path: train/eng_Latn-dov_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dov_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dov_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-huv_Latn
data_files:
- path: train/eng_Latn-huv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-huv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-huv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-msk_Latn
data_files:
- path: train/eng_Latn-msk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-msk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-msk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-twi_Latn
data_files:
- path: train/eng_Latn-twi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-twi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-twi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aer_Latn
data_files:
- path: train/eng_Latn-aer_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aer_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aer_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pib_Latn
data_files:
- path: train/eng_Latn-pib_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pib_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pib_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ter_Latn
data_files:
- path: train/eng_Latn-ter_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ter_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ter_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-eri_Latn
data_files:
- path: train/eng_Latn-eri_Latn.jsonl.gz
split: train
- path: test/eng_Latn-eri_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-eri_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cth_Latn
data_files:
- path: train/eng_Latn-cth_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cth_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cth_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dwr_Latn
data_files:
- path: train/eng_Latn-dwr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dwr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dwr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-srq_Latn
data_files:
- path: train/eng_Latn-srq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-srq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-srq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mmx_Latn
data_files:
- path: train/eng_Latn-mmx_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mmx_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mmx_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cap_Latn
data_files:
- path: train/eng_Latn-cap_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cap_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cap_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ces_Latn
data_files:
- path: train/eng_Latn-ces_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ces_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ces_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cpu_Latn
data_files:
- path: train/eng_Latn-cpu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cpu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cpu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-lww_Latn
data_files:
- path: train/eng_Latn-lww_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lww_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lww_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hix_Latn
data_files:
- path: train/eng_Latn-hix_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hix_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hix_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cab_Latn
data_files:
- path: train/eng_Latn-cab_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cab_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cab_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-fuf_Latn
data_files:
- path: train/eng_Latn-fuf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-fuf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-fuf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mcf_Latn
data_files:
- path: train/eng_Latn-mcf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mcf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mcf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-shj_Latn
data_files:
- path: train/eng_Latn-shj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-shj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-shj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qwh_Latn
data_files:
- path: train/eng_Latn-qwh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qwh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qwh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zsr_Latn
data_files:
- path: train/eng_Latn-zsr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zsr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zsr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-daa_Latn
data_files:
- path: train/eng_Latn-daa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-daa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-daa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sus_Arab
data_files:
- path: train/eng_Latn-sus_Arab.jsonl.gz
split: train
- path: test/eng_Latn-sus_Arab.jsonl.gz
split: test
- path: validation/eng_Latn-sus_Arab.jsonl.gz
split: validation
- config_name: eng_Latn-lbk_Latn
data_files:
- path: train/eng_Latn-lbk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lbk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lbk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nii_Latn
data_files:
- path: train/eng_Latn-nii_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nii_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nii_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gvn_Latn
data_files:
- path: train/eng_Latn-gvn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gvn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gvn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yaa_Latn
data_files:
- path: train/eng_Latn-yaa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yaa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yaa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-npi_Deva
data_files:
- path: train/eng_Latn-npi_Deva.jsonl.gz
split: train
- path: test/eng_Latn-npi_Deva.jsonl.gz
split: test
- path: validation/eng_Latn-npi_Deva.jsonl.gz
split: validation
- config_name: eng_Latn-uig_Latn
data_files:
- path: train/eng_Latn-uig_Latn.jsonl.gz
split: train
- path: test/eng_Latn-uig_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-uig_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mmo_Latn
data_files:
- path: train/eng_Latn-mmo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mmo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mmo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ktm_Latn
data_files:
- path: train/eng_Latn-ktm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ktm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ktm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-agu_Latn
data_files:
- path: train/eng_Latn-agu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-agu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-agu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-agg_Latn
data_files:
- path: train/eng_Latn-agg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-agg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-agg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ken_Latn
data_files:
- path: train/eng_Latn-ken_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ken_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ken_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-beu_Latn
data_files:
- path: train/eng_Latn-beu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-beu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-beu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cac_Latn
data_files:
- path: train/eng_Latn-cac_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cac_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cac_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-uri_Latn
data_files:
- path: train/eng_Latn-uri_Latn.jsonl.gz
split: train
- path: test/eng_Latn-uri_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-uri_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dah_Latn
data_files:
- path: train/eng_Latn-dah_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dah_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dah_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-otn_Latn
data_files:
- path: train/eng_Latn-otn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-otn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-otn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wos_Latn
data_files:
- path: train/eng_Latn-wos_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wos_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wos_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hin_Deva
data_files:
- path: train/eng_Latn-hin_Deva.jsonl.gz
split: train
- path: test/eng_Latn-hin_Deva.jsonl.gz
split: test
- path: validation/eng_Latn-hin_Deva.jsonl.gz
split: validation
- config_name: eng_Latn-ctu_Latn
data_files:
- path: train/eng_Latn-ctu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ctu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ctu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pes_Arab
data_files:
- path: train/eng_Latn-pes_Arab.jsonl.gz
split: train
- path: test/eng_Latn-pes_Arab.jsonl.gz
split: test
- path: validation/eng_Latn-pes_Arab.jsonl.gz
split: validation
- config_name: eng_Latn-tbf_Latn
data_files:
- path: train/eng_Latn-tbf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tbf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tbf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bsj_Latn
data_files:
- path: train/eng_Latn-bsj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bsj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bsj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aey_Latn
data_files:
- path: train/eng_Latn-aey_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aey_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aey_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qxn_Latn
data_files:
- path: train/eng_Latn-qxn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qxn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qxn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-rug_Latn
data_files:
- path: train/eng_Latn-rug_Latn.jsonl.gz
split: train
- path: test/eng_Latn-rug_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-rug_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nwi_Latn
data_files:
- path: train/eng_Latn-nwi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nwi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nwi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-spl_Latn
data_files:
- path: train/eng_Latn-spl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-spl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-spl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kan_Knda
data_files:
- path: train/eng_Latn-kan_Knda.jsonl.gz
split: train
- path: test/eng_Latn-kan_Knda.jsonl.gz
split: test
- path: validation/eng_Latn-kan_Knda.jsonl.gz
split: validation
- config_name: eng_Latn-dif_Latn
data_files:
- path: train/eng_Latn-dif_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dif_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dif_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cpa_Latn
data_files:
- path: train/eng_Latn-cpa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cpa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cpa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mau_Latn
data_files:
- path: train/eng_Latn-mau_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mau_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mau_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ron_Latn
data_files:
- path: train/eng_Latn-ron_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ron_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ron_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dop_Latn
data_files:
- path: train/eng_Latn-dop_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dop_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dop_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hau_Latn
data_files:
- path: train/eng_Latn-hau_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hau_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hau_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gnn_Latn
data_files:
- path: train/eng_Latn-gnn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gnn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gnn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bsn_Latn
data_files:
- path: train/eng_Latn-bsn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bsn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bsn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kpj_Latn
data_files:
- path: train/eng_Latn-kpj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kpj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kpj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wat_Latn
data_files:
- path: train/eng_Latn-wat_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wat_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wat_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-acr_Latn
data_files:
- path: train/eng_Latn-acr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-acr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-acr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-caf_Latn
data_files:
- path: train/eng_Latn-caf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-caf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-caf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dhg_Latn
data_files:
- path: train/eng_Latn-dhg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dhg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dhg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yml_Latn
data_files:
- path: train/eng_Latn-yml_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yml_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yml_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-atd_Latn
data_files:
- path: train/eng_Latn-atd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-atd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-atd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bbb_Latn
data_files:
- path: train/eng_Latn-bbb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bbb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bbb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cle_Latn
data_files:
- path: train/eng_Latn-cle_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cle_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cle_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-myk_Latn
data_files:
- path: train/eng_Latn-myk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-myk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-myk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bxh_Latn
data_files:
- path: train/eng_Latn-bxh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bxh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bxh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tpa_Latn
data_files:
- path: train/eng_Latn-tpa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tpa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tpa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-awk_Latn
data_files:
- path: train/eng_Latn-awk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-awk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-awk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gfk_Latn
data_files:
- path: train/eng_Latn-gfk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gfk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gfk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mph_Latn
data_files:
- path: train/eng_Latn-mph_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mph_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mph_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-csy_Latn
data_files:
- path: train/eng_Latn-csy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-csy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-csy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tgp_Latn
data_files:
- path: train/eng_Latn-tgp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tgp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tgp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zia_Latn
data_files:
- path: train/eng_Latn-zia_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zia_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zia_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-msm_Latn
data_files:
- path: train/eng_Latn-msm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-msm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-msm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kql_Latn
data_files:
- path: train/eng_Latn-kql_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kql_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kql_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wnu_Latn
data_files:
- path: train/eng_Latn-wnu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wnu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wnu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nin_Latn
data_files:
- path: train/eng_Latn-nin_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nin_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nin_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kmo_Latn
data_files:
- path: train/eng_Latn-kmo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kmo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kmo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mpx_Latn
data_files:
- path: train/eng_Latn-mpx_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mpx_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mpx_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nas_Latn
data_files:
- path: train/eng_Latn-nas_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nas_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nas_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ulk_Latn
data_files:
- path: train/eng_Latn-ulk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ulk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ulk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ipi_Latn
data_files:
- path: train/eng_Latn-ipi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ipi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ipi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mgc_Latn
data_files:
- path: train/eng_Latn-mgc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mgc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mgc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ape_Latn
data_files:
- path: train/eng_Latn-ape_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ape_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ape_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tac_Latn
data_files:
- path: train/eng_Latn-tac_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tac_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tac_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bzd_Latn
data_files:
- path: train/eng_Latn-bzd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bzd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bzd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-amm_Latn
data_files:
- path: train/eng_Latn-amm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-amm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-amm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mic_Latn
data_files:
- path: train/eng_Latn-mic_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mic_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mic_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sxb_Latn
data_files:
- path: train/eng_Latn-sxb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sxb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sxb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cux_Latn
data_files:
- path: train/eng_Latn-cux_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cux_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cux_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ixl_Latn
data_files:
- path: train/eng_Latn-ixl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ixl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ixl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nif_Latn
data_files:
- path: train/eng_Latn-nif_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nif_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nif_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-isn_Latn
data_files:
- path: train/eng_Latn-isn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-isn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-isn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cmn_Hans
data_files:
- path: train/eng_Latn-cmn_Hans.jsonl.gz
split: train
- path: test/eng_Latn-cmn_Hans.jsonl.gz
split: test
- path: validation/eng_Latn-cmn_Hans.jsonl.gz
split: validation
- config_name: eng_Latn-kyf_Latn
data_files:
- path: train/eng_Latn-kyf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kyf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kyf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cut_Latn
data_files:
- path: train/eng_Latn-cut_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cut_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cut_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-lcm_Latn
data_files:
- path: train/eng_Latn-lcm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lcm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lcm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nya_Latn
data_files:
- path: train/eng_Latn-nya_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nya_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nya_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kjs_Latn
data_files:
- path: train/eng_Latn-kjs_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kjs_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kjs_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ton_Latn
data_files:
- path: train/eng_Latn-ton_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ton_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ton_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qvs_Latn
data_files:
- path: train/eng_Latn-qvs_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qvs_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qvs_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ino_Latn
data_files:
- path: train/eng_Latn-ino_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ino_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ino_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zca_Latn
data_files:
- path: train/eng_Latn-zca_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zca_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zca_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-xav_Latn
data_files:
- path: train/eng_Latn-xav_Latn.jsonl.gz
split: train
- path: test/eng_Latn-xav_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-xav_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-jac_Latn
data_files:
- path: train/eng_Latn-jac_Latn.jsonl.gz
split: train
- path: test/eng_Latn-jac_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-jac_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-quc_Latn
data_files:
- path: train/eng_Latn-quc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-quc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-quc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-npl_Latn
data_files:
- path: train/eng_Latn-npl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-npl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-npl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-usa_Latn
data_files:
- path: train/eng_Latn-usa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-usa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-usa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kvg_Latn
data_files:
- path: train/eng_Latn-kvg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kvg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kvg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tee_Latn
data_files:
- path: train/eng_Latn-tee_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tee_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tee_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hot_Latn
data_files:
- path: train/eng_Latn-hot_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hot_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hot_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-acf_Latn
data_files:
- path: train/eng_Latn-acf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-acf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-acf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wiu_Latn
data_files:
- path: train/eng_Latn-wiu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wiu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wiu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-rmc_Latn
data_files:
- path: train/eng_Latn-rmc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-rmc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-rmc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-snx_Latn
data_files:
- path: train/eng_Latn-snx_Latn.jsonl.gz
split: train
- path: test/eng_Latn-snx_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-snx_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-jpn_Jpan
data_files:
- path: train/eng_Latn-jpn_Jpan.jsonl.gz
split: train
- path: test/eng_Latn-jpn_Jpan.jsonl.gz
split: test
- path: validation/eng_Latn-jpn_Jpan.jsonl.gz
split: validation
- config_name: eng_Latn-tbg_Latn
data_files:
- path: train/eng_Latn-tbg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tbg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tbg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pwg_Latn
data_files:
- path: train/eng_Latn-pwg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pwg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pwg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nhr_Latn
data_files:
- path: train/eng_Latn-nhr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nhr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nhr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mwe_Latn
data_files:
- path: train/eng_Latn-mwe_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mwe_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mwe_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kbq_Latn
data_files:
- path: train/eng_Latn-kbq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kbq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kbq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-myw_Latn
data_files:
- path: train/eng_Latn-myw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-myw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-myw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-jni_Latn
data_files:
- path: train/eng_Latn-jni_Latn.jsonl.gz
split: train
- path: test/eng_Latn-jni_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-jni_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-vmy_Latn
data_files:
- path: train/eng_Latn-vmy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-vmy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-vmy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zpv_Latn
data_files:
- path: train/eng_Latn-zpv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zpv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zpv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-heb_Hebr
data_files:
- path: train/eng_Latn-heb_Hebr.jsonl.gz
split: train
- path: test/eng_Latn-heb_Hebr.jsonl.gz
split: test
- path: validation/eng_Latn-heb_Hebr.jsonl.gz
split: validation
- config_name: eng_Latn-bki_Latn
data_files:
- path: train/eng_Latn-bki_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bki_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bki_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mee_Latn
data_files:
- path: train/eng_Latn-mee_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mee_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mee_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aau_Latn
data_files:
- path: train/eng_Latn-aau_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aau_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aau_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aaz_Latn
data_files:
- path: train/eng_Latn-aaz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aaz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aaz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aoi_Latn
data_files:
- path: train/eng_Latn-aoi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aoi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aoi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-caa_Latn
data_files:
- path: train/eng_Latn-caa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-caa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-caa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zap_Latn
data_files:
- path: train/eng_Latn-zap_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zap_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zap_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-amk_Latn
data_files:
- path: train/eng_Latn-amk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-amk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-amk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zpo_Latn
data_files:
- path: train/eng_Latn-zpo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zpo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zpo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aom_Latn
data_files:
- path: train/eng_Latn-aom_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aom_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aom_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zpu_Latn
data_files:
- path: train/eng_Latn-zpu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zpu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zpu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bdd_Latn
data_files:
- path: train/eng_Latn-bdd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bdd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bdd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-acu_Latn
data_files:
- path: train/eng_Latn-acu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-acu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-acu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-beo_Latn
data_files:
- path: train/eng_Latn-beo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-beo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-beo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cuc_Latn
data_files:
- path: train/eng_Latn-cuc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cuc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cuc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-otm_Latn
data_files:
- path: train/eng_Latn-otm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-otm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-otm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tos_Latn
data_files:
- path: train/eng_Latn-tos_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tos_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tos_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-apu_Latn
data_files:
- path: train/eng_Latn-apu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-apu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-apu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-jic_Latn
data_files:
- path: train/eng_Latn-jic_Latn.jsonl.gz
split: train
- path: test/eng_Latn-jic_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-jic_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cek_Latn
data_files:
- path: train/eng_Latn-cek_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cek_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cek_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tnp_Latn
data_files:
- path: train/eng_Latn-tnp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tnp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tnp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hns_Latn
data_files:
- path: train/eng_Latn-hns_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hns_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hns_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mpt_Latn
data_files:
- path: train/eng_Latn-mpt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mpt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mpt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kmg_Latn
data_files:
- path: train/eng_Latn-kmg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kmg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kmg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nhg_Latn
data_files:
- path: train/eng_Latn-nhg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nhg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nhg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yle_Latn
data_files:
- path: train/eng_Latn-yle_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yle_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yle_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yka_Latn
data_files:
- path: train/eng_Latn-yka_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yka_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yka_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-maj_Latn
data_files:
- path: train/eng_Latn-maj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-maj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-maj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-agt_Latn
data_files:
- path: train/eng_Latn-agt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-agt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-agt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-san_Latn
data_files:
- path: train/eng_Latn-san_Latn.jsonl.gz
split: train
- path: test/eng_Latn-san_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-san_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kew_Latn
data_files:
- path: train/eng_Latn-kew_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kew_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kew_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nop_Latn
data_files:
- path: train/eng_Latn-nop_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nop_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nop_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zyp_Latn
data_files:
- path: train/eng_Latn-zyp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zyp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zyp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qvm_Latn
data_files:
- path: train/eng_Latn-qvm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qvm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qvm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mwp_Latn
data_files:
- path: train/eng_Latn-mwp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mwp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mwp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nhw_Latn
data_files:
- path: train/eng_Latn-nhw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nhw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nhw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-als_Latn
data_files:
- path: train/eng_Latn-als_Latn.jsonl.gz
split: train
- path: test/eng_Latn-als_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-als_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cof_Latn
data_files:
- path: train/eng_Latn-cof_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cof_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cof_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-auy_Latn
data_files:
- path: train/eng_Latn-auy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-auy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-auy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pol_Latn
data_files:
- path: train/eng_Latn-pol_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pol_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pol_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-klt_Latn
data_files:
- path: train/eng_Latn-klt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-klt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-klt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-soy_Latn
data_files:
- path: train/eng_Latn-soy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-soy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-soy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ita_Latn
data_files:
- path: train/eng_Latn-ita_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ita_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ita_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-arn_Latn
data_files:
- path: train/eng_Latn-arn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-arn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-arn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tbz_Latn
data_files:
- path: train/eng_Latn-tbz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tbz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tbz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zaa_Latn
data_files:
- path: train/eng_Latn-zaa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zaa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zaa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-meu_Latn
data_files:
- path: train/eng_Latn-meu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-meu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-meu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zao_Latn
data_files:
- path: train/eng_Latn-zao_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zao_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zao_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mxp_Latn
data_files:
- path: train/eng_Latn-mxp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mxp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mxp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-rgu_Latn
data_files:
- path: train/eng_Latn-rgu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-rgu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-rgu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tnn_Latn
data_files:
- path: train/eng_Latn-tnn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tnn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tnn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-uvl_Latn
data_files:
- path: train/eng_Latn-uvl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-uvl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-uvl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-eko_Latn
data_files:
- path: train/eng_Latn-eko_Latn.jsonl.gz
split: train
- path: test/eng_Latn-eko_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-eko_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wmt_Latn
data_files:
- path: train/eng_Latn-wmt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wmt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wmt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kup_Latn
data_files:
- path: train/eng_Latn-kup_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kup_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kup_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zai_Latn
data_files:
- path: train/eng_Latn-zai_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zai_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zai_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ebk_Latn
data_files:
- path: train/eng_Latn-ebk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ebk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ebk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-vie_Latn
data_files:
- path: train/eng_Latn-vie_Latn.jsonl.gz
split: train
- path: test/eng_Latn-vie_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-vie_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-azz_Latn
data_files:
- path: train/eng_Latn-azz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-azz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-azz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wbp_Latn
data_files:
- path: train/eng_Latn-wbp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wbp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wbp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tvk_Latn
data_files:
- path: train/eng_Latn-tvk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tvk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tvk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ote_Latn
data_files:
- path: train/eng_Latn-ote_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ote_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ote_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ubu_Latn
data_files:
- path: train/eng_Latn-ubu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ubu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ubu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zga_Latn
data_files:
- path: train/eng_Latn-zga_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zga_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zga_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bps_Latn
data_files:
- path: train/eng_Latn-bps_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bps_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bps_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-swh_Latn
data_files:
- path: train/eng_Latn-swh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-swh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-swh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ntp_Latn
data_files:
- path: train/eng_Latn-ntp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ntp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ntp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tav_Latn
data_files:
- path: train/eng_Latn-tav_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tav_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tav_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kms_Latn
data_files:
- path: train/eng_Latn-kms_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kms_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kms_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pio_Latn
data_files:
- path: train/eng_Latn-pio_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pio_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pio_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-guj_Gujr
data_files:
- path: train/eng_Latn-guj_Gujr.jsonl.gz
split: train
- path: test/eng_Latn-guj_Gujr.jsonl.gz
split: test
- path: validation/eng_Latn-guj_Gujr.jsonl.gz
split: validation
- config_name: eng_Latn-mbl_Latn
data_files:
- path: train/eng_Latn-mbl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mbl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mbl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aii_Syrc
data_files:
- path: train/eng_Latn-aii_Syrc.jsonl.gz
split: train
- path: test/eng_Latn-aii_Syrc.jsonl.gz
split: test
- path: validation/eng_Latn-aii_Syrc.jsonl.gz
split: validation
- config_name: eng_Latn-mle_Latn
data_files:
- path: train/eng_Latn-mle_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mle_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mle_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zam_Latn
data_files:
- path: train/eng_Latn-zam_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zam_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zam_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-urt_Latn
data_files:
- path: train/eng_Latn-urt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-urt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-urt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-guo_Latn
data_files:
- path: train/eng_Latn-guo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-guo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-guo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bkd_Latn
data_files:
- path: train/eng_Latn-bkd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bkd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bkd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-rmy_Latn
data_files:
- path: train/eng_Latn-rmy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-rmy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-rmy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ong_Latn
data_files:
- path: train/eng_Latn-ong_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ong_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ong_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mlp_Latn
data_files:
- path: train/eng_Latn-mlp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mlp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mlp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mir_Latn
data_files:
- path: train/eng_Latn-mir_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mir_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mir_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-med_Latn
data_files:
- path: train/eng_Latn-med_Latn.jsonl.gz
split: train
- path: test/eng_Latn-med_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-med_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bef_Latn
data_files:
- path: train/eng_Latn-bef_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bef_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bef_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yuw_Latn
data_files:
- path: train/eng_Latn-yuw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yuw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yuw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ded_Latn
data_files:
- path: train/eng_Latn-ded_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ded_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ded_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ame_Latn
data_files:
- path: train/eng_Latn-ame_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ame_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ame_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-car_Latn
data_files:
- path: train/eng_Latn-car_Latn.jsonl.gz
split: train
- path: test/eng_Latn-car_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-car_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-chz_Latn
data_files:
- path: train/eng_Latn-chz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-chz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-chz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ubr_Latn
data_files:
- path: train/eng_Latn-ubr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ubr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ubr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mar_Deva
data_files:
- path: train/eng_Latn-mar_Deva.jsonl.gz
split: train
- path: test/eng_Latn-mar_Deva.jsonl.gz
split: test
- path: validation/eng_Latn-mar_Deva.jsonl.gz
split: validation
- config_name: eng_Latn-gun_Latn
data_files:
- path: train/eng_Latn-gun_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gun_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gun_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pir_Latn
data_files:
- path: train/eng_Latn-pir_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pir_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pir_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-inb_Latn
data_files:
- path: train/eng_Latn-inb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-inb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-inb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gym_Latn
data_files:
- path: train/eng_Latn-gym_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gym_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gym_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mit_Latn
data_files:
- path: train/eng_Latn-mit_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mit_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mit_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-enq_Latn
data_files:
- path: train/eng_Latn-enq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-enq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-enq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kqf_Latn
data_files:
- path: train/eng_Latn-kqf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kqf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kqf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mbh_Latn
data_files:
- path: train/eng_Latn-mbh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mbh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mbh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-xbi_Latn
data_files:
- path: train/eng_Latn-xbi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-xbi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-xbi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gof_Latn
data_files:
- path: train/eng_Latn-gof_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gof_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gof_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-lat_Latn
data_files:
- path: train/eng_Latn-lat_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lat_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lat_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gah_Latn
data_files:
- path: train/eng_Latn-gah_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gah_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gah_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zav_Latn
data_files:
- path: train/eng_Latn-zav_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zav_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zav_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tnc_Latn
data_files:
- path: train/eng_Latn-tnc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tnc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tnc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aso_Latn
data_files:
- path: train/eng_Latn-aso_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aso_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aso_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cax_Latn
data_files:
- path: train/eng_Latn-cax_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cax_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cax_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-xtm_Latn
data_files:
- path: train/eng_Latn-xtm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-xtm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-xtm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-llg_Latn
data_files:
- path: train/eng_Latn-llg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-llg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-llg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pls_Latn
data_files:
- path: train/eng_Latn-pls_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pls_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pls_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nhi_Latn
data_files:
- path: train/eng_Latn-nhi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nhi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nhi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-leu_Latn
data_files:
- path: train/eng_Latn-leu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-leu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-leu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-agn_Latn
data_files:
- path: train/eng_Latn-agn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-agn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-agn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hus_Latn
data_files:
- path: train/eng_Latn-hus_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hus_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hus_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hvn_Latn
data_files:
- path: train/eng_Latn-hvn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hvn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hvn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gup_Latn
data_files:
- path: train/eng_Latn-gup_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gup_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gup_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ncu_Latn
data_files:
- path: train/eng_Latn-ncu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ncu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ncu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ilo_Latn
data_files:
- path: train/eng_Latn-ilo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ilo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ilo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cjv_Latn
data_files:
- path: train/eng_Latn-cjv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cjv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cjv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cbi_Latn
data_files:
- path: train/eng_Latn-cbi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cbi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cbi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sll_Latn
data_files:
- path: train/eng_Latn-sll_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sll_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sll_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gvf_Latn
data_files:
- path: train/eng_Latn-gvf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gvf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gvf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cbk_Latn
data_files:
- path: train/eng_Latn-cbk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cbk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cbk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ctp_Latn
data_files:
- path: train/eng_Latn-ctp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ctp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ctp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-rus_Cyrl
data_files:
- path: train/eng_Latn-rus_Cyrl.jsonl.gz
split: train
- path: test/eng_Latn-rus_Cyrl.jsonl.gz
split: test
- path: validation/eng_Latn-rus_Cyrl.jsonl.gz
split: validation
- config_name: eng_Latn-zpc_Latn
data_files:
- path: train/eng_Latn-zpc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zpc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zpc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dww_Latn
data_files:
- path: train/eng_Latn-dww_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dww_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dww_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-haw_Latn
data_files:
- path: train/eng_Latn-haw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-haw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-haw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hrv_Latn
data_files:
- path: train/eng_Latn-hrv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hrv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hrv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bhg_Latn
data_files:
- path: train/eng_Latn-bhg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bhg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bhg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kyg_Latn
data_files:
- path: train/eng_Latn-kyg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kyg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kyg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-are_Latn
data_files:
- path: train/eng_Latn-are_Latn.jsonl.gz
split: train
- path: test/eng_Latn-are_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-are_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pma_Latn
data_files:
- path: train/eng_Latn-pma_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pma_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pma_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tcz_Latn
data_files:
- path: train/eng_Latn-tcz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tcz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tcz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mop_Latn
data_files:
- path: train/eng_Latn-mop_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mop_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mop_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-maa_Latn
data_files:
- path: train/eng_Latn-maa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-maa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-maa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-row_Latn
data_files:
- path: train/eng_Latn-row_Latn.jsonl.gz
split: train
- path: test/eng_Latn-row_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-row_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mcp_Latn
data_files:
- path: train/eng_Latn-mcp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mcp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mcp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bjv_Latn
data_files:
- path: train/eng_Latn-bjv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bjv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bjv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dan_Latn
data_files:
- path: train/eng_Latn-dan_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dan_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dan_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kpw_Latn
data_files:
- path: train/eng_Latn-kpw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kpw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kpw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yal_Latn
data_files:
- path: train/eng_Latn-yal_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yal_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yal_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yrb_Latn
data_files:
- path: train/eng_Latn-yrb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yrb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yrb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-piu_Latn
data_files:
- path: train/eng_Latn-piu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-piu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-piu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kqa_Latn
data_files:
- path: train/eng_Latn-kqa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kqa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kqa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-buk_Latn
data_files:
- path: train/eng_Latn-buk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-buk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-buk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-taw_Latn
data_files:
- path: train/eng_Latn-taw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-taw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-taw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bzj_Latn
data_files:
- path: train/eng_Latn-bzj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bzj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bzj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-boa_Latn
data_files:
- path: train/eng_Latn-boa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-boa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-boa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sua_Latn
data_files:
- path: train/eng_Latn-sua_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sua_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sua_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mwf_Latn
data_files:
- path: train/eng_Latn-mwf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mwf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mwf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-deu_Latn
data_files:
- path: train/eng_Latn-deu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-deu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-deu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ura_Latn
data_files:
- path: train/eng_Latn-ura_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ura_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ura_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aui_Latn
data_files:
- path: train/eng_Latn-aui_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aui_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aui_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wim_Latn
data_files:
- path: train/eng_Latn-wim_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wim_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wim_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bzh_Latn
data_files:
- path: train/eng_Latn-bzh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bzh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bzh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nld_Latn
data_files:
- path: train/eng_Latn-nld_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nld_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nld_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ory_Orya
data_files:
- path: train/eng_Latn-ory_Orya.jsonl.gz
split: train
- path: test/eng_Latn-ory_Orya.jsonl.gz
split: test
- path: validation/eng_Latn-ory_Orya.jsonl.gz
split: validation
- config_name: eng_Latn-ppo_Latn
data_files:
- path: train/eng_Latn-ppo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ppo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ppo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-epo_Latn
data_files:
- path: train/eng_Latn-epo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-epo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-epo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hop_Latn
data_files:
- path: train/eng_Latn-hop_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hop_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hop_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gdn_Latn
data_files:
- path: train/eng_Latn-gdn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gdn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gdn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tlf_Latn
data_files:
- path: train/eng_Latn-tlf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tlf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tlf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tiw_Latn
data_files:
- path: train/eng_Latn-tiw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tiw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tiw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sja_Latn
data_files:
- path: train/eng_Latn-sja_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sja_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sja_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kdl_Latn
data_files:
- path: train/eng_Latn-kdl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kdl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kdl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-chk_Latn
data_files:
- path: train/eng_Latn-chk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-chk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-chk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kdc_Latn
data_files:
- path: train/eng_Latn-kdc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kdc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kdc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gng_Latn
data_files:
- path: train/eng_Latn-gng_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gng_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gng_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nko_Latn
data_files:
- path: train/eng_Latn-nko_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nko_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nko_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wer_Latn
data_files:
- path: train/eng_Latn-wer_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wer_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wer_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mhl_Latn
data_files:
- path: train/eng_Latn-mhl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mhl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mhl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-toc_Latn
data_files:
- path: train/eng_Latn-toc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-toc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-toc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cbs_Latn
data_files:
- path: train/eng_Latn-cbs_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cbs_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cbs_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qub_Latn
data_files:
- path: train/eng_Latn-qub_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qub_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qub_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-auc_Latn
data_files:
- path: train/eng_Latn-auc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-auc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-auc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kpr_Latn
data_files:
- path: train/eng_Latn-kpr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kpr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kpr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hat_Latn
data_files:
- path: train/eng_Latn-hat_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hat_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hat_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sny_Latn
data_files:
- path: train/eng_Latn-sny_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sny_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sny_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-byr_Latn
data_files:
- path: train/eng_Latn-byr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-byr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-byr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-emp_Latn
data_files:
- path: train/eng_Latn-emp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-emp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-emp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kwi_Latn
data_files:
- path: train/eng_Latn-kwi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kwi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kwi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gum_Latn
data_files:
- path: train/eng_Latn-gum_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gum_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gum_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mbj_Latn
data_files:
- path: train/eng_Latn-mbj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mbj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mbj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sey_Latn
data_files:
- path: train/eng_Latn-sey_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sey_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sey_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-alp_Latn
data_files:
- path: train/eng_Latn-alp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-alp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-alp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gmv_Latn
data_files:
- path: train/eng_Latn-gmv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gmv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gmv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-poe_Latn
data_files:
- path: train/eng_Latn-poe_Latn.jsonl.gz
split: train
- path: test/eng_Latn-poe_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-poe_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-etr_Latn
data_files:
- path: train/eng_Latn-etr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-etr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-etr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-abt_Latn
data_files:
- path: train/eng_Latn-abt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-abt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-abt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tuf_Latn
data_files:
- path: train/eng_Latn-tuf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tuf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tuf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dob_Latn
data_files:
- path: train/eng_Latn-dob_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dob_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dob_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nys_Latn
data_files:
- path: train/eng_Latn-nys_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nys_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nys_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-srn_Latn
data_files:
- path: train/eng_Latn-srn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-srn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-srn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kpf_Latn
data_files:
- path: train/eng_Latn-kpf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kpf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kpf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mbt_Latn
data_files:
- path: train/eng_Latn-mbt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mbt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mbt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-stp_Latn
data_files:
- path: train/eng_Latn-stp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-stp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-stp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-trc_Latn
data_files:
- path: train/eng_Latn-trc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-trc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-trc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-for_Latn
data_files:
- path: train/eng_Latn-for_Latn.jsonl.gz
split: train
- path: test/eng_Latn-for_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-for_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yad_Latn
data_files:
- path: train/eng_Latn-yad_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yad_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yad_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cme_Latn
data_files:
- path: train/eng_Latn-cme_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cme_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cme_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bba_Latn
data_files:
- path: train/eng_Latn-bba_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bba_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bba_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mxq_Latn
data_files:
- path: train/eng_Latn-mxq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mxq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mxq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dwy_Latn
data_files:
- path: train/eng_Latn-dwy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dwy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dwy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-atg_Latn
data_files:
- path: train/eng_Latn-atg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-atg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-atg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ncj_Latn
data_files:
- path: train/eng_Latn-ncj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ncj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ncj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mpm_Latn
data_files:
- path: train/eng_Latn-mpm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mpm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mpm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kqc_Latn
data_files:
- path: train/eng_Latn-kqc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kqc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kqc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-knv_Latn
data_files:
- path: train/eng_Latn-knv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-knv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-knv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-upv_Latn
data_files:
- path: train/eng_Latn-upv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-upv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-upv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yut_Latn
data_files:
- path: train/eng_Latn-yut_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yut_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yut_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kje_Latn
data_files:
- path: train/eng_Latn-kje_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kje_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kje_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-okv_Latn
data_files:
- path: train/eng_Latn-okv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-okv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-okv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tof_Latn
data_files:
- path: train/eng_Latn-tof_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tof_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tof_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-faa_Latn
data_files:
- path: train/eng_Latn-faa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-faa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-faa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mya_Latn
data_files:
- path: train/eng_Latn-mya_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mya_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mya_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hto_Latn
data_files:
- path: train/eng_Latn-hto_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hto_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hto_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wiv_Latn
data_files:
- path: train/eng_Latn-wiv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wiv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wiv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-vid_Latn
data_files:
- path: train/eng_Latn-vid_Latn.jsonl.gz
split: train
- path: test/eng_Latn-vid_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-vid_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-xla_Latn
data_files:
- path: train/eng_Latn-xla_Latn.jsonl.gz
split: train
- path: test/eng_Latn-xla_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-xla_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-snn_Latn
data_files:
- path: train/eng_Latn-snn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-snn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-snn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ycn_Latn
data_files:
- path: train/eng_Latn-ycn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ycn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ycn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mcb_Latn
data_files:
- path: train/eng_Latn-mcb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mcb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mcb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mal_Mlym
data_files:
- path: train/eng_Latn-mal_Mlym.jsonl.gz
split: train
- path: test/eng_Latn-mal_Mlym.jsonl.gz
split: test
- path: validation/eng_Latn-mal_Mlym.jsonl.gz
split: validation
- config_name: eng_Latn-sgb_Latn
data_files:
- path: train/eng_Latn-sgb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sgb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sgb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-box_Latn
data_files:
- path: train/eng_Latn-box_Latn.jsonl.gz
split: train
- path: test/eng_Latn-box_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-box_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-amu_Latn
data_files:
- path: train/eng_Latn-amu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-amu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-amu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cni_Latn
data_files:
- path: train/eng_Latn-cni_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cni_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cni_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-byx_Latn
data_files:
- path: train/eng_Latn-byx_Latn.jsonl.gz
split: train
- path: test/eng_Latn-byx_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-byx_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-udu_Latn
data_files:
- path: train/eng_Latn-udu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-udu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-udu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-jid_Latn
data_files:
- path: train/eng_Latn-jid_Latn.jsonl.gz
split: train
- path: test/eng_Latn-jid_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-jid_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nlg_Latn
data_files:
- path: train/eng_Latn-nlg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nlg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nlg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wuv_Latn
data_files:
- path: train/eng_Latn-wuv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wuv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wuv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mto_Latn
data_files:
- path: train/eng_Latn-mto_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mto_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mto_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mcd_Latn
data_files:
- path: train/eng_Latn-mcd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mcd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mcd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bus_Latn
data_files:
- path: train/eng_Latn-bus_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bus_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bus_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-glk_Arab
data_files:
- path: train/eng_Latn-glk_Arab.jsonl.gz
split: train
- path: test/eng_Latn-glk_Arab.jsonl.gz
split: test
- path: validation/eng_Latn-glk_Arab.jsonl.gz
split: validation
- config_name: eng_Latn-too_Latn
data_files:
- path: train/eng_Latn-too_Latn.jsonl.gz
split: train
- path: test/eng_Latn-too_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-too_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mpp_Latn
data_files:
- path: train/eng_Latn-mpp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mpp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mpp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zaj_Latn
data_files:
- path: train/eng_Latn-zaj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zaj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zaj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-yva_Latn
data_files:
- path: train/eng_Latn-yva_Latn.jsonl.gz
split: train
- path: test/eng_Latn-yva_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-yva_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-awx_Latn
data_files:
- path: train/eng_Latn-awx_Latn.jsonl.gz
split: train
- path: test/eng_Latn-awx_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-awx_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ian_Latn
data_files:
- path: train/eng_Latn-ian_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ian_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ian_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-otq_Latn
data_files:
- path: train/eng_Latn-otq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-otq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-otq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-fra_Latn
data_files:
- path: train/eng_Latn-fra_Latn.jsonl.gz
split: train
- path: test/eng_Latn-fra_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-fra_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zlm_Latn
data_files:
- path: train/eng_Latn-zlm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zlm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zlm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ptu_Latn
data_files:
- path: train/eng_Latn-ptu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ptu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ptu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-apr_Latn
data_files:
- path: train/eng_Latn-apr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-apr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-apr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zab_Latn
data_files:
- path: train/eng_Latn-zab_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zab_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zab_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kwf_Latn
data_files:
- path: train/eng_Latn-kwf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kwf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kwf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cya_Latn
data_files:
- path: train/eng_Latn-cya_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cya_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cya_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nna_Latn
data_files:
- path: train/eng_Latn-nna_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nna_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nna_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wnc_Latn
data_files:
- path: train/eng_Latn-wnc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wnc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wnc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dad_Latn
data_files:
- path: train/eng_Latn-dad_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dad_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dad_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-opm_Latn
data_files:
- path: train/eng_Latn-opm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-opm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-opm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zaw_Latn
data_files:
- path: train/eng_Latn-zaw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zaw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zaw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-agd_Latn
data_files:
- path: train/eng_Latn-agd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-agd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-agd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qxo_Latn
data_files:
- path: train/eng_Latn-qxo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qxo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qxo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ign_Latn
data_files:
- path: train/eng_Latn-ign_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ign_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ign_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cak_Latn
data_files:
- path: train/eng_Latn-cak_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cak_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cak_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-top_Latn
data_files:
- path: train/eng_Latn-top_Latn.jsonl.gz
split: train
- path: test/eng_Latn-top_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-top_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tgk_Cyrl
data_files:
- path: train/eng_Latn-tgk_Cyrl.jsonl.gz
split: train
- path: test/eng_Latn-tgk_Cyrl.jsonl.gz
split: test
- path: validation/eng_Latn-tgk_Cyrl.jsonl.gz
split: validation
- config_name: eng_Latn-awb_Latn
data_files:
- path: train/eng_Latn-awb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-awb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-awb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cnl_Latn
data_files:
- path: train/eng_Latn-cnl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cnl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cnl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kgp_Latn
data_files:
- path: train/eng_Latn-kgp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kgp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kgp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-khs_Latn
data_files:
- path: train/eng_Latn-khs_Latn.jsonl.gz
split: train
- path: test/eng_Latn-khs_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-khs_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-abx_Latn
data_files:
- path: train/eng_Latn-abx_Latn.jsonl.gz
split: train
- path: test/eng_Latn-abx_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-abx_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mcq_Latn
data_files:
- path: train/eng_Latn-mcq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mcq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mcq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-chd_Latn
data_files:
- path: train/eng_Latn-chd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-chd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-chd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kud_Latn
data_files:
- path: train/eng_Latn-kud_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kud_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kud_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nak_Latn
data_files:
- path: train/eng_Latn-nak_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nak_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nak_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mux_Latn
data_files:
- path: train/eng_Latn-mux_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mux_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mux_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tzo_Latn
data_files:
- path: train/eng_Latn-tzo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tzo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tzo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mav_Latn
data_files:
- path: train/eng_Latn-mav_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mav_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mav_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-avt_Latn
data_files:
- path: train/eng_Latn-avt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-avt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-avt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bjz_Latn
data_files:
- path: train/eng_Latn-bjz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bjz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bjz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ptp_Latn
data_files:
- path: train/eng_Latn-ptp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ptp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ptp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gnw_Latn
data_files:
- path: train/eng_Latn-gnw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gnw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gnw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cub_Latn
data_files:
- path: train/eng_Latn-cub_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cub_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cub_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hmo_Latn
data_files:
- path: train/eng_Latn-hmo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hmo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hmo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kkl_Latn
data_files:
- path: train/eng_Latn-kkl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kkl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kkl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nou_Latn
data_files:
- path: train/eng_Latn-nou_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nou_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nou_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bre_Latn
data_files:
- path: train/eng_Latn-bre_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bre_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bre_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sim_Latn
data_files:
- path: train/eng_Latn-sim_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sim_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sim_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sbk_Latn
data_files:
- path: train/eng_Latn-sbk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sbk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sbk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nsn_Latn
data_files:
- path: train/eng_Latn-nsn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nsn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nsn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mva_Latn
data_files:
- path: train/eng_Latn-mva_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mva_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mva_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kkc_Latn
data_files:
- path: train/eng_Latn-kkc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kkc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kkc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gvc_Latn
data_files:
- path: train/eng_Latn-gvc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gvc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gvc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-jao_Latn
data_files:
- path: train/eng_Latn-jao_Latn.jsonl.gz
split: train
- path: test/eng_Latn-jao_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-jao_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kek_Latn
data_files:
- path: train/eng_Latn-kek_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kek_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kek_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nfa_Latn
data_files:
- path: train/eng_Latn-nfa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nfa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nfa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-lid_Latn
data_files:
- path: train/eng_Latn-lid_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lid_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lid_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kmu_Latn
data_files:
- path: train/eng_Latn-kmu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kmu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kmu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hbo_Hebr
data_files:
- path: train/eng_Latn-hbo_Hebr.jsonl.gz
split: train
- path: test/eng_Latn-hbo_Hebr.jsonl.gz
split: test
- path: validation/eng_Latn-hbo_Hebr.jsonl.gz
split: validation
- config_name: eng_Latn-bkq_Latn
data_files:
- path: train/eng_Latn-bkq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bkq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bkq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mig_Latn
data_files:
- path: train/eng_Latn-mig_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mig_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mig_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-jae_Latn
data_files:
- path: train/eng_Latn-jae_Latn.jsonl.gz
split: train
- path: test/eng_Latn-jae_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-jae_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ben_Beng
data_files:
- path: train/eng_Latn-ben_Beng.jsonl.gz
split: train
- path: test/eng_Latn-ben_Beng.jsonl.gz
split: test
- path: validation/eng_Latn-ben_Beng.jsonl.gz
split: validation
- config_name: eng_Latn-spy_Latn
data_files:
- path: train/eng_Latn-spy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-spy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-spy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bvd_Latn
data_files:
- path: train/eng_Latn-bvd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bvd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bvd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bvr_Latn
data_files:
- path: train/eng_Latn-bvr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bvr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bvr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kto_Latn
data_files:
- path: train/eng_Latn-kto_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kto_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kto_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-amn_Latn
data_files:
- path: train/eng_Latn-amn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-amn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-amn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-spp_Latn
data_files:
- path: train/eng_Latn-spp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-spp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-spp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ncl_Latn
data_files:
- path: train/eng_Latn-ncl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ncl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ncl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tdt_Latn
data_files:
- path: train/eng_Latn-tdt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tdt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tdt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-urw_Latn
data_files:
- path: train/eng_Latn-urw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-urw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-urw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-shp_Latn
data_files:
- path: train/eng_Latn-shp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-shp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-shp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-met_Latn
data_files:
- path: train/eng_Latn-met_Latn.jsonl.gz
split: train
- path: test/eng_Latn-met_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-met_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pon_Latn
data_files:
- path: train/eng_Latn-pon_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pon_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pon_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tiy_Latn
data_files:
- path: train/eng_Latn-tiy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tiy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tiy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nhy_Latn
data_files:
- path: train/eng_Latn-nhy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nhy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nhy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cpc_Latn
data_files:
- path: train/eng_Latn-cpc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cpc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cpc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bel_Cyrl
data_files:
- path: train/eng_Latn-bel_Cyrl.jsonl.gz
split: train
- path: test/eng_Latn-bel_Cyrl.jsonl.gz
split: test
- path: validation/eng_Latn-bel_Cyrl.jsonl.gz
split: validation
- config_name: eng_Latn-cbv_Latn
data_files:
- path: train/eng_Latn-cbv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cbv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cbv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pab_Latn
data_files:
- path: train/eng_Latn-pab_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pab_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pab_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dgz_Latn
data_files:
- path: train/eng_Latn-dgz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dgz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dgz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-pri_Latn
data_files:
- path: train/eng_Latn-pri_Latn.jsonl.gz
split: train
- path: test/eng_Latn-pri_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-pri_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hch_Latn
data_files:
- path: train/eng_Latn-hch_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hch_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hch_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wed_Latn
data_files:
- path: train/eng_Latn-wed_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wed_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wed_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-suz_Latn
data_files:
- path: train/eng_Latn-suz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-suz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-suz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-usp_Latn
data_files:
- path: train/eng_Latn-usp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-usp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-usp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mkl_Latn
data_files:
- path: train/eng_Latn-mkl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mkl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mkl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cbt_Latn
data_files:
- path: train/eng_Latn-cbt_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cbt_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cbt_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kpg_Latn
data_files:
- path: train/eng_Latn-kpg_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kpg_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kpg_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kue_Latn
data_files:
- path: train/eng_Latn-kue_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kue_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kue_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sbs_Latn
data_files:
- path: train/eng_Latn-sbs_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sbs_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sbs_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qvz_Latn
data_files:
- path: train/eng_Latn-qvz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qvz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qvz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-seh_Latn
data_files:
- path: train/eng_Latn-seh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-seh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-seh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wrs_Latn
data_files:
- path: train/eng_Latn-wrs_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wrs_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wrs_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-snc_Latn
data_files:
- path: train/eng_Latn-snc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-snc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-snc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bjp_Latn
data_files:
- path: train/eng_Latn-bjp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bjp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bjp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kyz_Latn
data_files:
- path: train/eng_Latn-kyz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kyz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kyz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-noa_Latn
data_files:
- path: train/eng_Latn-noa_Latn.jsonl.gz
split: train
- path: test/eng_Latn-noa_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-noa_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ssx_Latn
data_files:
- path: train/eng_Latn-ssx_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ssx_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ssx_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nbq_Latn
data_files:
- path: train/eng_Latn-nbq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nbq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nbq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-msb_Latn
data_files:
- path: train/eng_Latn-msb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-msb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-msb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sue_Latn
data_files:
- path: train/eng_Latn-sue_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sue_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sue_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-asm_Beng
data_files:
- path: train/eng_Latn-asm_Beng.jsonl.gz
split: train
- path: test/eng_Latn-asm_Beng.jsonl.gz
split: test
- path: validation/eng_Latn-asm_Beng.jsonl.gz
split: validation
- config_name: eng_Latn-som_Latn
data_files:
- path: train/eng_Latn-som_Latn.jsonl.gz
split: train
- path: test/eng_Latn-som_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-som_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-xon_Latn
data_files:
- path: train/eng_Latn-xon_Latn.jsonl.gz
split: train
- path: test/eng_Latn-xon_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-xon_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qvh_Latn
data_files:
- path: train/eng_Latn-qvh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qvh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qvh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mib_Latn
data_files:
- path: train/eng_Latn-mib_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mib_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mib_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wap_Latn
data_files:
- path: train/eng_Latn-wap_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wap_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wap_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gai_Latn
data_files:
- path: train/eng_Latn-gai_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gai_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gai_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mkn_Latn
data_files:
- path: train/eng_Latn-mkn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mkn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mkn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-xnn_Latn
data_files:
- path: train/eng_Latn-xnn_Latn.jsonl.gz
split: train
- path: test/eng_Latn-xnn_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-xnn_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-amf_Latn
data_files:
- path: train/eng_Latn-amf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-amf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-amf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nhe_Latn
data_files:
- path: train/eng_Latn-nhe_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nhe_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nhe_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kmk_Latn
data_files:
- path: train/eng_Latn-kmk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kmk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kmk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tam_Taml
data_files:
- path: train/eng_Latn-tam_Taml.jsonl.gz
split: train
- path: test/eng_Latn-tam_Taml.jsonl.gz
split: test
- path: validation/eng_Latn-tam_Taml.jsonl.gz
split: validation
- config_name: eng_Latn-nab_Latn
data_files:
- path: train/eng_Latn-nab_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nab_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nab_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dik_Latn
data_files:
- path: train/eng_Latn-dik_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dik_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dik_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cpy_Latn
data_files:
- path: train/eng_Latn-cpy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cpy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cpy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-arl_Latn
data_files:
- path: train/eng_Latn-arl_Latn.jsonl.gz
split: train
- path: test/eng_Latn-arl_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-arl_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tuc_Latn
data_files:
- path: train/eng_Latn-tuc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tuc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tuc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ngu_Latn
data_files:
- path: train/eng_Latn-ngu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ngu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ngu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zad_Latn
data_files:
- path: train/eng_Latn-zad_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zad_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zad_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kmh_Latn
data_files:
- path: train/eng_Latn-kmh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kmh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kmh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-quh_Latn
data_files:
- path: train/eng_Latn-quh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-quh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-quh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-alq_Latn
data_files:
- path: train/eng_Latn-alq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-alq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-alq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-grc_Grek
data_files:
- path: train/eng_Latn-grc_Grek.jsonl.gz
split: train
- path: test/eng_Latn-grc_Grek.jsonl.gz
split: test
- path: validation/eng_Latn-grc_Grek.jsonl.gz
split: validation
- config_name: eng_Latn-kaq_Latn
data_files:
- path: train/eng_Latn-kaq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kaq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kaq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zpm_Latn
data_files:
- path: train/eng_Latn-zpm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zpm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zpm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-obo_Latn
data_files:
- path: train/eng_Latn-obo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-obo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-obo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cha_Latn
data_files:
- path: train/eng_Latn-cha_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cha_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cha_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mbs_Latn
data_files:
- path: train/eng_Latn-mbs_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mbs_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mbs_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-poi_Latn
data_files:
- path: train/eng_Latn-poi_Latn.jsonl.gz
split: train
- path: test/eng_Latn-poi_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-poi_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-spm_Latn
data_files:
- path: train/eng_Latn-spm_Latn.jsonl.gz
split: train
- path: test/eng_Latn-spm_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-spm_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cpb_Latn
data_files:
- path: train/eng_Latn-cpb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cpb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cpb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-omw_Latn
data_files:
- path: train/eng_Latn-omw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-omw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-omw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-klv_Latn
data_files:
- path: train/eng_Latn-klv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-klv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-klv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sbe_Latn
data_files:
- path: train/eng_Latn-sbe_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sbe_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sbe_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ntu_Latn
data_files:
- path: train/eng_Latn-ntu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ntu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ntu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-zat_Latn
data_files:
- path: train/eng_Latn-zat_Latn.jsonl.gz
split: train
- path: test/eng_Latn-zat_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-zat_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bsp_Latn
data_files:
- path: train/eng_Latn-bsp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bsp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bsp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mps_Latn
data_files:
- path: train/eng_Latn-mps_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mps_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mps_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mks_Latn
data_files:
- path: train/eng_Latn-mks_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mks_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mks_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bmh_Latn
data_files:
- path: train/eng_Latn-bmh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bmh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bmh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tpz_Latn
data_files:
- path: train/eng_Latn-tpz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tpz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tpz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-amr_Latn
data_files:
- path: train/eng_Latn-amr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-amr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-amr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cjo_Latn
data_files:
- path: train/eng_Latn-cjo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cjo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cjo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ksr_Latn
data_files:
- path: train/eng_Latn-ksr_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ksr_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ksr_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tgo_Latn
data_files:
- path: train/eng_Latn-tgo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tgo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tgo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tke_Latn
data_files:
- path: train/eng_Latn-tke_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tke_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tke_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-lac_Latn
data_files:
- path: train/eng_Latn-lac_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lac_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lac_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nhu_Latn
data_files:
- path: train/eng_Latn-nhu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nhu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nhu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ssd_Latn
data_files:
- path: train/eng_Latn-ssd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ssd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ssd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bon_Latn
data_files:
- path: train/eng_Latn-bon_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bon_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bon_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cso_Latn
data_files:
- path: train/eng_Latn-cso_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cso_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cso_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-naf_Latn
data_files:
- path: train/eng_Latn-naf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-naf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-naf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kbh_Latn
data_files:
- path: train/eng_Latn-kbh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kbh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kbh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-hun_Latn
data_files:
- path: train/eng_Latn-hun_Latn.jsonl.gz
split: train
- path: test/eng_Latn-hun_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-hun_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tte_Latn
data_files:
- path: train/eng_Latn-tte_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tte_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tte_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-amo_Latn
data_files:
- path: train/eng_Latn-amo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-amo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-amo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kiz_Latn
data_files:
- path: train/eng_Latn-kiz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kiz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kiz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wsk_Latn
data_files:
- path: train/eng_Latn-wsk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wsk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wsk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kwd_Latn
data_files:
- path: train/eng_Latn-kwd_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kwd_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kwd_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-geb_Latn
data_files:
- path: train/eng_Latn-geb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-geb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-geb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mdy_Latn
data_files:
- path: train/eng_Latn-mdy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mdy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mdy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kgk_Latn
data_files:
- path: train/eng_Latn-kgk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kgk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kgk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kqw_Latn
data_files:
- path: train/eng_Latn-kqw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kqw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kqw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gaw_Latn
data_files:
- path: train/eng_Latn-gaw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gaw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gaw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mox_Latn
data_files:
- path: train/eng_Latn-mox_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mox_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mox_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cav_Latn
data_files:
- path: train/eng_Latn-cav_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cav_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cav_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tzj_Latn
data_files:
- path: train/eng_Latn-tzj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tzj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tzj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kze_Latn
data_files:
- path: train/eng_Latn-kze_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kze_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kze_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-not_Latn
data_files:
- path: train/eng_Latn-not_Latn.jsonl.gz
split: train
- path: test/eng_Latn-not_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-not_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-lin_Latn
data_files:
- path: train/eng_Latn-lin_Latn.jsonl.gz
split: train
- path: test/eng_Latn-lin_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-lin_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-urd_Arab
data_files:
- path: train/eng_Latn-urd_Arab.jsonl.gz
split: train
- path: test/eng_Latn-urd_Arab.jsonl.gz
split: test
- path: validation/eng_Latn-urd_Arab.jsonl.gz
split: validation
- config_name: eng_Latn-mlh_Latn
data_files:
- path: train/eng_Latn-mlh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mlh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mlh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-muy_Latn
data_files:
- path: train/eng_Latn-muy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-muy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-muy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-iws_Latn
data_files:
- path: train/eng_Latn-iws_Latn.jsonl.gz
split: train
- path: test/eng_Latn-iws_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-iws_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tur_Latn
data_files:
- path: train/eng_Latn-tur_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tur_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tur_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-gam_Latn
data_files:
- path: train/eng_Latn-gam_Latn.jsonl.gz
split: train
- path: test/eng_Latn-gam_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-gam_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kbc_Latn
data_files:
- path: train/eng_Latn-kbc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kbc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kbc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kgf_Latn
data_files:
- path: train/eng_Latn-kgf_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kgf_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kgf_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-maz_Latn
data_files:
- path: train/eng_Latn-maz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-maz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-maz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nss_Latn
data_files:
- path: train/eng_Latn-nss_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nss_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nss_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ake_Latn
data_files:
- path: train/eng_Latn-ake_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ake_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ake_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nuy_Latn
data_files:
- path: train/eng_Latn-nuy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nuy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nuy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bjk_Latn
data_files:
- path: train/eng_Latn-bjk_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bjk_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bjk_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mzz_Latn
data_files:
- path: train/eng_Latn-mzz_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mzz_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mzz_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-msy_Latn
data_files:
- path: train/eng_Latn-msy_Latn.jsonl.gz
split: train
- path: test/eng_Latn-msy_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-msy_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-anh_Latn
data_files:
- path: train/eng_Latn-anh_Latn.jsonl.gz
split: train
- path: test/eng_Latn-anh_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-anh_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-bea_Latn
data_files:
- path: train/eng_Latn-bea_Latn.jsonl.gz
split: train
- path: test/eng_Latn-bea_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-bea_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-ntj_Latn
data_files:
- path: train/eng_Latn-ntj_Latn.jsonl.gz
split: train
- path: test/eng_Latn-ntj_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-ntj_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-anv_Latn
data_files:
- path: train/eng_Latn-anv_Latn.jsonl.gz
split: train
- path: test/eng_Latn-anv_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-anv_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-xed_Latn
data_files:
- path: train/eng_Latn-xed_Latn.jsonl.gz
split: train
- path: test/eng_Latn-xed_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-xed_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nho_Latn
data_files:
- path: train/eng_Latn-nho_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nho_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nho_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-cbc_Latn
data_files:
- path: train/eng_Latn-cbc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-cbc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-cbc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qve_Latn
data_files:
- path: train/eng_Latn-qve_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qve_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qve_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-amp_Latn
data_files:
- path: train/eng_Latn-amp_Latn.jsonl.gz
split: train
- path: test/eng_Latn-amp_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-amp_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-qvc_Latn
data_files:
- path: train/eng_Latn-qvc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-qvc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-qvc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aka_Latn
data_files:
- path: train/eng_Latn-aka_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aka_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aka_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aby_Latn
data_files:
- path: train/eng_Latn-aby_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aby_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aby_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-myu_Latn
data_files:
- path: train/eng_Latn-myu_Latn.jsonl.gz
split: train
- path: test/eng_Latn-myu_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-myu_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aak_Arab
data_files:
- path: train/eng_Latn-aak_Arab.jsonl.gz
split: train
- path: test/eng_Latn-aak_Arab.jsonl.gz
split: test
- path: validation/eng_Latn-aak_Arab.jsonl.gz
split: validation
- config_name: eng_Latn-soq_Latn
data_files:
- path: train/eng_Latn-soq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-soq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-soq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tif_Latn
data_files:
- path: train/eng_Latn-tif_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tif_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tif_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-aai_Latn
data_files:
- path: train/eng_Latn-aai_Latn.jsonl.gz
split: train
- path: test/eng_Latn-aai_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-aai_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-nnq_Latn
data_files:
- path: train/eng_Latn-nnq_Latn.jsonl.gz
split: train
- path: test/eng_Latn-nnq_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-nnq_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-sab_Latn
data_files:
- path: train/eng_Latn-sab_Latn.jsonl.gz
split: train
- path: test/eng_Latn-sab_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-sab_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-wmw_Latn
data_files:
- path: train/eng_Latn-wmw_Latn.jsonl.gz
split: train
- path: test/eng_Latn-wmw_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-wmw_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-dgc_Latn
data_files:
- path: train/eng_Latn-dgc_Latn.jsonl.gz
split: train
- path: test/eng_Latn-dgc_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-dgc_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-roo_Latn
data_files:
- path: train/eng_Latn-roo_Latn.jsonl.gz
split: train
- path: test/eng_Latn-roo_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-roo_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-tcs_Latn
data_files:
- path: train/eng_Latn-tcs_Latn.jsonl.gz
split: train
- path: test/eng_Latn-tcs_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-tcs_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-mxb_Latn
data_files:
- path: train/eng_Latn-mxb_Latn.jsonl.gz
split: train
- path: test/eng_Latn-mxb_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-mxb_Latn.jsonl.gz
split: validation
- config_name: eng_Latn-kde_Latn
data_files:
- path: train/eng_Latn-kde_Latn.jsonl.gz
split: train
- path: test/eng_Latn-kde_Latn.jsonl.gz
split: test
- path: validation/eng_Latn-kde_Latn.jsonl.gz
split: validation
---
This dataset pre-computes all English-centric directions from [bible-nlp/biblenlp-corpus](https://huggingface.co/datasets/bible-nlp/biblenlp-corpus), and as a result loading is significantly faster.
Loading example:
```python
>>> from datasets import load_dataset
>>> dataset = load_dataset("davidstap/biblenlp-corpus-mmteb", "eng-arb", trust_remote_code=True)
>>> dataset
DatasetDict({
train: Dataset({
features: ['eng', 'arb'],
num_rows: 28723
})
validation: Dataset({
features: ['eng', 'arb'],
num_rows: 1578
})
test: Dataset({
features: ['eng', 'arb'],
num_rows: 1551
})
})
>>>
```
Note that in all possible configurations, `eng` comes before the other language.
|
KShivendu/dbpedia-entities-openai-1M | KShivendu | "2024-02-19T08:24:43Z" | 4,126 | 20 | [
"task_categories:feature-extraction",
"language:en",
"license:mit",
"size_categories:1M<n<10M",
"format:parquet",
"modality:text",
"modality:timeseries",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"feature-extraction"
] | "2023-06-20T22:29:43Z" | ---
license: mit
dataset_info:
features:
- name: _id
dtype: string
- name: title
dtype: string
- name: text
dtype: string
- name: openai
sequence: float32
splits:
- name: train
num_bytes: 12383152
num_examples: 1000000
download_size: 12383152
dataset_size: 1000000
language:
- en
task_categories:
- feature-extraction
pretty_name: OpenAI 1M with DBPedia Entities
size_categories:
- 1M<n<10M
---
1M OpenAI Embeddings -- 1536 dimensions
Created: June 2023.
Text used for Embedding: title (string) + text (string)
Embedding Model: text-embedding-ada-002
First used for the pgvector vs VectorDB (Qdrant) benchmark: https://nirantk.com/writing/pgvector-vs-qdrant/
### Future work
We are planning to take this up to 10M (and possibly 100M) vectors. Contact [@KShivendu_](https://twitter.com/KShivendu_) on Twitter or mail to [email protected] if you want to help :)
### Credits:
This dataset was generated from the first 1M entries of https://huggingface.co/datasets/BeIR/dbpedia-entity |
allenai/tulu-3-sft-mixture | allenai | "2024-12-02T19:48:33Z" | 4,124 | 96 | [
"task_categories:other",
"annotations_creators:crowdsourced",
"annotations_creators:expert-generated",
"annotations_creators:machine-generated",
"multilinguality:multilingual",
"source_datasets:allenai/coconot",
"source_datasets:ai2-adapt-dev/flan_v2_converted",
"source_datasets:HuggingFaceH4/no_robots",
"source_datasets:OpenAssistant/oasst1",
"source_datasets:allenai/tulu-3-personas-math",
"source_datasets:allenai/tulu-3-sft-personas-math-grade",
"source_datasets:allenai/tulu-3-sft-personas-code",
"source_datasets:allenai/tulu-3-personas-algebra",
"source_datasets:allenai/tulu-3-sft-personas-instruction-following",
"source_datasets:AI-MO/NuminaMath-TIR",
"source_datasets:allenai/wildguardmix",
"source_datasets:allenai/wildjailbreak",
"source_datasets:allenai/tulu-3-hard-coded",
"source_datasets:CohereForAI/aya_dataset",
"source_datasets:allenai/WildChat-1M",
"source_datasets:LipengCS/Table-GPT",
"source_datasets:allenai/SciRIFF",
"source_datasets:theblackcat102/evol-codealpaca-v1",
"language:amh",
"language:arb",
"language:ary",
"language:ars",
"language:acq",
"language:arz",
"language:apc",
"language:ben",
"language:ceb",
"language:dan",
"language:deu",
"language:ell",
"language:eng",
"language:eus",
"language:fil",
"language:fin",
"language:fra",
"language:gle",
"language:guj",
"language:hat",
"language:hau",
"language:hin",
"language:hun",
"language:ibo",
"language:ind",
"language:ita",
"language:jav",
"language:jpn",
"language:kan",
"language:kir",
"language:kor",
"language:kur",
"language:lit",
"language:mal",
"language:mar",
"language:mlg",
"language:msa",
"language:mya",
"language:nep",
"language:nld",
"language:nso",
"language:nya",
"language:pan",
"language:pes",
"language:pol",
"language:por",
"language:pus",
"language:rus",
"language:sin",
"language:sna",
"language:snd",
"language:som",
"language:spa",
"language:sqi",
"language:srp",
"language:sun",
"language:swa",
"language:swe",
"language:tam",
"language:tel",
"language:tha",
"language:tur",
"language:ukr",
"language:urd",
"language:vie",
"language:wol",
"language:xho",
"language:yor",
"language:zho",
"language:zul",
"license:odc-by",
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"other"
] | "2024-11-08T03:56:36Z" | ---
annotations_creators:
- crowdsourced
- expert-generated
- machine-generated
language:
- amh
- arb
- ary
- ars
- acq
- arz
- apc
- ben
- ceb
- dan
- deu
- ell
- eng
- eus
- fil
- fin
- fra
- gle
- guj
- hat
- hau
- hin
- hun
- ibo
- ind
- ita
- jav
- jpn
- kan
- kir
- kor
- kur
- lit
- mal
- mar
- mlg
- msa
- mya
- nep
- nld
- nso
- nya
- pan
- pes
- pol
- por
- pus
- rus
- sin
- sna
- snd
- som
- spa
- sqi
- srp
- sun
- swa
- swe
- tam
- tel
- tha
- tur
- ukr
- urd
- vie
- wol
- xho
- yor
- zho
- zul
license: odc-by
multilinguality:
- multilingual
size_categories:
- 100K<n<1M
source_datasets:
- allenai/coconot
- ai2-adapt-dev/flan_v2_converted
- HuggingFaceH4/no_robots
- OpenAssistant/oasst1
- allenai/tulu-3-personas-math
- allenai/tulu-3-sft-personas-math-grade
- allenai/tulu-3-sft-personas-code
- allenai/tulu-3-personas-algebra
- allenai/tulu-3-sft-personas-instruction-following
- AI-MO/NuminaMath-TIR
- allenai/wildguardmix
- allenai/wildjailbreak
- allenai/tulu-3-hard-coded
- CohereForAI/aya_dataset
- allenai/WildChat-1M
- LipengCS/Table-GPT
- allenai/SciRIFF
- theblackcat102/evol-codealpaca-v1
task_categories:
- other
dataset_info:
features:
- name: id
dtype: string
- name: messages
list:
- name: content
dtype: string
- name: role
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 2914250826.5647593
num_examples: 939343
download_size: 1412954868
dataset_size: 2914250826.5647593
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
<img src="https://huggingface.co/datasets/allenai/blog-images/resolve/main/tulu-3/Tulu3-logo.png" alt="Tulu3 banner" width="400" style="margin-left:'auto' margin-right:'auto' display:'block'"/>
# Tulu 3 SFT Mixture
*Note that this collection is licensed under ODC-BY-1.0 license; different licenses apply to subsets of the data. Some portions of the dataset are non-commercial. We present the mixture as a research artifact.*
The Tulu 3 SFT mixture was used to train the [Tulu 3 series of models](https://huggingface.co/collections/allenai/tulu-3-models-673b8e0dc3512e30e7dc54f5).
It contains 939,344 samples from the following sets:
- [CoCoNot](https://huggingface.co/datasets/allenai/coconot) (ODC-BY-1.0), 10,983 prompts (Brahman et al., 2024)
- [FLAN v2](https://github.com/google-research/FLAN/tree/main) via [`ai2-adapt-dev/flan_v2_converted`](https://huggingface.co/datasets/ai2-adapt-dev/flan_v2_converted), 89,982 prompts (Longpre et al., 2023)
- [No Robots](https://huggingface.co/datasets/HuggingFaceH4/no_robots) (CC-BY-NC-4.0), 9,500 prompts (Rajani et al. 2023)
- [OpenAssistant Guanaco](https://huggingface.co/datasets/OpenAssistant/oasst1) (Apache 2.0), 7,132 prompts (Kopf et al., 2024)
- [Tulu 3 Persona MATH](https://huggingface.co/datasets/allenai/tulu-3-personas-math) (ODC-BY-1.0), 149,960 prompts
- [Tulu 3 Persona GSM](https://huggingface.co/datasets/allenai/tulu-3-sft-personas-math-grade) (ODC-BY-1.0), 49,980 prompts
- [Tulu 3 Persona Python](https://huggingface.co/datasets/allenai/tulu-3-sft-personas-code) (ODC-BY-1.0), 34,999 prompts
- [Tulu 3 Persona Algebra](https://huggingface.co/datasets/allenai/tulu-3-personas-algebra) (ODC-BY-1.0), 20,000 prompts
- [Tulu 3 Persona IF](https://huggingface.co/datasets/allenai/tulu-3-sft-personas-instruction-following) (ODC-BY-1.0), 29,980 prompts
- [NuminaMath-TIR](https://huggingface.co/datasets/AI-MO/NuminaMath-TIR) (Apache 2.0), 64,312 prompts (Beeching et al. 2024)
- [Tulu 3 WildGuardMix](https://huggingface.co/datasets/allenai/wildguardmix) (Apache 2.0), 50,000 prompts (Han et al., 2024)
- [Tulu 3 WildJailbreak](https://huggingface.co/datasets/allenai/wildjailbreak) (ODC-BY-1.0), 50,000 prompts (Wildteaming, 2024)
- [Tulu 3 Hardcoded](https://huggingface.co/datasets/allenai/tulu-3-hard-coded) (CC-BY-4.0), 240 prompts
- [Aya](https://huggingface.co/datasets/CohereForAI/aya_dataset) (Apache 2.0), 100,000 prompts (Singh et al., 2024)
- [WildChat GPT-4](https://huggingface.co/datasets/allenai/WildChat-1M) (ODC-BY-1.0), 100,000 prompts (Zhao et al., 2024)
- [TableGPT](https://huggingface.co/datasets/LipengCS/Table-GPT) (MIT), 5,000 prompts (Zha et al., 2023)
- [SciRIFF](https://huggingface.co/datasets/allenai/SciRIFF) (ODC-BY-1.0), 10,000 prompts (Wadden et al., 2024)
- [Evol CodeAlpaca](https://huggingface.co/datasets/theblackcat102/evol-codealpaca-v1) (Apache 2.0), 107,276 prompts (Luo et al., 2023)
## Dataset Structure
Each example in the dataset contains the standard instruction-tuning data points as follow:
- `id` (str): a unique identifier
- `messages` (list): message format used for supervised fine-tuning (this contains user prompt and assistant responses)
- `source` (str): the source dataset for the given sample
### Model Family
| **Stage** | **Llama 3.1 8B** | **Llama 3.1 70B** |
|----------------------|----------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------|
| **Base Model** | [meta-llama/Llama-3.1-8B](https://huggingface.co/meta-llama/Llama-3.1-8B) | [meta-llama/Llama-3.1-70B](https://huggingface.co/meta-llama/Llama-3.1-70B) |
| **SFT** | [allenai/Llama-3.1-Tulu-3-8B-SFT](https://huggingface.co/allenai/Llama-3.1-Tulu-3-8B-SFT) | [allenai/Llama-3.1-Tulu-3-70B-SFT](https://huggingface.co/allenai/Llama-3.1-Tulu-3-70B-SFT) |
| **DPO** | [allenai/Llama-3.1-Tulu-3-8B-DPO](https://huggingface.co/allenai/Llama-3.1-Tulu-3-8B-DPO) | [allenai/Llama-3.1-Tulu-3-70B-DPO](https://huggingface.co/allenai/Llama-3.1-Tulu-3-70B-DPO) |
| **Final Models (RLVR)** | [allenai/Llama-3.1-Tulu-3-8B](https://huggingface.co/allenai/Llama-3.1-Tulu-3-8B) | [allenai/Llama-3.1-Tulu-3-70B](https://huggingface.co/allenai/Llama-3.1-Tulu-3-70B) |
| **Reward Model (RM)**| [allenai/Llama-3.1-Tulu-3-8B-RM](https://huggingface.co/allenai/Llama-3.1-Tulu-3-8B-RM) | (Same as 8B) |
## License
This dataset is licensed under ODC-BY-1.0. It is intended for research and educational use in accordance with Ai2's [Responsible Use Guidelines](https://allenai.org/responsible-use). This dataset includes output data generated from third party models that are subject to separate terms governing their use. For more information on license and terms, consult each subset linked above.
## Citation
If Tülu3 or any of the related materials were helpful to your work, please cite:
```
@article{lambert2024tulu3,
title = {Tülu 3: Pushing Frontiers in Open Language Model Post-Training},
author = {
Nathan Lambert and
Jacob Morrison and
Valentina Pyatkin and
Shengyi Huang and
Hamish Ivison and
Faeze Brahman and
Lester James V. Miranda and
Alisa Liu and
Nouha Dziri and
Shane Lyu and
Yuling Gu and
Saumya Malik and
Victoria Graf and
Jena D. Hwang and
Jiangjiang Yang and
Ronan Le Bras and
Oyvind Tafjord and
Chris Wilhelm and
Luca Soldaini and
Noah A. Smith and
Yizhong Wang and
Pradeep Dasigi and
Hannaneh Hajishirzi
},
year = {2024},
email = {[email protected]}
}
``` |
sasha/prof_images_blip__stabilityai-stable-diffusion-2 | sasha | "2023-06-03T00:58:31Z" | 4,119 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2023-06-03T00:55:54Z" | ---
dataset_info:
features:
- name: images
dtype: image
- name: embeddings
sequence: float32
splits:
- name: courier
num_bytes: 4257204.0
num_examples: 100
- name: aide
num_bytes: 3491678.0
num_examples: 100
- name: police_officer
num_bytes: 4252502.0
num_examples: 100
- name: purchasing_agent
num_bytes: 5285492.0
num_examples: 100
- name: metal_worker
num_bytes: 5097159.0
num_examples: 100
- name: financial_analyst
num_bytes: 5261660.0
num_examples: 100
- name: stocker
num_bytes: 5700830.0
num_examples: 100
- name: it_specialist
num_bytes: 5731031.0
num_examples: 100
- name: writer
num_bytes: 4309299.0
num_examples: 100
- name: accountant
num_bytes: 4689435.0
num_examples: 100
- name: coach
num_bytes: 4537870.0
num_examples: 100
- name: painter
num_bytes: 3686294.0
num_examples: 100
- name: real_estate_broker
num_bytes: 5241610.0
num_examples: 100
- name: truck_driver
num_bytes: 4955237.0
num_examples: 100
- name: data_entry_keyer
num_bytes: 6112746.0
num_examples: 100
- name: computer_support_specialist
num_bytes: 5187781.0
num_examples: 100
- name: cook
num_bytes: 3905039.0
num_examples: 100
- name: interior_designer
num_bytes: 4970044.0
num_examples: 100
- name: nutritionist
num_bytes: 4904269.0
num_examples: 100
- name: designer
num_bytes: 5618928.0
num_examples: 100
- name: maid
num_bytes: 3653112.0
num_examples: 100
- name: producer
num_bytes: 5532083.0
num_examples: 100
- name: executive_assistant
num_bytes: 5181631.0
num_examples: 100
- name: logistician
num_bytes: 5623819.0
num_examples: 100
- name: tractor_operator
num_bytes: 5566239.0
num_examples: 100
- name: doctor
num_bytes: 3922429.0
num_examples: 100
- name: inventory_clerk
num_bytes: 5675119.0
num_examples: 100
- name: sheet_metal_worker
num_bytes: 4932393.0
num_examples: 100
- name: groundskeeper
num_bytes: 5624913.0
num_examples: 100
- name: electrical_engineer
num_bytes: 5486843.0
num_examples: 100
- name: physical_therapist
num_bytes: 4416383.0
num_examples: 100
- name: insurance_agent
num_bytes: 4503029.0
num_examples: 100
- name: aerospace_engineer
num_bytes: 5005814.0
num_examples: 100
- name: psychologist
num_bytes: 4751138.0
num_examples: 100
- name: financial_advisor
num_bytes: 4616805.0
num_examples: 100
- name: printing_press_operator
num_bytes: 4885677.0
num_examples: 100
- name: architect
num_bytes: 4694972.0
num_examples: 100
- name: dental_hygienist
num_bytes: 4051984.0
num_examples: 100
- name: artist
num_bytes: 4093686.0
num_examples: 100
- name: office_worker
num_bytes: 4984173.0
num_examples: 100
- name: ceo
num_bytes: 4753603.0
num_examples: 100
- name: taxi_driver
num_bytes: 4839205.0
num_examples: 100
- name: librarian
num_bytes: 5209270.0
num_examples: 100
- name: author
num_bytes: 4326443.0
num_examples: 100
- name: plumber
num_bytes: 5004142.0
num_examples: 100
- name: construction_worker
num_bytes: 5173177.0
num_examples: 100
- name: clergy
num_bytes: 3852512.0
num_examples: 100
- name: electrician
num_bytes: 5239521.0
num_examples: 100
- name: jailer
num_bytes: 5032189.0
num_examples: 100
- name: credit_counselor
num_bytes: 4814481.0
num_examples: 100
- name: scientist
num_bytes: 4363783.0
num_examples: 100
- name: drywall_installer
num_bytes: 4174819.0
num_examples: 100
- name: school_bus_driver
num_bytes: 4998022.0
num_examples: 100
- name: dental_assistant
num_bytes: 4140296.0
num_examples: 100
- name: fitness_instructor
num_bytes: 4416504.0
num_examples: 100
- name: detective
num_bytes: 4583678.0
num_examples: 100
- name: hairdresser
num_bytes: 4463307.0
num_examples: 100
- name: welder
num_bytes: 4918374.0
num_examples: 100
- name: pharmacy_technician
num_bytes: 4661790.0
num_examples: 100
- name: compliance_officer
num_bytes: 4845349.0
num_examples: 100
- name: singer
num_bytes: 4638247.0
num_examples: 100
- name: tutor
num_bytes: 3678185.0
num_examples: 100
- name: language_pathologist
num_bytes: 5254361.0
num_examples: 100
- name: medical_records_specialist
num_bytes: 5634506.0
num_examples: 100
- name: sales_manager
num_bytes: 5056132.0
num_examples: 100
- name: industrial_engineer
num_bytes: 5172786.0
num_examples: 100
- name: manager
num_bytes: 4931846.0
num_examples: 100
- name: mechanic
num_bytes: 4613784.0
num_examples: 100
- name: postal_worker
num_bytes: 4441299.0
num_examples: 100
- name: computer_systems_analyst
num_bytes: 6079263.0
num_examples: 100
- name: salesperson
num_bytes: 5024667.0
num_examples: 100
- name: office_clerk
num_bytes: 4755914.0
num_examples: 100
- name: claims_appraiser
num_bytes: 4478395.0
num_examples: 100
- name: security_guard
num_bytes: 5230558.0
num_examples: 100
- name: interviewer
num_bytes: 4656965.0
num_examples: 100
- name: dispatcher
num_bytes: 5291584.0
num_examples: 100
- name: lawyer
num_bytes: 4277664.0
num_examples: 100
- name: marketing_manager
num_bytes: 5252505.0
num_examples: 100
- name: customer_service_representative
num_bytes: 4896010.0
num_examples: 100
- name: software_developer
num_bytes: 5547030.0
num_examples: 100
- name: mover
num_bytes: 4568118.0
num_examples: 100
- name: supervisor
num_bytes: 4923186.0
num_examples: 100
- name: paralegal
num_bytes: 4687033.0
num_examples: 100
- name: graphic_designer
num_bytes: 5759315.0
num_examples: 100
- name: dentist
num_bytes: 3970331.0
num_examples: 100
- name: roofer
num_bytes: 5495073.0
num_examples: 100
- name: public_relations_specialist
num_bytes: 5073442.0
num_examples: 100
- name: engineer
num_bytes: 4250666.0
num_examples: 100
- name: occupational_therapist
num_bytes: 3855692.0
num_examples: 100
- name: manicurist
num_bytes: 4064118.0
num_examples: 100
- name: cleaner
num_bytes: 5095971.0
num_examples: 100
- name: facilities_manager
num_bytes: 5247879.0
num_examples: 100
- name: repair_worker
num_bytes: 5113432.0
num_examples: 100
- name: cashier
num_bytes: 5215203.0
num_examples: 100
- name: baker
num_bytes: 4610706.0
num_examples: 100
- name: market_research_analyst
num_bytes: 5593811.0
num_examples: 100
- name: health_technician
num_bytes: 4477836.0
num_examples: 100
- name: veterinarian
num_bytes: 4197438.0
num_examples: 100
- name: underwriter
num_bytes: 4662113.0
num_examples: 100
- name: mechanical_engineer
num_bytes: 5722417.0
num_examples: 100
- name: janitor
num_bytes: 5390408.0
num_examples: 100
- name: pilot
num_bytes: 3829685.0
num_examples: 100
- name: therapist
num_bytes: 3859109.0
num_examples: 100
- name: director
num_bytes: 4096085.0
num_examples: 100
- name: wholesale_buyer
num_bytes: 6300854.0
num_examples: 100
- name: air_conditioning_installer
num_bytes: 5741602.0
num_examples: 100
- name: butcher
num_bytes: 5332426.0
num_examples: 100
- name: machinery_mechanic
num_bytes: 5057875.0
num_examples: 100
- name: event_planner
num_bytes: 5039870.0
num_examples: 100
- name: carpet_installer
num_bytes: 5903738.0
num_examples: 100
- name: musician
num_bytes: 4431405.0
num_examples: 100
- name: civil_engineer
num_bytes: 5463535.0
num_examples: 100
- name: farmer
num_bytes: 4772114.0
num_examples: 100
- name: financial_manager
num_bytes: 5187071.0
num_examples: 100
- name: childcare_worker
num_bytes: 4326077.0
num_examples: 100
- name: clerk
num_bytes: 4268877.0
num_examples: 100
- name: machinist
num_bytes: 4593820.0
num_examples: 100
- name: firefighter
num_bytes: 4414229.0
num_examples: 100
- name: photographer
num_bytes: 4056745.0
num_examples: 100
- name: file_clerk
num_bytes: 5925494.0
num_examples: 100
- name: bus_driver
num_bytes: 4714778.0
num_examples: 100
- name: fast_food_worker
num_bytes: 4778503.0
num_examples: 100
- name: bartender
num_bytes: 5407515.0
num_examples: 100
- name: computer_programmer
num_bytes: 5961791.0
num_examples: 100
- name: pharmacist
num_bytes: 5103862.0
num_examples: 100
- name: nursing_assistant
num_bytes: 3969985.0
num_examples: 100
- name: career_counselor
num_bytes: 4578703.0
num_examples: 100
- name: mental_health_counselor
num_bytes: 4915411.0
num_examples: 100
- name: network_administrator
num_bytes: 6576986.0
num_examples: 100
- name: teacher
num_bytes: 4112341.0
num_examples: 100
- name: dishwasher
num_bytes: 4582691.0
num_examples: 100
- name: teller
num_bytes: 4580341.0
num_examples: 100
- name: teaching_assistant
num_bytes: 4310141.0
num_examples: 100
- name: payroll_clerk
num_bytes: 5104922.0
num_examples: 100
- name: laboratory_technician
num_bytes: 4772940.0
num_examples: 100
- name: social_assistant
num_bytes: 4975461.0
num_examples: 100
- name: radiologic_technician
num_bytes: 4614401.0
num_examples: 100
- name: social_worker
num_bytes: 4143912.0
num_examples: 100
- name: nurse
num_bytes: 3251197.0
num_examples: 100
- name: receptionist
num_bytes: 4962877.0
num_examples: 100
- name: carpenter
num_bytes: 4367834.0
num_examples: 100
- name: correctional_officer
num_bytes: 5027428.0
num_examples: 100
- name: community_manager
num_bytes: 5327391.0
num_examples: 100
- name: massage_therapist
num_bytes: 4309573.0
num_examples: 100
- name: head_cook
num_bytes: 4488723.0
num_examples: 100
- name: plane_mechanic
num_bytes: 4650683.0
num_examples: 100
download_size: 729196101
dataset_size: 705285705.0
---
# Dataset Card for "prof_images_blip__stabilityai-stable-diffusion-2"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
asahi417/seamless-align-enA-hiA.speaker-embedding.hubert-xl | asahi417 | "2024-06-14T00:55:39Z" | 4,115 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-06-11T14:40:50Z" | ---
dataset_info:
- config_name: subset_1
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 11456903470
num_examples: 2295
download_size: 11490865608
dataset_size: 11456903470
- config_name: subset_10
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8497469033
num_examples: 2026
download_size: 8525108036
dataset_size: 8497469033
- config_name: subset_11
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8184086859
num_examples: 1984
download_size: 8211671614
dataset_size: 8184086859
- config_name: subset_12
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8267732668
num_examples: 2004
download_size: 8293785190
dataset_size: 8267732668
- config_name: subset_13
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7813596787
num_examples: 1931
download_size: 7836272058
dataset_size: 7813596787
- config_name: subset_14
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8181837081
num_examples: 1980
download_size: 8209357137
dataset_size: 8181837081
- config_name: subset_15
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7654872944
num_examples: 1959
download_size: 7681446772
dataset_size: 7654872944
- config_name: subset_16
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7951105030
num_examples: 2001
download_size: 7977767633
dataset_size: 7951105030
- config_name: subset_17
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8382711574
num_examples: 2022
download_size: 8410409589
dataset_size: 8382711574
- config_name: subset_18
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7937878932
num_examples: 1988
download_size: 7964412330
dataset_size: 7937878932
- config_name: subset_19
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7973944881
num_examples: 1965
download_size: 7996531378
dataset_size: 7973944881
- config_name: subset_2
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 11434325392
num_examples: 2335
download_size: 11468346317
dataset_size: 11434325392
- config_name: subset_20
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8072628489
num_examples: 1971
download_size: 8099998069
dataset_size: 8072628489
- config_name: subset_21
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7842776502
num_examples: 1976
download_size: 7869318616
dataset_size: 7842776502
- config_name: subset_22
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8231015149
num_examples: 2018
download_size: 8258687492
dataset_size: 8231015149
- config_name: subset_23
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7882339323
num_examples: 1981
download_size: 7908693869
dataset_size: 7882339323
- config_name: subset_24
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7881574967
num_examples: 1970
download_size: 7907947016
dataset_size: 7881574967
- config_name: subset_25
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7839009922
num_examples: 1971
download_size: 7861902434
dataset_size: 7839009922
- config_name: subset_26
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7761654985
num_examples: 1933
download_size: 7787970361
dataset_size: 7761654985
- config_name: subset_27
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7799225980
num_examples: 1956
download_size: 7824102193
dataset_size: 7799225980
- config_name: subset_28
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8012410192
num_examples: 1958
download_size: 8039972646
dataset_size: 8012410192
- config_name: subset_29
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7970556536
num_examples: 1948
download_size: 7995093736
dataset_size: 7970556536
- config_name: subset_3
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 10667678399
num_examples: 2282
download_size: 10700911929
dataset_size: 10667678399
- config_name: subset_30
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7755516455
num_examples: 1934
download_size: 7781841837
dataset_size: 7755516455
- config_name: subset_31
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7709626192
num_examples: 1938
download_size: 7733846345
dataset_size: 7709626192
- config_name: subset_32
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7945619371
num_examples: 1964
download_size: 7972076500
dataset_size: 7945619371
- config_name: subset_33
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7755861573
num_examples: 1956
download_size: 7782331103
dataset_size: 7755861573
- config_name: subset_34
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7551263495
num_examples: 1870
download_size: 7577309125
dataset_size: 7551263495
- config_name: subset_35
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7717436607
num_examples: 1892
download_size: 7743548116
dataset_size: 7717436607
- config_name: subset_36
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7972270125
num_examples: 1912
download_size: 7998248708
dataset_size: 7972270125
- config_name: subset_37
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7822660355
num_examples: 1922
download_size: 7848953597
dataset_size: 7822660355
- config_name: subset_38
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7648323640
num_examples: 1887
download_size: 7672540009
dataset_size: 7648323640
- config_name: subset_39
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7720982705
num_examples: 1896
download_size: 7746345910
dataset_size: 7720982705
- config_name: subset_4
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 10118452202
num_examples: 2220
download_size: 10150858575
dataset_size: 10118452202
- config_name: subset_40
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7763705486
num_examples: 1882
download_size: 7787502164
dataset_size: 7763705486
- config_name: subset_41
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7925622100
num_examples: 1923
download_size: 7949886137
dataset_size: 7925622100
- config_name: subset_42
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7914446049
num_examples: 1915
download_size: 7940181806
dataset_size: 7914446049
- config_name: subset_43
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7903526901
num_examples: 1917
download_size: 7928322534
dataset_size: 7903526901
- config_name: subset_44
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7760785372
num_examples: 1889
download_size: 7786831231
dataset_size: 7760785372
- config_name: subset_45
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7755318564
num_examples: 1895
download_size: 7780195794
dataset_size: 7755318564
- config_name: subset_46
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7891268067
num_examples: 1889
download_size: 7917296265
dataset_size: 7891268067
- config_name: subset_47
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7830294204
num_examples: 1911
download_size: 7855216144
dataset_size: 7830294204
- config_name: subset_48
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7757327084
num_examples: 1894
download_size: 7783491998
dataset_size: 7757327084
- config_name: subset_49
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8105982269
num_examples: 1929
download_size: 8133082537
dataset_size: 8105982269
- config_name: subset_5
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 9677243533
num_examples: 2202
download_size: 9708698623
dataset_size: 9677243533
- config_name: subset_50
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7911297714
num_examples: 1889
download_size: 7937313855
dataset_size: 7911297714
- config_name: subset_51
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7706356062
num_examples: 1863
download_size: 7732372407
dataset_size: 7706356062
- config_name: subset_52
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8029195296
num_examples: 1945
download_size: 8056701679
dataset_size: 8029195296
- config_name: subset_53
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7802394024
num_examples: 1911
download_size: 7828599203
dataset_size: 7802394024
- config_name: subset_54
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8217068361
num_examples: 1964
download_size: 8244613479
dataset_size: 8217068361
- config_name: subset_55
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7989823846
num_examples: 1899
download_size: 8015859513
dataset_size: 7989823846
- config_name: subset_56
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7936306692
num_examples: 1936
download_size: 7962535595
dataset_size: 7936306692
- config_name: subset_57
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8070979492
num_examples: 1920
download_size: 8098287819
dataset_size: 8070979492
- config_name: subset_58
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8086481027
num_examples: 1938
download_size: 8113795491
dataset_size: 8086481027
- config_name: subset_59
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8072039128
num_examples: 1907
download_size: 8099286684
dataset_size: 8072039128
- config_name: subset_6
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8967711636
num_examples: 2123
download_size: 8996427746
dataset_size: 8967711636
- config_name: subset_60
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7809441672
num_examples: 1874
download_size: 7835394925
dataset_size: 7809441672
- config_name: subset_61
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8014199101
num_examples: 1904
download_size: 8041493095
dataset_size: 8014199101
- config_name: subset_62
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8133084804
num_examples: 1887
download_size: 8160250943
dataset_size: 8133084804
- config_name: subset_63
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7917859266
num_examples: 1861
download_size: 7941183053
dataset_size: 7917859266
- config_name: subset_64
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7976741792
num_examples: 1893
download_size: 8002761471
dataset_size: 7976741792
- config_name: subset_65
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7748461869
num_examples: 1886
download_size: 7774522745
dataset_size: 7748461869
- config_name: subset_66
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8092233702
num_examples: 1918
download_size: 8119594840
dataset_size: 8092233702
- config_name: subset_67
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8225948764
num_examples: 1908
download_size: 8252110533
dataset_size: 8225948764
- config_name: subset_68
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8160512685
num_examples: 1939
download_size: 8186718449
dataset_size: 8160512685
- config_name: subset_69
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8319151151
num_examples: 1932
download_size: 8346403985
dataset_size: 8319151151
- config_name: subset_7
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8907865283
num_examples: 2085
download_size: 8936625360
dataset_size: 8907865283
- config_name: subset_70
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8071509655
num_examples: 1887
download_size: 8098655261
dataset_size: 8071509655
- config_name: subset_71
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8293741230
num_examples: 1932
download_size: 8320952581
dataset_size: 8293741230
- config_name: subset_72
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8285879747
num_examples: 1916
download_size: 8313129863
dataset_size: 8285879747
- config_name: subset_73
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8262272112
num_examples: 1900
download_size: 8289363323
dataset_size: 8262272112
- config_name: subset_74
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7997453485
num_examples: 1899
download_size: 8023483189
dataset_size: 7997453485
- config_name: subset_75
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8262038636
num_examples: 1927
download_size: 8289252589
dataset_size: 8262038636
- config_name: subset_76
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8232282368
num_examples: 1912
download_size: 8259407860
dataset_size: 8232282368
- config_name: subset_77
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8305966259
num_examples: 1922
download_size: 8333172713
dataset_size: 8305966259
- config_name: subset_78
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8058199324
num_examples: 1909
download_size: 8085492874
dataset_size: 8058199324
- config_name: subset_79
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8328483268
num_examples: 1948
download_size: 8355816563
dataset_size: 8328483268
- config_name: subset_8
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8540940621
num_examples: 2083
download_size: 8569954705
dataset_size: 8540940621
- config_name: subset_80
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7992578739
num_examples: 1877
download_size: 8018456103
dataset_size: 7992578739
- config_name: subset_81
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8215049205
num_examples: 1898
download_size: 8242226727
dataset_size: 8215049205
- config_name: subset_82
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8192856985
num_examples: 1896
download_size: 8219934679
dataset_size: 8192856985
- config_name: subset_83
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8058929502
num_examples: 1878
download_size: 8085975218
dataset_size: 8058929502
- config_name: subset_84
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8161411475
num_examples: 1902
download_size: 8188601913
dataset_size: 8161411475
- config_name: subset_85
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8473572248
num_examples: 1984
download_size: 8499559293
dataset_size: 8473572248
- config_name: subset_86
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8526355265
num_examples: 1930
download_size: 8554702136
dataset_size: 8526355265
- config_name: subset_87
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7955386516
num_examples: 1849
download_size: 7981125257
dataset_size: 7955386516
- config_name: subset_88
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7865388552
num_examples: 1849
download_size: 7890537353
dataset_size: 7865388552
- config_name: subset_89
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8258633632
num_examples: 1894
download_size: 8285721378
dataset_size: 8258633632
- config_name: subset_9
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8493615469
num_examples: 2022
download_size: 8521092296
dataset_size: 8493615469
- config_name: subset_90
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 8001091016
num_examples: 1893
download_size: 8028255143
dataset_size: 8001091016
- config_name: subset_91
features:
- name: line_no
dtype: int64
- name: enA.id
dtype: string
- name: enA.laser_score
dtype: float64
- name: hiA.id
dtype: string
- name: hiA.laser_score
dtype: float64
- name: enA.audio.speaker_embedding
sequence: float32
- name: enA.audio.speaker_embedding.full
sequence:
sequence: float32
- name: hiA.audio.speaker_embedding
sequence: float32
- name: hiA.audio.speaker_embedding.full
sequence:
sequence: float32
splits:
- name: train
num_bytes: 7886456072
num_examples: 1820
download_size: 7912099547
dataset_size: 7886456072
configs:
- config_name: subset_1
data_files:
- split: train
path: subset_1/train-*
- config_name: subset_10
data_files:
- split: train
path: subset_10/train-*
- config_name: subset_11
data_files:
- split: train
path: subset_11/train-*
- config_name: subset_12
data_files:
- split: train
path: subset_12/train-*
- config_name: subset_13
data_files:
- split: train
path: subset_13/train-*
- config_name: subset_14
data_files:
- split: train
path: subset_14/train-*
- config_name: subset_15
data_files:
- split: train
path: subset_15/train-*
- config_name: subset_16
data_files:
- split: train
path: subset_16/train-*
- config_name: subset_17
data_files:
- split: train
path: subset_17/train-*
- config_name: subset_18
data_files:
- split: train
path: subset_18/train-*
- config_name: subset_19
data_files:
- split: train
path: subset_19/train-*
- config_name: subset_2
data_files:
- split: train
path: subset_2/train-*
- config_name: subset_20
data_files:
- split: train
path: subset_20/train-*
- config_name: subset_21
data_files:
- split: train
path: subset_21/train-*
- config_name: subset_22
data_files:
- split: train
path: subset_22/train-*
- config_name: subset_23
data_files:
- split: train
path: subset_23/train-*
- config_name: subset_24
data_files:
- split: train
path: subset_24/train-*
- config_name: subset_25
data_files:
- split: train
path: subset_25/train-*
- config_name: subset_26
data_files:
- split: train
path: subset_26/train-*
- config_name: subset_27
data_files:
- split: train
path: subset_27/train-*
- config_name: subset_28
data_files:
- split: train
path: subset_28/train-*
- config_name: subset_29
data_files:
- split: train
path: subset_29/train-*
- config_name: subset_3
data_files:
- split: train
path: subset_3/train-*
- config_name: subset_30
data_files:
- split: train
path: subset_30/train-*
- config_name: subset_31
data_files:
- split: train
path: subset_31/train-*
- config_name: subset_32
data_files:
- split: train
path: subset_32/train-*
- config_name: subset_33
data_files:
- split: train
path: subset_33/train-*
- config_name: subset_34
data_files:
- split: train
path: subset_34/train-*
- config_name: subset_35
data_files:
- split: train
path: subset_35/train-*
- config_name: subset_36
data_files:
- split: train
path: subset_36/train-*
- config_name: subset_37
data_files:
- split: train
path: subset_37/train-*
- config_name: subset_38
data_files:
- split: train
path: subset_38/train-*
- config_name: subset_39
data_files:
- split: train
path: subset_39/train-*
- config_name: subset_4
data_files:
- split: train
path: subset_4/train-*
- config_name: subset_40
data_files:
- split: train
path: subset_40/train-*
- config_name: subset_41
data_files:
- split: train
path: subset_41/train-*
- config_name: subset_42
data_files:
- split: train
path: subset_42/train-*
- config_name: subset_43
data_files:
- split: train
path: subset_43/train-*
- config_name: subset_44
data_files:
- split: train
path: subset_44/train-*
- config_name: subset_45
data_files:
- split: train
path: subset_45/train-*
- config_name: subset_46
data_files:
- split: train
path: subset_46/train-*
- config_name: subset_47
data_files:
- split: train
path: subset_47/train-*
- config_name: subset_48
data_files:
- split: train
path: subset_48/train-*
- config_name: subset_49
data_files:
- split: train
path: subset_49/train-*
- config_name: subset_5
data_files:
- split: train
path: subset_5/train-*
- config_name: subset_50
data_files:
- split: train
path: subset_50/train-*
- config_name: subset_51
data_files:
- split: train
path: subset_51/train-*
- config_name: subset_52
data_files:
- split: train
path: subset_52/train-*
- config_name: subset_53
data_files:
- split: train
path: subset_53/train-*
- config_name: subset_54
data_files:
- split: train
path: subset_54/train-*
- config_name: subset_55
data_files:
- split: train
path: subset_55/train-*
- config_name: subset_56
data_files:
- split: train
path: subset_56/train-*
- config_name: subset_57
data_files:
- split: train
path: subset_57/train-*
- config_name: subset_58
data_files:
- split: train
path: subset_58/train-*
- config_name: subset_59
data_files:
- split: train
path: subset_59/train-*
- config_name: subset_6
data_files:
- split: train
path: subset_6/train-*
- config_name: subset_60
data_files:
- split: train
path: subset_60/train-*
- config_name: subset_61
data_files:
- split: train
path: subset_61/train-*
- config_name: subset_62
data_files:
- split: train
path: subset_62/train-*
- config_name: subset_63
data_files:
- split: train
path: subset_63/train-*
- config_name: subset_64
data_files:
- split: train
path: subset_64/train-*
- config_name: subset_65
data_files:
- split: train
path: subset_65/train-*
- config_name: subset_66
data_files:
- split: train
path: subset_66/train-*
- config_name: subset_67
data_files:
- split: train
path: subset_67/train-*
- config_name: subset_68
data_files:
- split: train
path: subset_68/train-*
- config_name: subset_69
data_files:
- split: train
path: subset_69/train-*
- config_name: subset_7
data_files:
- split: train
path: subset_7/train-*
- config_name: subset_70
data_files:
- split: train
path: subset_70/train-*
- config_name: subset_71
data_files:
- split: train
path: subset_71/train-*
- config_name: subset_72
data_files:
- split: train
path: subset_72/train-*
- config_name: subset_73
data_files:
- split: train
path: subset_73/train-*
- config_name: subset_74
data_files:
- split: train
path: subset_74/train-*
- config_name: subset_75
data_files:
- split: train
path: subset_75/train-*
- config_name: subset_76
data_files:
- split: train
path: subset_76/train-*
- config_name: subset_77
data_files:
- split: train
path: subset_77/train-*
- config_name: subset_78
data_files:
- split: train
path: subset_78/train-*
- config_name: subset_79
data_files:
- split: train
path: subset_79/train-*
- config_name: subset_8
data_files:
- split: train
path: subset_8/train-*
- config_name: subset_80
data_files:
- split: train
path: subset_80/train-*
- config_name: subset_81
data_files:
- split: train
path: subset_81/train-*
- config_name: subset_82
data_files:
- split: train
path: subset_82/train-*
- config_name: subset_83
data_files:
- split: train
path: subset_83/train-*
- config_name: subset_84
data_files:
- split: train
path: subset_84/train-*
- config_name: subset_85
data_files:
- split: train
path: subset_85/train-*
- config_name: subset_86
data_files:
- split: train
path: subset_86/train-*
- config_name: subset_87
data_files:
- split: train
path: subset_87/train-*
- config_name: subset_88
data_files:
- split: train
path: subset_88/train-*
- config_name: subset_89
data_files:
- split: train
path: subset_89/train-*
- config_name: subset_9
data_files:
- split: train
path: subset_9/train-*
- config_name: subset_90
data_files:
- split: train
path: subset_90/train-*
- config_name: subset_91
data_files:
- split: train
path: subset_91/train-*
---
|
IWSLT/iwslt2017 | IWSLT | "2023-04-05T10:07:51Z" | 4,111 | 35 | [
"task_categories:translation",
"annotations_creators:crowdsourced",
"language_creators:expert-generated",
"multilinguality:translation",
"source_datasets:original",
"language:ar",
"language:de",
"language:en",
"language:fr",
"language:it",
"language:ja",
"language:ko",
"language:nl",
"language:ro",
"language:zh",
"license:cc-by-nc-nd-4.0",
"size_categories:1M<n<10M",
"region:us"
] | [
"translation"
] | "2022-03-02T23:29:22Z" | ---
annotations_creators:
- crowdsourced
language:
- ar
- de
- en
- fr
- it
- ja
- ko
- nl
- ro
- zh
language_creators:
- expert-generated
license:
- cc-by-nc-nd-4.0
multilinguality:
- translation
pretty_name: IWSLT 2017
size_categories:
- 1M<n<10M
source_datasets:
- original
task_categories:
- translation
task_ids: []
paperswithcode_id: iwslt-2017
dataset_info:
- config_name: iwslt2017-en-it
features:
- name: translation
dtype:
translation:
languages:
- en
- it
splits:
- name: train
num_bytes: 46647925
num_examples: 231619
- name: test
num_bytes: 305246
num_examples: 1566
- name: validation
num_bytes: 200023
num_examples: 929
download_size: 329391132
dataset_size: 47153194
- config_name: iwslt2017-en-nl
features:
- name: translation
dtype:
translation:
languages:
- en
- nl
splits:
- name: train
num_bytes: 42843933
num_examples: 237240
- name: test
num_bytes: 311646
num_examples: 1777
- name: validation
num_bytes: 197814
num_examples: 1003
download_size: 329391132
dataset_size: 43353393
- config_name: iwslt2017-en-ro
features:
- name: translation
dtype:
translation:
languages:
- en
- ro
splits:
- name: train
num_bytes: 44129950
num_examples: 220538
- name: test
num_bytes: 316790
num_examples: 1678
- name: validation
num_bytes: 205028
num_examples: 914
download_size: 329391132
dataset_size: 44651768
- config_name: iwslt2017-it-en
features:
- name: translation
dtype:
translation:
languages:
- it
- en
splits:
- name: train
num_bytes: 46647925
num_examples: 231619
- name: test
num_bytes: 305246
num_examples: 1566
- name: validation
num_bytes: 200023
num_examples: 929
download_size: 329391132
dataset_size: 47153194
- config_name: iwslt2017-it-nl
features:
- name: translation
dtype:
translation:
languages:
- it
- nl
splits:
- name: train
num_bytes: 43033168
num_examples: 233415
- name: test
num_bytes: 309725
num_examples: 1669
- name: validation
num_bytes: 197774
num_examples: 1001
download_size: 329391132
dataset_size: 43540667
- config_name: iwslt2017-it-ro
features:
- name: translation
dtype:
translation:
languages:
- it
- ro
splits:
- name: train
num_bytes: 44485169
num_examples: 217551
- name: test
num_bytes: 314974
num_examples: 1643
- name: validation
num_bytes: 204989
num_examples: 914
download_size: 329391132
dataset_size: 45005132
- config_name: iwslt2017-nl-en
features:
- name: translation
dtype:
translation:
languages:
- nl
- en
splits:
- name: train
num_bytes: 42843933
num_examples: 237240
- name: test
num_bytes: 311646
num_examples: 1777
- name: validation
num_bytes: 197814
num_examples: 1003
download_size: 329391132
dataset_size: 43353393
- config_name: iwslt2017-nl-it
features:
- name: translation
dtype:
translation:
languages:
- nl
- it
splits:
- name: train
num_bytes: 43033168
num_examples: 233415
- name: test
num_bytes: 309725
num_examples: 1669
- name: validation
num_bytes: 197774
num_examples: 1001
download_size: 329391132
dataset_size: 43540667
- config_name: iwslt2017-nl-ro
features:
- name: translation
dtype:
translation:
languages:
- nl
- ro
splits:
- name: train
num_bytes: 41338738
num_examples: 206920
- name: test
num_bytes: 320952
num_examples: 1680
- name: validation
num_bytes: 202380
num_examples: 913
download_size: 329391132
dataset_size: 41862070
- config_name: iwslt2017-ro-en
features:
- name: translation
dtype:
translation:
languages:
- ro
- en
splits:
- name: train
num_bytes: 44129950
num_examples: 220538
- name: test
num_bytes: 316790
num_examples: 1678
- name: validation
num_bytes: 205028
num_examples: 914
download_size: 329391132
dataset_size: 44651768
- config_name: iwslt2017-ro-it
features:
- name: translation
dtype:
translation:
languages:
- ro
- it
splits:
- name: train
num_bytes: 44485169
num_examples: 217551
- name: test
num_bytes: 314974
num_examples: 1643
- name: validation
num_bytes: 204989
num_examples: 914
download_size: 329391132
dataset_size: 45005132
- config_name: iwslt2017-ro-nl
features:
- name: translation
dtype:
translation:
languages:
- ro
- nl
splits:
- name: train
num_bytes: 41338738
num_examples: 206920
- name: test
num_bytes: 320952
num_examples: 1680
- name: validation
num_bytes: 202380
num_examples: 913
download_size: 329391132
dataset_size: 41862070
- config_name: iwslt2017-ar-en
features:
- name: translation
dtype:
translation:
languages:
- ar
- en
splits:
- name: train
num_bytes: 56481059
num_examples: 231713
- name: test
num_bytes: 2014296
num_examples: 8583
- name: validation
num_bytes: 241206
num_examples: 888
download_size: 27748780
dataset_size: 58736561
- config_name: iwslt2017-de-en
features:
- name: translation
dtype:
translation:
languages:
- de
- en
splits:
- name: train
num_bytes: 42608380
num_examples: 206112
- name: test
num_bytes: 1608474
num_examples: 8079
- name: validation
num_bytes: 210975
num_examples: 888
download_size: 16758320
dataset_size: 44427829
- config_name: iwslt2017-en-ar
features:
- name: translation
dtype:
translation:
languages:
- en
- ar
splits:
- name: train
num_bytes: 56481059
num_examples: 231713
- name: test
num_bytes: 2014296
num_examples: 8583
- name: validation
num_bytes: 241206
num_examples: 888
download_size: 29333173
dataset_size: 58736561
- config_name: iwslt2017-en-de
features:
- name: translation
dtype:
translation:
languages:
- en
- de
splits:
- name: train
num_bytes: 42608380
num_examples: 206112
- name: test
num_bytes: 1608474
num_examples: 8079
- name: validation
num_bytes: 210975
num_examples: 888
download_size: 16758334
dataset_size: 44427829
- config_name: iwslt2017-en-fr
features:
- name: translation
dtype:
translation:
languages:
- en
- fr
splits:
- name: train
num_bytes: 49273286
num_examples: 232825
- name: test
num_bytes: 1767465
num_examples: 8597
- name: validation
num_bytes: 207579
num_examples: 890
download_size: 27699724
dataset_size: 51248330
- config_name: iwslt2017-en-ja
features:
- name: translation
dtype:
translation:
languages:
- en
- ja
splits:
- name: train
num_bytes: 48204987
num_examples: 223108
- name: test
num_bytes: 1809007
num_examples: 8469
- name: validation
num_bytes: 208124
num_examples: 871
download_size: 26983602
dataset_size: 50222118
- config_name: iwslt2017-en-ko
features:
- name: translation
dtype:
translation:
languages:
- en
- ko
splits:
- name: train
num_bytes: 51678043
num_examples: 230240
- name: test
num_bytes: 1869793
num_examples: 8514
- name: validation
num_bytes: 219295
num_examples: 879
download_size: 19364776
dataset_size: 53767131
- config_name: iwslt2017-en-zh
features:
- name: translation
dtype:
translation:
languages:
- en
- zh
splits:
- name: train
num_bytes: 44271004
num_examples: 231266
- name: test
num_bytes: 1605527
num_examples: 8549
- name: validation
num_bytes: 202537
num_examples: 879
download_size: 27597071
dataset_size: 46079068
- config_name: iwslt2017-fr-en
features:
- name: translation
dtype:
translation:
languages:
- fr
- en
splits:
- name: train
num_bytes: 49273286
num_examples: 232825
- name: test
num_bytes: 1767465
num_examples: 8597
- name: validation
num_bytes: 207579
num_examples: 890
download_size: 26880731
dataset_size: 51248330
- config_name: iwslt2017-ja-en
features:
- name: translation
dtype:
translation:
languages:
- ja
- en
splits:
- name: train
num_bytes: 48204987
num_examples: 223108
- name: test
num_bytes: 1809007
num_examples: 8469
- name: validation
num_bytes: 208124
num_examples: 871
download_size: 26190859
dataset_size: 50222118
- config_name: iwslt2017-ko-en
features:
- name: translation
dtype:
translation:
languages:
- ko
- en
splits:
- name: train
num_bytes: 51678043
num_examples: 230240
- name: test
num_bytes: 1869793
num_examples: 8514
- name: validation
num_bytes: 219295
num_examples: 879
download_size: 19364733
dataset_size: 53767131
- config_name: iwslt2017-zh-en
features:
- name: translation
dtype:
translation:
languages:
- zh
- en
splits:
- name: train
num_bytes: 44271004
num_examples: 231266
- name: test
num_bytes: 1605527
num_examples: 8549
- name: validation
num_bytes: 202537
num_examples: 879
download_size: 26849290
dataset_size: 46079068
---
# Dataset Card for IWSLT 2017
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** [https://sites.google.com/site/iwsltevaluation2017/TED-tasks](https://sites.google.com/site/iwsltevaluation2017/TED-tasks)
- **Repository:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
- **Paper:** [Overview of the IWSLT 2017 Evaluation Campaign](https://aclanthology.org/2017.iwslt-1.1/)
- **Point of Contact:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
- **Size of downloaded dataset files:** 4.24 GB
- **Size of the generated dataset:** 1.14 GB
- **Total amount of disk used:** 5.38 GB
### Dataset Summary
The IWSLT 2017 Multilingual Task addresses text translation, including zero-shot translation, with a single MT system
across all directions including English, German, Dutch, Italian and Romanian. As unofficial task, conventional
bilingual text translation is offered between English and Arabic, French, Japanese, Chinese, German and Korean.
### Supported Tasks and Leaderboards
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Languages
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Dataset Structure
### Data Instances
#### iwslt2017-ar-en
- **Size of downloaded dataset files:** 27.75 MB
- **Size of the generated dataset:** 58.74 MB
- **Total amount of disk used:** 86.49 MB
An example of 'train' looks as follows.
```
This example was too long and was cropped:
{
"translation": "{\"ar\": \"لقد طرت في \\\"القوات الجوية \\\" لمدة ثمان سنوات. والآن أجد نفسي مضطرا لخلع حذائي قبل صعود الطائرة!\", \"en\": \"I flew on Air ..."
}
```
#### iwslt2017-de-en
- **Size of downloaded dataset files:** 16.76 MB
- **Size of the generated dataset:** 44.43 MB
- **Total amount of disk used:** 61.18 MB
An example of 'train' looks as follows.
```
{
"translation": {
"de": "Es ist mir wirklich eine Ehre, zweimal auf dieser Bühne stehen zu dürfen. Tausend Dank dafür.",
"en": "And it's truly a great honor to have the opportunity to come to this stage twice; I'm extremely grateful."
}
}
```
#### iwslt2017-en-ar
- **Size of downloaded dataset files:** 29.33 MB
- **Size of the generated dataset:** 58.74 MB
- **Total amount of disk used:** 88.07 MB
An example of 'train' looks as follows.
```
This example was too long and was cropped:
{
"translation": "{\"ar\": \"لقد طرت في \\\"القوات الجوية \\\" لمدة ثمان سنوات. والآن أجد نفسي مضطرا لخلع حذائي قبل صعود الطائرة!\", \"en\": \"I flew on Air ..."
}
```
#### iwslt2017-en-de
- **Size of downloaded dataset files:** 16.76 MB
- **Size of the generated dataset:** 44.43 MB
- **Total amount of disk used:** 61.18 MB
An example of 'validation' looks as follows.
```
{
"translation": {
"de": "Die nächste Folie, die ich Ihnen zeige, ist eine Zeitrafferaufnahme was in den letzten 25 Jahren passiert ist.",
"en": "The next slide I show you will be a rapid fast-forward of what's happened over the last 25 years."
}
}
```
#### iwslt2017-en-fr
- **Size of downloaded dataset files:** 27.69 MB
- **Size of the generated dataset:** 51.24 MB
- **Total amount of disk used:** 78.94 MB
An example of 'validation' looks as follows.
```
{
"translation": {
"en": "But this understates the seriousness of this particular problem because it doesn't show the thickness of the ice.",
"fr": "Mais ceci tend à amoindrir le problème parce qu'on ne voit pas l'épaisseur de la glace."
}
}
```
### Data Fields
The data fields are the same among all splits.
#### iwslt2017-ar-en
- `translation`: a multilingual `string` variable, with possible languages including `ar`, `en`.
#### iwslt2017-de-en
- `translation`: a multilingual `string` variable, with possible languages including `de`, `en`.
#### iwslt2017-en-ar
- `translation`: a multilingual `string` variable, with possible languages including `en`, `ar`.
#### iwslt2017-en-de
- `translation`: a multilingual `string` variable, with possible languages including `en`, `de`.
#### iwslt2017-en-fr
- `translation`: a multilingual `string` variable, with possible languages including `en`, `fr`.
### Data Splits
| name |train |validation|test|
|---------------|-----:|---------:|---:|
|iwslt2017-ar-en|231713| 888|8583|
|iwslt2017-de-en|206112| 888|8079|
|iwslt2017-en-ar|231713| 888|8583|
|iwslt2017-en-de|206112| 888|8079|
|iwslt2017-en-fr|232825| 890|8597|
## Dataset Creation
### Curation Rationale
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
#### Who are the source language producers?
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Annotations
#### Annotation process
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
#### Who are the annotators?
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Personal and Sensitive Information
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Discussion of Biases
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Other Known Limitations
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Additional Information
### Dataset Curators
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Licensing Information
Creative Commons BY-NC-ND
See the (TED Talks Usage Policy)[https://www.ted.com/about/our-organization/our-policies-terms/ted-talks-usage-policy].
### Citation Information
```
@inproceedings{cettolo-etal-2017-overview,
title = "Overview of the {IWSLT} 2017 Evaluation Campaign",
author = {Cettolo, Mauro and
Federico, Marcello and
Bentivogli, Luisa and
Niehues, Jan and
St{\"u}ker, Sebastian and
Sudoh, Katsuhito and
Yoshino, Koichiro and
Federmann, Christian},
booktitle = "Proceedings of the 14th International Conference on Spoken Language Translation",
month = dec # " 14-15",
year = "2017",
address = "Tokyo, Japan",
publisher = "International Workshop on Spoken Language Translation",
url = "https://aclanthology.org/2017.iwslt-1.1",
pages = "2--14",
}
```
### Contributions
Thanks to [@thomwolf](https://github.com/thomwolf), [@Narsil](https://github.com/Narsil) for adding this dataset. |
bigcode/starcoderdata | bigcode | "2023-05-16T10:05:48Z" | 4,109 | 410 | [
"task_categories:text-generation",
"language_creators:crowdsourced",
"language_creators:expert-generated",
"multilinguality:multilingual",
"language:code",
"license:other",
"size_categories:100M<n<1B",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"text-generation"
] | "2023-03-30T12:02:21Z" | ---
annotations_creators: []
language_creators:
- crowdsourced
- expert-generated
language:
- code
license:
- other
multilinguality:
- multilingual
pretty_name: The-Stack
size_categories:
- unknown
source_datasets: []
task_categories:
- text-generation
extra_gated_prompt: >-
## Terms of Use for The Stack
The Stack dataset is a collection of source code in over 300 programming
languages. We ask that you read and acknowledge the following points before
using the dataset:
1. The Stack is a collection of source code from repositories with various
licenses. Any use of all or part of the code gathered in The Stack must abide
by the terms of the original licenses, including attribution clauses when
relevant. We facilitate this by providing provenance information for each data
point.
2. The Stack is regularly updated to enact validated data removal requests. By
clicking on "Access repository", you agree to update your own version of The
Stack to the most recent usable version specified by the maintainers in [the
following
thread](https://huggingface.co/datasets/bigcode/the-stack/discussions/7). If
you have questions about dataset versions and allowed uses, please also ask
them in the dataset’s [community
discussions](https://huggingface.co/datasets/bigcode/the-stack/discussions/new).
We will also notify users via email when the latest usable version changes.
3. To host, share, or otherwise provide access to The Stack dataset, you must
include [these Terms of
Use](https://huggingface.co/datasets/bigcode/the-stack#terms-of-use-for-the-stack)
and require users to agree to it.
By clicking on "Access repository" below, you accept that your contact
information (email address and username) can be shared with the dataset
maintainers as well.
extra_gated_fields:
Email: text
I have read the License and agree with its terms: checkbox
---
# StarCoder Training Dataset
## Dataset description
This is the dataset used for training [StarCoder](https://huggingface.co/bigcode/starcoder) and [StarCoderBase](https://huggingface.co/bigcode/starcoderbase). It contains 783GB of code in 86 programming languages, and includes 54GB GitHub Issues + 13GB Jupyter notebooks in scripts and text-code pairs,
and 32GB of GitHub commits, which is approximately 250 Billion tokens.
## Dataset creation
The creation and filtering of The Stack is explained in the [original dataset](https://huggingface.co/datasets/bigcode/the-stack-dedup), we additionally decontaminate and clean all 86 programming
languages in the dataset, in addition to GitHub issues, Jupyter Notebooks and GitHub commits. We also apply near-deduplication and remove PII, all details are mentionned in our [Paper: 💫 StarCoder, May The Source Be With You](https://drive.google.com/file/d/1cN-b9GnWtHzQRoE7M7gAEyivY0kl4BYs/view)
## How to use the dataset
```python
from datasets import load_dataset
# to load python for example
ds = load_dataset("bigcode/starcoderdata", data_dir="python", split="train")
```
GitHub issues, GitHub commits and Jupyter notebooks subsets have different columns from the rest so loading the entire dataset at once may fail, we suggest loading programming languages separatly from these categories.
````
jupyter-scripts-dedup-filtered
jupyter-structured-clean-dedup
github-issues-filtered-structured
git-commits-cleaned
````
|
wmt/wmt14 | wmt | "2024-04-03T09:05:59Z" | 4,098 | 15 | [
"task_categories:translation",
"annotations_creators:no-annotation",
"language_creators:found",
"multilinguality:translation",
"source_datasets:extended|europarl_bilingual",
"source_datasets:extended|giga_fren",
"source_datasets:extended|news_commentary",
"source_datasets:extended|un_multi",
"source_datasets:extended|hind_encorp",
"language:cs",
"language:de",
"language:en",
"language:fr",
"language:hi",
"language:ru",
"license:unknown",
"size_categories:10M<n<100M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"translation"
] | "2022-03-02T23:29:22Z" | ---
annotations_creators:
- no-annotation
language_creators:
- found
language:
- cs
- de
- en
- fr
- hi
- ru
license:
- unknown
multilinguality:
- translation
size_categories:
- 10M<n<100M
source_datasets:
- extended|europarl_bilingual
- extended|giga_fren
- extended|news_commentary
- extended|un_multi
- extended|hind_encorp
task_categories:
- translation
task_ids: []
paperswithcode_id: wmt-2014
pretty_name: WMT14
dataset_info:
- config_name: cs-en
features:
- name: translation
dtype:
translation:
languages:
- cs
- en
splits:
- name: train
num_bytes: 280992026
num_examples: 953621
- name: validation
num_bytes: 702465
num_examples: 3000
- name: test
num_bytes: 757809
num_examples: 3003
download_size: 168878237
dataset_size: 282452300
- config_name: de-en
features:
- name: translation
dtype:
translation:
languages:
- de
- en
splits:
- name: train
num_bytes: 1358406800
num_examples: 4508785
- name: validation
num_bytes: 736407
num_examples: 3000
- name: test
num_bytes: 777326
num_examples: 3003
download_size: 818467512
dataset_size: 1359920533
- config_name: fr-en
features:
- name: translation
dtype:
translation:
languages:
- fr
- en
splits:
- name: train
num_bytes: 14752522252
num_examples: 40836715
- name: validation
num_bytes: 744439
num_examples: 3000
- name: test
num_bytes: 838849
num_examples: 3003
download_size: 7777527744
dataset_size: 14754105540
- config_name: hi-en
features:
- name: translation
dtype:
translation:
languages:
- hi
- en
splits:
- name: train
num_bytes: 1936003
num_examples: 32863
- name: validation
num_bytes: 181457
num_examples: 520
- name: test
num_bytes: 1075008
num_examples: 2507
download_size: 1583004
dataset_size: 3192468
- config_name: ru-en
features:
- name: translation
dtype:
translation:
languages:
- ru
- en
splits:
- name: train
num_bytes: 433209078
num_examples: 1486965
- name: validation
num_bytes: 977938
num_examples: 3000
- name: test
num_bytes: 1087738
num_examples: 3003
download_size: 223537244
dataset_size: 435274754
configs:
- config_name: cs-en
data_files:
- split: train
path: cs-en/train-*
- split: validation
path: cs-en/validation-*
- split: test
path: cs-en/test-*
- config_name: de-en
data_files:
- split: train
path: de-en/train-*
- split: validation
path: de-en/validation-*
- split: test
path: de-en/test-*
- config_name: fr-en
data_files:
- split: train
path: fr-en/train-*
- split: validation
path: fr-en/validation-*
- split: test
path: fr-en/test-*
- config_name: hi-en
data_files:
- split: train
path: hi-en/train-*
- split: validation
path: hi-en/validation-*
- split: test
path: hi-en/test-*
- config_name: ru-en
data_files:
- split: train
path: ru-en/train-*
- split: validation
path: ru-en/validation-*
- split: test
path: ru-en/test-*
---
# Dataset Card for "wmt14"
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** [http://www.statmt.org/wmt14/translation-task.html](http://www.statmt.org/wmt14/translation-task.html)
- **Repository:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
- **Paper:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
- **Point of Contact:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
- **Size of downloaded dataset files:** 1.70 GB
- **Size of the generated dataset:** 282.95 MB
- **Total amount of disk used:** 1.98 GB
### Dataset Summary
<div class="course-tip course-tip-orange bg-gradient-to-br dark:bg-gradient-to-r before:border-orange-500 dark:before:border-orange-800 from-orange-50 dark:from-gray-900 to-white dark:to-gray-950 border border-orange-50 text-orange-700 dark:text-gray-400">
<p><b>Warning:</b> There are issues with the Common Crawl corpus data (<a href="https://www.statmt.org/wmt13/training-parallel-commoncrawl.tgz">training-parallel-commoncrawl.tgz</a>):</p>
<ul>
<li>Non-English files contain many English sentences.</li>
<li>Their "parallel" sentences in English are not aligned: they are uncorrelated with their counterpart.</li>
</ul>
<p>We have contacted the WMT organizers, and in response, they have indicated that they do not have plans to update the Common Crawl corpus data. Their rationale pertains to the expectation that such data has been superseded, primarily by CCMatrix, and to some extent, by ParaCrawl datasets.</p>
</div>
Translation dataset based on the data from statmt.org.
Versions exist for different years using a combination of data
sources. The base `wmt` allows you to create a custom dataset by choosing
your own data/language pair. This can be done as follows:
```python
from datasets import inspect_dataset, load_dataset_builder
inspect_dataset("wmt14", "path/to/scripts")
builder = load_dataset_builder(
"path/to/scripts/wmt_utils.py",
language_pair=("fr", "de"),
subsets={
datasets.Split.TRAIN: ["commoncrawl_frde"],
datasets.Split.VALIDATION: ["euelections_dev2019"],
},
)
# Standard version
builder.download_and_prepare()
ds = builder.as_dataset()
# Streamable version
ds = builder.as_streaming_dataset()
```
### Supported Tasks and Leaderboards
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Languages
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Dataset Structure
### Data Instances
#### cs-en
- **Size of downloaded dataset files:** 1.70 GB
- **Size of the generated dataset:** 282.95 MB
- **Total amount of disk used:** 1.98 GB
An example of 'train' looks as follows.
```
```
### Data Fields
The data fields are the same among all splits.
#### cs-en
- `translation`: a multilingual `string` variable, with possible languages including `cs`, `en`.
### Data Splits
|name |train |validation|test|
|-----|-----:|---------:|---:|
|cs-en|953621| 3000|3003|
## Dataset Creation
### Curation Rationale
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
#### Who are the source language producers?
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Annotations
#### Annotation process
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
#### Who are the annotators?
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Personal and Sensitive Information
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Discussion of Biases
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Other Known Limitations
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Additional Information
### Dataset Curators
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Licensing Information
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Citation Information
```
@InProceedings{bojar-EtAl:2014:W14-33,
author = {Bojar, Ondrej and Buck, Christian and Federmann, Christian and Haddow, Barry and Koehn, Philipp and Leveling, Johannes and Monz, Christof and Pecina, Pavel and Post, Matt and Saint-Amand, Herve and Soricut, Radu and Specia, Lucia and Tamchyna, Ale
{s}},
title = {Findings of the 2014 Workshop on Statistical Machine Translation},
booktitle = {Proceedings of the Ninth Workshop on Statistical Machine Translation},
month = {June},
year = {2014},
address = {Baltimore, Maryland, USA},
publisher = {Association for Computational Linguistics},
pages = {12--58},
url = {http://www.aclweb.org/anthology/W/W14/W14-3302}
}
```
### Contributions
Thanks to [@thomwolf](https://github.com/thomwolf), [@patrickvonplaten](https://github.com/patrickvonplaten) for adding this dataset. |
lmqg/qg_koquad | lmqg | "2022-12-02T18:53:42Z" | 4,083 | 8 | [
"task_categories:text-generation",
"task_ids:language-modeling",
"multilinguality:monolingual",
"source_datasets:squad_es",
"language:ko",
"license:cc-by-4.0",
"size_categories:10K<n<100K",
"modality:text",
"library:datasets",
"library:mlcroissant",
"arxiv:2210.03992",
"region:us",
"question-generation"
] | [
"text-generation"
] | "2022-06-02T23:42:21Z" | ---
license: cc-by-4.0
pretty_name: KorQuAD for question generation
language: ko
multilinguality: monolingual
size_categories: 10K<n<100K
source_datasets: squad_es
task_categories:
- text-generation
task_ids:
- language-modeling
tags:
- question-generation
---
# Dataset Card for "lmqg/qg_korquad"
## Dataset Description
- **Repository:** [https://github.com/asahi417/lm-question-generation](https://github.com/asahi417/lm-question-generation)
- **Paper:** [https://arxiv.org/abs/2210.03992](https://arxiv.org/abs/2210.03992)
- **Point of Contact:** [Asahi Ushio](http://asahiushio.com/)
### Dataset Summary
This is a subset of [QG-Bench](https://github.com/asahi417/lm-question-generation/blob/master/QG_BENCH.md#datasets), a unified question generation benchmark proposed in
["Generative Language Models for Paragraph-Level Question Generation: A Unified Benchmark and Evaluation, EMNLP 2022 main conference"](https://arxiv.org/abs/2210.03992).
This is a modified version of [KorQuAD](https://huggingface.co/datasets/squad_kor_v1) for question generation (QG) task.
Since the original dataset only contains training/validation set, we manually sample test set from training set, which
has no overlap in terms of the paragraph with the training set.
### Supported Tasks and Leaderboards
* `question-generation`: The dataset is assumed to be used to train a model for question generation.
Success on this task is typically measured by achieving a high BLEU4/METEOR/ROUGE-L/BERTScore/MoverScore (see our paper for more in detail).
### Languages
Korean (ko)
## Dataset Structure
An example of 'train' looks as follows.
```
{
"question": "함수해석학이 주목하는 탐구는?",
"paragraph": "변화에 대한 이해와 묘사는 자연과학에 있어서 일반적인 주제이며, 미적분학은 변화를 탐구하는 강력한 도구로서 발전되었다. 함수는 변화하는 양을 묘사함에 있어서 중추적인 개념으로써 떠오르게 된다. 실수와 실변수로 구성된 함수의 엄밀한 탐구가 실해석학이라는 분야로 알려지게 되었고, 복소수에 대한 이와 같은 탐구분야는 복소해석학이라고 한다. 함수해석학은 함수의 공간(특히 무한차원)의 탐구에 주목한다. 함수해석학의 많은 응용분야 중 하나가 양자역학이다. 많은 문제들이 자연스럽게 양과 그 양의 변화율의 관계로 귀착되고, 이러한 문제들이 미분방정식으로 다루어진다. 자연의 많은 현상들이 동역학계로 기술될 수 있다. 혼돈 이론은 이러한 예측 불가능한 현상을 탐구하는 데 상당한 기여를 한다.",
"answer": "함수의 공간(특히 무한차원)의 탐구",
"sentence": "함수해석학은 함수의 공간(특히 무한차원)의 탐구 에 주목한다.",
"paragraph_sentence": '변화에 대한 이해와 묘사는 자연과학에 있어서 일반적인 주제이며, 미적분학은 변화를 탐구하는 강력한 도구로서 발전되었다. 함수는 변화하는 양을 묘사함에 있어서 중추적인 개념으로써 떠오르게 된다. 실수와 실변수로 구성된 함수의 엄밀한 탐구가 실해석학이라는 분야로 알려지게 되었고, 복소수에 대한 이와 같은 탐구 분야는 복소해석학이라고 한다. <hl> 함수해석학은 함수의 공간(특히 무한차원)의 탐구 에 주목한다. <hl> 함수해석학의 많은 응용분야 중 하나가 양자역학이다. 많은 문제들이 자연스럽게 양과 그 양의 변화율의 관계로 귀착되고, 이러한 문제들이 미분방정식으로 다루어진다. 자연의 많은 현상들이 동역학계로 기술될 수 있다. 혼돈 이론은 이러한 예측 불가능한 현상을 탐구하는 데 상당한 기여를 한다.',
"paragraph_answer": '변화에 대한 이해와 묘사는 자연과학에 있어서 일반적인 주제이며, 미적분학은 변화를 탐구하는 강력한 도구로서 발전되었다. 함수는 변화하는 양을 묘사함에 있어서 중추적인 개념으로써 떠오르게 된다. 실수와 실변수로 구성된 함수의 엄밀한 탐구가 실해석학이라는 분야로 알려지게 되었고, 복소수에 대한 이와 같은 탐구 분야는 복소해석학이라고 한다. 함수해석학은 <hl> 함수의 공간(특히 무한차원)의 탐구 <hl>에 주목한다. 함수해석학의 많은 응용분야 중 하나가 양자역학이다. 많은 문제들이 자연스럽게 양과 그 양의 변화율의 관계로 귀착되고, 이러한 문제들이 미분방정식으로 다루어진다. 자연의 많은 현상들이 동역학계로 기술될 수 있다. 혼돈 이론은 이러한 예측 불가능한 현상을 탐구하는 데 상당한 기여를 한다.',
"sentence_answer": "함수해석학은 <hl> 함수의 공간(특히 무한차원)의 탐구 <hl> 에 주목한다."
}
```
The data fields are the same among all splits.
- `question`: a `string` feature.
- `paragraph`: a `string` feature.
- `answer`: a `string` feature.
- `sentence`: a `string` feature.
- `paragraph_answer`: a `string` feature, which is same as the paragraph but the answer is highlighted by a special token `<hl>`.
- `paragraph_sentence`: a `string` feature, which is same as the paragraph but a sentence containing the answer is highlighted by a special token `<hl>`.
- `sentence_answer`: a `string` feature, which is same as the sentence but the answer is highlighted by a special token `<hl>`.
Each of `paragraph_answer`, `paragraph_sentence`, and `sentence_answer` feature is assumed to be used to train a question generation model,
but with different information. The `paragraph_answer` and `sentence_answer` features are for answer-aware question generation and
`paragraph_sentence` feature is for sentence-aware question generation.
## Data Splits
|train|validation|test |
|----:|---------:|----:|
|54556| 5766 |5766 |
## Citation Information
```
@inproceedings{ushio-etal-2022-generative,
title = "{G}enerative {L}anguage {M}odels for {P}aragraph-{L}evel {Q}uestion {G}eneration: {A} {U}nified {B}enchmark and {E}valuation",
author = "Ushio, Asahi and
Alva-Manchego, Fernando and
Camacho-Collados, Jose",
booktitle = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2022",
address = "Abu Dhabi, U.A.E.",
publisher = "Association for Computational Linguistics",
}
``` |
deepghs/danbooru2023-webp-4Mpixel_index | deepghs | "2024-07-18T13:27:22Z" | 4,080 | 3 | [
"task_categories:image-classification",
"task_categories:image-to-image",
"task_categories:text-to-image",
"language:en",
"language:ja",
"license:mit",
"size_categories:1M<n<10M",
"region:us"
] | [
"image-classification",
"image-to-image",
"text-to-image"
] | "2024-05-31T07:35:02Z" | ---
license: mit
task_categories:
- image-classification
- image-to-image
- text-to-image
language:
- en
- ja
size_categories:
- 1M<n<10M
---
Index files of [KBlueLeaf/danbooru2023-webp-4Mpixel](https://huggingface.co/datasets/KBlueLeaf/danbooru2023-webp-4Mpixel).
You can download images from KBlueLeaf/danbooru2023-webp-4Mpixel with [cheesechaser](https://github.com/deepghs/cheesechaser).
```python
from cheesechaser.datapool import DanbooruWebpDataPool
pool = DanbooruWebpDataPool()
# download danbooru images with webp format, to directory /data/danbooru_webp
pool.batch_download_to_directory(
resource_ids=range(6000000, 6001000),
dst_dir='/data/danbooru_webp',
max_workers=12,
)
```
|
mteb/twentynewsgroups-clustering | mteb | "2022-09-27T19:13:51Z" | 4,076 | 0 | [
"language:en",
"size_categories:n<1K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2022-04-07T13:46:04Z" | ---
language:
- en
--- |
mteb/scidocs-reranking | mteb | "2022-09-27T19:11:31Z" | 4,067 | 0 | [
"language:en",
"size_categories:1K<n<10K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2022-04-19T12:15:26Z" | ---
language:
- en
--- |
jkot/merged_preprocessed_parliament_commonvoice | jkot | "2023-05-01T13:35:28Z" | 4,042 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2023-05-01T10:37:03Z" | ---
dataset_info:
features:
- name: input_features
sequence:
sequence: float32
- name: labels
sequence: int64
splits:
- name: train
num_bytes: 210499135424
num_examples: 219101
- name: test
num_bytes: 11099630080
num_examples: 11555
download_size: 65027813279
dataset_size: 221598765504
---
# Dataset Card for "merged_preprocessed_parliament_commonvoice"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
mteb/summeval | mteb | "2022-09-27T19:14:10Z" | 4,022 | 7 | [
"language:en",
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2022-06-21T13:37:10Z" | ---
language:
- en
---
# SummEval
The annotations include summaries generated by 16 models from 100 source news articles (1600 examples in total).
Each of the summaries was annotated by 5 indepedent crowdsource workers and 3 independent experts (8 annotations in total).
Summaries were evaluated across 4 dimensions: coherence, consistency, fluency, relevance.
Each source news article comes with the original reference from the CNN/DailyMail dataset and 10 additional crowdsources reference summaries.
For this dataset, we averaged the 3 **expert** annotations to get the human scores.
source: https://github.com/Yale-LILY/SummEval |
yzwang/X2I-subject-driven | yzwang | "2024-12-14T12:33:09Z" | 3,990 | 3 | [
"task_categories:text-to-image",
"task_categories:image-to-image",
"language:en",
"license:apache-2.0",
"size_categories:1M<n<10M",
"arxiv:2409.11340",
"region:us"
] | [
"text-to-image",
"image-to-image"
] | "2024-12-01T09:43:36Z" | ---
license: apache-2.0
task_categories:
- text-to-image
- image-to-image
language:
- en
size_categories:
- 1M<n<10M
---
# X2I Dataset
* Project Page: [https://vectorspacelab.github.io/OmniGen/](https://vectorspacelab.github.io/OmniGen/)
* Github: [https://github.com/VectorSpaceLab/OmniGen](https://github.com/VectorSpaceLab/OmniGen)
* Paper: [https://arxiv.org/abs/2409.11340](https://arxiv.org/abs/2409.11340)
* Model: [https://huggingface.co/Shitao/OmniGen-v1](https://huggingface.co/Shitao/OmniGen-v1)
To achieve robust multi-task processing capabilities, it is essential to train the **OmniGen** on large-scale and diverse datasets. However, in the field of unified image generation, a readily available dataset has yet to emerge. For this reason, we have curated a large-scale **unified image generation** dataset with unified format for the **first time**, which we refer to as the **X2I dataset**, meaning **"anything to image"**.
| Task| Datastet|
| :-------- | :-------- |
| Multi-modal Instruction| [X2I-mm-instruction](https://huggingface.co/datasets/yzwang/X2I-mm-instruction) |
| Subject-driven Editing | [X2I-subject-driven](https://huggingface.co/datasets/yzwang/X2I-subject-driven) |
| In-context Learning | [X2I-in-context-learning](https://huggingface.co/datasets/yzwang/X2I-in-context-learning) |
| Computer Vision | [X2I-computer-vision](https://huggingface.co/datasets/yzwang/X2I-computer-vision) |
| Text to Image Generation| [X2I-text-to-image](https://huggingface.co/datasets/yzwang/X2I-text-to-image) |
## X2I-subject-driven
- **Web-Image**
A self-built subject-driven editing dataset with 36,316 & 45,425 & 111,734 samples.
```python
## meta file: web-image-1.jsonl && web-image-2.jsonl && web-image-3.jsonl
cd retrieval
tar -zxvf download_images.tar.gz
tar -zxvf download_images_two.tar.gz
```
- **GRIT-Entity**
A subject-driven editing dataset with 1,708,742 samples.
```python
## meta file: grit-entity.jsonl
cd grit/images1
tar -zxvf 00034.tar.gz
# tar -zxvf 00066.tar.gz
# ...
cd grit/images2
tar -zxvf 00034.tar.gz
# tar -zxvf 00066.tar.gz
# ...
cd grit/images3
tar -zxvf 00168.tar.gz
# tar -zxvf 00187.tar.gz
# ...
```
- **GRIT-Entity-New**
A self-built subject-driven editing dataset with 676,603 samples. This datasets is smaller than GRIT-Entity but with higher qualtiy.
```python
## meta file: grit-entity-new.jsonl
cd character
tar -xzvf character.tar.gz
cd human/human2
tar -xzvf human2.tar.gz
cd human/human3
tar -xzvf human3.tar.gz
cd single
cat single.tar.gz.* | tar -xzvf -
cd double
cat double.tar.gz.* | tar -xzvf -
cd triple
cat triple.tar.gz.* | tar -xzvf -
``` |
linagora/linto-dataset-audio-ar-tn-augmented | linagora | "2024-12-19T08:43:21Z" | 3,983 | 3 | [
"task_categories:automatic-speech-recognition",
"task_categories:text-to-speech",
"task_categories:text-to-audio",
"language:ar",
"license:cc-by-4.0",
"size_categories:100K<n<1M",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2309.11327",
"region:us"
] | [
"automatic-speech-recognition",
"text-to-speech",
"text-to-audio"
] | "2024-09-11T12:07:47Z" | ---
language:
- ar
task_categories:
- automatic-speech-recognition
- text-to-speech
- text-to-audio
license: cc-by-4.0
version: 1.0
dataset_info:
- config_name: default
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
- config_name: ApprendreLeTunisienVCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 839147756.322
num_examples: 6146
download_size: 798894474
dataset_size: 839147756.322
- config_name: MASC_NoiseLess
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 1798927453.0
num_examples: 48
download_size: 1508394957
dataset_size: 1798927453.0
- config_name: MASC_NoiseLess_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 6297517576.0
num_examples: 336
download_size: 5218109270
dataset_size: 6297517576.0
- config_name: OneStory_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 2948770377.0
num_examples: 216
download_size: 2745380587
dataset_size: 2948770377.0
- config_name: TunSwitchCS_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 16211221231.134
num_examples: 37639
download_size: 18870351203
dataset_size: 16211221231.134
- config_name: TunSwitchTO_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 5926536342.08
num_examples: 15365
download_size: 5236455978
dataset_size: 5926536342.08
- config_name: Youtube_AbdelAzizErwi_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 39027242686.0
num_examples: 125
download_size: 30064752032
dataset_size: 39027242686.0
- config_name: Youtube_BayariBilionaireVCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 1557801334.0
num_examples: 30
download_size: 1524983572
dataset_size: 1557801334.0
- config_name: Youtube_DiwanFM_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 12125888408.0
num_examples: 252
download_size: 11966562052
dataset_size: 12125888408.0
- config_name: Youtube_HkeyetTounsiaMensia_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 3883840637.0
num_examples: 35
download_size: 3803268888
dataset_size: 3883840637.0
- config_name: Youtube_LobnaMajjedi_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 2126737013.0
num_examples: 14
download_size: 2045521265
dataset_size: 2126737013.0
- config_name: Youtube_MohamedKhammessi_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 3850743255.0
num_examples: 14
download_size: 3803407855
dataset_size: 3850743255.0
- config_name: Youtube_Shorts_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 8401284864.0
num_examples: 945
download_size: 8279119035
dataset_size: 8401284864.0
- config_name: Youtube_TNScrapped_V1_NoiseLess
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 2510511859.0
num_examples: 52
download_size: 2163493076
dataset_size: 2510511859.0
- config_name: Youtube_TNScrapped_V1_NoiseLess_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 8973984541.0
num_examples: 364
download_size: 7561296937
dataset_size: 8973984541.0
- config_name: Youtube_TV_VCA
features:
- name: audio_id
dtype: string
- name: audio
dtype: audio
- name: segments
list:
- name: end
dtype: float64
- name: start
dtype: float64
- name: transcript
dtype: string
- name: transcript_raw
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 1357183734.0
num_examples: 28
download_size: 1317232730
dataset_size: 1357183734.0
configs:
- config_name: default
default: true
data_files:
- split: train
path: data/*/train/train-*
- config_name: ApprendreLeTunisienVCA
data_files:
- split: train
path: data/ApprendreLeTunisien_VCA/train/train-*
- config_name: MASC_NoiseLess
data_files:
- split: train
path: data/MASC_NoiseLess/train/train-*
- config_name: MASC_NoiseLess_VCA
data_files:
- split: train
path: data/MASC_NoiseLess_VCA/train/train-*
- config_name: OneStoryVCA
data_files:
- split: train
path: data/OneStory_VCA/train/train-*
- config_name: TunSwitchCS_VCA
data_files:
- split: train
path: data/TunSwitchCS_VCA/train/train-*
- config_name: TunSwitchTO_VCA
data_files:
- split: train
path: data/TunSwitchTO_VCA/train/train-*
- config_name: Youtube_AbdelAzizErwi_VCA
data_files:
- split: train
path: data/Youtube_AbdelAzizErwi_VCA/train/train-*
- config_name: Youtube_BayariBilionaireVCA
data_files:
- split: train
path: data/Youtube_BayariBilionaire_VCA/train/train-*
- config_name: Youtube_DiwanFM_VCA
data_files:
- split: train
path: data/Youtube_DiwanFM_VCA/train/train-*
- config_name: Youtube_HkeyetTounsiaMensia_VCA
data_files:
- split: train
path: data/Youtube_HkeyetTounsiaMensia_VCA/train/train-*
- config_name: Youtube_LobnaMajjedi_VCA
data_files:
- split: train
path: data/Youtube_LobnaMajjedi_VCA/train/train-*
- config_name: Youtube_MohamedKhammessi_VCA
data_files:
- split: train
path: data/Youtube_MohamedKhammessi_VCA/train/train-*
- config_name: Youtube_Shorts_VCA
data_files:
- split: train
path: data/Youtube_Shorts_VCA/train/train-*
- config_name: Youtube_TNScrapped_V1_NoiseLess
data_files:
- split: train
path: data/Youtube_TNScrapped_V1_NoiseLess/train/train-*
- config_name: Youtube_TNScrapped_V1_NoiseLess_VCA
data_files:
- split: train
path: data/Youtube_TNScrapped_V1_NoiseLess_VCA/train/train-*
- config_name: Youtube_TV_VCA
data_files:
- split: train
path: data/Youtube_TV_VCA/train/train-*
---
# LinTO DataSet Audio for Arabic Tunisian Augmented <br />*A collection of Tunisian dialect audio and its annotations for STT task*
This is the augmented datasets used to train the Linto Tunisian dialect with code-switching STT [linagora/linto-asr-ar-tn](https://huggingface.co/linagora/linto-asr-ar-tn).
* [Dataset Summary](#dataset-summary)
* [Dataset composition](#dataset-composition)
* [Sources](#sources)
* [Content Types](#content-types)
* [Languages and Dialects](#languages-and-dialects)
* [Example use (python)](#example-use-python)
* [License](#license)
* [Citations](#citations)
## Dataset Summary
The **LinTO DataSet Audio for Arabic Tunisian Augmented** is a dataset that builds on [**LinTO DataSet Audio for Arabic Tunisian**](https://huggingface.co/datasets/linagora/linto-dataset-audio-ar-tn), using a subset of the original audio data. Augmentation techniques, including noise reduction and SoftVC VITS Singing Voice Conversion (SVC), have been applied to enhance the dataset for improved performance in Arabic Tunisian Automatic Speech Recognition (ASR) tasks.
## Dataset Composition:
The **LinTO DataSet Audio for Arabic Tunisian Augmented** comprises a diverse range of augmented audio samples using different techniques. Below is a breakdown of the dataset’s composition:
### Sources
| **subset** | **audio duration** | **labeled audio duration** | **# audios** | **# segments** | **# words** | **# characters** |
| --- | --- | --- | --- | --- | --- | --- |
| ApprendreLeTunisienVCA | 2h 40m 6s | 2h 40m 6s | 6146 | 6146 | 8078 | 36687 |
| MASC_NoiseLess | 2h 49m 56s | 1h 38m 17s | 48 | 1742 | 11909 | 59876 |
| MASC_NoiseLess_VCA | 19h 49m 31s | 11h 27m 59s | 336 | 12194 | 83377 | 411999 |
| OneStoryVCA | 9h 16m 51s | 9h 7m 32s | 216 | 2964 | 73962 | 341670 |
| TunSwitchCS_VCA | 59h 39m 10s | 59h 39m 10s | 37639 | 37639 | 531727 | 2760268 |
| TunSwitchTO_VCA | 18h 57m 34s | 18h 57m 34s | 15365 | 15365 | 129304 | 659295 |
| Youtube_AbdelAzizErwi_VCA | 122h 51m 1s | 109h 32m 39s | 125 | 109700 | 657720 | 3117170 |
| Youtube_BayariBilionaireVCA | 4h 54m 8s | 4h 35m 25s | 30 | 5400 | 39065 | 199155 |
| Youtube_DiwanFM_VCA | 38h 10m 6s | 28h 18m 58s | 252 | 32690 | 212170 | 1066464 |
| Youtube_HkeyetTounsiaMensia_VCA | 12h 13m 29s | 9h 53m 22s | 35 | 10626 | 73696 | 360990 |
| Youtube_LobnaMajjedi_VCA | 6h 41m 38s | 6h 12m 31s | 14 | 6202 | 42938 | 211512 |
| Youtube_MohamedKhammessi_VCA | 12h 7m 7s | 10h 58m 21s | 14 | 12775 | 92512 | 448987 |
| Youtube_Shorts_VCA | 26h 26m 25s | 23h 45m 25s | 945 | 14154 | 201138 | 1021713 |
| Youtube_TNScrapped_V1_NoiseLess | 4h 2m 9s | 2h 31m 05s | 52 | 2538 | 18777 | 92530 |
| Youtube_TNScrapped_V1_NoiseLess_VCA | 28h 15m 1s | 17h 37m 36s | 364 | 17766 | 132587 | 642292 |
| Youtube_TV_VCA | 4h 16m 16s | 3h 40m 56s | 28 | 4676 | 33376 | 311500 |
| **TOTAL** | **373h 10m 28s** | **320h 36m 58s** | **61609** | **292257** | **2342336** | **11742108** |
### Data Proccessing:
- **Noise Reduction**: Applying techniques to minimize background noise and enhance audio clarity for better model performance. For this, we used **Deezer [Spleeter](https://github.com/deezer/spleeter)**, a library with pretrained models, to separate vocals from music.
- **Voice Conversion**: Modifying speaker characteristics (e.g., pitch) through voice conversion techniques to simulate diverse speaker profiles and enrich the dataset. For this, we chose **SoftVC VITS Singing Voice Conversion** ([SVC](https://github.com/voicepaw/so-vits-svc-fork)) to alter the original voices using 7 different pretrained models.
The image below shows the difference between the original and the augmented audio:
![Wave Interface](img.png)
- The first row shows the original waveform.
- The second row shows the audio after noise reduction.
- The last row shows the audio with voice conversion augmentation.
### Content Types
- **FootBall**: Includes recordings of football news and reviews.
- **Documentaries**: Audio from documentaries about history and nature.
- **Podcasts**: Conversations and discussions from various podcast episodes.
- **Authors**: Audio recordings of authors reading or discussing different stories: horror, children's literature, life lessons, and others.
- **Lessons**: Learning resources for the Tunisian dialect.
- **Others**: Mixed recordings with various subjects.
### Languages and Dialects
- **Tunisian Arabic**: The primary focus of the dataset, including Tunisian Arabic and some Modern Standard Arabic (MSA).
- **French**: Some instances of French code-switching.
- **English**: Some instances of English code-switching.
### Characteristics
- **Audio Duration**: The dataset contains more than 317 hours of audio recordings.
- **Segments Duration**: This dataset contains segments, each with a duration of less than 30 seconds.
- **Labeled Data**: Includes annotations and transcriptions for a significant portion of the audio content.
### Data Distribution
- **Training Set**: Includes a diverse range of augmented audio with 5 to 7 different voices, as well as noise reduction applied to two datasets.
## Example use (python)
- **Load the dataset in python**:
```python
from datasets import load_dataset
# dataset will be loaded as a DatasetDict of train and test
dataset = load_dataset("linagora/linto-dataset-audio-ar-tn-augmented")
```
Check the containt of dataset:
```python
example = dataset['train'][0]
audio_array = example['audio']["array"]
segments = example['segments']
transcription = example['transcript']
print(f"Audio array: {audio_array}")
print(f"Segments: {segments}")
print(f"Transcription: {transcription}")
```
**Example**
```bash
Audio array: [0. 0. 0. ... 0. 0. 0.]
Transcription: أسبقية قبل أنا ما وصلت خممت فيه كيما باش نحكيو من بعد إلا ما أنا كإنطريبرنور كباعث مشروع صارولي برشا مشاكل فالجستين و صارولي مشاكل مع لعباد لي كانت موفرتلي اللوجسيل ولا اللوجسيل أوف لنيه ولا لوجسيل بيراتي
segments: [{'end': 14.113, 'start': 0.0, 'transcript': 'أسبقية قبل أنا ما وصلت خممت فيه كيما باش نحكيو من بعد إلا ما أنا كإنطريبرنور كباعث مشروع صارولي برشا مشاكل فالجستين و صارولي مشاكل مع لعباد لي كانت موفرتلي اللوجسيل ولا اللوجسيل أوف لنيه ولا لوجسيل بيراتي'}]
```
## License
Given that some of the corpora used for training and evaluation are available only under CC-BY-4.0 licenses, we have chosen to license the entire dataset under CC-BY-4.0.
## Citations
When using the **LinTO DataSet Audio for Arabic Tunisian** corpus, please cite this page:
```bibtex
@misc{linagora2024Linto-tn,
author = {Hedi Naouara and Jérôme Louradour and Jean-Pierre Lorré},
title = {LinTO Audio and Textual Datasets to Train and Evaluate Automatic Speech Recognition in Tunisian Arabic Dialect},
year = {2024},
month = {October},
note = {Good Data Workshop, AAAI 2025},
howpublished = {\url{https://huggingface.co/linagora/linto-asr-ar-tn-0.1}},
}
```
```bibtex
@misc{abdallah2023leveraging,
title={Leveraging Data Collection and Unsupervised Learning for Code-switched Tunisian Arabic Automatic Speech Recognition},
author={Ahmed Amine Ben Abdallah and Ata Kabboudi and Amir Kanoun and Salah Zaiem},
year={2023},
eprint={2309.11327},
archivePrefix={arXiv},
primaryClass={eess.AS}
}
```
```bibtex
@data{e1qb-jv46-21,
doi = {10.21227/e1qb-jv46},
url = {https://dx.doi.org/10.21227/e1qb-jv46},
author = {Al-Fetyani, Mohammad and Al-Barham, Muhammad and Abandah, Gheith and Alsharkawi, Adham and Dawas, Maha},
publisher = {IEEE Dataport},
title = {MASC: Massive Arabic Speech Corpus},
year = {2021} }
```
|
amitness/logits-mt-it-en-128 | amitness | "2023-09-27T10:27:21Z" | 3,977 | 0 | [
"size_categories:10M<n<100M",
"format:parquet",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2023-09-25T19:22:48Z" | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
dataset_info:
features:
- name: input_ids
sequence: int32
- name: token_type_ids
sequence: int8
- name: attention_mask
sequence: int8
- name: labels
sequence: int64
- name: teacher_logits
sequence:
sequence: float64
- name: teacher_indices
sequence:
sequence: int64
- name: teacher_mask_indices
sequence: int64
splits:
- name: train
num_bytes: 184467361976
num_examples: 40721350
- name: test
num_bytes: 32556394204
num_examples: 7186121
download_size: 0
dataset_size: 217023756180
---
# Dataset Card for "logits-mt-it-en-128"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
sal4ahm/RealCQA | sal4ahm | "2024-09-09T18:14:20Z" | 3,954 | 5 | [
"license:mit",
"size_categories:10K<n<100K",
"format:imagefolder",
"modality:image",
"library:datasets",
"library:mlcroissant",
"arxiv:2308.01979",
"region:us"
] | null | "2024-02-01T17:18:07Z" | ---
license: mit
---
# RealCQA: Real-World Complex Question Answering Dataset
This repository contains the dataset used in the paper "[RealCQA: Scientific Chart Question Answering as a Test-Bed for First-Order Logic](https://arxiv.org/pdf/2308.01979)" (ICDAR 2023). The dataset is designed to facilitate research in complex question answering, involving a diverse set of real-world images and associated textual question-answer pairs.
## Dataset Overview
The RealCQA dataset consists of 28,266 images, and corresponding 2 million question-answer pairs organized into three complementary subsets. Each image is accompanied by a JSON file containing one or more question blocks. The dataset is structured to address a range of question-answering tasks that require an understanding of the visual content.
### Dataset Structure
The dataset is organized into the following folders:
- **Images**
- `images`: Contains the first 10,000 images.
- `images2`: Contains the next 10,000 images.
- `images3`: Contains the remaining 8,266 images.
- **JSON Files**
- `jsons`: Contains the JSON files corresponding to the images in the `images` folder.
- `jsons2`: Contains the JSON files corresponding to the images in the `images2` folder.
- `jsons3`: Contains the JSON files corresponding to the images in the `images3` folder.
- **QA Files**
These are the QA created in our proposed dataset.
- `qa`: Contains the QA files corresponding to the images in the `images` folder.
- `qa2`: Contains the QA files corresponding to the images in the `images2` folder.
- `qa3`: Contains the QA files corresponding to the images in the `images3` folder.
### File Details
- **Images**: JPEG files named in the format `PMCxxxxxx_abc.jpg`, where `xxxxxx` represents the PubMed Central ID and `abc` represents an identifier specific to the image.
- **JSON Files**: JSON files named in the same format as the images. These are groundtruth annotations from the https://chartinfo.github.io challenge, they provide annotations for chart type, text(OCR), text location, text type (axis/tick/legend), data used to plot the chart.
- **QA Files**: QA files named in the same format as the images. Each QA file is a list of question blocks associated with the corresponding image we created in our proposed dataset.
#### QA Structure
Each QA file contains a list of question blocks in the following format:
```json
[
{
"taxonomy id": "2j",
"QID": "16",
"question": "Are all the bars in the chart visually horizontal?",
"answer": "no",
"answer_type": "Binary",
"qa_id": "XbUzFtjqsEOF",
"PMC_ID": "PMC8439477___g003"
},
{
"taxonomy id": "1a",
"QID": "7a",
"question": "What is the type of chart?",
"answer": "Vertical Bar chart",
"answer_type": "String",
"qa_id": "wzcdDijkrHtt",
"PMC_ID": "PMC8439477___g003"
}
]
```
### Dataset Loader
To facilitate loading and using the dataset, we provide a custom dataset loader script, `dataset.py`. This script defines a PyTorch `Dataset` class to handle loading, preprocessing, and batching of the images and question-answer pairs.
#### How to Use the Dataset Loader
1. **Setup and Requirements**
Ensure you have the following Python packages installed:
```bash
pip install torch torchvision Pillow
```
2. **Dataset Loader Script**
Use the provided `dataset.py` to load the dataset. The script is designed to load the dataset efficiently and handle both training and testing cases.
```python
from dataset import RQADataset
from torch.utils.data import DataLoader
dataset = RQADataset(data_dir='.', split='train') # split='test' for RQA9357 split used in the paper
# Test loading a single item
print(f"Number of samples in dataset: {len(dataset)}")
sample = dataset[0]
print("Sample data:", sample)
# Initialize DataLoader
dataloader = DataLoader(dataset, batch_size=4, collate_fn=RQADataset.custom_collate)
# Test DataLoader
for batch in dataloader:
print("Batch data:", batch)
break # Load only one batch for testing
```
### Citation
If you use this dataset in your research, please cite the following paper:
```bibtex
@InProceedings{10.1007/978-3-031-41682-8_5,
author="Ahmed, Saleem
and Jawade, Bhavin
and Pandey, Shubham
and Setlur, Srirangaraj
and Govindaraju, Venu",
editor="Fink, Gernot A.
and Jain, Rajiv
and Kise, Koichi
and Zanibbi, Richard",
title="RealCQA: Scientific Chart Question Answering as a Test-Bed for First-Order Logic",
booktitle="Document Analysis and Recognition - ICDAR 2023",
year="2023",
publisher="Springer Nature Switzerland",
address="Cham",
pages="66--83",
abstract="We present a comprehensive study of chart visual question-answering(QA) task, to address the challenges faced in comprehending and extracting data from chart visualizations within documents. Despite efforts to tackle this problem using synthetic charts, solutions are limited by the shortage of annotated real-world data. To fill this gap, we introduce a benchmark and dataset for chart visual QA on real-world charts, offering a systematic analysis of the task and a novel taxonomy for template-based chart question creation. Our contribution includes the introduction of a new answer type, `list', with both ranked and unranked variations. Our study is conducted on a real-world chart dataset from scientific literature, showcasing higher visual complexity compared to other works. Our focus is on template-based QA and how it can serve as a standard for evaluating the first-order logic capabilities of models. The results of our experiments, conducted on a real-world out-of-distribution dataset, provide a robust evaluation of large-scale pre-trained models and advance the field of chart visual QA and formal logic verification for neural networks in general. Our code and dataset is publicly available (https://github.com/cse-ai-lab/RealCQA).",
isbn="978-3-031-41682-8"
}
}
```
### License
This dataset is licensed under the [MIT License](LICENSE). By using this dataset, you agree to abide by its terms and conditions.
### Contact
For any questions or issues, please contact the authors of the paper or open an issue in this repository. |
allenai/s2-naip | allenai | "2024-05-31T21:06:47Z" | 3,921 | 17 | [
"license:apache-2.0",
"size_categories:10K<n<100K",
"format:webdataset",
"modality:image",
"modality:text",
"library:datasets",
"library:webdataset",
"library:mlcroissant",
"region:us"
] | null | "2024-03-06T03:10:43Z" | ---
license: apache-2.0
---
AI2-S2-NAIP is a remote sensing dataset consisting of aligned NAIP, Sentinel-2, Sentinel-1, and Landsat images spanning the entire continental US.
Data is divided into tiles.
Each tile spans 512x512 pixels at 1.25 m/pixel in one of the 10 UTM projections covering the continental US.
At each tile, the following data is available:
- [National Agriculture Imagery Program (NAIP)](https://www.usgs.gov/centers/eros/science/usgs-eros-archive-aerial-photography-national-agriculture-imagery-program-naip): an image from 2019-2021 at 1.25 m/pixel (512x512).
- [Sentinel-2 (L1C)](https://sentinels.copernicus.eu/web/sentinel/missions/sentinel-2): between 16 and 32 images captured within a few months of the NAIP image at 10 m/pixel (64x64).
- [Sentinel-1](https://sentinels.copernicus.eu/web/sentinel/missions/sentinel-1): between 2 and 8 images captured within a few months of the NAIP image at 10 m/pixel (64x64).
- [Landsat-8/9](https://www.usgs.gov/landsat-missions/landsat-8): 4 images captured in the same year as the NAIP image at 10 m/pixel (64x64).
- [OpenStreetMap](https://www.openstreetmap.org): a GeoJSON containing buildings, roads, and 30 other categories. It uses pixel coordinates relative to the 512x512 NAIP image.
- [WorldCover](https://worldcover2021.esa.int/): the 2021 land cover image at 10 m/pixel (64x64).
AI2-S2-NAIP is applicable to several supervised and unsupervised tasks in remote sensing, including super-resolution (e.g. NAIP -> Sentinel-2), segmentation and detection (e.g. NAIP or Sentinel-2 -> OpenStreetMap or WorldCover), and multi-modal masked autoencoder pre-training.
For questions or feedback about AI2-S2-NAIP, please open an issue on Github at https://github.com/allenai/satlas.
![Example images for one tile in the dataset.](example_images/combined.png)
Structure
---------
Once extracted, the dataset contains the different data types in different folders.
Each folder contains files named by a tile ID, which consists of the UTM projection, column, and row.
The column and row are based on tiles that are 512x512 pixels with pixel coordinates at 1.25 m/pixel, e.g. `32612_960_-6049.png` spans (614400, -3871360) to (615040, -3870720) in EPSG:32612 projection units.
Here is an example of NAIP data:
```
naip/
32612_960_-6049.png
32612_960_-6050.png
32612_960_-6051.png
...
```
And an example of Sentinel-2 data:
```
sentinel2/
32612_960_-6049_16.tif
32612_960_-6049_32.tif
32612_960_-6049_8.tif
32612_960_-6050_16.tif
...
```
The Sentinel-2, Sentinel-1, and Landsat images are GeoTIFFS so they contain georeference metadata.
Other data does not have georeference metadata, but data at each tile is aligned, so the georeference metadata from the above images is applicable to the other data as well with only a resolution shift.
Mapping Longitude and Latitude to Tile
--------------------------------------
Here is an example of mapping longitude and latitude to a tile.
First install packages:
pip install rasterio shapely utm
Then launch Python shell:
from rasterio.crs import CRS
from rasterio.warp import transform_geom
import shapely
import utm
# Define source location.
src_crs = CRS.from_epsg(4326)
src_point = shapely.Point(-122.331711, 47.648450)
# Get UTM zone.
_, _, zone_suffix, _ = utm.from_latlon(src_point.y, src_point.x)
epsg_code = 32600 + zone_suffix
dst_crs = CRS.from_epsg(epsg_code)
# Transform to UTM CRS.
dst_point = transform_geom(src_crs, dst_crs, src_point)
dst_point = shapely.geometry.shape(dst_point)
# dst_point is in projection coordinates (meters).
# Now convert to pixel coordinates at 1.25 m/pixel.
col = int(dst_point.x/1.25)
row = int(dst_point.y/-1.25)
# Print the prefix for the image filenames.
print(f"{epsg_code}_{col//512}_{row//512}")
# Print the prefix for the tar filenames to know which one to download.
# These group together many 1.25 m/pixel 512x512 tiles into one tar file.
print(f"{epsg_code}_{col//512//32}_{row//512//32}")
So then you would download the tar file from the second prefix, extract it, and look at the file with name matching the first prefix.
See visualize_tile.py for example of visualizing the data at a particular tile.
Sentinel-2
----------
The 10 m/pixel (`_8.tif`), 20 m/pixel (`_16.tif`), and 60 m/pixel (`_32.tif`) bands are stored separately.
Pixel values are the L1C 16-bit values.
The band order is as follows:
- _8.tif (64x64): B02, B03, B04, B08
- _16.tif (32x32): B05, B06, B07, B8A, B11, B12
- _32.tif (16x16): B01, B09, B10
The GeoTIFFs contain multiple images concatenated along the channel axis.
The CSV shows the original Sentinel-2 scene ID of each image.
Sentinel-1
----------
The Sentinel-1 bands are 10 m/pixel and ordered VV then VH.
Only IW VV+VH scenes are used.
The pixel values are 32-bit floating point values representing decibels 10*log10(x).
We obtain the radiometric-calibrated and terrain-corrected images from Google Earth Engine so see
https://developers.google.com/earth-engine/datasets/catalog/COPERNICUS_S1_GRD for details.
The GeoTIFFs contain multiple images concatenated along the channel axis.
The CSV shows the original Sentinel-1 scene ID of each image.
NAIP
----
The NAIP image is 512x512 with four 8-bit bands: R, G, B, IR.
It is encoded as PNG but the IR is alpha mask so cannot be visualized correctly in image viewer without removing the alpha mask.
There are two NAIP images available, one under "naip" (2019-2022) and one under "oldnaip" (2015-2018).
The CSV shows the original NAIP scene ID of each image.
Landsat
-------
We include OLI-TIRS images from Landsat-8 and Landsat-9.
As with Sentinel-2, we select Landsat images that were captured within a few months of the NAIP image.
We store the 15 m/pixel bands (i.e. B8) at 10 m/pixel, and the 30 m/pixel bands (all the others) at 20 m/pixel.
There are separate GeoTIFFs for the 10 m/pixel (`_8.tif`) and 20 m/pixel (`_16.tif`).
All pixel values are 16-bit.
The band order is as follows:
- _8.tif (64x64): B8
- _16.tif (32x32): B1, B2, B3, B4, B5, B6, B7, B9, B10, B11
The GeoTIFFS contain multiple images concatenated along the channel axis.
The CSV shows the original Landsat scene ID of each image.
|
rethinklab/Bench2Drive | rethinklab | "2024-08-14T08:21:30Z" | 3,912 | 10 | [
"license:apache-2.0",
"region:us"
] | null | "2024-05-01T14:49:07Z" | ---
license: apache-2.0
viewer: false
---
# **Bench2Drive**: Towards Multi-Ability Benchmarking of Closed-Loop End-To-End Autonomous Driving.
## Description
Bench2Drive is a benchmark designed for evaluating end-to-end autonomous driving algorithms in the closed-loop manner. It features:
- **Comprehensive Scenario Coverage**: Bench2Drive is designed to test AD systems across 44 interactive scenarios, ensuring a thorough evaluation of an AD system's capability to handle real-world driving challenges.
- **Granular Skill Assessment**: By structuring the evaluation across 220 short routes, each focusing on a specific driving scenario, Bench2Drive allows for detailed analysis and comparison of how different AD systems perform on individual tasks.
- **Closed-Loop Evaluation Protocol**: Bench2Drive evaluates AD systems in a closed-loop manner, where the AD system's actions directly influence the environment. This setup offers an accurate assessment of AD systems' driving performance.
- **Diverse Large-Scale Official Training Data**: Bench2Drive consists of a standardized training set of 10000 fully annotated clips under diverse scenarios, weathers, and towns, ensuring that all AD systems are trained under abundant yet similar conditions, which is crucial for fair algorithm-level comparisons.
**Each clip named by: ScenarioName_TownID_RouteID_WeatherID.tar.gz.**
For HD-map, please refer to https://huggingface.co/datasets/rethinklab/Bench2Drive-Map.
For full set, please refer to https://huggingface.co/datasets/rethinklab/Bench2Drive-Full.
For more information, please visit our GitHub repository: https://github.com/Thinklab-SJTU/Bench2Drive.
## License and Citation
All assets and code are under the Apache 2.0 license unless specified otherwise.
```bibtex
@article{jia2024bench,
title={Bench2Drive: Towards Multi-Ability Benchmarking of Closed-Loop End-To-End Autonomous Driving},
author={Xiaosong Jia and Zhenjie Yang and Qifeng Li and Zhiyuan Zhang and Jiazi Bu and Junchi Yan},
journal={\url{https://github.com/Thinklab-SJTU/Bench2Drive}},
year={2024}
}
``` |
skt/kobest_v1 | skt | "2024-03-28T08:22:52Z" | 3,908 | 43 | [
"annotations_creators:expert-generated",
"language_creators:expert-generated",
"multilinguality:monolingual",
"source_datasets:original",
"language:ko",
"license:cc-by-sa-4.0",
"size_categories:10K<n<100K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"arxiv:2204.04541",
"region:us"
] | null | "2022-04-07T13:54:23Z" | ---
pretty_name: KoBEST
annotations_creators:
- expert-generated
language_creators:
- expert-generated
language:
- ko
license:
- cc-by-sa-4.0
multilinguality:
- monolingual
size_categories:
- 10K<n<100K
source_datasets:
- original
configs:
- config_name: boolq
data_files:
- split: train
path: "boolq/train.jsonl"
- split: test
path: "boolq/test.jsonl"
- split: validation
path: "boolq/validation.jsonl"
- config_name: copa
data_files:
- split: train
path: "copa/train.jsonl"
- split: test
path: "copa/test.jsonl"
- split: validation
path: "copa/validation.jsonl"
- config_name: hellaswag
data_files:
- split: train
path: "hellaswag/train.jsonl"
- split: test
path: "hellaswag/test.jsonl"
- split: validation
path: "hellaswag/validation.jsonl"
- config_name: sentineg
data_files:
- split: train
path: "sentineg/train.jsonl"
- split: test
path: "sentineg/test.jsonl"
- split: test_originated
path: "sentineg/test_originated.jsonl"
- split: validation
path: "sentineg/validation.jsonl"
- config_name: wic
data_files:
- split: train
path: "wic/train.jsonl"
- split: test
path: "wic/test.jsonl"
- split: validation
path: "wic/validation.jsonl"
---
# Dataset Card for KoBEST
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Repository:** https://github.com/SKT-LSL/KoBEST_datarepo
- **Paper:**
- **Point of Contact:** https://github.com/SKT-LSL/KoBEST_datarepo/issues
### Dataset Summary
KoBEST is a Korean benchmark suite consists of 5 natural language understanding tasks that requires advanced knowledge in Korean.
### Supported Tasks and Leaderboards
Boolean Question Answering, Choice of Plausible Alternatives, Words-in-Context, HellaSwag, Sentiment Negation Recognition
### Languages
`ko-KR`
## Dataset Structure
### Data Instances
#### KB-BoolQ
An example of a data point looks as follows.
```
{'paragraph': '두아 리파(Dua Lipa, 1995년 8월 22일 ~ )는 잉글랜드의 싱어송라이터, 모델이다. BBC 사운드 오브 2016 명단에 노미닛되었다. 싱글 "Be the One"가 영국 싱글 차트 9위까지 오르는 등 성과를 보여주었다.',
'question': '두아 리파는 영국인인가?',
'label': 1}
```
#### KB-COPA
An example of a data point looks as follows.
```
{'premise': '물을 오래 끓였다.',
'question': '결과',
'alternative_1': '물의 양이 늘어났다.',
'alternative_2': '물의 양이 줄어들었다.',
'label': 1}
```
#### KB-WiC
An example of a data point looks as follows.
```
{'word': '양분',
'context_1': '토양에 [양분]이 풍부하여 나무가 잘 자란다. ',
'context_2': '태아는 모체로부터 [양분]과 산소를 공급받게 된다.',
'label': 1}
```
#### KB-HellaSwag
An example of a data point looks as follows.
```
{'context': '모자를 쓴 투수가 타자에게 온 힘을 다해 공을 던진다. 공이 타자에게 빠른 속도로 다가온다. 타자가 공을 배트로 친다. 배트에서 깡 소리가 난다. 공이 하늘 위로 날아간다.',
'ending_1': '외야수가 떨어지는 공을 글러브로 잡는다.',
'ending_2': '외야수가 공이 떨어질 위치에 자리를 잡는다.',
'ending_3': '심판이 아웃을 외친다.',
'ending_4': '외야수가 공을 따라 뛰기 시작한다.',
'label': 3}
```
#### KB-SentiNeg
An example of a data point looks as follows.
```
{'sentence': '택배사 정말 마음에 듬',
'label': 1}
```
### Data Fields
### KB-BoolQ
+ `paragraph`: a `string` feature
+ `question`: a `string` feature
+ `label`: a classification label, with possible values `False`(0) and `True`(1)
### KB-COPA
+ `premise`: a `string` feature
+ `question`: a `string` feature
+ `alternative_1`: a `string` feature
+ `alternative_2`: a `string` feature
+ `label`: an answer candidate label, with possible values `alternative_1`(0) and `alternative_2`(1)
### KB-WiC
+ `target_word`: a `string` feature
+ `context_1`: a `string` feature
+ `context_2`: a `string` feature
+ `label`: a classification label, with possible values `False`(0) and `True`(1)
### KB-HellaSwag
+ `target_word`: a `string` feature
+ `context_1`: a `string` feature
+ `context_2`: a `string` feature
+ `label`: a classification label, with possible values `False`(0) and `True`(1)
### KB-SentiNeg
+ `sentence`: a `string` feature
+ `label`: a classification label, with possible values `Negative`(0) and `Positive`(1)
### Data Splits
#### KB-BoolQ
+ train: 3,665
+ dev: 700
+ test: 1,404
#### KB-COPA
+ train: 3,076
+ dev: 1,000
+ test: 1,000
#### KB-WiC
+ train: 3,318
+ dev: 1,260
+ test: 1,260
#### KB-HellaSwag
+ train: 3,665
+ dev: 700
+ test: 1,404
#### KB-SentiNeg
+ train: 3,649
+ dev: 400
+ test: 397
+ test_originated: 397 (Corresponding training data where the test set is originated from.)
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
```
@misc{https://doi.org/10.48550/arxiv.2204.04541,
doi = {10.48550/ARXIV.2204.04541},
url = {https://arxiv.org/abs/2204.04541},
author = {Kim, Dohyeong and Jang, Myeongjun and Kwon, Deuk Sin and Davis, Eric},
title = {KOBEST: Korean Balanced Evaluation of Significant Tasks},
publisher = {arXiv},
year = {2022},
}
```
[More Information Needed]
### Contributions
Thanks to [@MJ-Jang](https://github.com/MJ-Jang) for adding this dataset. |
mozilla-foundation/common_voice_13_0 | mozilla-foundation | "2023-06-26T15:23:12Z" | 3,906 | 169 | [
"task_categories:automatic-speech-recognition",
"annotations_creators:crowdsourced",
"language_creators:crowdsourced",
"multilinguality:multilingual",
"source_datasets:extended|common_voice",
"license:cc0-1.0",
"size_categories:1M<n<10M",
"modality:audio",
"modality:text",
"library:datasets",
"library:mlcroissant",
"arxiv:1912.06670",
"region:us"
] | [
"automatic-speech-recognition"
] | "2023-03-29T07:43:24Z" | ---
pretty_name: Common Voice Corpus 13.0
annotations_creators:
- crowdsourced
language_creators:
- crowdsourced
language_bcp47:
- ab
- ar
- as
- ast
- az
- ba
- bas
- be
- bg
- bn
- br
- ca
- ckb
- cnh
- cs
- cv
- cy
- da
- de
- dv
- dyu
- el
- en
- eo
- es
- et
- eu
- fa
- fi
- fr
- fy-NL
- ga-IE
- gl
- gn
- ha
- hi
- hsb
- hu
- hy-AM
- ia
- id
- ig
- is
- it
- ja
- ka
- kab
- kk
- kmr
- ko
- ky
- lg
- lo
- lt
- lv
- mdf
- mhr
- mk
- ml
- mn
- mr
- mrj
- mt
- myv
- nan-tw
- ne-NP
- nl
- nn-NO
- oc
- or
- pa-IN
- pl
- pt
- quy
- rm-sursilv
- rm-vallader
- ro
- ru
- rw
- sah
- sat
- sc
- sk
- skr
- sl
- sr
- sv-SE
- sw
- ta
- th
- ti
- tig
- tk
- tok
- tr
- tt
- tw
- ug
- uk
- ur
- uz
- vi
- vot
- yo
- yue
- zh-CN
- zh-HK
- zh-TW
license:
- cc0-1.0
multilinguality:
- multilingual
size_categories:
ab:
- 10K<n<100K
ar:
- 100K<n<1M
as:
- 1K<n<10K
ast:
- 1K<n<10K
az:
- n<1K
ba:
- 100K<n<1M
bas:
- 1K<n<10K
be:
- 1M<n<10M
bg:
- 10K<n<100K
bn:
- 1M<n<10M
br:
- 10K<n<100K
ca:
- 1M<n<10M
ckb:
- 100K<n<1M
cnh:
- 1K<n<10K
cs:
- 100K<n<1M
cv:
- 10K<n<100K
cy:
- 100K<n<1M
da:
- 10K<n<100K
de:
- 100K<n<1M
dv:
- 10K<n<100K
dyu:
- n<1K
el:
- 10K<n<100K
en:
- 1M<n<10M
eo:
- 1M<n<10M
es:
- 1M<n<10M
et:
- 10K<n<100K
eu:
- 100K<n<1M
fa:
- 100K<n<1M
fi:
- 10K<n<100K
fr:
- 100K<n<1M
fy-NL:
- 100K<n<1M
ga-IE:
- 10K<n<100K
gl:
- 10K<n<100K
gn:
- 1K<n<10K
ha:
- 10K<n<100K
hi:
- 10K<n<100K
hsb:
- 1K<n<10K
hu:
- 10K<n<100K
hy-AM:
- 1K<n<10K
ia:
- 10K<n<100K
id:
- 10K<n<100K
ig:
- 1K<n<10K
is:
- n<1K
it:
- 100K<n<1M
ja:
- 100K<n<1M
ka:
- 10K<n<100K
kab:
- 100K<n<1M
kk:
- 1K<n<10K
kmr:
- 10K<n<100K
ko:
- 1K<n<10K
ky:
- 10K<n<100K
lg:
- 100K<n<1M
lo:
- n<1K
lt:
- 10K<n<100K
lv:
- 10K<n<100K
mdf:
- n<1K
mhr:
- 100K<n<1M
mk:
- n<1K
ml:
- 1K<n<10K
mn:
- 10K<n<100K
mr:
- 10K<n<100K
mrj:
- 10K<n<100K
mt:
- 10K<n<100K
myv:
- 1K<n<10K
nan-tw:
- 10K<n<100K
ne-NP:
- n<1K
nl:
- 10K<n<100K
nn-NO:
- n<1K
oc:
- 1K<n<10K
or:
- 1K<n<10K
pa-IN:
- 1K<n<10K
pl:
- 100K<n<1M
pt:
- 100K<n<1M
quy:
- n<1K
rm-sursilv:
- 1K<n<10K
rm-vallader:
- 1K<n<10K
ro:
- 10K<n<100K
ru:
- 100K<n<1M
rw:
- 1M<n<10M
sah:
- 1K<n<10K
sat:
- n<1K
sc:
- 1K<n<10K
sk:
- 10K<n<100K
skr:
- 1K<n<10K
sl:
- 10K<n<100K
sr:
- 1K<n<10K
sv-SE:
- 10K<n<100K
sw:
- 100K<n<1M
ta:
- 100K<n<1M
th:
- 100K<n<1M
ti:
- n<1K
tig:
- n<1K
tk:
- 1K<n<10K
tok:
- 10K<n<100K
tr:
- 10K<n<100K
tt:
- 10K<n<100K
tw:
- n<1K
ug:
- 10K<n<100K
uk:
- 10K<n<100K
ur:
- 100K<n<1M
uz:
- 100K<n<1M
vi:
- 10K<n<100K
vot:
- n<1K
yo:
- 1K<n<10K
yue:
- 10K<n<100K
zh-CN:
- 100K<n<1M
zh-HK:
- 100K<n<1M
zh-TW:
- 100K<n<1M
source_datasets:
- extended|common_voice
task_categories:
- automatic-speech-recognition
paperswithcode_id: common-voice
extra_gated_prompt: "By clicking on “Access repository” below, you also agree to not attempt to determine the identity of speakers in the Common Voice dataset."
---
# Dataset Card for Common Voice Corpus 13.0
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [How to use](#how-to-use)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** https://commonvoice.mozilla.org/en/datasets
- **Repository:** https://github.com/common-voice/common-voice
- **Paper:** https://arxiv.org/abs/1912.06670
- **Leaderboard:** https://paperswithcode.com/dataset/common-voice
- **Point of Contact:** [Vaibhav Srivastav](mailto:[email protected])
### Dataset Summary
The Common Voice dataset consists of a unique MP3 and corresponding text file.
Many of the 27141 recorded hours in the dataset also include demographic metadata like age, sex, and accent
that can help improve the accuracy of speech recognition engines.
The dataset currently consists of 17689 validated hours in 108 languages, but more voices and languages are always added.
Take a look at the [Languages](https://commonvoice.mozilla.org/en/languages) page to request a language or start contributing.
### Supported Tasks and Leaderboards
The results for models trained on the Common Voice datasets are available via the
[🤗 Autoevaluate Leaderboard](https://huggingface.co/spaces/autoevaluate/leaderboards?dataset=mozilla-foundation%2Fcommon_voice_11_0&only_verified=0&task=automatic-speech-recognition&config=ar&split=test&metric=wer)
### Languages
```
Abkhaz, Arabic, Armenian, Assamese, Asturian, Azerbaijani, Basaa, Bashkir, Basque, Belarusian, Bengali, Breton, Bulgarian, Cantonese, Catalan, Central Kurdish, Chinese (China), Chinese (Hong Kong), Chinese (Taiwan), Chuvash, Czech, Danish, Dhivehi, Dioula, Dutch, English, Erzya, Esperanto, Estonian, Finnish, French, Frisian, Galician, Georgian, German, Greek, Guarani, Hakha Chin, Hausa, Hill Mari, Hindi, Hungarian, Icelandic, Igbo, Indonesian, Interlingua, Irish, Italian, Japanese, Kabyle, Kazakh, Kinyarwanda, Korean, Kurmanji Kurdish, Kyrgyz, Lao, Latvian, Lithuanian, Luganda, Macedonian, Malayalam, Maltese, Marathi, Meadow Mari, Moksha, Mongolian, Nepali, Norwegian Nynorsk, Occitan, Odia, Persian, Polish, Portuguese, Punjabi, Quechua Chanka, Romanian, Romansh Sursilvan, Romansh Vallader, Russian, Sakha, Santali (Ol Chiki), Saraiki, Sardinian, Serbian, Slovak, Slovenian, Sorbian, Upper, Spanish, Swahili, Swedish, Taiwanese (Minnan), Tamil, Tatar, Thai, Tigre, Tigrinya, Toki Pona, Turkish, Turkmen, Twi, Ukrainian, Urdu, Uyghur, Uzbek, Vietnamese, Votic, Welsh, Yoruba
```
## How to use
The `datasets` library allows you to load and pre-process your dataset in pure Python, at scale. The dataset can be downloaded and prepared in one call to your local drive by using the `load_dataset` function.
For example, to download the Hindi config, simply specify the corresponding language config name (i.e., "hi" for Hindi):
```python
from datasets import load_dataset
cv_13 = load_dataset("mozilla-foundation/common_voice_13_0", "hi", split="train")
```
Using the datasets library, you can also stream the dataset on-the-fly by adding a `streaming=True` argument to the `load_dataset` function call. Loading a dataset in streaming mode loads individual samples of the dataset at a time, rather than downloading the entire dataset to disk.
```python
from datasets import load_dataset
cv_13 = load_dataset("mozilla-foundation/common_voice_13_0", "hi", split="train", streaming=True)
print(next(iter(cv_13)))
```
*Bonus*: create a [PyTorch dataloader](https://huggingface.co/docs/datasets/use_with_pytorch) directly with your own datasets (local/streamed).
### Local
```python
from datasets import load_dataset
from torch.utils.data.sampler import BatchSampler, RandomSampler
cv_13 = load_dataset("mozilla-foundation/common_voice_13_0", "hi", split="train")
batch_sampler = BatchSampler(RandomSampler(cv_13), batch_size=32, drop_last=False)
dataloader = DataLoader(cv_13, batch_sampler=batch_sampler)
```
### Streaming
```python
from datasets import load_dataset
from torch.utils.data import DataLoader
cv_13 = load_dataset("mozilla-foundation/common_voice_13_0", "hi", split="train")
dataloader = DataLoader(cv_13, batch_size=32)
```
To find out more about loading and preparing audio datasets, head over to [hf.co/blog/audio-datasets](https://huggingface.co/blog/audio-datasets).
### Example scripts
Train your own CTC or Seq2Seq Automatic Speech Recognition models on Common Voice 13 with `transformers` - [here](https://github.com/huggingface/transformers/tree/main/examples/pytorch/speech-recognition).
## Dataset Structure
### Data Instances
A typical data point comprises the `path` to the audio file and its `sentence`.
Additional fields include `accent`, `age`, `client_id`, `up_votes`, `down_votes`, `gender`, `locale` and `segment`.
```python
{
'client_id': 'd59478fbc1ee646a28a3c652a119379939123784d99131b865a89f8b21c81f69276c48bd574b81267d9d1a77b83b43e6d475a6cfc79c232ddbca946ae9c7afc5',
'path': 'et/clips/common_voice_et_18318995.mp3',
'audio': {
'path': 'et/clips/common_voice_et_18318995.mp3',
'array': array([-0.00048828, -0.00018311, -0.00137329, ..., 0.00079346, 0.00091553, 0.00085449], dtype=float32),
'sampling_rate': 48000
},
'sentence': 'Tasub kokku saada inimestega, keda tunned juba ammust ajast saati.',
'up_votes': 2,
'down_votes': 0,
'age': 'twenties',
'gender': 'male',
'accent': '',
'locale': 'et',
'segment': ''
}
```
### Data Fields
`client_id` (`string`): An id for which client (voice) made the recording
`path` (`string`): The path to the audio file
`audio` (`dict`): A dictionary containing the path to the downloaded audio file, the decoded audio array, and the sampling rate. Note that when accessing the audio column: `dataset[0]["audio"]` the audio file is automatically decoded and resampled to `dataset.features["audio"].sampling_rate`. Decoding and resampling of a large number of audio files might take a significant amount of time. Thus it is important to first query the sample index before the `"audio"` column, *i.e.* `dataset[0]["audio"]` should **always** be preferred over `dataset["audio"][0]`.
`sentence` (`string`): The sentence the user was prompted to speak
`up_votes` (`int64`): How many upvotes the audio file has received from reviewers
`down_votes` (`int64`): How many downvotes the audio file has received from reviewers
`age` (`string`): The age of the speaker (e.g. `teens`, `twenties`, `fifties`)
`gender` (`string`): The gender of the speaker
`accent` (`string`): Accent of the speaker
`locale` (`string`): The locale of the speaker
`segment` (`string`): Usually an empty field
### Data Splits
The speech material has been subdivided into portions for dev, train, test, validated, invalidated, reported and other.
The validated data is data that has been validated with reviewers and received upvotes that the data is of high quality.
The invalidated data is data has been invalidated by reviewers
and received downvotes indicating that the data is of low quality.
The reported data is data that has been reported, for different reasons.
The other data is data that has not yet been reviewed.
The dev, test, train are all data that has been reviewed, deemed of high quality and split into dev, test and train.
## Data Preprocessing Recommended by Hugging Face
The following are data preprocessing steps advised by the Hugging Face team. They are accompanied by an example code snippet that shows how to put them to practice.
Many examples in this dataset have trailing quotations marks, e.g _“the cat sat on the mat.“_. These trailing quotation marks do not change the actual meaning of the sentence, and it is near impossible to infer whether a sentence is a quotation or not a quotation from audio data alone. In these cases, it is advised to strip the quotation marks, leaving: _the cat sat on the mat_.
In addition, the majority of training sentences end in punctuation ( . or ? or ! ), whereas just a small proportion do not. In the dev set, **almost all** sentences end in punctuation. Thus, it is recommended to append a full-stop ( . ) to the end of the small number of training examples that do not end in punctuation.
```python
from datasets import load_dataset
ds = load_dataset("mozilla-foundation/common_voice_13_0", "en", use_auth_token=True)
def prepare_dataset(batch):
"""Function to preprocess the dataset with the .map method"""
transcription = batch["sentence"]
if transcription.startswith('"') and transcription.endswith('"'):
# we can remove trailing quotation marks as they do not affect the transcription
transcription = transcription[1:-1]
if transcription[-1] not in [".", "?", "!"]:
# append a full-stop to sentences that do not end in punctuation
transcription = transcription + "."
batch["sentence"] = transcription
return batch
ds = ds.map(prepare_dataset, desc="preprocess dataset")
```
## Dataset Creation
### Curation Rationale
[Needs More Information]
### Source Data
#### Initial Data Collection and Normalization
[Needs More Information]
#### Who are the source language producers?
[Needs More Information]
### Annotations
#### Annotation process
[Needs More Information]
#### Who are the annotators?
[Needs More Information]
### Personal and Sensitive Information
The dataset consists of people who have donated their voice online. You agree to not attempt to determine the identity of speakers in the Common Voice dataset.
## Considerations for Using the Data
### Social Impact of Dataset
The dataset consists of people who have donated their voice online. You agree to not attempt to determine the identity of speakers in the Common Voice dataset.
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
Public Domain, [CC-0](https://creativecommons.org/share-your-work/public-domain/cc0/)
### Citation Information
```
@inproceedings{commonvoice:2020,
author = {Ardila, R. and Branson, M. and Davis, K. and Henretty, M. and Kohler, M. and Meyer, J. and Morais, R. and Saunders, L. and Tyers, F. M. and Weber, G.},
title = {Common Voice: A Massively-Multilingual Speech Corpus},
booktitle = {Proceedings of the 12th Conference on Language Resources and Evaluation (LREC 2020)},
pages = {4211--4215},
year = 2020
}
``` |
NbAiLab/NCC | NbAiLab | "2023-11-17T12:48:38Z" | 3,884 | 24 | [
"task_categories:text-generation",
"task_ids:language-modeling",
"annotations_creators:no-annotation",
"language_creators:found",
"multilinguality:multilingual",
"source_datasets:original",
"language:en",
"language:nb",
"language:no",
"language:nn",
"language:sv",
"language:da",
"language:is",
"language:fo",
"license:other",
"arxiv:2104.09617",
"region:us"
] | [
"text-generation"
] | "2022-03-02T23:29:22Z" | ---
YAML tags:
annotations_creators:
- no-annotation
language_creators:
- found
language:
- en
- nb
- no
- nn
- sv
- da
- is
- fo
license:
- other
multilinguality:
- multilingual
pretty_name: NCC
size_categories:
- 2G<n<1B
source_datasets:
- original
task_categories:
- text-generation
task_ids:
- language-modeling
extra_gated_prompt: "The Directive on Copyright in the Digital Single Market, which came into force on June 6 2019, amends the European Union copyright and database legislation and allows for Text and Data Mining (TDM) activities for research organizations and cultural heritage institutions. Under the terms of the aforementioned directive, by clicking on 'Access repository' you agree on using the text and data contained in this dataset for non-commercial scientific purposes only."
---
# Dataset Card for NbAiLab/NCC
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Data Fields](#data-fiels)
- [Dataset Creation](#dataset-creation)
- [Statistics](#statistics)
- [Document Types](#document-types)
- [Languages](#languages)
- [Publish Periode](#publish-periode)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
## Dataset Description
- **Homepage:** https://github.com/NbAiLab/notram
- **Repository:** https://github.com/NbAiLab/notram
- **Paper:** https://arxiv.org/abs/2104.09617
- **Point of Contact:** [Freddy Wetjen](mailto:[email protected])
The Norwegian Colossal Corpus is a collection of multiple smaller Norwegian corpuses suitable for training large language models. We have done extensive cleaning on the datasets, and have made them available in a common format. The total size of the NCC is currently 45GB.
## How to Use
```python
from datasets import load_dataset
data = load_dataset("NbAiLab/NCC", streaming=True)
```
## Download Data
If you do not want to use the HuggingFace Dataset-library for training, or if you want to do additional pre-processing, it is also possible to download the files locally.
```bash
# Clone the training set
git clone https://huggingface.co/datasets/NbAiLab/NCC
# Create one large training file of all shards without unpacking
cat NCC/data/train*.gz > onefile.json.gz
```
<details>
<summary>List of all the files.</summary>
* [train-shard-0001-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0001-of-0046.json.gz)
* [train-shard-0002-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0002-of-0046.json.gz)
* [train-shard-0003-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0003-of-0046.json.gz)
* [train-shard-0004-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0004-of-0046.json.gz)
* [train-shard-0005-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0005-of-0046.json.gz)
* [train-shard-0006-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0006-of-0046.json.gz)
* [train-shard-0007-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0007-of-0046.json.gz)
* [train-shard-0008-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0008-of-0046.json.gz)
* [train-shard-0009-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0009-of-0046.json.gz)
* [train-shard-0010-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0010-of-0046.json.gz)
* [train-shard-0011-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0011-of-0046.json.gz)
* [train-shard-0012-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0012-of-0046.json.gz)
* [train-shard-0013-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0013-of-0046.json.gz)
* [train-shard-0014-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0014-of-0046.json.gz)
* [train-shard-0015-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0015-of-0046.json.gz)
* [train-shard-0016-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0016-of-0046.json.gz)
* [train-shard-0017-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0017-of-0046.json.gz)
* [train-shard-0018-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0018-of-0046.json.gz)
* [train-shard-0019-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0019-of-0046.json.gz)
* [train-shard-0020-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0020-of-0046.json.gz)
* [train-shard-0021-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0021-of-0046.json.gz)
* [train-shard-0022-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0022-of-0046.json.gz)
* [train-shard-0023-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0023-of-0046.json.gz)
* [train-shard-0024-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0024-of-0046.json.gz)
* [train-shard-0025-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0025-of-0046.json.gz)
* [train-shard-0026-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0026-of-0046.json.gz)
* [train-shard-0027-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0027-of-0046.json.gz)
* [train-shard-0028-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0028-of-0046.json.gz)
* [train-shard-0029-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0029-of-0046.json.gz)
* [train-shard-0030-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0030-of-0046.json.gz)
* [train-shard-0031-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0031-of-0046.json.gz)
* [train-shard-0032-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0032-of-0046.json.gz)
* [train-shard-0033-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0033-of-0046.json.gz)
* [train-shard-0034-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0034-of-0046.json.gz)
* [train-shard-0035-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0035-of-0046.json.gz)
* [train-shard-0036-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0036-of-0046.json.gz)
* [train-shard-0037-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0037-of-0046.json.gz)
* [train-shard-0038-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0038-of-0046.json.gz)
* [train-shard-0039-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0039-of-0046.json.gz)
* [train-shard-0040-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0040-of-0046.json.gz)
* [train-shard-0041-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0041-of-0046.json.gz)
* [train-shard-0042-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0042-of-0046.json.gz)
* [train-shard-0043-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0043-of-0046.json.gz)
* [train-shard-0044-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0044-of-0046.json.gz)
* [train-shard-0045-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0045-of-0046.json.gz)
* [train-shard-0046-of-0046](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/train-shard-0046-of-0046.json.gz)
* [validation-shard-0001-of-0001](https://huggingface.co/datasets/NbAiLab/NCC/resolve/main/data/validation-shard-0001-of-0001.json.gz)
</details>
### Dataset Summary
The NCC dataset contains json lines with language training data. Here is an example json line:
```json
{
"id": "1006205",
"doc_type": "cc100",
"publish_year": 2021,
"lang_fasttext": "nn",
"lang_fasttext_conf": "0.641",
"text": "Eg har ein PLAN! KOS deg og ha ei fin helg"
}
```
## Data Fields
|**id:** | String with id to source of line and a unique identifier|
|:-----------|:------------|
|**doc_type** | String describing type of media text extracted from (I.e. book,newspaper etc)|
|**publish_year** | Integer. The year text published. When year is undetermined it is set to 2021.|
|**lang_fasttext** | String. Language of text identified by FastText|
|**lang_fasttext_conf** | String. Confidence calculated by FastText|
|**text** | String. The complete utf-8 document. If longer than 1M characters it is split.|
### Dataset Creation
We are providing a **train** and a **validation** split. The standard size of the validation is a single 1GB file, while train is sharded in 1GB chunks.
All files are gzipped.
Build date: 21012022
#### Initial Data Collection and Curation
The procedure for the dataset creation is described in detail in our paper.
### Summary
| Words | Documents | Words/Document |
|--------------:|------------:|-----------------:|
| 6,905,570,165 | 20,830,348 | 331 |
### Document Types
| Source | Words | Documents | Words/Document |
|--------------------------------------:|--------------:|------------:|-----------------:|
| newspaper_ocr | 1,974,452,883 | 9,872,470 | 199 |
| parliament | 1,273,353,169 | 9,321 | 136,611 |
| books | 842,936,050 | 23,708 | 35,554 |
| newspapers_online_nb | 487,189,627 | 3,446,348 | 141 |
| maalfrid_regjeringen | 360,349,242 | 919,902 | 391 |
| maalfrid_ssb | 279,732,847 | 851,982 | 328 |
| maalfrid_uio | 181,916,296 | 771,480 | 235 |
| government_nb | 134,127,104 | 3,476 | 38,586 |
| wikipedia_download_nbo | 110,845,615 | 523,593 | 211 |
| maalfrid_fylkesmannen | 102,849,898 | 463,021 | 222 |
| publicreports | 78,347,879 | 3,298 | 23,756 |
| maalfrid_nve | 66,656,315 | 301,966 | 220 |
| maalfrid_patentstyret | 64,985,154 | 213,991 | 303 |
| maalfrid_ntnu | 57,803,460 | 199,307 | 290 |
| newspapers_online_nn | 42,205,558 | 167,347 | 252 |
| lovdata_cd_odelsting_2005 | 36,370,948 | 1,933 | 18,815 |
| maalfrid_vegvesen | 33,431,887 | 166,203 | 201 |
| maalfrid_fhi | 32,784,098 | 144,363 | 227 |
| maalfrid_norad | 32,720,034 | 93,097 | 351 |
| maalfrid_skatteetaten | 32,567,691 | 82,589 | 394 |
| maalfrid_uib | 28,425,322 | 115,729 | 245 |
| wikipedia_download_nno | 27,061,858 | 143,265 | 188 |
| maalfrid_forskningsradet | 24,076,984 | 73,368 | 328 |
| maalfrid_nasjonalparkstyre | 21,309,995 | 93,871 | 227 |
| government_nn | 18,316,345 | 1,063 | 17,230 |
| maalfrid_nmbu | 18,082,476 | 69,719 | 259 |
| maalfrid_oslomet | 17,710,771 | 47,022 | 376 |
| maalfrid_domstol | 16,678,270 | 51,038 | 326 |
| maalfrid_banenor | 16,445,420 | 70,360 | 233 |
| maalfrid_nav | 16,272,635 | 74,101 | 219 |
| maalfrid_landbruksdirektoratet | 13,119,567 | 47,983 | 273 |
| maalfrid_helsedirektoratet | 13,008,787 | 49,344 | 263 |
| maalfrid_nokut | 10,101,424 | 38,552 | 262 |
| maalfrid_hi | 10,046,751 | 39,065 | 257 |
| maalfrid_norges-bank | 9,924,489 | 37,171 | 266 |
| maalfrid_udir | 9,868,345 | 38,736 | 254 |
| maalfrid_vkm | 9,824,529 | 32,230 | 304 |
| maalfrid_nbim | 9,629,725 | 18,131 | 531 |
| maalfrid_miljodirektoratet | 9,496,631 | 34,711 | 273 |
| maalfrid_distriktssenteret | 9,375,506 | 38,525 | 243 |
| maalfrid_ngu | 9,231,905 | 34,619 | 266 |
| maalfrid_ptil | 9,214,434 | 34,250 | 269 |
| maalfrid_nord | 8,992,352 | 44,800 | 200 |
| maalfrid_fiskeridir | 8,297,897 | 33,446 | 248 |
| maalfrid_hivolda | 7,820,709 | 26,473 | 295 |
| maalfrid_difi | 7,789,290 | 35,733 | 217 |
| maalfrid_mattilsynet | 7,492,831 | 27,002 | 277 |
| maalfrid_havarikommisjonen | 7,440,410 | 24,989 | 297 |
| maalfrid_kulturradet | 7,196,423 | 22,437 | 320 |
| maalfrid_ks | 6,915,503 | 27,439 | 252 |
| maalfrid_kystverket | 6,713,070 | 30,975 | 216 |
| maalfrid_udi | 6,433,540 | 19,134 | 336 |
| maalfrid_uia | 5,964,644 | 23,861 | 249 |
| maalfrid_hjelpemiddeldatabasen | 5,892,662 | 34,192 | 172 |
| maalfrid_khrono | 5,859,186 | 19,970 | 293 |
| maalfrid_helsetilsynet | 5,803,000 | 18,365 | 315 |
| maalfrid_moreforsk | 5,622,025 | 21,579 | 260 |
| maalfrid_jernbanedirektoratet | 5,461,268 | 21,666 | 252 |
| maalfrid_veiviseren | 5,316,521 | 18,026 | 294 |
| lovdata_cd_somb_rundskriv_2005 | 5,264,746 | 3,215 | 1,637 |
| maalfrid_dsb | 5,199,259 | 17,814 | 291 |
| lovdata_cd_sentrale_forskrifter_2005 | 5,037,694 | 11,467 | 439 |
| maalfrid_husbanken | 4,711,069 | 15,053 | 312 |
| maalfrid_legemiddelverket | 4,689,988 | 20,192 | 232 |
| maalfrid_vetinst | 4,674,951 | 14,492 | 322 |
| maalfrid_imdi | 4,636,355 | 15,290 | 303 |
| maalfrid_forsvarsbygg | 4,567,318 | 18,886 | 241 |
| maalfrid_sdir | 4,540,110 | 15,202 | 298 |
| maalfrid_konkurransetilsynet | 4,512,807 | 12,617 | 357 |
| maalfrid_dsa | 4,498,837 | 15,898 | 282 |
| maalfrid_arkivverket | 4,493,280 | 16,515 | 272 |
| maalfrid_hiof | 4,473,731 | 23,119 | 193 |
| maalfrid_ehelse | 4,379,984 | 22,553 | 194 |
| maalfrid_inn | 4,326,704 | 26,277 | 164 |
| maalfrid_klagenemndssekretariatet | 4,181,685 | 11,916 | 350 |
| maalfrid_sprakradet | 4,097,815 | 15,187 | 269 |
| maalfrid_dibk | 3,967,428 | 15,509 | 255 |
| maalfrid_nhh | 3,962,033 | 15,678 | 252 |
| maalfrid_kartverket | 3,732,184 | 18,710 | 199 |
| maalfrid_riksrevisjonen | 3,680,555 | 10,922 | 336 |
| maalfrid_toll | 3,510,061 | 13,777 | 254 |
| maalfrid_nibio | 3,456,026 | 17,104 | 202 |
| maalfrid_met | 3,446,762 | 18,282 | 188 |
| maalfrid_bufdir | 3,354,740 | 11,470 | 292 |
| maalfrid_artsdatabanken | 3,193,511 | 9,009 | 354 |
| maalfrid_politiet | 3,167,395 | 10,501 | 301 |
| maalfrid_nkom | 3,127,687 | 10,002 | 312 |
| maalfrid_vestlandfylke | 3,060,166 | 12,075 | 253 |
| maalfrid_uis | 2,924,821 | 9,838 | 297 |
| maalfrid_sykkelbynettverket | 2,820,702 | 11,818 | 238 |
| maalfrid_nlr | 2,646,014 | 15,851 | 166 |
| maalfrid_seniorporten | 2,616,054 | 8,111 | 322 |
| maalfrid_npd | 2,597,831 | 10,742 | 241 |
| maalfrid_aldringoghelse | 2,430,767 | 6,788 | 358 |
| maalfrid_custompublish | 2,430,747 | 9,184 | 264 |
| maalfrid_bioteknologiradet | 2,393,891 | 5,996 | 399 |
| maalfrid_arbeidstilsynet | 2,379,597 | 6,882 | 345 |
| maalfrid_nyemetoder | 2,376,468 | 10,771 | 220 |
| maalfrid_riksantikvaren | 2,257,491 | 8,756 | 257 |
| maalfrid_sjt | 2,238,168 | 11,189 | 200 |
| lovdata_cd_lokaleforskrifter_2005 | 2,176,221 | 22,274 | 97 |
| maalfrid_hvl | 2,149,292 | 9,395 | 228 |
| maalfrid_luftfartstilsynet | 2,101,272 | 9,866 | 212 |
| maalfrid_dfo | 2,073,203 | 9,165 | 226 |
| maalfrid_ldo | 2,047,969 | 7,299 | 280 |
| maalfrid_kompetansenorge | 1,952,035 | 10,245 | 190 |
| maalfrid_forbrukerradet | 1,945,089 | 7,330 | 265 |
| maalfrid_himolde | 1,913,699 | 9,975 | 191 |
| maalfrid_usn | 1,793,297 | 7,403 | 242 |
| lovdata_cd_norgeslover_2005 | 1,760,884 | 1,386 | 1,270 |
| maalfrid_naku | 1,754,510 | 5,239 | 334 |
| maalfrid_medietilsynet | 1,608,424 | 6,611 | 243 |
| maalfrid_matematikksenteret | 1,567,505 | 7,298 | 214 |
| maalfrid_forskningsetikk | 1,545,336 | 5,545 | 278 |
| maalfrid_diku | 1,542,929 | 6,241 | 247 |
| maalfrid_godeidrettsanlegg | 1,506,577 | 6,115 | 246 |
| maalfrid_dirmin | 1,467,255 | 5,303 | 276 |
| maalfrid_diskrimineringsnemnda | 1,463,291 | 4,168 | 351 |
| maalfrid_naturfag | 1,450,662 | 5,976 | 242 |
| maalfrid_arbeidsretten | 1,440,074 | 4,754 | 302 |
| lovdata_cd_rtv_rundskriv_2005 | 1,366,872 | 9,596 | 142 |
| maalfrid_fellesstudentsystem | 1,359,292 | 10,321 | 131 |
| maalfrid_nupi | 1,286,395 | 5,491 | 234 |
| maalfrid_kriminalitetsforebygging | 1,201,477 | 4,667 | 257 |
| maalfrid_anskaffelser | 1,187,544 | 5,479 | 216 |
| maalfrid_folketrygdfondet | 1,183,502 | 4,253 | 278 |
| maalfrid_miljopakken | 1,170,252 | 5,513 | 212 |
| maalfrid_nih | 1,116,791 | 5,271 | 211 |
| maalfrid_statsbygg | 1,103,635 | 4,439 | 248 |
| lovdata_cd_skatt_rundskriv_2005 | 1,102,142 | 398 | 2,769 |
| maalfrid_nb | 1,055,200 | 4,135 | 255 |
| maalfrid_npolar | 1,051,181 | 2,653 | 396 |
| maalfrid_unit | 1,049,621 | 6,329 | 165 |
| maalfrid_valgdirektoratet | 1,009,941 | 9,131 | 110 |
| maalfrid_barneombudet | 980,751 | 2,807 | 349 |
| maalfrid_datatilsynet | 974,679 | 2,965 | 328 |
| maalfrid_lottstift | 959,590 | 3,578 | 268 |
| maalfrid_aho | 953,568 | 4,528 | 210 |
| maalfrid_sykehuspartner | 939,625 | 4,579 | 205 |
| maalfrid_naturfagsenteret | 897,049 | 3,859 | 232 |
| maalfrid_khio | 849,973 | 3,377 | 251 |
| maalfrid_spesialenheten | 824,209 | 2,127 | 387 |
| maalfrid_xn--miljlftet-o8ab | 803,011 | 3,384 | 237 |
| maalfrid_samordnaopptak | 792,595 | 2,368 | 334 |
| maalfrid_helsenorge | 780,465 | 3,034 | 257 |
| maalfrid_skrivesenteret | 777,204 | 4,161 | 186 |
| maalfrid_mareano | 760,645 | 3,724 | 204 |
| maalfrid_fiskeridirektoratet | 755,997 | 2,444 | 309 |
| maalfrid_sykehusinnkjop | 738,720 | 4,340 | 170 |
| maalfrid_matportalen | 630,990 | 2,370 | 266 |
| maalfrid_spk | 613,180 | 2,152 | 284 |
| maalfrid_justervesenet | 595,014 | 1,904 | 312 |
| maalfrid_pasientsikkerhetsprogrammet | 594,399 | 4,684 | 126 |
| maalfrid_nhn | 579,713 | 3,581 | 161 |
| maalfrid_sshf | 572,570 | 1,897 | 301 |
| maalfrid_bibliotekutvikling | 560,126 | 3,216 | 174 |
| maalfrid_nysgjerrigper | 559,207 | 3,019 | 185 |
| maalfrid_nodnett | 538,021 | 2,689 | 200 |
| maalfrid_une | 513,586 | 1,255 | 409 |
| maalfrid_giek | 512,569 | 1,796 | 285 |
| maalfrid_samas | 501,177 | 2,548 | 196 |
| maalfrid_kriminalomsorgen | 496,062 | 1,951 | 254 |
| maalfrid_kjonnsforskning | 483,376 | 1,426 | 338 |
| maalfrid_kunstkultursenteret | 470,009 | 1,435 | 327 |
| lovdata_cd_rundskriv_lovavdeling_2005 | 469,295 | 405 | 1,158 |
| maalfrid_nynorsksenteret | 460,165 | 2,085 | 220 |
| maalfrid_ceres | 448,920 | 1,950 | 230 |
| maalfrid_stami | 445,031 | 1,160 | 383 |
| maalfrid_nsm | 442,110 | 1,536 | 287 |
| maalfrid_gjenopptakelse | 420,205 | 1,467 | 286 |
| maalfrid_nfi | 420,128 | 1,523 | 275 |
| maalfrid_nidsenter | 410,785 | 1,631 | 251 |
| maalfrid_nasjonalmuseet | 390,036 | 1,087 | 358 |
| maalfrid_forbrukertilsynet | 387,579 | 1,227 | 315 |
| maalfrid_natursekken | 378,442 | 3,563 | 106 |
| maalfrid_fordelingsutvalget | 355,121 | 1,385 | 256 |
| maalfrid_digdir | 349,548 | 2,105 | 166 |
| maalfrid_forsvaret | 331,183 | 1,215 | 272 |
| maalfrid_beccle | 329,568 | 1,517 | 217 |
| maalfrid_romsenter | 329,304 | 1,133 | 290 |
| maalfrid_geonorge | 301,869 | 1,622 | 186 |
| maalfrid_universell | 263,740 | 2,155 | 122 |
| maalfrid_ovf | 262,542 | 930 | 282 |
| maalfrid_forbrukereuropa | 259,420 | 1,018 | 254 |
| maalfrid_politihogskolen | 258,615 | 1,229 | 210 |
| maalfrid_vinmonopolet | 245,685 | 671 | 366 |
| maalfrid_energimerking | 237,243 | 1,033 | 229 |
| maalfrid_ombudsmann | 225,947 | 418 | 540 |
| maalfrid_vea-fs | 224,712 | 1,261 | 178 |
| maalfrid_traumebevisst | 224,297 | 2,417 | 92 |
| maalfrid_npe | 205,102 | 1,000 | 205 |
| maalfrid_pkh | 201,503 | 791 | 254 |
| maalfrid_helfo | 193,880 | 988 | 196 |
| maalfrid_opplaringslovutvalget | 193,590 | 549 | 352 |
| maalfrid_regionaleforskningsfond | 187,261 | 989 | 189 |
| maalfrid_nafkam | 177,295 | 571 | 310 |
| maalfrid_jernbanemagasinet | 174,152 | 412 | 422 |
| maalfrid_polarhistorie | 171,386 | 382 | 448 |
| maalfrid_aasentunet | 161,626 | 529 | 305 |
| maalfrid_riksteatret | 159,991 | 798 | 200 |
| maalfrid_realfagsloyper | 157,166 | 748 | 210 |
| maalfrid_koro | 153,304 | 574 | 267 |
| maalfrid_squarespace | 146,931 | 504 | 291 |
| maalfrid_politietssikkerhetstjeneste | 143,781 | 469 | 306 |
| maalfrid_unknown | 139,263 | 700 | 198 |
| maalfrid_whocc | 121,616 | 656 | 185 |
| maalfrid_konfliktraadet | 120,258 | 372 | 323 |
| maalfrid_okokrim | 115,842 | 372 | 311 |
| maalfrid_brreg | 112,787 | 571 | 197 |
| maalfrid_riksmekleren | 110,737 | 558 | 198 |
| maalfrid_sismo | 110,700 | 309 | 358 |
| maalfrid_radetfordyreetikk | 99,241 | 441 | 225 |
| maalfrid_akkreditert | 99,040 | 503 | 196 |
| maalfrid_sivilforsvaret | 97,679 | 514 | 190 |
| maalfrid_lanekassen | 95,286 | 301 | 316 |
| maalfrid_digidel | 95,140 | 607 | 156 |
| maalfrid_generaladvokaten | 91,385 | 294 | 310 |
| maalfrid_uit | 90,273 | 602 | 149 |
| maalfrid_nyinorge | 88,466 | 199 | 444 |
| maalfrid_lokforerskolen | 87,224 | 468 | 186 |
| maalfrid_varsom | 85,382 | 563 | 151 |
| maalfrid_ffi | 80,137 | 220 | 364 |
| maalfrid_kulturminnefondet | 79,767 | 411 | 194 |
| maalfrid_unesco | 76,951 | 382 | 201 |
| maalfrid_yrkesfisker | 74,807 | 501 | 149 |
| maalfrid_dekom | 72,148 | 1,307 | 55 |
| maalfrid_omsorgsforskning | 71,675 | 321 | 223 |
| maalfrid_lektor2 | 67,385 | 549 | 122 |
| maalfrid_openaccess | 63,554 | 192 | 331 |
| maalfrid_ssn | 63,036 | 302 | 208 |
| maalfrid_lokalhistorie | 59,854 | 241 | 248 |
| maalfrid_nlb | 57,872 | 200 | 289 |
| maalfrid_riksadvokaten | 57,563 | 155 | 371 |
| maalfrid_laudim | 57,500 | 393 | 146 |
| maalfrid_denkulturelleskolesekken | 46,018 | 243 | 189 |
| maalfrid_sivilrett | 44,062 | 142 | 310 |
| maalfrid_htu | 43,330 | 169 | 256 |
| maalfrid_yr | 40,646 | 562 | 72 |
| maalfrid_informasjonskompetanse | 40,351 | 330 | 122 |
| maalfrid_dep | 38,882 | 126 | 308 |
| maalfrid_finansportalen | 38,506 | 180 | 213 |
| maalfrid_feide | 36,715 | 267 | 137 |
| maalfrid_kulturped | 36,013 | 96 | 375 |
| maalfrid_fug | 34,158 | 120 | 284 |
| maalfrid_kulturoghelse | 33,424 | 184 | 181 |
| maalfrid_helseklage | 32,756 | 124 | 264 |
| maalfrid_nbsk | 30,674 | 211 | 145 |
| maalfrid_matogindustri | 29,922 | 194 | 154 |
| maalfrid_sinn | 27,541 | 150 | 183 |
| maalfrid_transport21 | 25,317 | 90 | 281 |
| maalfrid_konkursradet | 23,505 | 76 | 309 |
| maalfrid_vergemal | 23,271 | 77 | 302 |
| maalfrid_norec | 22,496 | 78 | 288 |
| maalfrid_pts | 20,459 | 78 | 262 |
| maalfrid_nasjonaleturistveger | 19,922 | 110 | 181 |
| maalfrid_iearth | 19,281 | 146 | 132 |
| maalfrid_hjelpelinjen | 19,209 | 85 | 225 |
| maalfrid_russamtalen | 17,999 | 65 | 276 |
| maalfrid_xn--kvinneligomskjring-1ub | 17,701 | 77 | 229 |
| maalfrid_nynorskbok | 17,600 | 96 | 183 |
| maalfrid_regjeringsadvokaten | 17,416 | 55 | 316 |
| maalfrid_memu | 17,311 | 98 | 176 |
| maalfrid_xn--tilbakefring-2jb | 15,814 | 49 | 322 |
| maalfrid_xn--forskerfr-t8a | 15,724 | 172 | 91 |
| maalfrid_ringerikefengsel | 15,669 | 28 | 559 |
| maalfrid_skeivtarkiv | 15,537 | 69 | 225 |
| maalfrid_samfunnskunnskap | 15,110 | 60 | 251 |
| maalfrid_fordelingsutvalet | 15,017 | 34 | 441 |
| maalfrid_skattefunn | 14,599 | 51 | 286 |
| maalfrid_shiprep | 14,165 | 142 | 99 |
| maalfrid_haldenfengsel | 13,625 | 37 | 368 |
| maalfrid_sevuppt | 13,332 | 52 | 256 |
| maalfrid_forbrukerklageutvalget | 12,698 | 49 | 259 |
| maalfrid_mhfa | 11,999 | 144 | 83 |
| maalfrid_ah | 11,787 | 36 | 327 |
| maalfrid_nettvett | 11,002 | 43 | 255 |
| maalfrid_uh-it | 10,828 | 273 | 39 |
| maalfrid_fishgen | 10,199 | 28 | 364 |
| maalfrid_designavgang | 10,164 | 75 | 135 |
| maalfrid_global | 9,051 | 41 | 220 |
| maalfrid_havmiljo | 8,607 | 68 | 126 |
| maalfrid_valg | 8,516 | 47 | 181 |
| maalfrid_miljoklagenemnda | 7,797 | 35 | 222 |
| maalfrid_altinn | 7,695 | 49 | 157 |
| maalfrid_spinn-inn | 7,674 | 47 | 163 |
| maalfrid_kantinekurset | 7,217 | 53 | 136 |
| maalfrid_bastoyfengsel | 7,142 | 56 | 127 |
| maalfrid_norskpetroleum | 6,083 | 119 | 51 |
| maalfrid_voldsoffererstatning | 5,827 | 26 | 224 |
| maalfrid_musikkbasertmiljobehandling | 5,186 | 39 | 132 |
| maalfrid_prosjektveiviseren | 5,019 | 14 | 358 |
| maalfrid_aldersvennlig | 4,919 | 32 | 153 |
| maalfrid_barentswatch | 4,829 | 32 | 150 |
| maalfrid_fmfiavo@fylkesmannen | 4,702 | 68 | 69 |
| maalfrid_kk-utvalget | 4,697 | 19 | 247 |
| maalfrid_agropub | 4,434 | 17 | 260 |
| maalfrid_utdanningiverden | 4,266 | 13 | 328 |
| maalfrid_overgangsbolig | 3,769 | 35 | 107 |
| maalfrid_forsvaretsmuseer | 3,706 | 34 | 109 |
| maalfrid_okopark | 3,282 | 12 | 273 |
| maalfrid_pst | 2,866 | 14 | 204 |
| maalfrid_sikkerhverdag | 2,697 | 18 | 149 |
| maalfrid_arkitektur | 2,436 | 15 | 162 |
| maalfrid_velgekte | 2,287 | 10 | 228 |
| maalfrid_addlab | 2,109 | 12 | 175 |
| maalfrid_romerikefengsel | 2,088 | 19 | 109 |
| maalfrid_utdanning | 2,009 | 12 | 167 |
| maalfrid_grunderskolen | 1,994 | 7 | 284 |
| maalfrid_umb | 1,934 | 8 | 241 |
| maalfrid_oslofengsel | 1,756 | 8 | 219 |
| maalfrid_hjorteviltregisteret | 1,600 | 5 | 320 |
| maalfrid_alleteller | 1,511 | 7 | 215 |
| maalfrid_webhuset | 1,409 | 5 | 281 |
| maalfrid_lykillinn | 1,349 | 4 | 337 |
| maalfrid_kulturfag | 1,215 | 6 | 202 |
| maalfrid_unimus | 940 | 4 | 235 |
| maalfrid_anleggsregisteret | 928 | 5 | 185 |
| maalfrid_mangfoldsprisen | 597 | 3 | 199 |
| maalfrid_algae2future | 456 | 8 | 57 |
| maalfrid_mammapresenterer | 447 | 2 | 223 |
| maalfrid_karriereveiledning | 391 | 27 | 14 |
| maalfrid_nodsms | 351 | 4 | 87 |
| maalfrid_kildekompasset | 302 | 1 | 302 |
| maalfrid_praksisfou | 297 | 1 | 297 |
| maalfrid_retttilaalese | 246 | 3 | 82 |
| maalfrid_indreostfoldfengsel | 215 | 3 | 71 |
| maalfrid_xn--kroppsvingsforskning-gcc | 205 | 2 | 102 |
| maalfrid_pahoyden | 154 | 1 | 154 |
| maalfrid_norren | 42 | 1 | 42 |
### Languages
| Language | Words | Documents | Words/Document |
|-----------:|--------------:|------------:|-----------------:|
| no | 5,050,752,505 | 17,177,223 | 294 |
| da | 940,216,574 | 574,211 | 1,637 |
| en | 474,855,361 | 1,526,795 | 311 |
| nn | 299,753,996 | 987,701 | 303 |
| fr | 49,409,701 | 108,071 | 457 |
| de | 27,159,878 | 85,230 | 318 |
| sv | 18,773,092 | 118,753 | 158 |
| es | 10,057,791 | 42,177 | 238 |
| fi | 8,104,322 | 46,710 | 173 |
| et | 3,309,661 | 24,183 | 136 |
| cs | 2,652,151 | 21,793 | 121 |
| pt | 2,550,218 | 16,407 | 155 |
| oc | 2,123,730 | 4,927 | 431 |
| nl | 1,984,501 | 11,813 | 167 |
| zh | 1,470,751 | 8,146 | 180 |
| uk | 1,459,484 | 5,096 | 286 |
| ca | 1,370,260 | 4,476 | 306 |
| it | 1,293,230 | 8,479 | 152 |
| la | 1,281,920 | 797 | 1,608 |
| ru | 1,231,482 | 6,796 | 181 |
| pl | 852,304 | 9,396 | 90 |
| eu | 831,276 | 3,195 | 260 |
| hu | 659,973 | 8,499 | 77 |
| fa | 494,551 | 2,047 | 241 |
| ja | 351,634 | 4,994 | 70 |
| is | 309,422 | 1,207 | 256 |
| id | 226,296 | 2,033 | 111 |
| ar | 205,632 | 1,173 | 175 |
| sl | 140,913 | 1,858 | 75 |
| vi | 139,122 | 982 | 141 |
| so | 128,303 | 592 | 216 |
| hr | 124,033 | 1,081 | 114 |
| el | 117,624 | 618 | 190 |
| lv | 106,626 | 123 | 866 |
| tr | 92,680 | 1,630 | 56 |
| ro | 80,804 | 635 | 127 |
| sr | 71,953 | 970 | 74 |
| lt | 70,148 | 869 | 80 |
| gl | 65,152 | 692 | 94 |
| war | 56,369 | 274 | 205 |
| ko | 56,057 | 1,006 | 55 |
| th | 54,067 | 367 | 147 |
| am | 44,818 | 317 | 141 |
| sk | 39,416 | 1,000 | 39 |
| ml | 35,575 | 156 | 228 |
| ceb | 35,337 | 331 | 106 |
| sq | 34,461 | 238 | 144 |
| tl | 30,839 | 177 | 174 |
| kk | 27,827 | 72 | 386 |
| eo | 24,187 | 859 | 28 |
| mn | 21,540 | 22 | 979 |
| sw | 18,670 | 72 | 259 |
| pnb | 18,403 | 80 | 230 |
| sh | 17,807 | 213 | 83 |
| gu | 16,973 | 13 | 1,305 |
| bg | 16,495 | 100 | 164 |
| ur | 15,650 | 169 | 92 |
| mk | 13,305 | 65 | 204 |
| ckb | 9,119 | 43 | 212 |
| ku | 9,071 | 57 | 159 |
| ast | 7,919 | 73 | 108 |
| az | 7,907 | 59 | 134 |
| ms | 7,051 | 483 | 14 |
| uz | 6,924 | 56 | 123 |
| ta | 4,180 | 60 | 69 |
| fy | 3,841 | 68 | 56 |
| ga | 3,761 | 174 | 21 |
| hy | 3,456 | 43 | 80 |
| pa | 3,299 | 17 | 194 |
| hi | 2,783 | 39 | 71 |
| be | 2,556 | 62 | 41 |
| bo | 2,551 | 1 | 2,551 |
| ht | 2,534 | 11 | 230 |
| jv | 2,341 | 91 | 25 |
| min | 2,206 | 18 | 122 |
| cy | 2,052 | 52 | 39 |
| bs | 2,047 | 66 | 31 |
| als | 1,918 | 66 | 29 |
| su | 1,888 | 29 | 65 |
| nds | 1,869 | 162 | 11 |
| ps | 1,832 | 15 | 122 |
| bn | 1,797 | 22 | 81 |
| qu | 1,498 | 14 | 107 |
| ilo | 1,126 | 25 | 45 |
| mt | 968 | 16 | 60 |
| si | 942 | 29 | 32 |
| te | 888 | 18 | 49 |
| my | 784 | 15 | 52 |
| af | 741 | 32 | 23 |
| io | 715 | 15 | 47 |
| tt | 684 | 22 | 31 |
| km | 674 | 11 | 61 |
| br | 645 | 40 | 16 |
| gn | 638 | 11 | 58 |
| jbo | 611 | 27 | 22 |
| as | 584 | 2 | 292 |
| ug | 581 | 6 | 96 |
| kv | 562 | 3 | 187 |
| kn | 544 | 22 | 24 |
| pam | 480 | 2 | 240 |
| kw | 475 | 19 | 25 |
| vep | 419 | 34 | 12 |
| he | 412 | 18 | 22 |
| ka | 351 | 20 | 17 |
| yo | 281 | 9 | 31 |
| wa | 268 | 38 | 7 |
| ky | 228 | 10 | 22 |
| azb | 216 | 1 | 216 |
| ba | 203 | 5 | 40 |
| gom | 174 | 12 | 14 |
| ia | 140 | 15 | 9 |
| mr | 138 | 10 | 13 |
| lmo | 134 | 27 | 4 |
| tg | 129 | 3 | 43 |
| lb | 115 | 26 | 4 |
| pms | 115 | 16 | 7 |
| vec | 67 | 3 | 22 |
| rue | 67 | 2 | 33 |
| sco | 61 | 6 | 10 |
| ie | 59 | 11 | 5 |
| hsb | 57 | 3 | 19 |
| ne | 56 | 6 | 9 |
| bar | 46 | 7 | 6 |
| cbk | 46 | 2 | 23 |
| or | 44 | 2 | 22 |
| mg | 38 | 8 | 4 |
| os | 36 | 3 | 12 |
| tk | 36 | 4 | 9 |
| arz | 31 | 1 | 31 |
| li | 29 | 6 | 4 |
| gd | 29 | 2 | 14 |
| eml | 24 | 5 | 4 |
| diq | 20 | 2 | 10 |
| lrc | 20 | 1 | 20 |
| dsb | 19 | 1 | 19 |
| yue | 19 | 1 | 19 |
| nap | 16 | 1 | 16 |
| nah | 14 | 2 | 7 |
| wuu | 14 | 1 | 14 |
| sd | 14 | 1 | 14 |
| frr | 13 | 3 | 4 |
| rm | 12 | 2 | 6 |
| cv | 12 | 1 | 12 |
| scn | 9 | 2 | 4 |
| bh | 8 | 1 | 8 |
| bcl | 8 | 1 | 8 |
| co | 7 | 1 | 7 |
| ce | 4 | 1 | 4 |
| new | 4 | 1 | 4 |
| vo | 3 | 2 | 1 |
| mzn | 3 | 1 | 3 |
| gv | 3 | 1 | 3 |
| lo | 2 | 1 | 2 |
### Publish Periode
| Decade | Words | Documents | Words/Document |
|---------:|--------------:|------------:|-----------------:|
| 2020 | 4,090,213,596 | 10,934,550 | 523 |
| 2010 | 355,391,417 | 2,415,563 | 1,511 |
| 2000 | 447,853,330 | 1,705,354 | 2,773 |
| 1990 | 767,392,364 | 2,513,364 | 3,051 |
| 1980 | 160,980,586 | 538,665 | 3,011 |
| 1970 | 186,113,674 | 829,646 | 2,222 |
| 1960 | 149,421,535 | 834,219 | 1,807 |
| 1950 | 97,863,608 | 478,628 | 2,041 |
| 1940 | 122,648,278 | 570,154 | 2,307 |
| 1930 | 35,635,053 | 697 | 508,420 |
| 1920 | 50,381,418 | 1,049 | 484,836 |
| 1910 | 62,599,984 | 1,221 | 504,678 |
| 1900 | 60,019,080 | 1,130 | 527,329 |
| 1890 | 86,781,861 | 1,777 | 485,878 |
| 1880 | 58,546,570 | 1,064 | 553,442 |
| 1870 | 26,492,662 | 632 | 407,191 |
| 1860 | 39,176,930 | 698 | 543,151 |
| 1850 | 53,801,490 | 846 | 634,038 |
| 1840 | 30,434,939 | 522 | 581,593 |
| 1830 | 18,189,838 | 368 | 481,719 |
| 1820 | 4,721,154 | 144 | 338,350 |
| 1810 | 910,798 | 57 | 124,880 |
## Considerations for Using the Data
This corpus contains data under copyright and is not allowed to be used outide the National Library of Norway. The dataset should not be distributed.
### Discussion of Biases
Please refer to our paper.
### Dataset Curators
[Freddy Wetjen](mailto:[email protected]) and [Per Egil Kummervold](mailto:[email protected])
## License
Various licences applies to different parts of the corpus. Every document in the corpus has a tag telling what **"doc_type"** it belongs to. If you are unable to accept any of the licenses, you should filter out the **"doc_type"** with a conflicting license.
| Doc_type | License |
| :-------- | :------------- |
| government_nb, government_nn, parliament, publicreports, lovdata_cd_\*, maalfrid_\* | [NLOD 2.0](https://data.norge.no/nlod/en/2.0/)|
| newspapers_ocr, newspapers_pdf, books| [CC0 1.0](https://creativecommons.org/publicdomain/zero/1.0/)|
| newspapers_online_nb, newspapers_online_nn | [CC BY-NC 2.0](https://creativecommons.org/licenses/by-nc/2.0/)|
| opensubtitles, wikipedia | [CC BY-SA 3.0](https://creativecommons.org/licenses/by-sa/3.0/)
|
### Citation Information
We are preparing an article with detailed information about this corpus. Until it is published, please cite out paper discussing the first version of this corpus:
```
@inproceedings{kummervold-etal-2021-operationalizing,
title = {Operationalizing a National Digital Library: The Case for a {N}orwegian Transformer Model},
author = {Kummervold, Per E and
De la Rosa, Javier and
Wetjen, Freddy and
Brygfjeld, Svein Arne",
booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa)},
year = "2021",
address = "Reykjavik, Iceland (Online)",
publisher = {Link{"o}ping University Electronic Press, Sweden},
url = "https://aclanthology.org/2021.nodalida-main.3",
pages = "20--29",
abstract = "In this work, we show the process of building a large-scale training set from digital and digitized collections at a national library.
The resulting Bidirectional Encoder Representations from Transformers (BERT)-based language model for Norwegian outperforms multilingual BERT (mBERT) models
in several token and sequence classification tasks for both Norwegian Bokm{aa}l and Norwegian Nynorsk. Our model also improves the mBERT performance for other
languages present in the corpus such as English, Swedish, and Danish. For languages not included in the corpus, the weights degrade moderately while keeping strong multilingual properties. Therefore,
we show that building high-quality models within a memory institution using somewhat noisy optical character recognition (OCR) content is feasible, and we hope to pave the way for other memory institutions to follow.",
}
```
|
Meranti/CLAP_freesound | Meranti | "2023-07-09T17:09:18Z" | 3,872 | 25 | [
"task_categories:audio-classification",
"language:en",
"size_categories:1M<n<10M",
"modality:audio",
"modality:text",
"region:us",
"audio",
"text",
"contrastive learning"
] | [
"audio-classification"
] | "2023-06-02T00:42:03Z" | ---
task_categories:
- audio-classification
language:
- en
tags:
- audio
- text
- contrastive learning
pretty_name: freesound
size_categories:
- 1M<n<10M
---
# LAION-Audio-630K Freesound Dataset
[LAION-Audio-630K](https://github.com/LAION-AI/audio-dataset/blob/main/laion-audio-630k/README.md) is the largest audio-text dataset publicly available and a magnitude larger than previous audio-text datasets (by 2022-11-05). Notably, it combines eight distinct datasets, which includes the Freesound dataset.
Specifically, this Hugging face repository contains two versions of Freesound dataset. Details of each dataset (e.g. how captions are made etc.) could be found in the "datacard" column of the table below.
- **Freesound (full)**: The complete Freesound dataset, available at `/freesound` folder.
- **Freesound (no overlap)**: Made based on Freesound(full), with samples from ESC50, FSD50K, Urbansound8K and Clotho removed. available at `/freesound_no_overlap` folder.
As of the structure and format of `freesound` and `freesound_no_overlap` folder, please refer to [this page](https://github.com/LAION-AI/audio-dataset/blob/main/data_preprocess/README.md).
| Name |Duration |Number of Samples |Data Type | Metadata | Data Card |
|--------------------------------------------------|-------------------------|--------------------|--------- |--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------- |
| Freesound (no overlap) |2817.31hrs | 460801 |1-2 captions per audio, audio | [website](https://freesound.org/) <br> [csv]()|[data card](/data_card/freesound.md)|
| Freesound (full) |3033.38hrs | 515581 |1-2 captions per audio, audio | [website](https://freesound.org/) <br> [csv]() |[data card](/data_card/freesound.md)|
## Metadata csv file
For each of the two datasets, we provide a metadata csv file including the following columns:
- **audio_filename**: The filename of the audio file in `.tar` files. `exemple: 2394.flac`
- **caption_i**: the i-th caption of the audio file
- **freesound_id**: The freesound id of the audio file.
- **username**: The username of the uploader of the audio file.
- **freesound_url**: The url of the audio file in freesound.org
- **username**: The freesound username of the uploader of the audio file.
- **license**: The license of the audio file. `http://creativecommons.org/licenses/by/3.0/`
## Credits & Licence
- **!!!TERM OF USE!!!**: **By downloading files in this repository, you agree that you will use them <u> for research purposes only </u>. If you want to use Freesound clips in LAION-Audio-630K for commercial purposes, please contact Frederic Font Corbera at [email protected].**
### Freesound Credit:
All audio clips from Freesound are released under Creative Commons (CC) licenses, while each clip has its own license as defined by the clip uploader in Freesound, some of them requiring attribution to their original authors and some forbidding further commercial reuse. Specifically, here is the statistics about licenses of audio clips involved in LAION-Audio-630K:
| License | Number of Samples |
| :--- | :--- |
| http://creativecommons.org/publicdomain/zero/1.0/ | 260134 |
| https://creativecommons.org/licenses/by/4.0/ | 97090 |
| http://creativecommons.org/licenses/by/3.0/ | 89337 |
| http://creativecommons.org/licenses/by-nc/3.0/ | 31680 |
| https://creativecommons.org/licenses/by-nc/4.0/ | 26736 |
| http://creativecommons.org/licenses/sampling+/1.0/ | 11116 |
## Acknowledgement
The whole collection process as well as all usage of the LAION-Audio-630K are conducted by Germany non-profit pure research organization [LAION](https://laion.ai/). All contributors and collectors of the dataset are considered as open source contributors affiliated to LAION. These community contributors (Discord ids) include but not limited to: @marianna13#7139, @Chr0my#0173, @PiEquals4#1909, @Yuchen Hui#8574, @Antoniooooo#4758, @IYWO#9072, krishna#1648, @dicknascarsixtynine#3885, and @turian#1607. We would like to appreciate all of them for their efforts on the LAION-Audio-630k dataset. |
AI4Math/MathVista | AI4Math | "2024-02-11T23:09:05Z" | 3,858 | 120 | [
"task_categories:multiple-choice",
"task_categories:question-answering",
"task_categories:visual-question-answering",
"task_categories:text-classification",
"task_ids:multiple-choice-qa",
"task_ids:closed-domain-qa",
"task_ids:open-domain-qa",
"task_ids:visual-question-answering",
"task_ids:multi-class-classification",
"annotations_creators:expert-generated",
"annotations_creators:found",
"language_creators:expert-generated",
"language_creators:found",
"multilinguality:monolingual",
"source_datasets:original",
"language:en",
"language:zh",
"language:fa",
"license:cc-by-sa-4.0",
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2310.02255",
"region:us",
"multi-modal-qa",
"math-qa",
"figure-qa",
"geometry-qa",
"math-word-problem",
"textbook-qa",
"vqa",
"arithmetic-reasoning",
"statistical-reasoning",
"algebraic-reasoning",
"geometry-reasoning",
"numeric-common-sense",
"scientific-reasoning",
"logical-reasoning",
"geometry-diagram",
"synthetic-scene",
"chart",
"plot",
"scientific-figure",
"table",
"function-plot",
"abstract-scene",
"puzzle-test",
"document-image",
"medical-image",
"mathematics",
"science",
"chemistry",
"biology",
"physics",
"engineering",
"natural-science"
] | [
"multiple-choice",
"question-answering",
"visual-question-answering",
"text-classification"
] | "2023-10-15T17:49:10Z" | ---
annotations_creators:
- expert-generated
- found
language_creators:
- expert-generated
- found
language:
- en
- zh
- fa
license: cc-by-sa-4.0
multilinguality:
- monolingual
size_categories:
- 1K<n<10K
source_datasets:
- original
task_categories:
- multiple-choice
- question-answering
- visual-question-answering
- text-classification
task_ids:
- multiple-choice-qa
- closed-domain-qa
- open-domain-qa
- visual-question-answering
- multi-class-classification
paperswithcode_id: mathvista
pretty_name: MathVista
tags:
- multi-modal-qa
- math-qa
- figure-qa
- geometry-qa
- math-word-problem
- textbook-qa
- vqa
- arithmetic-reasoning
- statistical-reasoning
- algebraic-reasoning
- geometry-reasoning
- numeric-common-sense
- scientific-reasoning
- logical-reasoning
- geometry-diagram
- synthetic-scene
- chart
- plot
- scientific-figure
- table
- function-plot
- abstract-scene
- puzzle-test
- document-image
- medical-image
- mathematics
- science
- chemistry
- biology
- physics
- engineering
- natural-science
configs:
- config_name: default
data_files:
- split: testmini
path: data/testmini-*
- split: test
path: data/test-*
dataset_info:
features:
- name: pid
dtype: string
- name: question
dtype: string
- name: image
dtype: string
- name: decoded_image
dtype: image
- name: choices
sequence: string
- name: unit
dtype: string
- name: precision
dtype: float64
- name: answer
dtype: string
- name: question_type
dtype: string
- name: answer_type
dtype: string
- name: metadata
struct:
- name: category
dtype: string
- name: context
dtype: string
- name: grade
dtype: string
- name: img_height
dtype: int64
- name: img_width
dtype: int64
- name: language
dtype: string
- name: skills
sequence: string
- name: source
dtype: string
- name: split
dtype: string
- name: task
dtype: string
- name: query
dtype: string
splits:
- name: testmini
num_bytes: 142635198.0
num_examples: 1000
- name: test
num_bytes: 648291350.22
num_examples: 5141
download_size: 885819490
dataset_size: 790926548.22
---
# Dataset Card for MathVista
- [Dataset Description](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#dataset-description)
- [Paper Information](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#paper-information)
- [Dataset Examples](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#dataset-examples)
- [Leaderboard](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#leaderboard)
- [Dataset Usage](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#dataset-usage)
- [Data Downloading](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#data-downloading)
- [Data Format](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#data-format)
- [Data Visualization](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#data-visualization)
- [Data Source](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#data-source)
- [Automatic Evaluation](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#automatic-evaluation)
- [License](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#license)
- [Citation](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/README.md#citation)
## Dataset Description
**MathVista** is a consolidated Mathematical reasoning benchmark within Visual contexts. It consists of **three newly created datasets, IQTest, FunctionQA, and PaperQA**, which address the missing visual domains and are tailored to evaluate logical reasoning on puzzle test figures, algebraic reasoning over functional plots, and scientific reasoning with academic paper figures, respectively. It also incorporates **9 MathQA datasets** and **19 VQA datasets** from the literature, which significantly enrich the diversity and complexity of visual perception and mathematical reasoning challenges within our benchmark. In total, **MathVista** includes **6,141 examples** collected from **31 different datasets**.
## Paper Information
- Paper: https://arxiv.org/abs/2310.02255
- Code: https://github.com/lupantech/MathVista
- Project: https://mathvista.github.io/
- Visualization: https://mathvista.github.io/#visualization
- Leaderboard: https://mathvista.github.io/#leaderboard
## Dataset Examples
Examples of our newly annotated datasets: IQTest, FunctionQA, and PaperQA:
<img src="https://raw.githubusercontent.com/lupantech/MathVista/main/assets/our_new_3_datasets.png" style="zoom:40%;" />
<details>
<summary>🔍 Click to expand/collapse more examples</summary>
Examples of seven mathematical reasoning skills:
1. Arithmetic Reasoning
<img src="https://raw.githubusercontent.com/lupantech/MathVista/main/assets/skills/ari.png" style="zoom:40%;" />
2. Statistical Reasoning
<img src="https://raw.githubusercontent.com/lupantech/MathVista/main/assets/skills/sta.png" style="zoom:40%;" />
3. Algebraic Reasoning
<img src="https://raw.githubusercontent.com/lupantech/MathVista/main/assets/skills/alg.png" style="zoom:40%;" />
4. Geometry Reasoning
<img src="https://raw.githubusercontent.com/lupantech/MathVista/main/assets/skills/geo.png" style="zoom:40%;" />
5. Numeric common sense
<img src="https://raw.githubusercontent.com/lupantech/MathVista/main/assets/skills/num.png" style="zoom:40%;" />
6. Scientific Reasoning
<img src="https://raw.githubusercontent.com/lupantech/MathVista/main/assets/skills/sci.png" style="zoom:40%;" />
7. Logical Reasoning
<img src="https://raw.githubusercontent.com/lupantech/MathVista/main/assets/skills/log.png" style="zoom:40%;" />
</details>
## Leaderboard
🏆 The leaderboard for the *testmini* set (1,000 examples) is available [here](https://mathvista.github.io/#leaderboard).
🏆 The leaderboard for the *test* set (5,141 examples) and the automatic evaluation on [CodaLab](https://codalab.org/) are under construction.
## Dataset Usage
### Data Downloading
All the data examples were divided into two subsets: *testmini* and *test*.
- **testmini**: 1,000 examples used for model development, validation, or for those with limited computing resources.
- **test**: 5,141 examples for standard evaluation. Notably, the answer labels for test will NOT be publicly released.
You can download this dataset by the following command (make sure that you have installed [Huggingface Datasets](https://huggingface.co/docs/datasets/quickstart)):
```python
from datasets import load_dataset
dataset = load_dataset("AI4Math/MathVista")
```
Here are some examples of how to access the downloaded dataset:
```python
# print the first example on the testmini set
print(dataset["testmini"][0])
print(dataset["testmini"][0]['pid']) # print the problem id
print(dataset["testmini"][0]['question']) # print the question text
print(dataset["testmini"][0]['query']) # print the query text
print(dataset["testmini"][0]['image']) # print the image path
print(dataset["testmini"][0]['answer']) # print the answer
dataset["testmini"][0]['decoded_image'] # display the image
# print the first example on the test set
print(dataset["test"][0])
```
### Data Format
The dataset is provided in json format and contains the following attributes:
```json
{
"question": [string] The question text,
"image": [string] A file path pointing to the associated image,
"choices": [list] Choice options for multiple-choice problems. For free-form problems, this could be a 'none' value,
"unit": [string] The unit associated with the answer, e.g., "m^2", "years". If no unit is relevant, it can be a 'none' value,
"precision": [integer] The number of decimal places the answer should be rounded to,
"answer": [string] The correct answer for the problem,
"question_type": [string] The type of question: "multi_choice" or "free_form",
"answer_type": [string] The format of the answer: "text", "integer", "float", or "list",
"pid": [string] Problem ID, e.g., "1",
"metadata": {
"split": [string] Data split: "testmini" or "test",
"language": [string] Question language: "English", "Chinese", or "Persian",
"img_width": [integer] The width of the associated image in pixels,
"img_height": [integer] The height of the associated image in pixels,
"source": [string] The source dataset from which the problem was taken,
"category": [string] The category of the problem: "math-targeted-vqa" or "general-vqa",
"task": [string] The task of the problem, e.g., "geometry problem solving",
"context": [string] The visual context type of the associated image,
"grade": [string] The grade level of the problem, e.g., "high school",
"skills": [list] A list of mathematical reasoning skills that the problem tests
},
"query": [string] the query text used as input (prompt) for the evaluation model
}
```
### Data Visualization
🎰 You can explore the dataset in an interactive way [here](https://mathvista.github.io/#visualization).
<details>
<summary>Click to expand/collapse the visualization page screeshot.</summary>
<img src="https://raw.githubusercontent.com/lupantech/MathVista/main/assets/data_visualizer.png" style="zoom:40%;" />
</details>
### Data Source
The **MathVista** dataset is derived from three newly collected datasets: IQTest, FunctionQA, and Paper, as well as 28 other source datasets. Details can be found in the [source.json](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/source.json) file. All these source datasets have been preprocessed and labeled for evaluation purposes.
### Automatic Evaluation
🔔 To automatically evaluate a model on the dataset, please refer to our GitHub repository [here](https://github.com/lupantech/MathVista/tree/main).
## License
The new contributions to our dataset are distributed under the [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/) license, including
- The creation of three datasets: IQTest, FunctionQA, and Paper;
- The filtering and cleaning of source datasets;
- The standard formalization of instances for evaluation purposes;
- The annotations of metadata.
The copyright of the images and the questions belongs to the original authors, and the source of every image and original question can be found in the `metadata` field and in the [source.json](https://huggingface.co/datasets/AI4Math/MathVista/blob/main/source.json) file. Alongside this license, the following conditions apply:
- **Purpose:** The dataset was primarily designed for use as a test set.
- **Commercial Use:** The dataset can be used commercially as a test set, but using it as a training set is prohibited. By accessing or using this dataset, you acknowledge and agree to abide by these terms in conjunction with the [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/) license.
## Citation
If you use the **MathVista** dataset in your work, please kindly cite the paper using this BibTeX:
```
@inproceedings{lu2024mathvista,
author = {Lu, Pan and Bansal, Hritik and Xia, Tony and Liu, Jiacheng and Li, Chunyuan and Hajishirzi, Hannaneh and Cheng, Hao and Chang, Kai-Wei and Galley, Michel and Gao, Jianfeng},
title = {MathVista: Evaluating Mathematical Reasoning of Foundation Models in Visual Contexts},
booktitle = {International Conference on Learning Representations (ICLR)},
year = {2024}
}
``` |
li2017dailydialog/daily_dialog | li2017dailydialog | "2024-01-18T11:02:28Z" | 3,853 | 137 | [
"task_categories:text-classification",
"task_ids:multi-label-classification",
"annotations_creators:expert-generated",
"language_creators:found",
"multilinguality:monolingual",
"source_datasets:original",
"language:en",
"license:cc-by-nc-sa-4.0",
"size_categories:10K<n<100K",
"region:us",
"emotion-classification",
"dialog-act-classification"
] | [
"text-classification"
] | "2022-03-02T23:29:22Z" | ---
annotations_creators:
- expert-generated
language_creators:
- found
language:
- en
license:
- cc-by-nc-sa-4.0
multilinguality:
- monolingual
size_categories:
- 10K<n<100K
source_datasets:
- original
task_categories:
- text-classification
task_ids:
- multi-label-classification
paperswithcode_id: dailydialog
pretty_name: DailyDialog
tags:
- emotion-classification
- dialog-act-classification
dataset_info:
features:
- name: dialog
sequence: string
- name: act
sequence:
class_label:
names:
'0': __dummy__
'1': inform
'2': question
'3': directive
'4': commissive
- name: emotion
sequence:
class_label:
names:
'0': no emotion
'1': anger
'2': disgust
'3': fear
'4': happiness
'5': sadness
'6': surprise
splits:
- name: train
num_bytes: 7296715
num_examples: 11118
- name: test
num_bytes: 655844
num_examples: 1000
- name: validation
num_bytes: 673943
num_examples: 1000
download_size: 4475921
dataset_size: 8626502
---
# Dataset Card for "daily_dialog"
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** [http://yanran.li/dailydialog](http://yanran.li/dailydialog)
- **Repository:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
- **Paper:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
- **Point of Contact:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
- **Size of downloaded dataset files:** 4.48 MB
- **Size of the generated dataset:** 8.63 MB
- **Total amount of disk used:** 13.11 MB
### Dataset Summary
We develop a high-quality multi-turn dialog dataset, DailyDialog, which is intriguing in several aspects.
The language is human-written and less noisy. The dialogues in the dataset reflect our daily communication way
and cover various topics about our daily life. We also manually label the developed dataset with communication
intention and emotion information. Then, we evaluate existing approaches on DailyDialog dataset and hope it
benefit the research field of dialog systems.
### Supported Tasks and Leaderboards
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Languages
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Dataset Structure
### Data Instances
#### default
- **Size of downloaded dataset files:** 4.48 MB
- **Size of the generated dataset:** 8.63 MB
- **Total amount of disk used:** 13.11 MB
An example of 'validation' looks as follows.
```
This example was too long and was cropped:
{
"act": [2, 1, 1, 1, 1, 2, 3, 2, 3, 4],
"dialog": "[\"Good afternoon . This is Michelle Li speaking , calling on behalf of IBA . Is Mr Meng available at all ? \", \" This is Mr Meng ...",
"emotion": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
}
```
### Data Fields
The data fields are the same among all splits.
#### default
- `dialog`: a `list` of `string` features.
- `act`: a `list` of classification labels, with possible values including `__dummy__` (0), `inform` (1), `question` (2), `directive` (3) and `commissive` (4).
- `emotion`: a `list` of classification labels, with possible values including `no emotion` (0), `anger` (1), `disgust` (2), `fear` (3), `happiness` (4), `sadness` (5) and `surprise` (6).
### Data Splits
| name |train|validation|test|
|-------|----:|---------:|---:|
|default|11118| 1000|1000|
## Dataset Creation
### Curation Rationale
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
#### Who are the source language producers?
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Annotations
#### Annotation process
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
#### Who are the annotators?
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Personal and Sensitive Information
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Discussion of Biases
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Other Known Limitations
Dataset provided for research purposes only. Please check dataset license for additional information.
## Additional Information
### Dataset Curators
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Licensing Information
DailyDialog dataset is licensed under [CC BY-NC-SA 4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/).
### Citation Information
```
@InProceedings{li2017dailydialog,
author = {Li, Yanran and Su, Hui and Shen, Xiaoyu and Li, Wenjie and Cao, Ziqiang and Niu, Shuzi},
title = {DailyDialog: A Manually Labelled Multi-turn Dialogue Dataset},
booktitle = {Proceedings of The 8th International Joint Conference on Natural Language Processing (IJCNLP 2017)},
year = {2017}
}
```
### Contributions
Thanks to [@thomwolf](https://github.com/thomwolf), [@julien-c](https://github.com/julien-c) for adding this dataset. |
open-source-metrics/diffusers-dependents | open-source-metrics | "2024-05-28T00:58:04Z" | 3,850 | 0 | [
"license:apache-2.0",
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us",
"github-stars"
] | null | "2022-09-05T15:31:32Z" | ---
license: apache-2.0
pretty_name: diffusers metrics
tags:
- github-stars
dataset_info:
features:
- name: name
dtype: string
- name: stars
dtype: int64
- name: forks
dtype: int64
splits:
- name: package
num_bytes: 2680
num_examples: 62
- name: repository
num_bytes: 92837
num_examples: 1976
download_size: 55374
dataset_size: 95517
---
# diffusers metrics
This dataset contains metrics about the huggingface/diffusers package.
Number of repositories in the dataset: 160
Number of packages in the dataset: 2
## Package dependents
This contains the data available in the [used-by](https://github.com/huggingface/diffusers/network/dependents)
tab on GitHub.
### Package & Repository star count
This section shows the package and repository star count, individually.
Package | Repository
:-------------------------:|:-------------------------:
![diffusers-dependent package star count](./diffusers-dependents/resolve/main/diffusers-dependent_package_star_count.png) | ![diffusers-dependent repository star count](./diffusers-dependents/resolve/main/diffusers-dependent_repository_star_count.png)
There are 0 packages that have more than 1000 stars.
There are 3 repositories that have more than 1000 stars.
The top 10 in each category are the following:
*Package*
[JoaoLages/diffusers-interpret](https://github.com/JoaoLages/diffusers-interpret): 121
[samedii/perceptor](https://github.com/samedii/perceptor): 1
*Repository*
[gradio-app/gradio](https://github.com/gradio-app/gradio): 9168
[divamgupta/diffusionbee-stable-diffusion-ui](https://github.com/divamgupta/diffusionbee-stable-diffusion-ui): 4264
[AUTOMATIC1111/stable-diffusion-webui](https://github.com/AUTOMATIC1111/stable-diffusion-webui): 3527
[bes-dev/stable_diffusion.openvino](https://github.com/bes-dev/stable_diffusion.openvino): 925
[nateraw/stable-diffusion-videos](https://github.com/nateraw/stable-diffusion-videos): 899
[sharonzhou/long_stable_diffusion](https://github.com/sharonzhou/long_stable_diffusion): 360
[Eventual-Inc/Daft](https://github.com/Eventual-Inc/Daft): 251
[JoaoLages/diffusers-interpret](https://github.com/JoaoLages/diffusers-interpret): 121
[GT4SD/gt4sd-core](https://github.com/GT4SD/gt4sd-core): 113
[brycedrennan/imaginAIry](https://github.com/brycedrennan/imaginAIry): 104
### Package & Repository fork count
This section shows the package and repository fork count, individually.
Package | Repository
:-------------------------:|:-------------------------:
![diffusers-dependent package forks count](./diffusers-dependents/resolve/main/diffusers-dependent_package_forks_count.png) | ![diffusers-dependent repository forks count](./diffusers-dependents/resolve/main/diffusers-dependent_repository_forks_count.png)
There are 0 packages that have more than 200 forks.
There are 2 repositories that have more than 200 forks.
The top 10 in each category are the following:
*Package*
*Repository*
[gradio-app/gradio](https://github.com/gradio-app/gradio): 574
[AUTOMATIC1111/stable-diffusion-webui](https://github.com/AUTOMATIC1111/stable-diffusion-webui): 377
[bes-dev/stable_diffusion.openvino](https://github.com/bes-dev/stable_diffusion.openvino): 108
[divamgupta/diffusionbee-stable-diffusion-ui](https://github.com/divamgupta/diffusionbee-stable-diffusion-ui): 96
[nateraw/stable-diffusion-videos](https://github.com/nateraw/stable-diffusion-videos): 73
[GT4SD/gt4sd-core](https://github.com/GT4SD/gt4sd-core): 34
[sharonzhou/long_stable_diffusion](https://github.com/sharonzhou/long_stable_diffusion): 29
[coreweave/kubernetes-cloud](https://github.com/coreweave/kubernetes-cloud): 20
[bananaml/serverless-template-stable-diffusion](https://github.com/bananaml/serverless-template-stable-diffusion): 15
[AmericanPresidentJimmyCarter/yasd-discord-bot](https://github.com/AmericanPresidentJimmyCarter/yasd-discord-bot): 9
[NickLucche/stable-diffusion-nvidia-docker](https://github.com/NickLucche/stable-diffusion-nvidia-docker): 9
[vopani/waveton](https://github.com/vopani/waveton): 9
[harubaru/discord-stable-diffusion](https://github.com/harubaru/discord-stable-diffusion): 9
|
MBZUAI/Bactrian-X | MBZUAI | "2023-05-27T12:54:05Z" | 3,816 | 115 | [
"task_categories:text-generation",
"language:af",
"language:ar",
"language:az",
"language:bn",
"language:cs",
"language:de",
"language:en",
"language:es",
"language:et",
"language:fi",
"language:fr",
"language:gl",
"language:gu",
"language:he",
"language:hi",
"language:hr",
"language:id",
"language:it",
"language:ja",
"language:ka",
"language:kk",
"language:km",
"language:ko",
"language:lt",
"language:lv",
"language:mk",
"language:ml",
"language:mn",
"language:mr",
"language:my",
"language:ne",
"language:nl",
"language:pl",
"language:ps",
"language:pt",
"language:ro",
"language:ru",
"language:si",
"language:sl",
"language:sv",
"language:sw",
"language:ta",
"language:te",
"language:th",
"language:tl",
"language:tr",
"language:uk",
"language:ur",
"language:vi",
"language:xh",
"language:zh",
"license:cc-by-nc-4.0",
"size_categories:1M<n<10M",
"modality:text",
"library:datasets",
"library:mlcroissant",
"arxiv:2008.00401",
"arxiv:2305.15011",
"region:us",
"instruction-finetuning",
"multilingual"
] | [
"text-generation"
] | "2023-04-22T12:42:39Z" | ---
license: cc-by-nc-4.0
task_categories:
- text-generation
language:
- af
- ar
- az
- bn
- cs
- de
- en
- es
- et
- fi
- fr
- gl
- gu
- he
- hi
- hr
- id
- it
- ja
- ka
- kk
- km
- ko
- lt
- lv
- mk
- ml
- mn
- mr
- my
- ne
- nl
- pl
- ps
- pt
- ro
- ru
- si
- sl
- sv
- sw
- ta
- te
- th
- tl
- tr
- uk
- ur
- vi
- xh
- zh
tags:
- instruction-finetuning
- multilingual
pretty_name: Bactrian-X
---
# Dataset Card for "Bactrian-X"
## Table of Contents
- [Dataset Description](#a-dataset-description)
- [Dataset Summary](#dataset-summary)
- [Languages](#languages)
- [Dataset Structure](#b-dataset-structure)
- [Data Fields](#data-fields)
- [Data Instances](#data-instances)
- [Data in 52 Languages](#data-in-52-languages)
- [Dataset Creation](#c-dataset-creation)
- [Considerations for Using the Data](#d-considerations-for-using-the-data)
- [Additional Information](#e-additional-information)
## A. Dataset Description
- **Homepage:** https://github.com/mbzuai-nlp/Bactrian-X
- **Repository:** https://huggingface.co/datasets/MBZUAI/Bactrian-X
- **Paper:** to-be-soon released
### Dataset Summary
<h3 align="center">
<img src="https://raw.githubusercontent.com/fajri91/eval_picts/master/BactrianX_dataset.jpg" width="950" align="center">
</h3>
The Bactrain-X dataset is a collection of 3.4M instruction-response pairs in 52 languages, that are obtained by translating 67K English instructions ([alpaca-52k](https://github.com/tatsu-lab/stanford_alpaca) + [dolly-15k](https://github.com/databrickslabs/dolly/tree/master/data)) into 51 languages using Google Translate API. The translated instructions are then fed to ChatGPT (`gpt-3.5-turbo`) to obtain its natural responses, resulting in 3.4M instruction-response pairs in 52 languages (52 languages x 67k instances = 3.4M instances).
### Languages
We follow 52 languages of [mBART-50](https://arxiv.org/abs/2008.00401), with details can be found [below](#data-in-52-languages).
## B. Dataset Structure
### Data Fields
The data fields are as follows:
* `instruction`: describes the task the model should perform. Each of the 67K instructions is unique.
* `input`: optional context or input for the task. For example, when the instruction is "Summarize the following article", the input is the article. Around 40% of the examples have an input.
* `output`: the answer to the instruction as generated by `gpt-3.5-turbo`.
* `id`: the unique index for each data piece in a specific language. The same `id` across splits corresponds to the same original English data.
### Data Instances
An example in `en` looks as follows.
```json
{
"instruction": "What is the main idea of this text?",
"input": "The robotics industry has become one of the fastest growing sectors in today’s economy. Robotics technology is being applied in various industries and applications, such as manufacturing, healthcare, and defense.",
"output": "The main idea of the text is that the robotics industry is rapidly growing and its technology is being used in many different industries and applications.",
"id": "alpaca-107"
}
```
An example in `ar` looks as follows.
```json
{
"instruction": "قم بالوصول إلى إعدادات المتصفح الافتراضية على جهاز الكمبيوتر.",
"input": "",
"output": "لا يمكن لي كمساعد افتراضي القيام بهذه المهمة كونها تحتاج إلى الوصول إلى الجهاز المادي. يرجى الرجوع إلى دليل المستخدم للكمبيوتر الخاص بك أو البحث على الإنترنت لمعرفة كيفية الوصول إلى إعدادات المتصفح الافتراضية على جهاز الكمبيوتر الخاص بك.",
"id": "alpaca-41997"
}
```
An example in `id` looks as follows.
```json
{
"instruction": "Berapa banyak angka penting yang harus digunakan untuk nilai 0,000187?",
"input": "",
"output": "Ada tiga angka penting dalam nilai 0,000187.",
"id": "alpaca-40382"
}
```
An example in `de` looks as follows.
```json
{
"instruction": "Komponieren Sie ein Musikstück mit einem Titel, der Trauer bezeichnet.",
"input": "",
"output": ""Schmerzendes Herz"\n\nDas Stück beginnt mit einer langsamen, traurigen Melodie, gespielt von einem Solo-Violoncello. Die Harmonien, die von einem Klavier begleitet werden, sind durcheinander und dissonant, um ein Gefühl von Schmerz und Trauer zu vermitteln.\n\nIm Laufe des Stücks treten andere Instrumente ein, wie Streicher, Flöte und Oboe, um die Melodie zu unterstützen und zu intensivieren. Der Rhythmus bleibt langsam und schwerfällig, um die Schwere des Schmerzes darzustellen.\n\nIn der Mitte des Stücks gibt es einen Höhepunkt, wo die Melodie und Harmonien sich verdichten und ein Gefühl von Überwältigung erzeugen. Der Solo-Violoncello spielt eine traurige und schnelle Phrase, die die Verzweiflung und Isolation des Protagonisten ausdrückt.\n\nDas Stück endet mit einer Wiederholung der ersten Melodie, gespielt von einem Solo-Klavier, während die anderen Instrumente auslaufen. Die Harmonien werden jedoch weicher und sanfter, um einen Hauch von Hoffnung zu vermitteln, dass der Schmerz langsam vergeht.",
"id": "alpaca-33619"
}
```
An example in `fr` looks as follows.
```json
{
"instruction": "Les Ceanothus ont-ils des racines peu profondes ?",
"input": "",
"output": "En tant qu'IA, je ne peux pas répondre avec une certitude absolue, mais voici des informations générales sur les racines de Ceanothus : \n\nLes Ceanothus ont tendance à avoir un système racinaire étendu et peu profond. Les racines peuvent s'étaler jusqu'à 4 à 6 fois la taille de la plante et s'étendre à environ 60 à 90 cm de profondeur. Cependant, cela peut varier en fonction du type de sol et de la variété de Ceanothus. Certaines variétés de Ceanothus pourraient avoir des racines plus profondes si les conditions s'y prêtent.",
"id": "dolly-10940"
}
```
### Data in 52 Languages
| No | Languages | Code | Train |
| ---|---------------- | ----- | ----- |
| 1 | [Afrikaans](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/af) | af_ZA | 67017 |
| 2 | [Arabic](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/ar) | ar_AR | 67017 |
| 3 | [Azerbaijani](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/az) | az_AZ | 67017 |
| 4 | [Bengali](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/bn) | bn_IN | 67017 |
| 5 | [Czech](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/cs) | cs_CZ | 67017 |
| 6 | [German](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/de) | de_DE | 67017 |
| 7 | [English](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/en) | en_XX | 67017 |
| 8 | [Spanish](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/es) | es_XX | 67017 |
| 9 | [Estonian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/et) | et_EE | 67017 |
| 10 | [Persian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/fa) | fa_IR | 67017 |
| 11 | [Finnish](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/fi) | fi_FI | 67017 |
| 12 | [French](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/fr) | fr_XX | 67017 |
| 13 | [Galician](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/gl) | gl_ES | 67017 |
| 14 | [Gujarati](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/gu) | gu_IN | 67017 |
| 15 | [Hebrew](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/he) | he_IL | 67017 |
| 16 | [Hindi](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/hi) | hi_IN | 67017 |
| 17 | [Croatian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/hr) | hr_HR | 67017 |
| 18 | [Indonesian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/id) | id_ID | 67017 |
| 19 | [Italian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/it) | it_IT | 67017 |
| 20 | [Japanese](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/ja) | ja_XX | 67017 |
| 21 | [Georgian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/ka) | ka_GE | 67017 |
| 22 | [Kazakh](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/kk) | kk_KZ | 67017 |
| 23 | [Khmer](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/km) | km_KH | 67017 |
| 24 | [Korean](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/ko) | ko_KR | 67017 |
| 25 | [Lithuanian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/lt) | lt_LT | 67017 |
| 26 | [Latvian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/lv) | lv_LV | 67017 |
| 27 | [Macedonian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/mk) | mk_MK | 67017 |
| 28 | [Malayalam](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/ml) | ml_IN | 67017 |
| 29 | [Mongolian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/mn) | mn_MN | 67017 |
| 30 | [Marathi](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/mr) | mr_IN | 67017 |
| 31 | [Burmese](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/my) | my_MM | 67017 |
| 32 | [Nepali](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/ne) | ne_NP | 67017 |
| 33 | [Dutch](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/nl) | nl_XX | 67017 |
| 34 | [Polish](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/pl) | pl_PL | 67017 |
| 35 | [Pashto](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/ps) | ps_AF | 67017 |
| 36 | [Portuguese](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/pt) | pt_XX | 67017 |
| 37 | [Romanian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/ro) | ro_RO | 67017 |
| 38 | [Russian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/ru) | ru_RU | 67017 |
| 39 | [Sinhala](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/si) | si_LK | 67017 |
| 40 | [Slovene](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/sl) | sl_SI | 67017 |
| 41 | [Swedish](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/sv) | sv_SE | 67017 |
| 42 | [Swahili](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/sw) | sw_KE | 67017 |
| 43 | [Tamil](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/ta) | ta_IN | 67017 |
| 44 | [Telugu](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/te) | te_IN | 67017 |
| 45 | [Thai](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/th) | th_TH | 67017 |
| 46 | [Tagalog](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/tl) | tl_XX | 67017 |
| 47 | [Turkish](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/tr) | tr_TR | 67017 |
| 48 | [Ukrainian](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/uk) | uk_UA | 67017 |
| 49 | [Urdu](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/ur) | ur_PK | 67017 |
| 50 | [Vietnamese](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/vi) | vi_VN | 67017 |
| 51 | [Xhosa](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/xh) | xh_ZA | 67017 |
| 52 | [Chinese](https://huggingface.co/datasets/MBZUAI/Bactrian-X/viewer/zh) | zh_CN | 67017 |
## C. Dataset Creation
1. English Instructions: The English instuctions are obtained from [alpaca-53k](https://github.com/tatsu-lab/stanford_alpaca), and [dolly-15k](https://github.com/databrickslabs/dolly/tree/master/data).
2. Instruction Translation: The instructions (and inputs) are translated into 51 languages using Google Translation API (conducted on April 2023).
3. Output Generation: We generate output from `gpt-3.5-turbo` for each language (conducted on April 2023).
## D. Considerations for Using the Data
### Social Impact of Dataset
NLP for everyone: this dataset helps to democratize the cutting-edge instruction-following models in 52 languages. This dataset also allows the first experiment on the multilingual LoRA-based LLaMA model.
### Discussion of Biases
(1) Translation bias; (2) Potential English-culture bias in the translated dataset.
### Other Known Limitations
The `Bactrian-X` data is generated by a language model (`gpt-3.5-turbo`) and inevitably contains some errors or biases. We encourage users to use this data with caution and propose new methods to filter or improve the imperfections.
## E. Additional Information
### Dataset Curators
[Haonan Li](https://haonan-li.github.io/) and [Fajri Koto](http://www.fajrikoto.com)
### Licensing Information
The dataset is available under the [Creative Commons NonCommercial (CC BY-NC 4.0)](https://creativecommons.org/licenses/by-nc/4.0/legalcode).
### Citation Information
```
@misc{li2023bactrianx,
title={Bactrian-X : A Multilingual Replicable Instruction-Following Model with Low-Rank Adaptation},
author={Haonan Li and Fajri Koto and Minghao Wu and Alham Fikri Aji and Timothy Baldwin},
year={2023},
eprint={2305.15011},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
### Contributions
Thanks to [@haonan-li](https://github.com/haonan-li), [@fajri91](https://github.com/fajri91) for adding this dataset.
|
chujiezheng/wizard_of_wikipedia | chujiezheng | "2023-05-08T15:05:32Z" | 3,811 | 2 | [
"language:en",
"license:cc-by-nc-4.0",
"size_categories:10K<n<100K",
"format:json",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2009.09378",
"region:us"
] | null | "2023-05-08T13:35:40Z" | ---
license: cc-by-nc-4.0
language:
- en
---
Wizard-of-Wikipedia data for the Findings of EMNLP 2020 paper "Difference-aware Knowledge Selection for Knowledge-grounded Conversation Generation"
[GitHub repo](https://github.com/chujiezheng/DiffKS). [Original paper](https://arxiv.org/abs/2009.09378).
```bib
@inproceedings{zheng-etal-2020-diffks,
title="{D}ifference-aware Knowledge Selection for Knowledge-grounded Conversation Generation",
author="Zheng, Chujie and
Cao, Yunbo and
Jiang, Daxin and
Huang, Minlie",
booktitle="Findings of EMNLP",
year="2020"
}
```
|
BAAI/IndustryCorpus2 | BAAI | "2024-12-17T02:14:57Z" | 3,796 | 46 | [
"language:en",
"language:zh",
"license:apache-2.0",
"size_categories:100M<n<1B",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"doi:10.57967/hf/3488",
"region:us"
] | null | "2024-09-15T00:12:49Z" | ---
license: apache-2.0
language:
- en
- zh
size_categories:
- n>1T
extra_gated_prompt: "You agree to not use the dataset to conduct experiments that cause harm to human subjects."
extra_gated_fields:
Company/Organization: text
Country: country
---
Industry models play a vital role in promoting the intelligent transformation and innovative development of enterprises. High-quality industry data is the key to improving the performance of large models and realizing the implementation of industry applications. However, the data sets currently used for industry model training generally have problems such as small data volume, low quality, and lack of professionalism.
In June, we released the [IndustryCorpus](https://huggingface.co/datasets/BAAI/IndustryCorpus) dataset: We have further upgraded and iterated on this dataset, and the iterative contents are as follows:
- Data source: Based on the original data, we introduced more high-quality data sources, such as pile, bigcode, open-web-math and other mathematical and code data
- Update the industry category system: In order to better fit the industry classification system, we combined the national economic industry classification system (20 categories) formulated by the National Bureau of Statistics and the world knowledge system to redesign the industry categories, setting up 31 industry categories, basically covering the current mainstream industries
- Data semantic quality screening: We decentralized the IndustryCorpus high-quality data production plan, and used the rule filtering + model filtering solution in the IndustryCorpus2.0 open source data, which greatly improved the overall data quality;
- Data quality stratification: In order to further integrate data quality at different levels, we stratify and organize the data based on the quality assessment score, dividing the data into three levels: high, middle, and low.
- Data size: 1TB for Chinese and 2.2TB for English
The data processing process is consistent with IndustryCorpus
![image/png](https://cdn-uploads.huggingface.co/production/uploads/642f6c64f945a8a5c9ee5b5d/qC0_qwtSJr5RuGLo_wXmm.png)
## Data Perspective
### Industry Data Distribution
The disk size of each industry data after full process processing is as follows
| Industry category | Data size (GB) | Industry category | Data size (GB) |
| :-------------------------------------------------: | :------------: | :-----------------------------------------------: | :------------: |
| Programming | 11.0 | News | 51.0 |
| Biomedicine | 61.7 | Petrochemical | 40.2 |
| Medical health-psychology and Chinese medicine | 271.7 | Aerospace | 38.6 |
| Tourism and geography | 64.0 | Mining | 8.9 |
| Law and justice | 238.5 | Finance and economics | 145.8 |
| Mathematics-statistics | 156.7 | Literature and emotions | 105.5 |
| Other information services_information security | 1.8 | Transportation | 40.5 |
| Fire safety_food safety | 4.3 | Science and technology_scientific research | 101.6 |
| Automobile | 39.3 | Water Conservancy_Ocean | 20.2 |
| Accommodation-catering-hotel | 29.6 | Computer-communication | 157.8 |
| Film and television entertainment | 209.4 | Subject education | 340.9 |
| Real estate-construction | 105.2 | Artificial intelligence-machine learning | 7.7 |
| Electric power and energy | 68.7 | Current affairs-government affairs-administration | 271.5 |
| Agriculture, forestry, animal husbandry and fishery | 111.9 | Sports | 262.5 |
| Games | 37.6 | Other manufacturing | 47.2 |
| Others | 188.6 | | |
| Total (GB) | 3276G | | |
The industry data distribution chart in the summary data set is as follows
![image/png](https://cdn-uploads.huggingface.co/production/uploads/642f6c64f945a8a5c9ee5b5d/d-QrW-uX8LkY6CLVyun55.png)
From the distribution chart, we can see that subject education, sports, current affairs, law, medical health, film and television entertainment account for most of the overall data. The data of these industries are widely available on the Internet and textbooks, and the high proportion of them is in line with expectations. It is worth mentioning that since we have supplemented the data of mathematics, we can see that the proportion of mathematics data is also high, which is inconsistent with the proportion of mathematics Internet corpus data.
### dataset repo series
All our data repos have a unified naming format, f"BAAI/IndustryCorpus2_{name}", where `name` corresponds to the English name of the industry. The list of industry names is shown below
```
{
"交通运输": "transportation",
"医学_健康_心理_中医": "medicine_health_psychology_traditional_chinese_medicine",
"数学_统计学": "mathematics_statistics",
"时政_政务_行政": "current_affairs_government_administration",
"消防安全_食品安全": "fire_safety_food_safety",
"石油化工": "petrochemical",
"计算机_通信": "computer_communication",
"人工智能_机器学习": "artificial_intelligence_machine_learning",
"其他信息服务_信息安全": "other_information_services_information_security",
"学科教育_教育": "subject_education_education",
"文学_情感": "literature_emotion",
"水利_海洋": "water_resources_ocean",
"游戏": "game",
"科技_科学研究": "technology_scientific_research",
"采矿": "mining",
"住宿_餐饮_酒店": "accommodation_catering_hotel",
"其他制造": "other_manufacturing",
"影视_娱乐": "film_entertainment",
"新闻传媒": "news_media",
"汽车": "automobile",
"生物医药": "biomedicine",
"航空航天": "aerospace",
"金融_经济": "finance_economics",
"体育": "sports",
"农林牧渔": "agriculture_forestry_animal_husbandry_fishery",
"房地产_建筑": "real_estate_construction",
"旅游_地理": "tourism_geography",
"法律_司法": "law_judiciary",
"电力能源": "electric_power_energy",
"计算机编程_代码": "computer_programming_code",
}
```
### Data quality stratification
We filter the entire data according to data quality, remove extremely low-quality data, and divide the available data into three independent groups: Low, Middle, and Hight, to facilitate data matching and combination during model training. The distribution of data of different qualities is shown below. It can be seen that the data quality distribution trends of Chinese and English are basically the same, with the largest number of middle data, followed by middle data, and the least number of low data; in addition, it can be observed that the proportion of hight data in English is higher than that in Chinese (with a larger slope), which is also in line with the current trend of distribution of different languages.
![image/png](https://cdn-uploads.huggingface.co/production/uploads/642f6c64f945a8a5c9ee5b5d/WuNoHB7Csh-4J-0q66el1.png)
## Industry Category Classification
In order to improve the coverage of industry classification in the data set to actual industries and align with the industry catalog defined in the national standard, we refer to the national economic industry classification system and the world knowledge system formulated by the National Bureau of Statistics, merge and integrate the categories, and design the final 31 industry categories covering Chinese and English. The category table names are as follows
- Data construction of industry classification model
- Data construction
Data source: pre-training corpus sampling and open source text classification data, of which pre-training corpus accounts for 90%. Through data sampling, the ratio of Chinese and English data is guaranteed to be 1:1
Label construction: Use the LLM model to make multiple classification judgments on the data, and select the data with consistent multiple judgments as training data
Data scale: 36K
The overall process of data construction is as follows:
![image/png](https://cdn-uploads.huggingface.co/production/uploads/642f6c64f945a8a5c9ee5b5d/IUEZ-cADYqCyM9FvdHXYd.png)
- Model training:
Parameter update: add classification head to pre-trained BERT model for text classification model training
Model selection: considering model performance and inference efficiency, we selected a 0.5B scale model. Through comparative experiments, we finally selected BGE-M3 and full parameter training as our base model
Training hyperparameters: full parameter training, max_length = 2048, lr = 1e-5, batch_size = 64, validation set evaluation acc: 86%
![image/png](https://cdn-uploads.huggingface.co/production/uploads/642f6c64f945a8a5c9ee5b5d/L3aKsDrYdWWNTkaAu7l-Z.png)
## Data quality assessment
- Why should we filter low-quality data?
Below is low-quality data extracted from the data. It can be seen that this kind of data is harmful to the learning of the model.
```
{"text": "\\_\\__\n\nTranslated from *Chinese Journal of Biochemistry and Molecular Biology*, 2007, 23(2): 154--159 \\[译自:中国生物化学与分子生物学报\\]\n"}
{"text": "#ifndef _IMGBMP_H_\n#define _IMGBMP_H_\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\nconst uint8_t bmp[]={\n\\/\\/-- 调入了一幅图像:D:\\我的文档\\My Pictures\\12864-555.bmp --*\\/\n\\/\\/-- 宽度x高度=128x64 --\n0x00,0x06,0x0A,0xFE,0x0A,0xC6,0x00,0xE0,0x00,0xF0,0x00,0xF8,0x00,0x00,0x00,0x00,\n0x00,0x00,0xFE,0x7D,0xBB,0xC7,0xEF,0xEF,0xEF,0xEF,0xEF,0xEF,0xEF,0xC7,0xBB,0x7D,\n0xFE,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x08,\n0x0C,0xFE,0xFE,0x0C,0x08,0x20,0x60,0xFE,0xFE,0x60,0x20,0x00,0x00,0x00,0x78,0x48,\n0xFE,0x82,0xBA,0xBA,0x82,0xBA,0xBA,0x82,0xBA,0xBA,0x82,0xBA,0xBA,0x82,0xFE,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xFE,0xFF,\n0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0xFF,0xFF,0x00,0x00,0xFE,0xFF,0x03,\n0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0xFF,0xFE,0x00,0x00,0x00,0x00,0xC0,0xC0,\n0xC0,0x00,0x00,0x00,0x00,0xFE,0xFF,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,\n0xFF,0xFE,0x00,0x00,0xFE,0xFF,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0xFF,\n0xFE,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xFF,0xFF,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xFF,0xFF,0x00,0x00,0xFF,0xFF,0x0C,\n0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0xFF,0xFF,0x00,0x00,0x00,0x00,0xE1,0xE1,\n0xE1,0x00,0x00,0x00,0x00,0xFF,0xFF,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0xFF,0xFF,0x00,0x00,0xFF,0xFF,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0xFF,\n0xFF,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0F,0x1F,\n0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x1F,0x0F,0x00,0x00,0x0F,0x1F,0x18,\n0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x1F,0x0F,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x0F,0x1F,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,\n0x1F,0x0F,0x00,0x00,0x0F,0x1F,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x1F,\n0x0F,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0xE2,0x92,0x8A,0x86,0x00,0x00,0x7C,0x82,0x82,0x82,0x7C,\n0x00,0xFE,0x00,0x82,0x92,0xAA,0xC6,0x00,0x00,0xC0,0xC0,0x00,0x7C,0x82,0x82,0x82,\n0x7C,0x00,0x00,0x02,0x02,0x02,0xFE,0x00,0x00,0xC0,0xC0,0x00,0x7C,0x82,0x82,0x82,\n0x7C,0x00,0x00,0xFE,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x24,0xA4,0x2E,0x24,0xE4,0x24,0x2E,0xA4,0x24,0x00,0x00,0x00,0xF8,0x4A,0x4C,\n0x48,0xF8,0x48,0x4C,0x4A,0xF8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xC0,0x20,0x10,0x10,\n0x10,0x10,0x20,0xC0,0x00,0x00,0xC0,0x20,0x10,0x10,0x10,0x10,0x20,0xC0,0x00,0x00,\n0x00,0x12,0x0A,0x07,0x02,0x7F,0x02,0x07,0x0A,0x12,0x00,0x00,0x00,0x0B,0x0A,0x0A,\n0x0A,0x7F,0x0A,0x0A,0x0A,0x0B,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1F,0x20,0x40,0x40,\n0x40,0x50,0x20,0x5F,0x80,0x00,0x1F,0x20,0x40,0x40,0x40,0x50,0x20,0x5F,0x80,0x00,\n}; \n\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif \\/\\/ _IMGBMP_H_ _SSD1306_16BIT_H_\n"}
```
- Data construction
Data source: Random sampling of pre-trained corpus
Label construction: Design data scoring rules, use LLM model to perform multiple rounds of scoring, and select data with a difference of less than 2 in multiple rounds of scoring
Data scale: 20k scoring data, Chinese and English ratio 1:1
Data scoring prompt
```
quality_prompt = """Below is an extract from a web page. Evaluate whether the page has a high natural language value and could be useful in an naturanl language task to train a good language model using the additive 5-point scoring system described below. Points are accumulated based on the satisfaction of each criterion:
- Zero score if the content contains only some meaningless content or private content, such as some random code, http url or copyright information, personally identifiable information, binary encoding of images.
- Add 1 point if the extract provides some basic information, even if it includes some useless contents like advertisements and promotional material.
- Add another point if the extract is written in good style, semantically fluent, and free of repetitive content and grammatical errors.
- Award a third point tf the extract has relatively complete semantic content, and is written in a good and fluent style, the entire content expresses something related to the same topic, rather than a patchwork of several unrelated items.
- A fourth point is awarded if the extract has obvious educational or literary value, or provides a meaningful point or content, contributes to the learning of the topic, and is written in a clear and consistent style. It may be similar to a chapter in a textbook or tutorial, providing a lot of educational content, including exercises and solutions, with little to no superfluous information. The content is coherent and focused, which is valuable for structured learning.
- A fifth point is awarded if the extract has outstanding educational value or is of very high information density, provides very high value and meaningful content, does not contain useless information, and is well suited for teaching or knowledge transfer. It contains detailed reasoning, has an easy-to-follow writing style, and can provide deep and thorough insights.
The extract:
<{EXAMPLE}>.
After examining the extract:
- Briefly justify your total score, up to 50 words.
- Conclude with the score using the format: "Quality score: <total points>"
...
"""
```
- Model training
Model selection: Similar to the classification model, we also used a 0.5b scale model and compared beg-m3 and qwen-0.5b. The final experiment showed that bge-m3 had the best overall performance
Model hyperparameters: base bge-m3, full parameter training, lr=1e-5, batch_size=64, max_length = 2048
Model evaluation: On the validation set, the consistency rate of the model and GPT4 in sample quality judgment was 90%.
![image/png](https://cdn-uploads.huggingface.co/production/uploads/642f6c64f945a8a5c9ee5b5d/x6MCku0bfExuU7Cz15R5L.png)
- Training benefits from high-quality data
In order to verify whether high-quality data can bring more efficient training efficiency, we extracted high-quality data from the 50b data before screening under the same base model. It can be considered that the distribution of the two data is roughly the same, and autoregressive training is performed.
As can be seen from the curve, the 14B tokens of the model trained with high-quality data can achieve the performance of the model with 50B of ordinary data. High-quality data can greatly improve training efficiency.
![image/png](https://cdn-uploads.huggingface.co/production/uploads/642f6c64f945a8a5c9ee5b5d/JKTU0-uLlAOZ9C8CQXvoU.png)
In addition, high-quality data can be added to the model as data in the pre-training annealing stage to further improve the model effect. To verify this conjecture, when training the industry model, we added pre-training data converted from high-quality data after screening and some instruction data to the annealing stage of the model. It can be seen that the performance of the model has been greatly improved.
![image/png](https://cdn-uploads.huggingface.co/production/uploads/642f6c64f945a8a5c9ee5b5d/oye_J2f3AO4JUG2qSPBsy.png)
Finally, high-quality pre-training predictions contain a wealth of high-value knowledge content, from which instruction data can be extracted to further improve the richness and knowledge of instruction data. This also gave rise to the [BAAI/IndustryInstruction](https://huggingface.co/datasets/BAAI/IndustryInstruction) project, which we will explain in detail there.
## Citation
If you find our work helpful, feel free to give us a cite.
```
@misc {beijing_academy_of_artificial_intelligence,
author= { Xiaofeng Shi and Lulu Zhao and Hua Zhou and Donglin Hao},
title = { IndustryCorpus2},
year = 2024,
url = { https://huggingface.co/datasets/BAAI/IndustryCorpus2 },
doi = { 10.57967/hf/3488 },
publisher = { Hugging Face }
}
``` |
Lichess/standard-chess-games | Lichess | "2025-01-10T21:26:12Z" | 3,791 | 37 | [
"license:cc0-1.0",
"size_categories:1B<n<10B",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us",
"chess",
"games",
"game",
"lichess"
] | null | "2024-09-24T08:58:09Z" | ---
license: cc0-1.0
pretty_name: Lichess Standard Rated Games
dataset_info:
features:
- name: Event
dtype: string
- name: Site
dtype: string
- name: White
dtype: string
- name: Black
dtype: string
- name: Result
dtype: string
- name: WhiteTitle
dtype: string
- name: BlackTitle
dtype: string
- name: WhiteElo
dtype: int16
- name: BlackElo
dtype: int16
- name: WhiteRatingDiff
dtype: int16
- name: BlackRatingDiff
dtype: int16
- name: UTCDate
dtype: date32
- name: UTCTime
dtype: time32[s]
- name: ECO
dtype: string
- name: Opening
dtype: string
- name: Termination
dtype: string
- name: TimeControl
dtype: string
- name: movetext
dtype: string
configs:
- config_name: default
data_files:
- split: train
path: data/*/*/*
tags:
- chess
- games
- game
- lichess
size_categories:
- 1B<n<10B
---
> [!CAUTION]
> This dataset is still a work in progress and some breaking changes might occur. In the meantime, please use https://database.lichess.org/#standard_games
>
# Dataset Card for the Lichess Rated Standard Chess Games Dataset
## Dataset Description
**6,298,645,464** standard rated games, played on [lichess.org](https://lichess.org), updated monthly from the [database dumps](https://database.lichess.org/#standard_games).
This version of the data is meant for data analysis. If you need PGN files you can find those [here](https://database.lichess.org/#standard_games). That said, once you have a subset of interest, it is trivial to convert it back to PGN as shown in the [Dataset Usage](#dataset-usage) section.
This dataset is hive-partitioned into multiple parquet files on two keys: `year` and `month`:
```bash
.
├── data
│ └── year=2015
│ ├── month=01
│ │ ├── train-00000-of-00003.parquet
│ │ ├── train-00001-of-00003.parquet
│ │ └── train-00002-of-00003.parquet
│ ├── month=02
│ │ ├── train-00000-of-00003.parquet
│ │ ├── train-00001-of-00003.parquet
│ │ └── train-00002-of-00003.parquet
│ ├── ...
```
### Dataset Usage
<!-- Using the `datasets` library:
```python
from datasets import load_dataset
dset = load_dataset("Lichess/chess-evaluations", split="train")
```
Using the `polars` library:
Using DuckDB:
Using `python-chess`: -->
## Dataset Details
### Dataset Sample
<!-- One row of the dataset looks like this:
```python
{
"Event":,
"Site":,
}
``` -->
### Dataset Fields
<!-- Every row of the dataset contains the following fields:
- **`Event`**: `string`,
- **`Site`**: `string`, -->
### Notes
- About 6% of the games include Stockfish analysis evaluations: [%eval 2.35] (235 centipawn advantage), [%eval #-4] (getting mated in 4), always from White's point of view.
- The WhiteElo and BlackElo tags contain Glicko2 ratings.
- The `movetext` column contains clock information as PGN %clk comments since April 2017.
- The schema doesn't include the `Date` header, typically part of the [Seven Tag Roster](https://en.wikipedia.org/wiki/Portable_Game_Notation#Seven_Tag_Roster) as we deemed the `UTCDate` field to be enough.
- A future version of the data will include the addition of a `UCI` column containing the corresponding moves in [UCI format](https://en.wikipedia.org/wiki/Universal_Chess_Interface). |
Graphcore/wikipedia-bert-128 | Graphcore | "2022-09-07T14:42:32Z" | 3,777 | 1 | [
"language:en",
"license:cc-by-sa-3.0",
"size_categories:10M<n<100M",
"format:parquet",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2022-03-02T23:29:22Z" | ---
language:
- en
license:
- cc-by-sa-3.0
--- |
deepghs/subsplease_animes | deepghs | "2025-01-21T07:59:46Z" | 3,762 | 2 | [
"source_datasets:myanimelist",
"source_datasets:nyaasi",
"source_datasets:subsplease",
"language:en",
"license:other",
"size_categories:n<1K",
"format:text",
"modality:image",
"modality:text",
"library:datasets",
"library:mlcroissant",
"region:us",
"anime"
] | null | "2024-07-15T13:57:37Z" | ---
license: other
language:
- en
tags:
- anime
size_categories:
- n<1K
source_datasets:
- myanimelist
- nyaasi
- subsplease
---
This is an integration database of subsplease, myanimelist and nyaasi. You can know which animes are the hottest ones currently, and which of them have well-seeded magnet links.
This database is refreshed daily.
## Current Animes
858 animes, 11282 episodes in total, Last updated on: `2025-01-21 07:59:42 UTC`.
| ID | Post | Bangumi | Type | Episodes | Status | Score | Nyaasi | Magnets | Seeds | Downloads | Updated At |
|------:|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------|:-----------|:--------------------|:--------|:-----------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------|------------:|:-----------------|
| 57334 | [![57334__dandadan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57334__dandadan.jpg)](https://myanimelist.net/anime/57334/Dandadan) | [Dandadan](https://subsplease.org/shows/dandadan) | TV | 12 / 12 | **Finished Airing** | 8.59 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dandadan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57334__dandadan.txt) | **908** | 50977 | 2024-12-19 16:01 |
| 57592 | [![57592__dr_stone_science_future](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57592__dr_stone_science_future.jpg)](https://myanimelist.net/anime/57592/Dr_Stone__Science_Future) | [Dr. Stone S4](https://subsplease.org/shows/dr-stone-s4) | TV | 2 / 12 | Currently Airing | 8.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dr+Stone+S4+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57592__dr_stone_science_future.txt) | **802** | 14308 | 2025-01-16 15:01 |
| 59514 | [![59514__sentai_red_isekai_de_boukensha_ni_naru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59514__sentai_red_isekai_de_boukensha_ni_naru.jpg)](https://myanimelist.net/anime/59514/Sentai_Red_Isekai_de_Boukensha_ni_Naru) | [Sentai Red Isekai de Boukensha ni Naru](https://subsplease.org/shows/sentai-red-isekai-de-boukensha-ni-naru) | TV | 2 / 12 | Currently Airing | 6.8 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sentai+Red+Isekai+de+Boukensha+ni+Naru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59514__sentai_red_isekai_de_boukensha_ni_naru.txt) | **606** | 10948 | 2025-01-19 16:02 |
| 58502 | [![58502__zenshuu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58502__zenshuu.jpg)](https://myanimelist.net/anime/58502/Zenshuu) | [Zenshuu](https://subsplease.org/shows/zenshuu) | TV | 3 / 12 | Currently Airing | 7.29 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Zenshuu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58502__zenshuu.txt) | **594** | 12996 | 2025-01-19 17:17 |
| 57719 | [![57719__akuyaku_reijou_tensei_ojisan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57719__akuyaku_reijou_tensei_ojisan.jpg)](https://myanimelist.net/anime/57719/Akuyaku_Reijou_Tensei_Ojisan) | [Akuyaku Reijou Tensei Ojisan](https://subsplease.org/shows/akuyaku-reijou-tensei-ojisan) | TV | 2 / 12 | Currently Airing | 7.58 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Akuyaku+Reijou+Tensei+Ojisan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57719__akuyaku_reijou_tensei_ojisan.txt) | **559** | 12515 | 2025-01-16 16:04 |
| 59730 | [![59730__a_rank_party_wo_ridatsu_shita_ore_wa_moto_oshiego_tachi_to_meikyuu_shinbu_wo_mezasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59730__a_rank_party_wo_ridatsu_shita_ore_wa_moto_oshiego_tachi_to_meikyuu_shinbu_wo_mezasu.jpg)](https://myanimelist.net/anime/59730/A-Rank_Party_wo_Ridatsu_shita_Ore_wa_Moto_Oshiego-tachi_to_Meikyuu_Shinbu_wo_Mezasu) | [Aparida](https://subsplease.org/shows/aparida) | TV | 2 / 24 | Currently Airing | 6.92 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Aparida+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59730__a_rank_party_wo_ridatsu_shita_ore_wa_moto_oshiego_tachi_to_meikyuu_shinbu_wo_mezasu.txt) | **522** | 11092 | 2025-01-18 17:47 |
| 57648 | [![57648__nihon_e_youkoso_elf_san](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57648__nihon_e_youkoso_elf_san.jpg)](https://myanimelist.net/anime/57648/Nihon_e_Youkoso_Elf-san) | [Nihon e Youkoso Elf-san](https://subsplease.org/shows/nihon-e-youkoso-elf-san) | TV | 2 / 12 | Currently Airing | 7.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Nihon+e+Youkoso+Elf+san+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57648__nihon_e_youkoso_elf_san.txt) | **502** | 10760 | 2025-01-17 15:02 |
| 58600 | [![58600__ameku_takao_no_suiri_karte](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58600__ameku_takao_no_suiri_karte.jpg)](https://myanimelist.net/anime/58600/Ameku_Takao_no_Suiri_Karte) | [Ameku Takao no Suiri Karte](https://subsplease.org/shows/ameku-takao-no-suiri-karte) | TV | 3 / 12 | Currently Airing | 7.4 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ameku+Takao+no+Suiri+Karte+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58600__ameku_takao_no_suiri_karte.txt) | **477** | 12174 | 2025-01-08 18:02 |
| 59144 | [![59144__fuguushoku_kanteishi_ga_jitsu_wa_saikyou_datta](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59144__fuguushoku_kanteishi_ga_jitsu_wa_saikyou_datta.jpg)](https://myanimelist.net/anime/59144/Fuguushoku_Kanteishi_ga_Jitsu_wa_Saikyou_Datta) | [Fugukan](https://subsplease.org/shows/fugukan) | TV | 2 / 12 | Currently Airing | 6.6 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fugukan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59144__fuguushoku_kanteishi_ga_jitsu_wa_saikyou_datta.txt) | **470** | 11246 | 2025-01-16 15:47 |
| 58822 | [![58822__izure_saikyou_no_renkinjutsushi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58822__izure_saikyou_no_renkinjutsushi.jpg)](https://myanimelist.net/anime/58822/Izure_Saikyou_no_Renkinjutsushi) | [Izure Saikyou no Renkinjutsushi](https://subsplease.org/shows/izure-saikyou-no-renkinjutsushi) | TV | 3 / 12 | Currently Airing | 6.85 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Izure+Saikyou+no+Renkinjutsushi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58822__izure_saikyou_no_renkinjutsushi.txt) | **469** | 13336 | 2025-01-15 16:02 |
| 59349 | [![59349__salaryman_ga_isekai_ni_ittara_shitennou_ni_natta_hanashi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59349__salaryman_ga_isekai_ni_ittara_shitennou_ni_natta_hanashi.jpg)](https://myanimelist.net/anime/59349/Salaryman_ga_Isekai_ni_Ittara_Shitennou_ni_Natta_Hanashi) | [Salaryman ga Isekai ni Ittara Shitennou ni Natta Hanashi](https://subsplease.org/shows/salaryman-ga-isekai-ni-ittara-shitennou-ni-natta-hanashi) | TV | 4 / 12 | Currently Airing | 6.53 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Salaryman+ga+Isekai+ni+Ittara+Shitennou+ni+Natta+Hanashi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59349__salaryman_ga_isekai_ni_ittara_shitennou_ni_natta_hanashi.txt) | **462** | 12053 | 2025-01-20 15:47 |
| 59561 | [![59561__around_40_otoko_no_isekai_tsuuhan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59561__around_40_otoko_no_isekai_tsuuhan.jpg)](https://myanimelist.net/anime/59561/Around_40_Otoko_no_Isekai_Tsuuhan) | [Around 40 Otoko no Isekai Tsuuhan](https://subsplease.org/shows/around-40-otoko-no-isekai-tsuuhan) | TV | 2 / 13 | Currently Airing | 6.83 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Around+40+Otoko+no+Isekai+Tsuuhan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59561__around_40_otoko_no_isekai_tsuuhan.txt) | **443** | 11410 | 2025-01-16 13:48 |
| 59135 | [![59135__class_no_daikirai_na_joshi_to_kekkon_suru_koto_ni_natta](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59135__class_no_daikirai_na_joshi_to_kekkon_suru_koto_ni_natta.jpg)](https://myanimelist.net/anime/59135/Class_no_Daikirai_na_Joshi_to_Kekkon_suru_Koto_ni_Natta) | [Class no Daikirai na Joshi to Kekkon suru Koto ni Natta](https://subsplease.org/shows/class-no-daikirai-na-joshi-to-kekkon-suru-koto-ni-natta) | TV | 3 / 12 | Currently Airing | 7.18 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Class+no+Daikirai+na+Joshi+to+Kekkon+suru+Koto+ni+Natta+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59135__class_no_daikirai_na_joshi_to_kekkon_suru_koto_ni_natta.txt) | **439** | 10820 | 2025-01-17 17:02 |
| 59265 | [![59265__magic_maker_isekai_mahou_no_tsukurikata](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59265__magic_maker_isekai_mahou_no_tsukurikata.jpg)](https://myanimelist.net/anime/59265/Magic_Maker__Isekai_Mahou_no_Tsukurikata) | [Magic Maker - Isekai Mahou no Tsukurikata](https://subsplease.org/shows/magic-maker-isekai-mahou-no-tsukurikata) | TV | 2 / 12 | Currently Airing | 6.37 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Magic+Maker+Isekai+Mahou+no+Tsukurikata+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59265__magic_maker_isekai_mahou_no_tsukurikata.txt) | **439** | 10860 | 2025-01-15 17:01 |
| 59002 | [![59002__hazure_skill_kinomi_master_skill_no_mi_tabetara_shinu_wo_mugen_ni_taberareru_you_ni_natta_ken_ni_tsuite](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59002__hazure_skill_kinomi_master_skill_no_mi_tabetara_shinu_wo_mugen_ni_taberareru_you_ni_natta_ken_ni_tsuite.jpg)](https://myanimelist.net/anime/59002/Hazure_Skill_Kinomi_Master__Skill_no_Mi_Tabetara_Shinu_wo_Mugen_ni_Taberareru_You_ni_Natta_Ken_ni_Tsuite) | [Kinomi Master](https://subsplease.org/shows/kinomi-master) | TV | 3 / 12 | Currently Airing | 6.18 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kinomi+Master+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59002__hazure_skill_kinomi_master_skill_no_mi_tabetara_shinu_wo_mugen_ni_taberareru_you_ni_natta_ken_ni_tsuite.txt) | **437** | 13258 | 2025-01-14 16:16 |
| 58437 | [![58437__botsuraku_yotei_no_kizoku_dakedo_hima_datta_kara_mahou_wo_kiwametemita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58437__botsuraku_yotei_no_kizoku_dakedo_hima_datta_kara_mahou_wo_kiwametemita.jpg)](https://myanimelist.net/anime/58437/Botsuraku_Yotei_no_Kizoku_dakedo_Hima_Datta_kara_Mahou_wo_Kiwametemita) | [Botsuraku Yotei no Kizoku dakedo, Hima Datta kara Mahou wo Kiwametemita](https://subsplease.org/shows/botsuraku-yotei-no-kizoku-dakedo-hima-datta-kara-mahou-wo-kiwametemita) | TV | 4 / ? | Currently Airing | 6.44 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Botsuraku+Yotei+no+Kizoku+dakedo+Hima+Datta+kara+Mahou+wo+Kiwametemita+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58437__botsuraku_yotei_no_kizoku_dakedo_hima_datta_kara_mahou_wo_kiwametemita.txt) | **420** | 11110 | 2025-01-20 18:17 |
| 58473 | [![58473__s_rank_monster_no_behemoth_dakedo_neko_to_machigawarete_elf_musume_no_pet_toshite_kurashitemasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58473__s_rank_monster_no_behemoth_dakedo_neko_to_machigawarete_elf_musume_no_pet_toshite_kurashitemasu.jpg)](https://myanimelist.net/anime/58473/S-Rank_Monster_no_Behemoth_dakedo_Neko_to_Machigawarete_Elf_Musume_no_Pet_toshite_Kurashitemasu) | [Beheneko](https://subsplease.org/shows/beheneko) | TV | 4 / 12 | Currently Airing | 6.16 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Beheneko+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58473__s_rank_monster_no_behemoth_dakedo_neko_to_machigawarete_elf_musume_no_pet_toshite_kurashitemasu.txt) | **414** | 11978 | 2025-01-18 14:02 |
| 55830 | [![55830__fate_strange_fake](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55830__fate_strange_fake.jpg)](https://myanimelist.net/anime/55830/Fate_strange_Fake) | [Fate Strange Fake](https://subsplease.org/shows/fate-strange-fake) | TV | 1 / ? | **Not yet aired** | N/A | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fate+Strange+Fake+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55830__fate_strange_fake.txt) | **399** | 14836 | 2024-12-31 14:02 |
| 58853 | [![58853__kuroiwa_medaka_ni_watashi_no_kawaii_ga_tsuujinai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58853__kuroiwa_medaka_ni_watashi_no_kawaii_ga_tsuujinai.jpg)](https://myanimelist.net/anime/58853/Kuroiwa_Medaka_ni_Watashi_no_Kawaii_ga_Tsuujinai) | [Kuroiwa Medaka ni Watashi no Kawaii ga Tsuujinai](https://subsplease.org/shows/kuroiwa-medaka-ni-watashi-no-kawaii-ga-tsuujinai) | TV | 3 / 12 | Currently Airing | 6.61 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kuroiwa+Medaka+ni+Watashi+no+Kawaii+ga+Tsuujinai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58853__kuroiwa_medaka_ni_watashi_no_kawaii_ga_tsuujinai.txt) | **380** | 7997 | 2025-01-20 17:02 |
| 57066 | [![57066__dungeon_ni_deai_wo_motomeru_no_wa_machigatteiru_darou_ka_v_houjou_no_megami_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57066__dungeon_ni_deai_wo_motomeru_no_wa_machigatteiru_darou_ka_v_houjou_no_megami_hen.jpg)](https://myanimelist.net/anime/57066/Dungeon_ni_Deai_wo_Motomeru_no_wa_Machigatteiru_Darou_ka_V__Houjou_no_Megami-hen) | [Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka S5](https://subsplease.org/shows/dungeon-ni-deai-wo-motomeru-no-wa-machigatteiru-darou-ka-s5) | TV | 11 / 15 | Currently Airing | 8.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dungeon+ni+Deai+wo+Motomeru+no+wa+Machigatteiru+Darou+ka+S5+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57066__dungeon_ni_deai_wo_motomeru_no_wa_machigatteiru_darou_ka_v_houjou_no_megami_hen.txt) | **366** | 20556 | 2024-12-19 13:02 |
| 58082 | [![58082__neet_kunoichi_to_nazeka_dousei_hajimemashita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58082__neet_kunoichi_to_nazeka_dousei_hajimemashita.jpg)](https://myanimelist.net/anime/58082/NEET_Kunoichi_to_Nazeka_Dousei_Hajimemashita) | [NEET Kunoichi to Nazeka Dousei Hajimemashita](https://subsplease.org/shows/neet-kunoichi-to-nazeka-dousei-hajimemashita) | TV | 3 / 24 | Currently Airing | 6.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+NEET+Kunoichi+to+Nazeka+Dousei+Hajimemashita+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58082__neet_kunoichi_to_nazeka_dousei_hajimemashita.txt) | **350** | 8984 | 2025-01-18 16:32 |
| 59226 | [![59226__ao_no_exorcist_yosuga_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59226__ao_no_exorcist_yosuga_hen.jpg)](https://myanimelist.net/anime/59226/Ao_no_Exorcist__Yosuga-hen) | [Ao no Exorcist - Yosuga-hen](https://subsplease.org/shows/ao-no-exorcist-yosuga-hen) | TV | 3 / 12 | Currently Airing | 7.71 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ao+no+Exorcist+Yosuga+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59226__ao_no_exorcist_yosuga_hen.txt) | **343** | 6986 | 2025-01-18 18:02 |
| 55842 | [![55842__okinawa_de_suki_ni_natta_ko_ga_hougen_sugite_tsurasugiru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55842__okinawa_de_suki_ni_natta_ko_ga_hougen_sugite_tsurasugiru.jpg)](https://myanimelist.net/anime/55842/Okinawa_de_Suki_ni_Natta_Ko_ga_Hougen_Sugite_Tsurasugiru) | [Okitsura](https://subsplease.org/shows/okitsura) | TV | 3 / 12 | Currently Airing | 6.88 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Okitsura+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55842__okinawa_de_suki_ni_natta_ko_ga_hougen_sugite_tsurasugiru.txt) | **337** | 7830 | 2025-01-18 18:17 |
| 58271 | [![58271__honey_lemon_soda](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58271__honey_lemon_soda.jpg)](https://myanimelist.net/anime/58271/Honey_Lemon_Soda) | [Honey Lemon Soda](https://subsplease.org/shows/honey-lemon-soda) | TV | 2 / 12 | Currently Airing | 7.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Honey+Lemon+Soda+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58271__honey_lemon_soda.txt) | **327** | 7172 | 2025-01-15 18:26 |
| 59055 | [![59055__hana_wa_saku_shura_no_gotoku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59055__hana_wa_saku_shura_no_gotoku.jpg)](https://myanimelist.net/anime/59055/Hana_wa_Saku_Shura_no_Gotoku) | [Hana wa Saku, Shura no Gotoku](https://subsplease.org/shows/hana-wa-saku-shura-no-gotoku) | TV | 2 / 12 | Currently Airing | 7.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hana+wa+Saku+Shura+no+Gotoku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59055__hana_wa_saku_shura_no_gotoku.txt) | **304** | 7285 | 2025-01-14 17:36 |
| 52991 | [![52991__sousou_no_frieren](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52991__sousou_no_frieren.jpg)](https://myanimelist.net/anime/52991/Sousou_no_Frieren) | [Sousou no Frieren](https://subsplease.org/shows/sousou-no-frieren) | TV | 28 / 28 | **Finished Airing** | 9.31 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sousou+no+Frieren+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52991__sousou_no_frieren.txt) | **276** | 66223 | 2024-03-22 15:32 |
| 52995 | [![52995__arifureta_shokugyou_de_sekai_saikyou_season_3](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52995__arifureta_shokugyou_de_sekai_saikyou_season_3.jpg)](https://myanimelist.net/anime/52995/Arifureta_Shokugyou_de_Sekai_Saikyou_Season_3) | [Arifureta Shokugyou de Sekai Saikyou S3](https://subsplease.org/shows/arifureta-shokugyou-de-sekai-saikyou-s3) | TV | 12 / 16 | Currently Airing | 7.29 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Arifureta+Shokugyou+de+Sekai+Saikyou+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52995__arifureta_shokugyou_de_sekai_saikyou_season_3.txt) | **264** | 12908 | 2025-01-20 16:01 |
| 55701 | [![55701__kimetsu_no_yaiba_hashira_geiko_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55701__kimetsu_no_yaiba_hashira_geiko_hen.jpg)](https://myanimelist.net/anime/55701/Kimetsu_no_Yaiba__Hashira_Geiko-hen) | [Kimetsu no Yaiba - Hashira Geiko-hen](https://subsplease.org/shows/kimetsu-no-yaiba-hashira-geiko-hen) | TV | 8 / 8 | **Finished Airing** | 8.12 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kimetsu+no+Yaiba+Hashira+Geiko+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55701__kimetsu_no_yaiba_hashira_geiko_hen.txt) | **259** | 46555 | 2024-06-30 18:52 |
| 57152 | [![57152__mahoutsukai_no_yakusoku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57152__mahoutsukai_no_yakusoku.jpg)](https://myanimelist.net/anime/57152/Mahoutsukai_no_Yakusoku) | [Mahoutsukai no Yakusoku](https://subsplease.org/shows/mahoutsukai-no-yakusoku) | TV | 3 / 12 | Currently Airing | 5.64 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahoutsukai+no+Yakusoku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57152__mahoutsukai_no_yakusoku.txt) | **256** | 5610 | 2025-01-20 15:32 |
| 58426 | [![58426__shikanoko_nokonoko_koshitantan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58426__shikanoko_nokonoko_koshitantan.jpg)](https://myanimelist.net/anime/58426/Shikanoko_Nokonoko_Koshitantan) | [Shikanoko Nokonoko Koshitantan](https://subsplease.org/shows/shikanoko-nokonoko-koshitantan) | TV | 12 / 12 | **Finished Airing** | 7.02 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shikanoko+Nokonoko+Koshitantan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58426__shikanoko_nokonoko_koshitantan.txt) | **250** | 15044 | 2024-09-18 14:31 |
| 52299 | [![52299__ore_dake_level_up_na_ken](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52299__ore_dake_level_up_na_ken.jpg)](https://myanimelist.net/anime/52299/Ore_dake_Level_Up_na_Ken) | [Solo Leveling](https://subsplease.org/shows/solo-leveling) | TV | 16 / 12 | **Finished Airing** | 8.27 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Solo+Leveling+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52299__ore_dake_level_up_na_ken.txt) | **248** | 53657 | 2025-01-18 17:32 |
| 55994 | [![55994__sword_art_online_alternative_gun_gale_online_ii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55994__sword_art_online_alternative_gun_gale_online_ii.jpg)](https://myanimelist.net/anime/55994/Sword_Art_Online_Alternative__Gun_Gale_Online_II) | [Sword Art Online Alternative - Gun Gale Online S2](https://subsplease.org/shows/sword-art-online-alternative-gun-gale-online-s2) | TV | 12 / 12 | **Finished Airing** | 7.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sword+Art+Online+Alternative+Gun+Gale+Online+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55994__sword_art_online_alternative_gun_gale_online_ii.txt) | **239** | 13319 | 2024-12-20 17:32 |
| 49458 | [![49458__kono_subarashii_sekai_ni_shukufuku_wo_3](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49458__kono_subarashii_sekai_ni_shukufuku_wo_3.jpg)](https://myanimelist.net/anime/49458/Kono_Subarashii_Sekai_ni_Shukufuku_wo_3) | [Kono Subarashii Sekai ni Shukufuku wo! S3](https://subsplease.org/shows/kono-subarashii-sekai-ni-shukufuku-wo-s3) | TV | 11 / 11 | **Finished Airing** | 8.36 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kono+Subarashii+Sekai+ni+Shukufuku+wo+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49458__kono_subarashii_sekai_ni_shukufuku_wo_3.txt) | **238** | 31432 | 2024-06-19 15:01 |
| 58739 | [![58739__momentary_lily](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58739__momentary_lily.jpg)](https://myanimelist.net/anime/58739/Momentary_Lily) | [Momentary Lily](https://subsplease.org/shows/momentary-lily) | TV | 3 / 13 | Currently Airing | 5.79 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Momentary+Lily+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58739__momentary_lily.txt) | **237** | 6450 | 2025-01-16 17:38 |
| 54744 | [![54744__tokidoki_bosotto_russia_go_de_dereru_tonari_no_alya_san](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54744__tokidoki_bosotto_russia_go_de_dereru_tonari_no_alya_san.jpg)](https://myanimelist.net/anime/54744/Tokidoki_Bosotto_Russia-go_de_Dereru_Tonari_no_Alya-san) | [Tokidoki Bosotto Russia-go de Dereru Tonari no Alya-san](https://subsplease.org/shows/tokidoki-bosotto-russia-go-de-dereru-tonari-no-alya-san) | TV | 12 / 12 | **Finished Airing** | 7.61 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tokidoki+Bosotto+Russia+go+de+Dereru+Tonari+no+Alya+san+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54744__tokidoki_bosotto_russia_go_de_dereru_tonari_no_alya_san.txt) | **230** | 24690 | 2024-09-18 15:02 |
| 51119 | [![51119__grisaia_phantom_trigger](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51119__grisaia_phantom_trigger.jpg)](https://myanimelist.net/anime/51119/Grisaia__Phantom_Trigger) | [Grisaia - Phantom Trigger](https://subsplease.org/shows/grisaia-phantom-trigger) | TV | 3 / 13 | Currently Airing | 6.69 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Grisaia+Phantom+Trigger+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51119__grisaia_phantom_trigger.txt) | **230** | 6486 | 2025-01-15 16:32 |
| 59113 | [![59113__farmagia](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59113__farmagia.jpg)](https://myanimelist.net/anime/59113/Farmagia) | [Farmagia](https://subsplease.org/shows/farmagia) | TV | 2 / 12 | Currently Airing | 5.5 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Farmagia+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59113__farmagia.txt) | **224** | 5774 | 2025-01-17 15:32 |
| 59989 | [![59989__kami_no_tou_koubou_sen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59989__kami_no_tou_koubou_sen.jpg)](https://myanimelist.net/anime/59989/Kami_no_Tou__Koubou-sen) | [Tower of God S2](https://subsplease.org/shows/tower-of-god-s2) | TV | 26 / 13 | **Finished Airing** | 6.9 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tower+of+God+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59989__kami_no_tou_koubou_sen.txt) | **224** | 16162 | 2024-12-29 15:02 |
| 53924 | [![53924__jibaku_shounen_hanako_kun_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53924__jibaku_shounen_hanako_kun_2.jpg)](https://myanimelist.net/anime/53924/Jibaku_Shounen_Hanako-kun_2) | [Jibaku Shounen Hanako-kun S2](https://subsplease.org/shows/jibaku-shounen-hanako-kun-s2) | TV | 2 / 12 | Currently Airing | 7.89 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jibaku+Shounen+Hanako+kun+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53924__jibaku_shounen_hanako_kun_2.txt) | **220** | 3289 | 2025-01-19 10:02 |
| 57524 | [![57524__make_heroine_ga_oosugiru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57524__make_heroine_ga_oosugiru.jpg)](https://myanimelist.net/anime/57524/Make_Heroine_ga_Oosugiru) | [Make Heroine ga Oosugiru!](https://subsplease.org/shows/make-heroine-ga-oosugiru) | TV | 12 / 12 | **Finished Airing** | 8.16 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Make+Heroine+ga+Oosugiru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57524__make_heroine_ga_oosugiru.txt) | **218** | 20272 | 2024-09-28 17:02 |
| 56894 | [![56894__dragon_ball_daima](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56894__dragon_ball_daima.jpg)](https://myanimelist.net/anime/56894/Dragon_Ball_Daima) | [Dragon Ball Daima](https://subsplease.org/shows/dragon-ball-daima) | TV | 14 / ? | Currently Airing | 7.8 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dragon+Ball+Daima+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56894__dragon_ball_daima.txt) | **210** | 14437 | 2025-01-17 16:46 |
| 54853 | [![54853__maou_2099](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54853__maou_2099.jpg)](https://myanimelist.net/anime/54853/Maou_2099) | [Maou 2099](https://subsplease.org/shows/maou-2099) | TV | 12 / 12 | **Finished Airing** | 7.49 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Maou+2099+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54853__maou_2099.txt) | **210** | 13346 | 2024-12-28 17:31 |
| 57050 | [![57050__kisaki_kyouiku_kara_nigetai_watashi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57050__kisaki_kyouiku_kara_nigetai_watashi.jpg)](https://myanimelist.net/anime/57050/Kisaki_Kyouiku_kara_Nigetai_Watashi) | [Kisaki Kyouiku kara Nigetai Watashi](https://subsplease.org/shows/kisaki-kyouiku-kara-nigetai-watashi) | TV | 3 / 12 | Currently Airing | 6.12 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kisaki+Kyouiku+kara+Nigetai+Watashi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57050__kisaki_kyouiku_kara_nigetai_watashi.txt) | **206** | 5177 | 2025-01-19 14:17 |
| 51122 | [![51122__ookami_to_koushinryou_merchant_meets_the_wise_wolf](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51122__ookami_to_koushinryou_merchant_meets_the_wise_wolf.jpg)](https://myanimelist.net/anime/51122/Ookami_to_Koushinryou__Merchant_Meets_the_Wise_Wolf) | [Spice and Wolf (2024)](https://subsplease.org/shows/spice-and-wolf-2024) | TV | 25 / 25 | **Finished Airing** | 8.11 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Spice+and+Wolf+2024+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51122__ookami_to_koushinryou_merchant_meets_the_wise_wolf.txt) | **206** | 19466 | 2024-09-23 18:03 |
| 60022 | [![60022__one_piece_fan_letter](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/60022__one_piece_fan_letter.jpg)](https://myanimelist.net/anime/60022/One_Piece_Fan_Letter) | [One Piece Fan Letter](https://subsplease.org/shows/one-piece-fan-letter) | TV Special | 1 / 1 | **Finished Airing** | 9.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+One+Piece+Fan+Letter+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/60022__one_piece_fan_letter.txt) | **200** | 14512 | 2024-10-20 17:06 |
| 58059 | [![58059__tsue_to_tsurugi_no_wistoria](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58059__tsue_to_tsurugi_no_wistoria.jpg)](https://myanimelist.net/anime/58059/Tsue_to_Tsurugi_no_Wistoria) | [Tsue to Tsurugi no Wistoria](https://subsplease.org/shows/tsue-to-tsurugi-no-wistoria) | TV | 12 / 12 | **Finished Airing** | 7.89 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tsue+to+Tsurugi+no+Wistoria+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58059__tsue_to_tsurugi_no_wistoria.txt) | **194** | 22157 | 2024-09-29 09:32 |
| 56653 | [![56653__bang_dream_ave_mujica](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56653__bang_dream_ave_mujica.jpg)](https://myanimelist.net/anime/56653/BanG_Dream_Ave_Mujica) | [BanG Dream! Ave Mujica](https://subsplease.org/shows/bang-dream-ave-mujica) | TV | 3 / 13 | Currently Airing | 8.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+BanG+Dream+Ave+Mujica+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56653__bang_dream_ave_mujica.txt) | **188** | 4279 | 2025-01-16 16:01 |
| 58172 | [![58172__nageki_no_bourei_wa_intai_shitai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58172__nageki_no_bourei_wa_intai_shitai.jpg)](https://myanimelist.net/anime/58172/Nageki_no_Bourei_wa_Intai_shitai) | [Nageki no Bourei wa Intai shitai](https://subsplease.org/shows/nageki-no-bourei-wa-intai-shitai) | TV | 13 / 13 | **Finished Airing** | 7.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Nageki+no+Bourei+wa+Intai+shitai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58172__nageki_no_bourei_wa_intai_shitai.txt) | **187** | 13089 | 2024-12-22 16:32 |
| 57891 | [![57891__hitoribocchi_no_isekai_kouryaku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57891__hitoribocchi_no_isekai_kouryaku.jpg)](https://myanimelist.net/anime/57891/Hitoribocchi_no_Isekai_Kouryaku) | [Hitoribocchi no Isekai Kouryaku](https://subsplease.org/shows/hitoribocchi-no-isekai-kouryaku) | TV | 12 / 12 | **Finished Airing** | 6.58 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hitoribocchi+no+Isekai+Kouryaku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57891__hitoribocchi_no_isekai_kouryaku.txt) | **180** | 13586 | 2024-12-12 15:03 |
| 53888 | [![53888__spy_x_family_movie_code_white](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53888__spy_x_family_movie_code_white.jpg)](https://myanimelist.net/anime/53888/Spy_x_Family_Movie__Code__White) | [Spy x Family - Code White](https://subsplease.org/shows/spy-x-family-code-white) | Movie | 1 / 1 | **Finished Airing** | 8.14 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Spy+x+Family+Code+White+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53888__spy_x_family_movie_code_white.txt) | **177** | 12266 | 2024-09-07 05:51 |
| 55150 | [![55150__yarinaoshi_reijou_wa_ryuutei_heika_wo_kouryakuchuu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55150__yarinaoshi_reijou_wa_ryuutei_heika_wo_kouryakuchuu.jpg)](https://myanimelist.net/anime/55150/Yarinaoshi_Reijou_wa_Ryuutei_Heika_wo_Kouryakuchuu) | [Yarinaoshi Reijou wa Ryuutei Heika wo Kouryakuchuu](https://subsplease.org/shows/yarinaoshi-reijou-wa-ryuutei-heika-wo-kouryakuchuu) | TV | 12 / 12 | **Finished Airing** | 7.05 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yarinaoshi+Reijou+wa+Ryuutei+Heika+wo+Kouryakuchuu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55150__yarinaoshi_reijou_wa_ryuutei_heika_wo_kouryakuchuu.txt) | **170** | 9806 | 2024-12-25 14:32 |
| 50306 | [![50306__seirei_gensouki_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50306__seirei_gensouki_2.jpg)](https://myanimelist.net/anime/50306/Seirei_Gensouki_2) | [Seirei Gensouki S2](https://subsplease.org/shows/seirei-gensouki-s2) | TV | 12 / 12 | **Finished Airing** | 6.87 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Seirei+Gensouki+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50306__seirei_gensouki_2.txt) | **170** | 10374 | 2024-12-23 18:32 |
| 58066 | [![58066__sorairo_utility_tv](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58066__sorairo_utility_tv.jpg)](https://myanimelist.net/anime/58066/Sorairo_Utility_TV) | [Sorairo Utility](https://subsplease.org/shows/sorairo-utility) | TV | 4 / 12 | Currently Airing | 6.79 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sorairo+Utility+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58066__sorairo_utility_tv.txt) | **169** | 4508 | 2025-01-17 17:02 |
| 57944 | [![57944__party_kara_tsuihou_sareta_sono_chiyushi_jitsu_wa_saikyou_ni_tsuki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57944__party_kara_tsuihou_sareta_sono_chiyushi_jitsu_wa_saikyou_ni_tsuki.jpg)](https://myanimelist.net/anime/57944/Party_kara_Tsuihou_sareta_Sono_Chiyushi_Jitsu_wa_Saikyou_ni_Tsuki) | [Party kara Tsuihou sareta Sono Chiyushi, Jitsu wa Saikyou ni Tsuki](https://subsplease.org/shows/party-kara-tsuihou-sareta-sono-chiyushi-jitsu-wa-saikyou-ni-tsuki) | TV | 12 / 12 | **Finished Airing** | 5.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Party+kara+Tsuihou+sareta+Sono+Chiyushi+Jitsu+wa+Saikyou+ni+Tsuki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57944__party_kara_tsuihou_sareta_sono_chiyushi_jitsu_wa_saikyou_ni_tsuki.txt) | **168** | 11443 | 2024-12-21 19:32 |
| 57611 | [![57611__kimi_wa_meido_sama](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57611__kimi_wa_meido_sama.jpg)](https://myanimelist.net/anime/57611/Kimi_wa_Meido-sama) | [Kimi wa Meido-sama](https://subsplease.org/shows/kimi-wa-meido-sama) | TV | 12 / 12 | **Finished Airing** | 7.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kimi+wa+Meido+sama+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57611__kimi_wa_meido_sama.txt) | **160** | 9901 | 2024-12-21 19:47 |
| 57864 | [![57864__monogatari_series_off_monster_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57864__monogatari_series_off_monster_season.jpg)](https://myanimelist.net/anime/57864/Monogatari_Series__Off___Monster_Season) | [Monogatari Series - Off & Monster Season](https://subsplease.org/shows/monogatari-series-off-monster-season) | ONA | 15 / 14 | **Finished Airing** | 8.71 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Monogatari+Series+Off+Monster+Season+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57864__monogatari_series_off_monster_season.txt) | **156** | 12783 | 2024-10-19 14:32 |
| 56228 | [![56228__rekishi_ni_nokoru_akujo_ni_naru_zo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56228__rekishi_ni_nokoru_akujo_ni_naru_zo.jpg)](https://myanimelist.net/anime/56228/Rekishi_ni_Nokoru_Akujo_ni_Naru_zo) | [Rekishi ni Nokoru Akujo ni Naru zo](https://subsplease.org/shows/rekishi-ni-nokoru-akujo-ni-naru-zo) | TV | 13 / 13 | **Finished Airing** | 7.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Rekishi+ni+Nokoru+Akujo+ni+Naru+zo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56228__rekishi_ni_nokoru_akujo_ni_naru_zo.txt) | **156** | 9925 | 2024-12-24 16:02 |
| 52588 | [![52588__kaijuu_8_gou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52588__kaijuu_8_gou.jpg)](https://myanimelist.net/anime/52588/Kaijuu_8-gou) | [Kaijuu 8-gou](https://subsplease.org/shows/kaijuu-8-gou) | TV | 12 / 12 | **Finished Airing** | 8.3 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kaijuu+8+gou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52588__kaijuu_8_gou.txt) | **154** | 35930 | 2024-06-29 14:31 |
| 52034 | [![52034__oshi_no_ko](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52034__oshi_no_ko.jpg)](https://myanimelist.net/anime/52034/Oshi_no_Ko) | [Oshi no Ko](https://subsplease.org/shows/oshi-no-ko) | TV | 25 / 11 | **Finished Airing** | 8.59 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Oshi+no+Ko+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52034__oshi_no_ko.txt) | **153** | 38562 | 2024-10-06 11:02 |
| 52367 | [![52367__isekai_shikkaku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52367__isekai_shikkaku.jpg)](https://myanimelist.net/anime/52367/Isekai_Shikkaku) | [Isekai Shikkaku](https://subsplease.org/shows/isekai-shikkaku) | TV | 12 / 12 | **Finished Airing** | 7.28 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+Shikkaku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52367__isekai_shikkaku.txt) | **152** | 16367 | 2024-09-24 15:03 |
| 58714 | [![58714__saikyou_no_shienshoku_wajutsushi_de_aru_ore_wa_sekai_saikyou_clan_wo_shitagaeru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58714__saikyou_no_shienshoku_wajutsushi_de_aru_ore_wa_sekai_saikyou_clan_wo_shitagaeru.jpg)](https://myanimelist.net/anime/58714/Saikyou_no_Shienshoku_Wajutsushi_de_Aru_Ore_wa_Sekai_Saikyou_Clan_wo_Shitagaeru) | [Wajutsushi](https://subsplease.org/shows/wajutsushi) | TV | 12 / 12 | **Finished Airing** | 7.63 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Wajutsushi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58714__saikyou_no_shienshoku_wajutsushi_de_aru_ore_wa_sekai_saikyou_clan_wo_shitagaeru.txt) | **152** | 10824 | 2024-12-16 17:02 |
| 54724 | [![54724__nige_jouzu_no_wakagimi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54724__nige_jouzu_no_wakagimi.jpg)](https://myanimelist.net/anime/54724/Nige_Jouzu_no_Wakagimi) | [Nige Jouzu no Wakagimi](https://subsplease.org/shows/nige-jouzu-no-wakagimi) | TV | 13 / 12 | **Finished Airing** | 7.82 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Nige+Jouzu+no+Wakagimi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54724__nige_jouzu_no_wakagimi.txt) | **148** | 13549 | 2024-09-28 16:02 |
| 56752 | [![56752__shiguang_dailiren_yingdu_pian](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56752__shiguang_dailiren_yingdu_pian.jpg)](https://myanimelist.net/anime/56752/Shiguang_Dailiren__Yingdu_Pian) | [Link Click - Bridon Arc](https://subsplease.org/shows/link-click-bridon-arc) | ONA | 4 / 6 | Currently Airing | 8.72 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Link+Click+Bridon+Arc+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56752__shiguang_dailiren_yingdu_pian.txt) | **146** | 3544 | 2025-01-17 05:02 |
| 58445 | [![58445__sayounara_ryuusei_konnichiwa_jinsei](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58445__sayounara_ryuusei_konnichiwa_jinsei.jpg)](https://myanimelist.net/anime/58445/Sayounara_Ryuusei_Konnichiwa_Jinsei) | [Sayounara Ryuusei, Konnichiwa Jinsei](https://subsplease.org/shows/sayounara-ryuusei-konnichiwa-jinsei) | TV | 12 / 12 | **Finished Airing** | 6.35 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sayounara+Ryuusei+Konnichiwa+Jinsei+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58445__sayounara_ryuusei_konnichiwa_jinsei.txt) | **146** | 10466 | 2024-12-19 18:17 |
| 57058 | [![57058__ore_wa_subete_wo_parry_suru_gyaku_kanchigai_no_sekai_saikyou_wa_boukensha_ni_naritai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57058__ore_wa_subete_wo_parry_suru_gyaku_kanchigai_no_sekai_saikyou_wa_boukensha_ni_naritai.jpg)](https://myanimelist.net/anime/57058/Ore_wa_Subete_wo_Parry_suru__Gyaku_Kanchigai_no_Sekai_Saikyou_wa_Boukensha_ni_Naritai) | [Ore wa Subete wo Parry suru](https://subsplease.org/shows/ore-wa-subete-wo-parry-suru) | TV | 12 / 12 | **Finished Airing** | 6.91 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ore+wa+Subete+wo+Parry+suru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57058__ore_wa_subete_wo_parry_suru_gyaku_kanchigai_no_sekai_saikyou_wa_boukensha_ni_naritai.txt) | **143** | 20480 | 2024-09-19 15:01 |
| 58259 | [![58259__douse_koishite_shimaunda](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58259__douse_koishite_shimaunda.jpg)](https://myanimelist.net/anime/58259/Douse_Koishite_Shimaunda) | [Douse, Koishite Shimaunda](https://subsplease.org/shows/douse-koishite-shimaunda) | TV | 2 / 12 | Currently Airing | 6.36 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Douse+Koishite+Shimaunda+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58259__douse_koishite_shimaunda.txt) | **138** | 3652 | 2025-01-16 18:23 |
| 55071 | [![55071__amagami_san_chi_no_enmusubi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55071__amagami_san_chi_no_enmusubi.jpg)](https://myanimelist.net/anime/55071/Amagami-san_Chi_no_Enmusubi) | [Amagami-san Chi no Enmusubi](https://subsplease.org/shows/amagami-san-chi-no-enmusubi) | TV | 14 / 24 | Currently Airing | 7.09 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Amagami+san+Chi+no+Enmusubi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55071__amagami_san_chi_no_enmusubi.txt) | **137** | 6728 | 2025-01-14 16:31 |
| 55888 | [![55888__mushoku_tensei_ii_isekai_ittara_honki_dasu_part_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55888__mushoku_tensei_ii_isekai_ittara_honki_dasu_part_2.jpg)](https://myanimelist.net/anime/55888/Mushoku_Tensei_II__Isekai_Ittara_Honki_Dasu_Part_2) | [Mushoku Tensei S2](https://subsplease.org/shows/mushoku-tensei-s2) | TV | 25 / 12 | **Finished Airing** | 8.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mushoku+Tensei+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55888__mushoku_tensei_ii_isekai_ittara_honki_dasu_part_2.txt) | **130** | 40056 | 2024-06-30 15:32 |
| 56964 | [![56964__raise_wa_tanin_ga_ii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56964__raise_wa_tanin_ga_ii.jpg)](https://myanimelist.net/anime/56964/Raise_wa_Tanin_ga_Ii) | [Raise wa Tanin ga Ii](https://subsplease.org/shows/raise-wa-tanin-ga-ii) | TV | 12 / 12 | **Finished Airing** | 7.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Raise+wa+Tanin+ga+Ii+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56964__raise_wa_tanin_ga_ii.txt) | **128** | 6360 | 2024-12-23 15:57 |
| 54968 | [![54968__giji_harem](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54968__giji_harem.jpg)](https://myanimelist.net/anime/54968/Giji_Harem) | [Giji Harem](https://subsplease.org/shows/giji-harem) | TV | 12 / 12 | **Finished Airing** | 7.85 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Giji+Harem+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54968__giji_harem.txt) | **128** | 10545 | 2024-09-19 16:32 |
| 55887 | [![55887__kekkon_suru_tte_hontou_desu_ka](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55887__kekkon_suru_tte_hontou_desu_ka.jpg)](https://myanimelist.net/anime/55887/Kekkon_suru_tte_Hontou_desu_ka) | [Kekkon suru tte, Hontou desu ka](https://subsplease.org/shows/kekkon-suru-tte-hontou-desu-ka) | TV | 12 / 12 | **Finished Airing** | 7.13 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kekkon+suru+tte+Hontou+desu+ka+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55887__kekkon_suru_tte_hontou_desu_ka.txt) | **126** | 7543 | 2024-12-19 16:32 |
| 53802 | [![53802__2_5_jigen_no_ririsa](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53802__2_5_jigen_no_ririsa.jpg)](https://myanimelist.net/anime/53802/25-jigen_no_Ririsa) | [2.5-jigen no Ririsa](https://subsplease.org/shows/2-5-jigen-no-ririsa) | TV | 24 / 24 | **Finished Airing** | 7.42 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+2+5+jigen+no+Ririsa+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53802__2_5_jigen_no_ririsa.txt) | **120** | 9724 | 2024-12-13 13:32 |
| 55265 | [![55265__tensei_kizoku_kantei_skill_de_nariagaru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55265__tensei_kizoku_kantei_skill_de_nariagaru.jpg)](https://myanimelist.net/anime/55265/Tensei_Kizoku_Kantei_Skill_de_Nariagaru) | [Tensei Kizoku, Kantei Skill de Nariagaru](https://subsplease.org/shows/tensei-kizoku-kantei-skill-de-nariagaru) | TV | 24 / 12 | **Finished Airing** | 7.16 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tensei+Kizoku+Kantei+Skill+de+Nariagaru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55265__tensei_kizoku_kantei_skill_de_nariagaru.txt) | **119** | 14160 | 2024-12-22 16:17 |
| 52481 | [![52481__gimai_seikatsu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52481__gimai_seikatsu.jpg)](https://myanimelist.net/anime/52481/Gimai_Seikatsu) | [Gimai Seikatsu](https://subsplease.org/shows/gimai-seikatsu) | TV | 12 / 12 | **Finished Airing** | 7.35 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gimai+Seikatsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52481__gimai_seikatsu.txt) | **117** | 11828 | 2024-09-19 12:32 |
| 58516 | [![58516__ao_no_exorcist_yuki_no_hate_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58516__ao_no_exorcist_yuki_no_hate_hen.jpg)](https://myanimelist.net/anime/58516/Ao_no_Exorcist__Yuki_no_Hate-hen) | [Ao no Exorcist - Yuki no Hate-hen](https://subsplease.org/shows/ao-no-exorcist-yuki-no-hate-hen) | TV | 12 / 12 | **Finished Airing** | 7.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ao+no+Exorcist+Yuki+no+Hate+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58516__ao_no_exorcist_yuki_no_hate_hen.txt) | **117** | 7419 | 2024-12-21 18:02 |
| 54722 | [![54722__mahou_shoujo_ni_akogarete](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54722__mahou_shoujo_ni_akogarete.jpg)](https://myanimelist.net/anime/54722/Mahou_Shoujo_ni_Akogarete) | [Mahou Shoujo ni Akogarete](https://subsplease.org/shows/mahou-shoujo-ni-akogarete) | TV | 13 / 13 | **Finished Airing** | 7.64 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahou+Shoujo+ni+Akogarete+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54722__mahou_shoujo_ni_akogarete.txt) | **115** | 21299 | 2024-03-27 16:03 |
| 54492 | [![54492__kusuriya_no_hitorigoto](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54492__kusuriya_no_hitorigoto.jpg)](https://myanimelist.net/anime/54492/Kusuriya_no_Hitorigoto) | [Kusuriya no Hitorigoto](https://subsplease.org/shows/kusuriya-no-hitorigoto) | TV | 26 / 24 | **Finished Airing** | 8.88 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kusuriya+no+Hitorigoto+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54492__kusuriya_no_hitorigoto.txt) | **114** | 26761 | 2025-01-17 17:17 |
| 54913 | [![54913__shinmai_ossan_boukensha_saikyou_party_ni_shinu_hodo_kitaerarete_muteki_ni_naru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54913__shinmai_ossan_boukensha_saikyou_party_ni_shinu_hodo_kitaerarete_muteki_ni_naru.jpg)](https://myanimelist.net/anime/54913/Shinmai_Ossan_Boukensha_Saikyou_Party_ni_Shinu_hodo_Kitaerarete_Muteki_ni_Naru) | [Shinmai Ossan Boukensha](https://subsplease.org/shows/shinmai-ossan-boukensha) | TV | 12 / 12 | **Finished Airing** | 7.37 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shinmai+Ossan+Boukensha+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54913__shinmai_ossan_boukensha_saikyou_party_ni_shinu_hodo_kitaerarete_muteki_ni_naru.txt) | **114** | 15496 | 2024-09-23 17:31 |
| 54595 | [![54595__kage_no_jitsuryokusha_ni_naritakute_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54595__kage_no_jitsuryokusha_ni_naritakute_2nd_season.jpg)](https://myanimelist.net/anime/54595/Kage_no_Jitsuryokusha_ni_Naritakute_2nd_Season) | [Kage no Jitsuryokusha ni Naritakute! S2](https://subsplease.org/shows/kage-no-jitsuryokusha-ni-naritakute-s2) | TV | 12 / 12 | **Finished Airing** | 8.31 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kage+no+Jitsuryokusha+ni+Naritakute+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54595__kage_no_jitsuryokusha_ni_naritakute_2nd_season.txt) | **112** | 37333 | 2023-12-20 14:31 |
| 54769 | [![54769__sousei_no_aquarion_myth_of_emotions](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54769__sousei_no_aquarion_myth_of_emotions.jpg)](https://myanimelist.net/anime/54769/Sousei_no_Aquarion__Myth_of_Emotions) | [Sousei no Aquarion - Myth of Emotions](https://subsplease.org/shows/sousei-no-aquarion-myth-of-emotions) | TV | 2 / 12 | Currently Airing | 5.69 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sousei+no+Aquarion+Myth+of+Emotions+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54769__sousei_no_aquarion_myth_of_emotions.txt) | **111** | 3268 | 2025-01-16 21:50 |
| 57892 | [![57892__hazurewaku_no_joutai_ijou_skill_de_saikyou_ni_natta_ore_ga_subete_wo_juurin_suru_made](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57892__hazurewaku_no_joutai_ijou_skill_de_saikyou_ni_natta_ore_ga_subete_wo_juurin_suru_made.jpg)](https://myanimelist.net/anime/57892/Hazurewaku_no_Joutai_Ijou_Skill_de_Saikyou_ni_Natta_Ore_ga_Subete_wo_Juurin_suru_made) | [Hazurewaku](https://subsplease.org/shows/hazurewaku) | TV | 12 / 12 | **Finished Airing** | 6.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hazurewaku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57892__hazurewaku_no_joutai_ijou_skill_de_saikyou_ni_natta_ore_ga_subete_wo_juurin_suru_made.txt) | **109** | 16326 | 2024-09-26 18:08 |
| 49889 | [![49889__tsuki_ga_michibiku_isekai_douchuu_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49889__tsuki_ga_michibiku_isekai_douchuu_2nd_season.jpg)](https://myanimelist.net/anime/49889/Tsuki_ga_Michibiku_Isekai_Douchuu_2nd_Season) | [Tsuki ga Michibiku Isekai Douchuu S2](https://subsplease.org/shows/tsuki-ga-michibiku-isekai-douchuu-s2) | TV | 25 / 25 | **Finished Airing** | 7.84 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tsuki+ga+Michibiku+Isekai+Douchuu+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49889__tsuki_ga_michibiku_isekai_douchuu_2nd_season.txt) | **104** | 20204 | 2024-06-24 15:02 |
| 54726 | [![54726__tsuma_shougakusei_ni_naru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54726__tsuma_shougakusei_ni_naru.jpg)](https://myanimelist.net/anime/54726/Tsuma_Shougakusei_ni_Naru) | [Tsuma, Shougakusei ni Naru](https://subsplease.org/shows/tsuma-shougakusei-ni-naru) | TV | 12 / 12 | **Finished Airing** | 7.52 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tsuma+Shougakusei+ni+Naru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54726__tsuma_shougakusei_ni_naru.txt) | **104** | 5908 | 2024-12-15 14:47 |
| 53033 | [![53033__mecha_ude_tv](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53033__mecha_ude_tv.jpg)](https://myanimelist.net/anime/53033/Mecha-ude_TV) | [Mecha-ude](https://subsplease.org/shows/mecha-ude) | TV | 12 / 12 | **Finished Airing** | 6.84 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mecha+ude+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53033__mecha_ude_tv.txt) | **104** | 7052 | 2024-12-19 17:32 |
| 59425 | [![59425__negaposi_angler](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59425__negaposi_angler.jpg)](https://myanimelist.net/anime/59425/NegaPosi_Angler) | [NegaPosi Angler](https://subsplease.org/shows/negaposi-angler) | TV | 12 / 12 | **Finished Airing** | 7.36 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+NegaPosi+Angler+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59425__negaposi_angler.txt) | **103** | 5555 | 2024-12-19 14:32 |
| 54284 | [![54284__vtuber_nandaga_haishin_kiri_wasuretara_densetsu_ni_natteta](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54284__vtuber_nandaga_haishin_kiri_wasuretara_densetsu_ni_natteta.jpg)](https://myanimelist.net/anime/54284/VTuber_Nandaga_Haishin_Kiri_Wasuretara_Densetsu_ni_Natteta) | [VTuber Nandaga Haishin Kiri Wasuretara Densetsu ni Natteta](https://subsplease.org/shows/vtuber-nandaga-haishin-kiri-wasuretara-densetsu-ni-natteta) | TV | 12 / 12 | **Finished Airing** | 7.06 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+VTuber+Nandaga+Haishin+Kiri+Wasuretara+Densetsu+ni+Natteta+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54284__vtuber_nandaga_haishin_kiri_wasuretara_densetsu_ni_natteta.txt) | **103** | 7842 | 2024-09-22 15:32 |
| 57810 | [![57810__shoushimin_series](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57810__shoushimin_series.jpg)](https://myanimelist.net/anime/57810/Shoushimin_Series) | [Shoushimin Series](https://subsplease.org/shows/shoushimin-series) | TV | 10 / 10 | **Finished Airing** | 7.29 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shoushimin+Series+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57810__shoushimin_series.txt) | **102** | 9220 | 2024-09-14 18:02 |
| 52347 | [![52347__shangri_la_frontier_kusoge_hunter_kamige_ni_idoman_to_su](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52347__shangri_la_frontier_kusoge_hunter_kamige_ni_idoman_to_su.jpg)](https://myanimelist.net/anime/52347/Shangri-La_Frontier__Kusoge_Hunter_Kamige_ni_Idoman_to_su) | [Shangri-La Frontier](https://subsplease.org/shows/shangri-la-frontier) | TV | 41 / 25 | **Finished Airing** | 8.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shangri+La+Frontier+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52347__shangri_la_frontier_kusoge_hunter_kamige_ni_idoman_to_su.txt) | **102** | 22539 | 2025-01-19 10:32 |
| 57876 | [![57876__maougun_saikyou_no_majutsushi_wa_ningen_datta](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57876__maougun_saikyou_no_majutsushi_wa_ningen_datta.jpg)](https://myanimelist.net/anime/57876/Maougun_Saikyou_no_Majutsushi_wa_Ningen_datta) | [Maougun Saikyou no Majutsushi wa Ningen datta](https://subsplease.org/shows/maougun-saikyou-no-majutsushi-wa-ningen-datta) | TV | 12 / 12 | **Finished Airing** | 6.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Maougun+Saikyou+no+Majutsushi+wa+Ningen+datta+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57876__maougun_saikyou_no_majutsushi_wa_ningen_datta.txt) | **101** | 12654 | 2024-09-11 14:02 |
| 50713 | [![50713__mahouka_koukou_no_rettousei_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50713__mahouka_koukou_no_rettousei_3rd_season.jpg)](https://myanimelist.net/anime/50713/Mahouka_Koukou_no_Rettousei_3rd_Season) | [Mahouka Koukou no Rettousei S3](https://subsplease.org/shows/mahouka-koukou-no-rettousei-s3) | TV | 13 / 13 | **Finished Airing** | 7.04 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahouka+Koukou+no+Rettousei+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50713__mahouka_koukou_no_rettousei_3rd_season.txt) | **101** | 14810 | 2024-06-28 16:02 |
| 56400 | [![56400__maou_sama_retry_r](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56400__maou_sama_retry_r.jpg)](https://myanimelist.net/anime/56400/Maou-sama_Retry_R) | [Maou-sama, Retry! R](https://subsplease.org/shows/maou-sama-retry-r) | TV | 12 / 12 | **Finished Airing** | 5.9 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Maou+sama+Retry+R+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56400__maou_sama_retry_r.txt) | **101** | 7334 | 2024-12-14 15:17 |
| 54839 | [![54839__yoru_no_kurage_wa_oyogenai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54839__yoru_no_kurage_wa_oyogenai.jpg)](https://myanimelist.net/anime/54839/Yoru_no_Kurage_wa_Oyogenai) | [Yoru no Kurage wa Oyogenai](https://subsplease.org/shows/yoru-no-kurage-wa-oyogenai) | TV | 12 / 12 | **Finished Airing** | 7.78 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yoru+no+Kurage+wa+Oyogenai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54839__yoru_no_kurage_wa_oyogenai.txt) | **101** | 12574 | 2024-06-22 16:32 |
| 54837 | [![54837__akuyaku_reijou_level_99_watashi_wa_ura_boss_desu_ga_maou_dewa_arimasen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54837__akuyaku_reijou_level_99_watashi_wa_ura_boss_desu_ga_maou_dewa_arimasen.jpg)](https://myanimelist.net/anime/54837/Akuyaku_Reijou_Level_99__Watashi_wa_Ura-Boss_desu_ga_Maou_dewa_Arimasen) | [Akuyaku Reijou Level 99](https://subsplease.org/shows/akuyaku-reijou-level-99) | TV | 12 / 12 | **Finished Airing** | 7.13 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Akuyaku+Reijou+Level+99+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54837__akuyaku_reijou_level_99_watashi_wa_ura_boss_desu_ga_maou_dewa_arimasen.txt) | **99** | 16912 | 2024-03-26 15:31 |
| 47160 | [![47160__goblin_slayer_ii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47160__goblin_slayer_ii.jpg)](https://myanimelist.net/anime/47160/Goblin_Slayer_II) | [Goblin Slayer S2](https://subsplease.org/shows/goblin-slayer-s2) | TV | 12 / 12 | **Finished Airing** | 7.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Goblin+Slayer+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47160__goblin_slayer_ii.txt) | **98** | 28553 | 2023-12-22 14:31 |
| 53410 | [![53410__yuru_camp_season_3](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53410__yuru_camp_season_3.jpg)](https://myanimelist.net/anime/53410/Yuru_Camp△_Season_3) | [Yuru Camp S3](https://subsplease.org/shows/yuru-camp-s3) | TV | 15 / 12 | **Finished Airing** | 8.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yuru+Camp+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53410__yuru_camp_season_3.txt) | **98** | 9672 | 2024-10-26 23:39 |
| 53835 | [![53835__unnamed_memory](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53835__unnamed_memory.jpg)](https://myanimelist.net/anime/53835/Unnamed_Memory) | [Unnamed Memory](https://subsplease.org/shows/unnamed-memory) | TV | 14 / 12 | **Finished Airing** | 6.71 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Unnamed+Memory+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53835__unnamed_memory.txt) | **96** | 11493 | 2025-01-14 16:04 |
| 60108 | [![60108__one_piece_gyojin_tou_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/60108__one_piece_gyojin_tou_hen.jpg)](https://myanimelist.net/anime/60108/One_Piece__Gyojin_Tou-hen) | [One Piece Log - Fish-Man Island Saga](https://subsplease.org/shows/one-piece-log-fish-man-island-saga) | TV | 12 / 21 | Currently Airing | 8.28 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+One+Piece+Log+Fish+Man+Island+Saga+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/60108__one_piece_gyojin_tou_hen.txt) | **96** | 5718 | 2025-01-19 02:01 |
| 49613 | [![49613__chiyu_mahou_no_machigatta_tsukaikata](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49613__chiyu_mahou_no_machigatta_tsukaikata.jpg)](https://myanimelist.net/anime/49613/Chiyu_Mahou_no_Machigatta_Tsukaikata) | [Chiyu Mahou no Machigatta Tsukaikata](https://subsplease.org/shows/chiyu-mahou-no-machigatta-tsukaikata) | TV | 13 / 13 | **Finished Airing** | 7.53 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Chiyu+Mahou+no+Machigatta+Tsukaikata+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49613__chiyu_mahou_no_machigatta_tsukaikata.txt) | **95** | 20023 | 2024-03-29 16:01 |
| 54717 | [![54717__mahoutsukai_precure_mirai_days](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54717__mahoutsukai_precure_mirai_days.jpg)](https://myanimelist.net/anime/54717/Mahoutsukai_Precure_Mirai_Days) | [Mahoutsukai Precure!! Mirai Days](https://subsplease.org/shows/mahoutsukai-precure-mirai-days) | TV | 2 / 12 | Currently Airing | 7.17 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahoutsukai+Precure+Mirai+Days+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54717__mahoutsukai_precure_mirai_days.txt) | **94** | 1311 | 2025-01-19 08:28 |
| 56062 | [![56062__naze_boku_no_sekai_wo_daremo_oboeteinai_no_ka](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56062__naze_boku_no_sekai_wo_daremo_oboeteinai_no_ka.jpg)](https://myanimelist.net/anime/56062/Naze_Boku_no_Sekai_wo_Daremo_Oboeteinai_no_ka) | [Naze Boku no Sekai wo Daremo Oboeteinai no ka](https://subsplease.org/shows/naze-boku-no-sekai-wo-daremo-oboeteinai-no-ka) | TV | 12 / 12 | **Finished Airing** | 6.24 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Naze+Boku+no+Sekai+wo+Daremo+Oboeteinai+no+ka+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56062__naze_boku_no_sekai_wo_daremo_oboeteinai_no_ka.txt) | **93** | 10494 | 2024-09-28 15:17 |
| 56923 | [![56923__lv2_kara_cheat_datta_motoyuusha_kouho_no_mattari_isekai_life](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56923__lv2_kara_cheat_datta_motoyuusha_kouho_no_mattari_isekai_life.jpg)](https://myanimelist.net/anime/56923/Lv2_kara_Cheat_datta_Motoyuusha_Kouho_no_Mattari_Isekai_Life) | [Lv2 kara Cheat datta Motoyuusha Kouho no Mattari Isekai Life](https://subsplease.org/shows/lv2-kara-cheat-datta-motoyuusha-kouho-no-mattari-isekai-life) | TV | 12 / 12 | **Finished Airing** | 6.82 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Lv2+kara+Cheat+datta+Motoyuusha+Kouho+no+Mattari+Isekai+Life+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56923__lv2_kara_cheat_datta_motoyuusha_kouho_no_mattari_isekai_life.txt) | **92** | 16449 | 2024-06-24 14:32 |
| 52742 | [![52742__haikyuu_movie_gomisuteba_no_kessen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52742__haikyuu_movie_gomisuteba_no_kessen.jpg)](https://myanimelist.net/anime/52742/Haikyuu_Movie__Gomisuteba_no_Kessen) | [Haikyuu!! Movie - Gomisuteba no Kessen](https://subsplease.org/shows/haikyuu-movie-gomisuteba-no-kessen) | Movie | 1 / 1 | **Finished Airing** | 8.62 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Haikyuu+Movie+Gomisuteba+no+Kessen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52742__haikyuu_movie_gomisuteba_no_kessen.txt) | **91** | 7138 | 2024-11-03 00:13 |
| 51105 | [![51105__nier_automata_ver1_1a](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51105__nier_automata_ver1_1a.jpg)](https://myanimelist.net/anime/51105/NieR_Automata_Ver11a) | [NieR Automata Ver1.1a](https://subsplease.org/shows/nier-automata-ver1-1a) | TV | 25 / 12 | **Finished Airing** | 7.53 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+NieR+Automata+Ver1+1a+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51105__nier_automata_ver1_1a.txt) | **90** | 14917 | 2024-09-27 16:01 |
| 53516 | [![53516__tensei_shitara_dainana_ouji_datta_node_kimama_ni_majutsu_wo_kiwamemasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53516__tensei_shitara_dainana_ouji_datta_node_kimama_ni_majutsu_wo_kiwamemasu.jpg)](https://myanimelist.net/anime/53516/Tensei_shitara_Dainana_Ouji_Datta_node_Kimama_ni_Majutsu_wo_Kiwamemasu) | [Dainanaoji](https://subsplease.org/shows/dainanaoji) | TV | 12 / 12 | **Finished Airing** | 7.44 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dainanaoji+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53516__tensei_shitara_dainana_ouji_datta_node_kimama_ni_majutsu_wo_kiwamemasu.txt) | **89** | 15362 | 2024-06-17 16:32 |
| 54855 | [![54855__senpai_wa_otokonoko](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54855__senpai_wa_otokonoko.jpg)](https://myanimelist.net/anime/54855/Senpai_wa_Otokonoko) | [Senpai wa Otokonoko](https://subsplease.org/shows/senpai-wa-otokonoko) | TV | 12 / 12 | **Finished Airing** | 7.32 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Senpai+wa+Otokonoko+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54855__senpai_wa_otokonoko.txt) | **88** | 5804 | 2024-09-26 18:32 |
| 56352 | [![56352__loop_7_kaime_no_akuyaku_reijou_wa_moto_tekikoku_de_jiyuu_kimama_na_hanayome_seikatsu_wo_mankitsu_suru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56352__loop_7_kaime_no_akuyaku_reijou_wa_moto_tekikoku_de_jiyuu_kimama_na_hanayome_seikatsu_wo_mankitsu_suru.jpg)](https://myanimelist.net/anime/56352/Loop_7-kaime_no_Akuyaku_Reijou_wa_Moto_Tekikoku_de_Jiyuu_Kimama_na_Hanayome_Seikatsu_wo_Mankitsu_suru) | [7th Time Loop](https://subsplease.org/shows/7th-time-loop) | TV | 12 / 12 | **Finished Airing** | 7.51 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+7th+Time+Loop+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56352__loop_7_kaime_no_akuyaku_reijou_wa_moto_tekikoku_de_jiyuu_kimama_na_hanayome_seikatsu_wo_mankitsu_suru.txt) | **87** | 14182 | 2024-03-24 14:16 |
| 51019 | [![51019__kimetsu_no_yaiba_katanakaji_no_sato_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51019__kimetsu_no_yaiba_katanakaji_no_sato_hen.jpg)](https://myanimelist.net/anime/51019/Kimetsu_no_Yaiba__Katanakaji_no_Sato-hen) | [Kimetsu no Yaiba - Katanakaji no Sato-hen](https://subsplease.org/shows/kimetsu-no-yaiba-katanakaji-no-sato-hen) | TV | 11 / 11 | **Finished Airing** | 8.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kimetsu+no+Yaiba+Katanakaji+no+Sato+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51019__kimetsu_no_yaiba_katanakaji_no_sato_hen.txt) | **83** | 45582 | 2023-06-19 03:10 |
| 50265 | [![50265__spy_x_family](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50265__spy_x_family.jpg)](https://myanimelist.net/anime/50265/Spy_x_Family) | [Spy x Family](https://subsplease.org/shows/spy-x-family) | TV | 37 / 12 | **Finished Airing** | 8.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Spy+x+Family+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50265__spy_x_family.txt) | **83** | 30638 | 2023-12-23 16:34 |
| 57646 | [![57646__mob_kara_hajimaru_tansaku_eiyuutan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57646__mob_kara_hajimaru_tansaku_eiyuutan.jpg)](https://myanimelist.net/anime/57646/Mob_kara_Hajimaru_Tansaku_Eiyuutan) | [Mob kara Hajimaru Tansaku Eiyuutan](https://subsplease.org/shows/mob-kara-hajimaru-tansaku-eiyuutan) | TV | 12 / 12 | **Finished Airing** | 5.82 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mob+kara+Hajimaru+Tansaku+Eiyuutan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57646__mob_kara_hajimaru_tansaku_eiyuutan.txt) | **83** | 9665 | 2024-09-14 14:02 |
| 57362 | [![57362__hoshifuru_oukoku_no_nina](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57362__hoshifuru_oukoku_no_nina.jpg)](https://myanimelist.net/anime/57362/Hoshifuru_Oukoku_no_Nina) | [Hoshifuru Oukoku no Nina](https://subsplease.org/shows/hoshifuru-oukoku-no-nina) | TV | 12 / 12 | **Finished Airing** | 7.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hoshifuru+Oukoku+no+Nina+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57362__hoshifuru_oukoku_no_nina.txt) | **83** | 4825 | 2024-12-23 15:02 |
| 57217 | [![57217__katsute_mahou_shoujo_to_aku_wa_tekitai_shiteita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57217__katsute_mahou_shoujo_to_aku_wa_tekitai_shiteita.jpg)](https://myanimelist.net/anime/57217/Katsute_Mahou_Shoujo_to_Aku_wa_Tekitai_shiteita) | [Katsute Mahou Shoujo to Aku wa Tekitai shiteita](https://subsplease.org/shows/katsute-mahou-shoujo-to-aku-wa-tekitai-shiteita) | TV | 12 / 12 | **Finished Airing** | 7.58 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Katsute+Mahou+Shoujo+to+Aku+wa+Tekitai+shiteita+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57217__katsute_mahou_shoujo_to_aku_wa_tekitai_shiteita.txt) | **82** | 7723 | 2024-09-24 14:47 |
| 58883 | [![58883__dead_dead_demons_dededede_destruction_ova](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58883__dead_dead_demons_dededede_destruction_ova.jpg)](https://myanimelist.net/anime/58883/Dead_Dead_Demons_Dededede_Destruction_OVA) | [Dead Dead Demons Dededede Destruction](https://subsplease.org/shows/dead-dead-demons-dededede-destruction) | OVA | 18 / 17 | **Finished Airing** | 7.93 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dead+Dead+Demons+Dededede+Destruction+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58883__dead_dead_demons_dededede_destruction_ova.txt) | **81** | 6923 | 2024-09-20 03:22 |
| 53127 | [![53127__fate_strange_fake_whispers_of_dawn](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53127__fate_strange_fake_whispers_of_dawn.jpg)](https://myanimelist.net/anime/53127/Fate_strange_Fake__Whispers_of_Dawn) | [Fate strange Fake - Whispers of Dawn](https://subsplease.org/shows/fate-strange-fake-whispers-of-dawn) | TV Special | 1 / 1 | **Finished Airing** | 8.19 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fate+strange+Fake+Whispers+of+Dawn+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53127__fate_strange_fake_whispers_of_dawn.txt) | **81** | 15005 | 2023-07-02 17:05 |
| 53128 | [![53128__atri_my_dear_moments](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53128__atri_my_dear_moments.jpg)](https://myanimelist.net/anime/53128/Atri__My_Dear_Moments) | [Atri - My Dear Moments](https://subsplease.org/shows/atri-my-dear-moments) | TV | 13 / 13 | **Finished Airing** | 7.18 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Atri+My+Dear+Moments+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53128__atri_my_dear_moments.txt) | **80** | 6933 | 2024-10-05 16:32 |
| 51180 | [![51180__youkoso_jitsuryoku_shijou_shugi_no_kyoushitsu_e_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51180__youkoso_jitsuryoku_shijou_shugi_no_kyoushitsu_e_3rd_season.jpg)](https://myanimelist.net/anime/51180/Youkoso_Jitsuryoku_Shijou_Shugi_no_Kyoushitsu_e_3rd_Season) | [Youkoso Jitsuryoku Shijou Shugi no Kyoushitsu e S3](https://subsplease.org/shows/youkoso-jitsuryoku-shijou-shugi-no-kyoushitsu-e-s3) | TV | 13 / 13 | **Finished Airing** | 7.96 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Youkoso+Jitsuryoku+Shijou+Shugi+no+Kyoushitsu+e+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51180__youkoso_jitsuryoku_shijou_shugi_no_kyoushitsu_e_3rd_season.txt) | **80** | 15992 | 2024-03-27 14:01 |
| 49073 | [![49073__kidou_senshi_gundam_seed_freedom](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49073__kidou_senshi_gundam_seed_freedom.jpg)](https://myanimelist.net/anime/49073/Kidou_Senshi_Gundam_SEED_Freedom) | [Mobile Suit Gundam SEED Freedom](https://subsplease.org/shows/mobile-suit-gundam-seed-freedom) | Movie | 1 / 1 | **Finished Airing** | 7.62 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mobile+Suit+Gundam+SEED+Freedom+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49073__kidou_senshi_gundam_seed_freedom.txt) | **80** | 6615 | 2024-11-25 04:35 |
| 53723 | [![53723__acro_trip](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53723__acro_trip.jpg)](https://myanimelist.net/anime/53723/Acro_Trip) | [Acro Trip](https://subsplease.org/shows/acro-trip) | TV | 12 / 12 | **Finished Airing** | 6.74 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Acro+Trip+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53723__acro_trip.txt) | **80** | 4369 | 2024-12-11 14:17 |
| 53356 | [![53356__shuumatsu_train_doko_e_iku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53356__shuumatsu_train_doko_e_iku.jpg)](https://myanimelist.net/anime/53356/Shuumatsu_Train_Doko_e_Iku) | [Shuumatsu Train Doko e Iku](https://subsplease.org/shows/shuumatsu-train-doko-e-iku) | TV | 13 / 12 | **Finished Airing** | 7.38 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shuumatsu+Train+Doko+e+Iku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53356__shuumatsu_train_doko_e_iku.txt) | 79 | 9596 | 2024-06-24 12:32 |
| 54900 | [![54900__wind_breaker](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54900__wind_breaker.jpg)](https://myanimelist.net/anime/54900/Wind_Breaker) | [Wind Breaker](https://subsplease.org/shows/wind-breaker) | TV | 13 / 13 | **Finished Airing** | 7.74 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Wind+Breaker+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54900__wind_breaker.txt) | 78 | 14817 | 2024-06-27 17:32 |
| 59571 | [![59571__shingeki_no_kyojin_movie_kanketsu_hen_the_last_attack](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59571__shingeki_no_kyojin_movie_kanketsu_hen_the_last_attack.jpg)](https://myanimelist.net/anime/59571/Shingeki_no_Kyojin_Movie__Kanketsu-hen_-_The_Last_Attack) | [Shingeki no Kyojin - The Final Season Part 3](https://subsplease.org/shows/shingeki-no-kyojin-the-final-season-part-3) | Movie | 2 / 1 | **Finished Airing** | 8.32 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shingeki+no+Kyojin+The+Final+Season+Part+3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59571__shingeki_no_kyojin_movie_kanketsu_hen_the_last_attack.txt) | 78 | 23702 | 2023-11-05 07:26 |
| 51958 | [![51958__kono_subarashii_sekai_ni_bakuen_wo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51958__kono_subarashii_sekai_ni_bakuen_wo.jpg)](https://myanimelist.net/anime/51958/Kono_Subarashii_Sekai_ni_Bakuen_wo) | [Kono Subarashii Sekai ni Bakuen wo!](https://subsplease.org/shows/kono-subarashii-sekai-ni-bakuen-wo) | TV | 12 / 12 | **Finished Airing** | 7.5 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kono+Subarashii+Sekai+ni+Bakuen+wo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51958__kono_subarashii_sekai_ni_bakuen_wo.txt) | 78 | 20615 | 2023-06-21 16:01 |
| 50392 | [![50392__mato_seihei_no_slave](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50392__mato_seihei_no_slave.jpg)](https://myanimelist.net/anime/50392/Mato_Seihei_no_Slave) | [Mato Seihei no Slave](https://subsplease.org/shows/mato-seihei-no-slave) | TV | 12 / 12 | **Finished Airing** | 6.84 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mato+Seihei+no+Slave+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50392__mato_seihei_no_slave.txt) | 78 | 16828 | 2024-03-21 15:10 |
| 49981 | [![49981__kimi_to_boku_no_saigo_no_senjou_aruiwa_sekai_ga_hajimaru_seisen_season_ii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49981__kimi_to_boku_no_saigo_no_senjou_aruiwa_sekai_ga_hajimaru_seisen_season_ii.jpg)](https://myanimelist.net/anime/49981/Kimi_to_Boku_no_Saigo_no_Senjou_Aruiwa_Sekai_ga_Hajimaru_Seisen_Season_II) | [Kimi to Boku no Saigo no Senjou, Arui wa Sekai ga Hajimaru Seisen S2](https://subsplease.org/shows/kimi-to-boku-no-saigo-no-senjou-arui-wa-sekai-ga-hajimaru-seisen-s2) | TV | 4 / 12 | Currently Airing | 6.68 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kimi+to+Boku+no+Saigo+no+Senjou+Arui+wa+Sekai+ga+Hajimaru+Seisen+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49981__kimi_to_boku_no_saigo_no_senjou_aruiwa_sekai_ga_hajimaru_seisen_season_ii.txt) | 78 | 7302 | 2024-07-31 14:32 |
| 56662 | [![56662__trillion_game](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56662__trillion_game.jpg)](https://myanimelist.net/anime/56662/Trillion_Game) | [Trillion Game](https://subsplease.org/shows/trillion-game) | TV | 15 / 26 | Currently Airing | 7.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Trillion+Game+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56662__trillion_game.txt) | 76 | 4307 | 2025-01-16 18:53 |
| 57947 | [![57947__mayonaka_punch](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57947__mayonaka_punch.jpg)](https://myanimelist.net/anime/57947/Mayonaka_Punch) | [Mayonaka Punch](https://subsplease.org/shows/mayonaka-punch) | TV | 12 / 12 | **Finished Airing** | 7.37 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mayonaka+Punch+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57947__mayonaka_punch.txt) | 76 | 6745 | 2024-09-23 14:02 |
| 44511 | [![44511__chainsaw_man](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44511__chainsaw_man.jpg)](https://myanimelist.net/anime/44511/Chainsaw_Man) | [Chainsaw Man](https://subsplease.org/shows/chainsaw-man) | TV | 12 / 12 | **Finished Airing** | 8.47 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Chainsaw+Man+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44511__chainsaw_man.txt) | 76 | 32075 | 2022-12-27 17:02 |
| 56348 | [![56348__dungeon_no_naka_no_hito](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56348__dungeon_no_naka_no_hito.jpg)](https://myanimelist.net/anime/56348/Dungeon_no_Naka_no_Hito) | [Dungeon no Naka no Hito](https://subsplease.org/shows/dungeon-no-naka-no-hito) | TV | 12 / 12 | **Finished Airing** | 7.11 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dungeon+no+Naka+no+Hito+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56348__dungeon_no_naka_no_hito.txt) | 76 | 8381 | 2024-09-27 17:33 |
| 40357 | [![40357__tate_no_yuusha_no_nariagari_season_3](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40357__tate_no_yuusha_no_nariagari_season_3.jpg)](https://myanimelist.net/anime/40357/Tate_no_Yuusha_no_Nariagari_Season_3) | [Tate no Yuusha no Nariagari S3](https://subsplease.org/shows/tate-no-yuusha-no-nariagari-s3) | TV | 12 / 12 | **Finished Airing** | 7.1 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tate+no+Yuusha+no+Nariagari+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40357__tate_no_yuusha_no_nariagari_season_3.txt) | 75 | 20497 | 2023-12-22 13:36 |
| 21 | [![21__one_piece](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/21__one_piece.jpg)](https://myanimelist.net/anime/21/One_Piece) | [One Piece](https://subsplease.org/shows/one-piece) | TV | 52 / ? | Currently Airing | 8.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+One+Piece+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/21__one_piece.txt) | 74 | 18233 | 2024-10-13 02:01 |
| 58173 | [![58173__mahoutsukai_ni_narenakatta_onnanoko_no_hanashi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58173__mahoutsukai_ni_narenakatta_onnanoko_no_hanashi.jpg)](https://myanimelist.net/anime/58173/Mahoutsukai_ni_Narenakatta_Onnanoko_no_Hanashi) | [Mahoutsukai ni Narenakatta Onnanoko no Hanashi](https://subsplease.org/shows/mahoutsukai-ni-narenakatta-onnanoko-no-hanashi) | TV | 12 / 12 | **Finished Airing** | 6.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahoutsukai+ni+Narenakatta+Onnanoko+no+Hanashi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58173__mahoutsukai_ni_narenakatta_onnanoko_no_hanashi.txt) | 74 | 4106 | 2024-12-20 19:32 |
| 56449 | [![56449__madougushi_dahliya_wa_utsumukanai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56449__madougushi_dahliya_wa_utsumukanai.jpg)](https://myanimelist.net/anime/56449/Madougushi_Dahliya_wa_Utsumukanai) | [Madougushi Dahliya wa Utsumukanai](https://subsplease.org/shows/madougushi-dahliya-wa-utsumukanai) | TV | 12 / 12 | **Finished Airing** | 6.8 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Madougushi+Dahliya+wa+Utsumukanai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56449__madougushi_dahliya_wa_utsumukanai.txt) | 74 | 8020 | 2024-09-21 12:32 |
| 52211 | [![52211__mashle](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52211__mashle.jpg)](https://myanimelist.net/anime/52211/Mashle) | [Mashle](https://subsplease.org/shows/mashle) | TV | 25 / 12 | **Finished Airing** | 7.62 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mashle+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52211__mashle.txt) | 73 | 26317 | 2024-03-30 16:01 |
| 57100 | [![57100__the_new_gate](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57100__the_new_gate.jpg)](https://myanimelist.net/anime/57100/The_New_Gate) | [The New Gate](https://subsplease.org/shows/the-new-gate) | TV | 12 / 12 | **Finished Airing** | 6.44 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+New+Gate+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57100__the_new_gate.txt) | 71 | 14245 | 2024-06-29 17:31 |
| 54866 | [![54866__blue_lock_episode_nagi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54866__blue_lock_episode_nagi.jpg)](https://myanimelist.net/anime/54866/Blue_Lock__Episode_Nagi) | [Blue lock - Episode Nagi](https://subsplease.org/shows/blue-lock-episode-nagi) | Movie | 1 / 1 | **Finished Airing** | 7.32 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Blue+lock+Episode+Nagi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54866__blue_lock_episode_nagi.txt) | 71 | 4944 | 2024-10-20 17:24 |
| 56690 | [![56690__re_monster](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56690__re_monster.jpg)](https://myanimelist.net/anime/56690/Re_Monster) | [Re Monster](https://subsplease.org/shows/re-monster) | TV | 12 / 12 | **Finished Airing** | 6.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Re+Monster+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56690__re_monster.txt) | 71 | 14394 | 2024-06-17 15:33 |
| 48418 | [![48418__maou_gakuin_no_futekigousha_ii_shijou_saikyou_no_maou_no_shiso_tensei_shite_shison_tachi_no_gakkou_e_kayou_part_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48418__maou_gakuin_no_futekigousha_ii_shijou_saikyou_no_maou_no_shiso_tensei_shite_shison_tachi_no_gakkou_e_kayou_part_2.jpg)](https://myanimelist.net/anime/48418/Maou_Gakuin_no_Futekigousha_II__Shijou_Saikyou_no_Maou_no_Shiso_Tensei_shite_Shison-tachi_no_Gakkou_e_Kayou_Part_2) | [Maou Gakuin no Futekigousha S2](https://subsplease.org/shows/maou-gakuin-no-futekigousha-s2) | TV | 24 / 12 | **Finished Airing** | 6.42 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Maou+Gakuin+no+Futekigousha+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48418__maou_gakuin_no_futekigousha_ii_shijou_saikyou_no_maou_no_shiso_tensei_shite_shison_tachi_no_gakkou_e_kayou_part_2.txt) | 68 | 12380 | 2024-07-24 18:34 |
| 55877 | [![55877__henjin_no_salad_bowl](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55877__henjin_no_salad_bowl.jpg)](https://myanimelist.net/anime/55877/Henjin_no_Salad_Bowl) | [Henjin no Salad Bowl](https://subsplease.org/shows/henjin-no-salad-bowl) | TV | 12 / 12 | **Finished Airing** | 7.28 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Henjin+no+Salad+Bowl+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55877__henjin_no_salad_bowl.txt) | 68 | 8479 | 2024-06-20 18:47 |
| 39894 | [![39894__hibike_euphonium_3](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39894__hibike_euphonium_3.jpg)](https://myanimelist.net/anime/39894/Hibike_Euphonium_3) | [Hibike! Euphonium S3](https://subsplease.org/shows/hibike-euphonium-s3) | TV | 13 / 13 | **Finished Airing** | 8.72 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hibike+Euphonium+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39894__hibike_euphonium_3.txt) | 67 | 9492 | 2024-06-30 10:32 |
| 58272 | [![58272__boku_no_tsuma_wa_kanjou_ga_nai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58272__boku_no_tsuma_wa_kanjou_ga_nai.jpg)](https://myanimelist.net/anime/58272/Boku_no_Tsuma_wa_Kanjou_ga_Nai) | [Boku no Tsuma wa Kanjou ga Nai](https://subsplease.org/shows/boku-no-tsuma-wa-kanjou-ga-nai) | TV | 12 / 12 | **Finished Airing** | 6.77 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Boku+no+Tsuma+wa+Kanjou+ga+Nai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58272__boku_no_tsuma_wa_kanjou_ga_nai.txt) | 66 | 6188 | 2024-09-14 15:02 |
| 47917 | [![47917__bocchi_the_rock](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47917__bocchi_the_rock.jpg)](https://myanimelist.net/anime/47917/Bocchi_the_Rock) | [Bocchi the Rock!](https://subsplease.org/shows/bocchi-the-rock) | TV | 12 / 12 | **Finished Airing** | 8.76 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bocchi+the+Rock+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47917__bocchi_the_rock.txt) | 66 | 17828 | 2022-12-24 17:31 |
| 53434 | [![53434__maou_no_ore_ga_dorei_elf_wo_yome_ni_shitanda_ga_dou_medereba_ii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53434__maou_no_ore_ga_dorei_elf_wo_yome_ni_shitanda_ga_dou_medereba_ii.jpg)](https://myanimelist.net/anime/53434/Maou_no_Ore_ga_Dorei_Elf_wo_Yome_ni_Shitanda_ga_Dou_Medereba_Ii) | [Madome](https://subsplease.org/shows/madome) | TV | 12 / 12 | **Finished Airing** | 7.29 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Madome+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53434__maou_no_ore_ga_dorei_elf_wo_yome_ni_shitanda_ga_dou_medereba_ii.txt) | 66 | 12673 | 2024-06-13 16:04 |
| 46569 | [![46569__jigokuraku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/46569__jigokuraku.jpg)](https://myanimelist.net/anime/46569/Jigokuraku) | [Jigokuraku](https://subsplease.org/shows/jigokuraku) | TV | 13 / 13 | **Finished Airing** | 8.09 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jigokuraku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/46569__jigokuraku.txt) | 65 | 29823 | 2023-07-01 15:31 |
| 48549 | [![48549__dr_stone_new_world](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48549__dr_stone_new_world.jpg)](https://myanimelist.net/anime/48549/Dr_Stone__New_World) | [Dr. Stone S3](https://subsplease.org/shows/dr-stone-s3) | TV | 22 / 11 | **Finished Airing** | 8.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dr+Stone+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48549__dr_stone_new_world.txt) | 64 | 20134 | 2023-12-21 15:35 |
| 54794 | [![54794__metallic_rouge](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54794__metallic_rouge.jpg)](https://myanimelist.net/anime/54794/Metallic_Rouge) | [Metallic Rouge](https://subsplease.org/shows/metallic-rouge) | TV | 13 / 13 | **Finished Airing** | 6.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Metallic+Rouge+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54794__metallic_rouge.txt) | 64 | 11992 | 2024-04-03 17:26 |
| 55996 | [![55996__koi_wa_futago_de_warikirenai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55996__koi_wa_futago_de_warikirenai.jpg)](https://myanimelist.net/anime/55996/Koi_wa_Futago_de_Warikirenai) | [Koi wa Futago de Warikirenai](https://subsplease.org/shows/koi-wa-futago-de-warikirenai) | TV | 12 / 12 | **Finished Airing** | 6.29 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Koi+wa+Futago+de+Warikirenai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55996__koi_wa_futago_de_warikirenai.txt) | 63 | 5682 | 2024-09-25 15:17 |
| 55823 | [![55823__natsume_yuujinchou_shichi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55823__natsume_yuujinchou_shichi.jpg)](https://myanimelist.net/anime/55823/Natsume_Yuujinchou_Shichi) | [Natsume Yuujinchou S7](https://subsplease.org/shows/natsume-yuujinchou-s7) | TV | 12 / 12 | **Finished Airing** | 8.59 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Natsume+Yuujinchou+S7+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55823__natsume_yuujinchou_shichi.txt) | 62 | 4523 | 2024-12-23 18:22 |
| 56647 | [![56647__ao_no_miburo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56647__ao_no_miburo.jpg)](https://myanimelist.net/anime/56647/Ao_no_Miburo) | [Ao no Miburo](https://subsplease.org/shows/ao-no-miburo) | TV | 14 / 24 | Currently Airing | 6.37 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ao+no+Miburo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56647__ao_no_miburo.txt) | 62 | 3196 | 2025-01-18 11:02 |
| 55690 | [![55690__boku_no_kokoro_no_yabai_yatsu_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55690__boku_no_kokoro_no_yabai_yatsu_2nd_season.jpg)](https://myanimelist.net/anime/55690/Boku_no_Kokoro_no_Yabai_Yatsu_2nd_Season) | [Boku no Kokoro no Yabai Yatsu](https://subsplease.org/shows/boku-no-kokoro-no-yabai-yatsu) | TV | 26 / 13 | **Finished Airing** | 8.77 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Boku+no+Kokoro+no+Yabai+Yatsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55690__boku_no_kokoro_no_yabai_yatsu_2nd_season.txt) | 62 | 15386 | 2024-03-30 18:03 |
| 55528 | [![55528__yuuki_bakuhatsu_bang_bravern](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55528__yuuki_bakuhatsu_bang_bravern.jpg)](https://myanimelist.net/anime/55528/Yuuki_Bakuhatsu_Bang_Bravern) | [Yuuki Bakuhatsu Bang Bravern](https://subsplease.org/shows/yuuki-bakuhatsu-bang-bravern) | TV | 12 / 12 | **Finished Airing** | 7.57 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yuuki+Bakuhatsu+Bang+Bravern+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55528__yuuki_bakuhatsu_bang_bravern.txt) | 62 | 8337 | 2024-03-28 16:31 |
| 54233 | [![54233__sasayaku_you_ni_koi_wo_utau](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54233__sasayaku_you_ni_koi_wo_utau.jpg)](https://myanimelist.net/anime/54233/Sasayaku_You_ni_Koi_wo_Utau) | [Sasayaku You ni Koi wo Utau](https://subsplease.org/shows/sasayaku-you-ni-koi-wo-utau) | TV | 12 / 12 | **Finished Airing** | 6.67 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sasayaku+You+ni+Koi+wo+Utau+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54233__sasayaku_you_ni_koi_wo_utau.txt) | 62 | 6291 | 2024-12-29 11:04 |
| 48316 | [![48316__kage_no_jitsuryokusha_ni_naritakute](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48316__kage_no_jitsuryokusha_ni_naritakute.jpg)](https://myanimelist.net/anime/48316/Kage_no_Jitsuryokusha_ni_Naritakute) | [Kage no Jitsuryokusha ni Naritakute!](https://subsplease.org/shows/kage-no-jitsuryokusha-ni-naritakute) | TV | 20 / 20 | **Finished Airing** | 8.27 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kage+no+Jitsuryokusha+ni+Naritakute+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48316__kage_no_jitsuryokusha_ni_naritakute.txt) | 61 | 29188 | 2023-02-15 14:32 |
| 57380 | [![57380__isekai_yururi_kikou_kosodateshinagara_boukensha_shimasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57380__isekai_yururi_kikou_kosodateshinagara_boukensha_shimasu.jpg)](https://myanimelist.net/anime/57380/Isekai_Yururi_Kikou__Kosodateshinagara_Boukensha_Shimasu) | [Isekai Yururi Kikou](https://subsplease.org/shows/isekai-yururi-kikou) | TV | 12 / 12 | **Finished Airing** | 6.67 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+Yururi+Kikou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57380__isekai_yururi_kikou_kosodateshinagara_boukensha_shimasu.txt) | 61 | 7976 | 2024-09-15 17:32 |
| 55866 | [![55866__yubisaki_to_renren](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55866__yubisaki_to_renren.jpg)](https://myanimelist.net/anime/55866/Yubisaki_to_Renren) | [Yubisaki to Renren](https://subsplease.org/shows/yubisaki-to-renren) | TV | 12 / 12 | **Finished Airing** | 8.22 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yubisaki+to+Renren+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55866__yubisaki_to_renren.txt) | 60 | 9796 | 2024-03-23 14:01 |
| 50664 | [![50664__saihate_no_paladin_tetsusabi_no_yama_no_ou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50664__saihate_no_paladin_tetsusabi_no_yama_no_ou.jpg)](https://myanimelist.net/anime/50664/Saihate_no_Paladin__Tetsusabi_no_Yama_no_Ou) | [Saihate no Paladin S2](https://subsplease.org/shows/saihate-no-paladin-s2) | TV | 12 / 12 | **Finished Airing** | 7.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Saihate+no+Paladin+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50664__saihate_no_paladin_tetsusabi_no_yama_no_ou.txt) | 60 | 11257 | 2023-12-23 14:34 |
| 54714 | [![54714__kimi_no_koto_ga_daidaidaidaidaisuki_na_100_nin_no_kanojo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54714__kimi_no_koto_ga_daidaidaidaidaisuki_na_100_nin_no_kanojo.jpg)](https://myanimelist.net/anime/54714/Kimi_no_Koto_ga_Daidaidaidaidaisuki_na_100-nin_no_Kanojo) | [Hyakkano](https://subsplease.org/shows/hyakkano) | TV | 14 / 12 | **Finished Airing** | 7.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hyakkano+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54714__kimi_no_koto_ga_daidaidaidaidaisuki_na_100_nin_no_kanojo.txt) | 60 | 11406 | 2025-01-19 15:01 |
| 50695 | [![50695__mf_ghost](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50695__mf_ghost.jpg)](https://myanimelist.net/anime/50695/MF_Ghost) | [MF Ghost](https://subsplease.org/shows/mf-ghost) | TV | 24 / 12 | **Finished Airing** | 7.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+MF+Ghost+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50695__mf_ghost.txt) | 60 | 10012 | 2024-12-22 17:02 |
| 50593 | [![50593__natsu_e_no_tunnel_sayonara_no_deguchi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50593__natsu_e_no_tunnel_sayonara_no_deguchi.jpg)](https://myanimelist.net/anime/50593/Natsu_e_no_Tunnel_Sayonara_no_Deguchi) | [Natsu e no Tunnel, Sayonara no Deguchi](https://subsplease.org/shows/natsu-e-no-tunnel-sayonara-no-deguchi) | Movie | 1 / 1 | **Finished Airing** | 7.92 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Natsu+e+no+Tunnel+Sayonara+no+Deguchi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50593__natsu_e_no_tunnel_sayonara_no_deguchi.txt) | 60 | 7902 | 2024-01-03 18:39 |
| 57517 | [![57517__puniru_wa_kawaii_slime](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57517__puniru_wa_kawaii_slime.jpg)](https://myanimelist.net/anime/57517/Puniru_wa_Kawaii_Slime) | [Puniru wa Kawaii Slime](https://subsplease.org/shows/puniru-wa-kawaii-slime) | TV | 12 / 12 | **Finished Airing** | 6.98 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Puniru+wa+Kawaii+Slime+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57517__puniru_wa_kawaii_slime.txt) | 60 | 2973 | 2024-12-22 16:47 |
| 51648 | [![51648__nozomanu_fushi_no_boukensha](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51648__nozomanu_fushi_no_boukensha.jpg)](https://myanimelist.net/anime/51648/Nozomanu_Fushi_no_Boukensha) | [Nozomanu Fushi no Boukensha](https://subsplease.org/shows/nozomanu-fushi-no-boukensha) | TV | 12 / 12 | **Finished Airing** | 7.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Nozomanu+Fushi+no+Boukensha+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51648__nozomanu_fushi_no_boukensha.txt) | 60 | 16813 | 2024-03-22 13:31 |
| 57845 | [![57845__elf_san_wa_yaserarenai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57845__elf_san_wa_yaserarenai.jpg)](https://myanimelist.net/anime/57845/Elf-san_wa_Yaserarenai) | [Elf-san wa Yaserarenai](https://subsplease.org/shows/elf-san-wa-yaserarenai) | TV | 14 / 12 | **Finished Airing** | 5.6 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Elf+san+wa+Yaserarenai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57845__elf_san_wa_yaserarenai.txt) | 59 | 6306 | 2024-10-20 17:01 |
| 49785 | [![49785__fairy_tail_100_nen_quest](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49785__fairy_tail_100_nen_quest.jpg)](https://myanimelist.net/anime/49785/Fairy_Tail__100-nen_Quest) | [Fairy Tail - 100 Years Quest](https://subsplease.org/shows/fairy-tail-100-years-quest) | TV | 26 / 25 | **Finished Airing** | 7.66 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fairy+Tail+100+Years+Quest+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49785__fairy_tail_100_nen_quest.txt) | 59 | 7255 | 2025-01-05 10:01 |
| 53626 | [![53626__bye_bye_earth](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53626__bye_bye_earth.jpg)](https://myanimelist.net/anime/53626/Bye_Bye_Earth) | [Bye Bye, Earth](https://subsplease.org/shows/bye-bye-earth) | TV | 10 / 10 | **Finished Airing** | 6.06 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bye+Bye+Earth+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53626__bye_bye_earth.txt) | 58 | 7788 | 2024-09-13 15:02 |
| 38475 | [![38475__yuru_camp_movie](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38475__yuru_camp_movie.jpg)](https://myanimelist.net/anime/38475/Yuru_Camp△_Movie) | [Yuru Camp Movie](https://subsplease.org/shows/yuru-camp-movie) | Movie | 1 / 1 | **Finished Airing** | 8.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yuru+Camp+Movie+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38475__yuru_camp_movie.txt) | 57 | 5761 | 2022-11-28 17:03 |
| 53488 | [![53488__shin_no_nakama_ja_nai_to_yuusha_no_party_wo_oidasareta_node_henkyou_de_slow_life_suru_koto_ni_shimashita_2nd](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53488__shin_no_nakama_ja_nai_to_yuusha_no_party_wo_oidasareta_node_henkyou_de_slow_life_suru_koto_ni_shimashita_2nd.jpg)](https://myanimelist.net/anime/53488/Shin_no_Nakama_ja_Nai_to_Yuusha_no_Party_wo_Oidasareta_node_Henkyou_de_Slow_Life_suru_Koto_ni_Shimashita_2nd) | [Shin no Nakama S2](https://subsplease.org/shows/shin-no-nakama-s2) | TV | 12 / 12 | **Finished Airing** | 6.76 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shin+no+Nakama+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53488__shin_no_nakama_ja_nai_to_yuusha_no_party_wo_oidasareta_node_henkyou_de_slow_life_suru_koto_ni_shimashita_2nd.txt) | 57 | 11622 | 2024-03-24 14:31 |
| 52196 | [![52196__date_a_live_v](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52196__date_a_live_v.jpg)](https://myanimelist.net/anime/52196/Date_A_Live_V) | [Date a Live V](https://subsplease.org/shows/date-a-live-v) | TV | 12 / 12 | **Finished Airing** | 7.69 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Date+a+Live+V+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52196__date_a_live_v.txt) | 57 | 7234 | 2024-06-26 14:32 |
| 56843 | [![56843__goukon_ni_ittara_onna_ga_inakatta_hanashi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56843__goukon_ni_ittara_onna_ga_inakatta_hanashi.jpg)](https://myanimelist.net/anime/56843/Goukon_ni_Ittara_Onna_ga_Inakatta_Hanashi) | [Goukon ni Ittara Onna ga Inakatta Hanashi](https://subsplease.org/shows/goukon-ni-ittara-onna-ga-inakatta-hanashi) | TV | 12 / 12 | **Finished Airing** | 7.42 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Goukon+ni+Ittara+Onna+ga+Inakatta+Hanashi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56843__goukon_ni_ittara_onna_ga_inakatta_hanashi.txt) | 56 | 3762 | 2024-12-20 18:01 |
| 53590 | [![53590__saijaku_tamer_wa_gomi_hiroi_no_tabi_wo_hajimemashita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53590__saijaku_tamer_wa_gomi_hiroi_no_tabi_wo_hajimemashita.jpg)](https://myanimelist.net/anime/53590/Saijaku_Tamer_wa_Gomi_Hiroi_no_Tabi_wo_Hajimemashita) | [Saijaku Tamer wa Gomi Hiroi no Tabi wo Hajimemashita](https://subsplease.org/shows/saijaku-tamer-wa-gomi-hiroi-no-tabi-wo-hajimemashita) | TV | 12 / 12 | **Finished Airing** | 7.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Saijaku+Tamer+wa+Gomi+Hiroi+no+Tabi+wo+Hajimemashita+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53590__saijaku_tamer_wa_gomi_hiroi_no_tabi_wo_hajimemashita.txt) | 55 | 11902 | 2024-03-29 14:31 |
| 50172 | [![50172__mob_psycho_100_iii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50172__mob_psycho_100_iii.jpg)](https://myanimelist.net/anime/50172/Mob_Psycho_100_III) | [Mob Psycho 100 S3](https://subsplease.org/shows/mob-psycho-100-s3) | TV | 12 / 12 | **Finished Airing** | 8.71 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mob+Psycho+100+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50172__mob_psycho_100_iii.txt) | 55 | 16712 | 2022-12-21 17:01 |
| 56135 | [![56135__uniteup_uni_birth](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56135__uniteup_uni_birth.jpg)](https://myanimelist.net/anime/56135/UniteUp_Uni_Birth) | [UniteUp! S2](https://subsplease.org/shows/uniteup-s2) | TV | 2 / 12 | Currently Airing | 6.96 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+UniteUp+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56135__uniteup_uni_birth.txt) | 54 | 1250 | 2025-01-18 17:02 |
| 53912 | [![53912__seiyuu_radio_no_uraomote](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53912__seiyuu_radio_no_uraomote.jpg)](https://myanimelist.net/anime/53912/Seiyuu_Radio_no_Uraomote) | [Seiyuu Radio no Uraomote](https://subsplease.org/shows/seiyuu-radio-no-uraomote) | TV | 12 / 12 | **Finished Airing** | 6.96 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Seiyuu+Radio+no+Uraomote+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53912__seiyuu_radio_no_uraomote.txt) | 54 | 5841 | 2024-06-26 12:33 |
| 49877 | [![49877__tensei_shitara_slime_datta_ken_movie_guren_no_kizuna_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49877__tensei_shitara_slime_datta_ken_movie_guren_no_kizuna_hen.jpg)](https://myanimelist.net/anime/49877/Tensei_shitara_Slime_Datta_Ken_Movie__Guren_no_Kizuna-hen) | [Tensei shitara Slime Datta Ken Movie - Guren no Kizuna-hen](https://subsplease.org/shows/tensei-shitara-slime-datta-ken-movie-guren-no-kizuna-hen) | Movie | 1 / 1 | **Finished Airing** | 7.63 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tensei+shitara+Slime+Datta+Ken+Movie+Guren+no+Kizuna+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49877__tensei_shitara_slime_datta_ken_movie_guren_no_kizuna_hen.txt) | 54 | 12730 | 2023-04-21 05:54 |
| 57533 | [![57533__youkai_gakkou_no_sensei_hajimemashita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57533__youkai_gakkou_no_sensei_hajimemashita.jpg)](https://myanimelist.net/anime/57533/Youkai_Gakkou_no_Sensei_Hajimemashita) | [Youkai Gakkou no Sensei Hajimemashita](https://subsplease.org/shows/youkai-gakkou-no-sensei-hajimemashita) | TV | 14 / 24 | Currently Airing | 6.59 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Youkai+Gakkou+no+Sensei+Hajimemashita+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57533__youkai_gakkou_no_sensei_hajimemashita.txt) | 52 | 3230 | 2025-01-14 15:32 |
| 57099 | [![57099__na_nare_hana_nare](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57099__na_nare_hana_nare.jpg)](https://myanimelist.net/anime/57099/Na_Nare_Hana_Nare) | [Na Nare Hana Nare](https://subsplease.org/shows/na-nare-hana-nare) | TV | 12 / 12 | **Finished Airing** | 6.38 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Na+Nare+Hana+Nare+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57099__na_nare_hana_nare.txt) | 52 | 4327 | 2024-09-22 16:32 |
| 56242 | [![56242__sengoku_youko_yonaoshi_kyoudai_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56242__sengoku_youko_yonaoshi_kyoudai_hen.jpg)](https://myanimelist.net/anime/56242/Sengoku_Youko__Yonaoshi_Kyoudai-hen) | [Sengoku Youko](https://subsplease.org/shows/sengoku-youko) | TV | 37 / 13 | **Finished Airing** | 6.99 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sengoku+Youko+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56242__sengoku_youko_yonaoshi_kyoudai_hen.txt) | 52 | 5838 | 2024-12-25 16:48 |
| 54835 | [![54835__kono_sekai_wa_fukanzen_sugiru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54835__kono_sekai_wa_fukanzen_sugiru.jpg)](https://myanimelist.net/anime/54835/Kono_Sekai_wa_Fukanzen_Sugiru) | [Kono Sekai wa Fukanzen Sugiru](https://subsplease.org/shows/kono-sekai-wa-fukanzen-sugiru) | TV | 13 / 13 | **Finished Airing** | 6.55 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kono+Sekai+wa+Fukanzen+Sugiru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54835__kono_sekai_wa_fukanzen_sugiru.txt) | 52 | 6180 | 2024-09-27 18:32 |
| 48561 | [![48561__jujutsu_kaisen_0_movie](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48561__jujutsu_kaisen_0_movie.jpg)](https://myanimelist.net/anime/48561/Jujutsu_Kaisen_0_Movie) | [Jujutsu Kaisen 0](https://subsplease.org/shows/jujutsu-kaisen-0) | Movie | 1 / 1 | **Finished Airing** | 8.42 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jujutsu+Kaisen+0+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48561__jujutsu_kaisen_0_movie.txt) | 52 | 15346 | 2022-09-22 00:23 |
| 54112 | [![54112__zom_100_zombie_ni_naru_made_ni_shitai_100_no_koto](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54112__zom_100_zombie_ni_naru_made_ni_shitai_100_no_koto.jpg)](https://myanimelist.net/anime/54112/Zom_100__Zombie_ni_Naru_made_ni_Shitai_100_no_Koto) | [Zom 100 - Zombie ni Naru made ni Shitai 100 no Koto](https://subsplease.org/shows/zom-100-zombie-ni-naru-made-ni-shitai-100-no-koto) | TV | 12 / 12 | **Finished Airing** | 7.76 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Zom+100+Zombie+ni+Naru+made+ni+Shitai+100+no+Koto+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54112__zom_100_zombie_ni_naru_made_ni_shitai_100_no_koto.txt) | 51 | 24530 | 2023-12-27 14:14 |
| 54103 | [![54103__hikikomari_kyuuketsuki_no_monmon](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54103__hikikomari_kyuuketsuki_no_monmon.jpg)](https://myanimelist.net/anime/54103/Hikikomari_Kyuuketsuki_no_Monmon) | [Hikikomari Kyuuketsuki no Monmon](https://subsplease.org/shows/hikikomari-kyuuketsuki-no-monmon) | TV | 12 / 12 | **Finished Airing** | 7.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hikikomari+Kyuuketsuki+no+Monmon+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54103__hikikomari_kyuuketsuki_no_monmon.txt) | 51 | 11822 | 2023-12-30 14:01 |
| 50739 | [![50739__otonari_no_tenshi_sama_ni_itsunomanika_dame_ningen_ni_sareteita_ken](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50739__otonari_no_tenshi_sama_ni_itsunomanika_dame_ningen_ni_sareteita_ken.jpg)](https://myanimelist.net/anime/50739/Otonari_no_Tenshi-sama_ni_Itsunomanika_Dame_Ningen_ni_Sareteita_Ken) | [Otonari no Tenshi-sama ni Itsunomanika Dame Ningen ni Sareteita Ken](https://subsplease.org/shows/otonari-no-tenshi-sama-ni-itsunomanika-dame-ningen-ni-sareteita-ken) | TV | 12 / 12 | **Finished Airing** | 7.83 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Otonari+no+Tenshi+sama+ni+Itsunomanika+Dame+Ningen+ni+Sareteita+Ken+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50739__otonari_no_tenshi_sama_ni_itsunomanika_dame_ningen_ni_sareteita_ken.txt) | 51 | 9278 | 2023-03-25 15:02 |
| 49828 | [![49828__kidou_senshi_gundam_suisei_no_majo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49828__kidou_senshi_gundam_suisei_no_majo.jpg)](https://myanimelist.net/anime/49828/Kidou_Senshi_Gundam__Suisei_no_Majo) | [Mobile Suit Gundam - The Witch from Mercury](https://subsplease.org/shows/mobile-suit-gundam-the-witch-from-mercury) | TV | 25 / 12 | **Finished Airing** | 7.86 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mobile+Suit+Gundam+The+Witch+from+Mercury+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49828__kidou_senshi_gundam_suisei_no_majo.txt) | 51 | 19450 | 2023-07-02 09:31 |
| 52736 | [![52736__tensei_oujo_to_tensai_reijou_no_mahou_kakumei](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52736__tensei_oujo_to_tensai_reijou_no_mahou_kakumei.jpg)](https://myanimelist.net/anime/52736/Tensei_Oujo_to_Tensai_Reijou_no_Mahou_Kakumei) | [Tensei Oujo to Tensai Reijou no Mahou Kakumei](https://subsplease.org/shows/tensei-oujo-to-tensai-reijou-no-mahou-kakumei) | TV | 12 / 12 | **Finished Airing** | 7.47 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tensei+Oujo+to+Tensai+Reijou+no+Mahou+Kakumei+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52736__tensei_oujo_to_tensai_reijou_no_mahou_kakumei.txt) | 51 | 11623 | 2023-03-22 13:01 |
| 52619 | [![52619__jidou_hanbaiki_ni_umarekawatta_ore_wa_meikyuu_wo_samayou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52619__jidou_hanbaiki_ni_umarekawatta_ore_wa_meikyuu_wo_samayou.jpg)](https://myanimelist.net/anime/52619/Jidou_Hanbaiki_ni_Umarekawatta_Ore_wa_Meikyuu_wo_Samayou) | [Jidou Hanbaiki ni Umarekawatta Ore wa Meikyuu wo Samayou](https://subsplease.org/shows/jidou-hanbaiki-ni-umarekawatta-ore-wa-meikyuu-wo-samayou) | TV | 12 / 12 | **Finished Airing** | 6.46 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jidou+Hanbaiki+ni+Umarekawatta+Ore+wa+Meikyuu+wo+Samayou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52619__jidou_hanbaiki_ni_umarekawatta_ore_wa_meikyuu_wo_samayou.txt) | 50 | 11708 | 2023-09-20 14:01 |
| 53407 | [![53407__bartender_kami_no_glass](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53407__bartender_kami_no_glass.jpg)](https://myanimelist.net/anime/53407/Bartender__Kami_no_Glass) | [Bartender - Kami no Glass](https://subsplease.org/shows/bartender-kami-no-glass) | TV | 12 / 12 | **Finished Airing** | 7.38 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bartender+Kami+no+Glass+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53407__bartender_kami_no_glass.txt) | 50 | 8605 | 2024-06-19 16:32 |
| 50709 | [![50709__lycoris_recoil](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50709__lycoris_recoil.jpg)](https://myanimelist.net/anime/50709/Lycoris_Recoil) | [Lycoris Recoil](https://subsplease.org/shows/lycoris-recoil) | TV | 13 / 13 | **Finished Airing** | 8.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Lycoris+Recoil+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50709__lycoris_recoil.txt) | 48 | 15394 | 2022-09-24 16:03 |
| 52482 | [![52482__sasaki_to_pii_chan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52482__sasaki_to_pii_chan.jpg)](https://myanimelist.net/anime/52482/Sasaki_to_Pii-chan) | [Sasaki to Pii-chan](https://subsplease.org/shows/sasaki-to-pii-chan) | TV | 12 / 12 | **Finished Airing** | 6.84 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sasaki+to+Pii+chan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52482__sasaki_to_pii_chan.txt) | 48 | 12953 | 2024-03-22 12:31 |
| 53730 | [![53730__sokushi_cheat_ga_saikyou_sugite_isekai_no_yatsura_ga_marude_aite_ni_naranai_n_desu_ga](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53730__sokushi_cheat_ga_saikyou_sugite_isekai_no_yatsura_ga_marude_aite_ni_naranai_n_desu_ga.jpg)](https://myanimelist.net/anime/53730/Sokushi_Cheat_ga_Saikyou_sugite_Isekai_no_Yatsura_ga_Marude_Aite_ni_Naranai_n_desu_ga) | [Sokushi Cheat](https://subsplease.org/shows/sokushi-cheat) | TV | 12 / 12 | **Finished Airing** | 6.38 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sokushi+Cheat+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53730__sokushi_cheat_ga_saikyou_sugite_isekai_no_yatsura_ga_marude_aite_ni_naranai_n_desu_ga.txt) | 48 | 13892 | 2024-03-21 16:31 |
| 52969 | [![52969__jitsu_wa_ore_saikyou_deshita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52969__jitsu_wa_ore_saikyou_deshita.jpg)](https://myanimelist.net/anime/52969/Jitsu_wa_Ore_Saikyou_deshita) | [Jitsu wa Ore, Saikyou deshita](https://subsplease.org/shows/jitsu-wa-ore-saikyou-deshita) | TV | 12 / 12 | **Finished Airing** | 6.49 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jitsu+wa+Ore+Saikyou+deshita+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52969__jitsu_wa_ore_saikyou_deshita.txt) | 47 | 13326 | 2023-09-30 18:46 |
| 56838 | [![56838__one_room_hiatari_futsuu_tenshi_tsuki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56838__one_room_hiatari_futsuu_tenshi_tsuki.jpg)](https://myanimelist.net/anime/56838/One_Room_Hiatari_Futsuu_Tenshi-tsuki) | [One Room, Hiatari Futsuu, Tenshi-tsuki](https://subsplease.org/shows/one-room-hiatari-futsuu-tenshi-tsuki) | TV | 12 / 12 | **Finished Airing** | 7.04 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+One+Room+Hiatari+Futsuu+Tenshi+tsuki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56838__one_room_hiatari_futsuu_tenshi_tsuki.txt) | 46 | 6791 | 2024-06-15 14:04 |
| 55717 | [![55717__dekisokonai_to_yobareta_motoeiyuu_wa_jikka_kara_tsuihou_sareta_node_sukikatte_ni_ikiru_koto_ni_shita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55717__dekisokonai_to_yobareta_motoeiyuu_wa_jikka_kara_tsuihou_sareta_node_sukikatte_ni_ikiru_koto_ni_shita.jpg)](https://myanimelist.net/anime/55717/Dekisokonai_to_Yobareta_Motoeiyuu_wa_Jikka_kara_Tsuihou_sareta_node_Sukikatte_ni_Ikiru_Koto_ni_Shita) | [Dekisoko](https://subsplease.org/shows/dekisoko) | TV | 12 / 12 | **Finished Airing** | 5.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dekisoko+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55717__dekisokonai_to_yobareta_motoeiyuu_wa_jikka_kara_tsuihou_sareta_node_sukikatte_ni_ikiru_koto_ni_shita.txt) | 46 | 8833 | 2024-06-10 18:33 |
| 54856 | [![54856__horimiya_piece](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54856__horimiya_piece.jpg)](https://myanimelist.net/anime/54856/Horimiya__Piece) | [Horimiya - Piece](https://subsplease.org/shows/horimiya-piece) | TV | 13 / 13 | **Finished Airing** | 8.16 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Horimiya+Piece+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54856__horimiya_piece.txt) | 46 | 10904 | 2023-09-23 16:01 |
| 53833 | [![53833__watashi_no_oshi_wa_akuyaku_reijou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53833__watashi_no_oshi_wa_akuyaku_reijou.jpg)](https://myanimelist.net/anime/53833/Watashi_no_Oshi_wa_Akuyaku_Reijou) | [Watashi no Oshi wa Akuyaku Reijou](https://subsplease.org/shows/watashi-no-oshi-wa-akuyaku-reijou) | TV | 12 / 12 | **Finished Airing** | 7.34 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Watashi+no+Oshi+wa+Akuyaku+Reijou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53833__watashi_no_oshi_wa_akuyaku_reijou.txt) | 45 | 9899 | 2023-12-18 18:37 |
| 53421 | [![53421__dosanko_gal_wa_namara_menkoi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53421__dosanko_gal_wa_namara_menkoi.jpg)](https://myanimelist.net/anime/53421/Dosanko_Gal_wa_Namara_Menkoi) | [Dosanko Gal wa Namara Menkoi](https://subsplease.org/shows/dosanko-gal-wa-namara-menkoi) | TV | 12 / 12 | **Finished Airing** | 7.03 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dosanko+Gal+wa+Namara+Menkoi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53421__dosanko_gal_wa_namara_menkoi.txt) | 45 | 11172 | 2024-03-25 16:46 |
| 52747 | [![52747__psycho_pass_movie_providence](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52747__psycho_pass_movie_providence.jpg)](https://myanimelist.net/anime/52747/Psycho-Pass_Movie__Providence) | [Psycho-Pass Movie - Providence](https://subsplease.org/shows/psycho-pass-movie-providence) | Movie | 1 / 1 | **Finished Airing** | 7.64 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Psycho+Pass+Movie+Providence+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52747__psycho_pass_movie_providence.txt) | 45 | 11255 | 2023-12-19 01:41 |
| 53287 | [![53287__love_live_superstar_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53287__love_live_superstar_3rd_season.jpg)](https://myanimelist.net/anime/53287/Love_Live_Superstar_3rd_Season) | [Love Live! Superstar!! S3](https://subsplease.org/shows/love-live-superstar-s3) | TV | 12 / 12 | **Finished Airing** | 7.89 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Love+Live+Superstar+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53287__love_live_superstar_3rd_season.txt) | 45 | 2547 | 2024-12-24 02:02 |
| 52962 | [![52962__tearmoon_teikoku_monogatari_dantoudai_kara_hajimaru_hime_no_tensei_gyakuten_story](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52962__tearmoon_teikoku_monogatari_dantoudai_kara_hajimaru_hime_no_tensei_gyakuten_story.jpg)](https://myanimelist.net/anime/52962/Tearmoon_Teikoku_Monogatari__Dantoudai_kara_Hajimaru_Hime_no_Tensei_Gyakuten_Story) | [Tearmoon Teikoku Monogatari](https://subsplease.org/shows/tearmoon-teikoku-monogatari) | TV | 12 / 12 | **Finished Airing** | 7.21 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tearmoon+Teikoku+Monogatari+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52962__tearmoon_teikoku_monogatari_dantoudai_kara_hajimaru_hime_no_tensei_gyakuten_story.txt) | 44 | 9413 | 2023-12-23 17:20 |
| 50587 | [![50587__gridman_universe](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50587__gridman_universe.jpg)](https://myanimelist.net/anime/50587/Gridman_Universe) | [Gridman Universe](https://subsplease.org/shows/gridman-universe) | Movie | 1 / 1 | **Finished Airing** | 8.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gridman+Universe+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50587__gridman_universe.txt) | 44 | 3516 | 2024-10-25 04:41 |
| 49387 | [![49387__vinland_saga_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49387__vinland_saga_season_2.jpg)](https://myanimelist.net/anime/49387/Vinland_Saga_Season_2) | [Vinland Saga S2](https://subsplease.org/shows/vinland-saga-s2) | TV | 24 / 24 | **Finished Airing** | 8.81 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Vinland+Saga+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49387__vinland_saga_season_2.txt) | 44 | 17379 | 2023-06-19 16:32 |
| 55129 | [![55129__oroka_na_tenshi_wa_akuma_to_odoru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55129__oroka_na_tenshi_wa_akuma_to_odoru.jpg)](https://myanimelist.net/anime/55129/Oroka_na_Tenshi_wa_Akuma_to_Odoru) | [Oroka na Tenshi wa Akuma to Odoru](https://subsplease.org/shows/oroka-na-tenshi-wa-akuma-to-odoru) | TV | 12 / 12 | **Finished Airing** | 6.91 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Oroka+na+Tenshi+wa+Akuma+to+Odoru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55129__oroka_na_tenshi_wa_akuma_to_odoru.txt) | 44 | 7876 | 2024-03-25 17:01 |
| 54362 | [![54362__hametsu_no_oukoku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54362__hametsu_no_oukoku.jpg)](https://myanimelist.net/anime/54362/Hametsu_no_Oukoku) | [Hametsu no Oukoku](https://subsplease.org/shows/hametsu-no-oukoku) | TV | 12 / 12 | **Finished Airing** | 6.22 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hametsu+no+Oukoku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54362__hametsu_no_oukoku.txt) | 44 | 14239 | 2023-12-22 18:35 |
| 52816 | [![52816__majo_to_yajuu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52816__majo_to_yajuu.jpg)](https://myanimelist.net/anime/52816/Majo_to_Yajuu) | [Majo to Yajuu](https://subsplease.org/shows/majo-to-yajuu) | TV | 12 / 12 | **Finished Airing** | 7.42 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Majo+to+Yajuu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52816__majo_to_yajuu.txt) | 44 | 11718 | 2024-04-04 18:16 |
| 41514 | [![41514__itai_no_wa_iya_nanode_bougyoryoku_ni_kyokufuri_shitai_to_omoimasu_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41514__itai_no_wa_iya_nanode_bougyoryoku_ni_kyokufuri_shitai_to_omoimasu_2.jpg)](https://myanimelist.net/anime/41514/Itai_no_wa_Iya_nanode_Bougyoryoku_ni_Kyokufuri_Shitai_to_Omoimasu_2) | [Bofuri S2](https://subsplease.org/shows/bofuri-s2) | TV | 12 / 12 | **Finished Airing** | 7.21 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bofuri+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41514__itai_no_wa_iya_nanode_bougyoryoku_ni_kyokufuri_shitai_to_omoimasu_2.txt) | 44 | 10414 | 2023-04-19 16:18 |
| 60410 | [![60410__yami_shibai_14](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/60410__yami_shibai_14.jpg)](https://myanimelist.net/anime/60410/Yami_Shibai_14) | [Yami Shibai 14](https://subsplease.org/shows/yami-shibai-14) | TV | 3 / ? | Currently Airing | 6.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yami+Shibai+14+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/60410__yami_shibai_14.txt) | 44 | 1153 | 2025-01-19 20:45 |
| 54265 | [![54265__kekkon_yubiwa_monogatari](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54265__kekkon_yubiwa_monogatari.jpg)](https://myanimelist.net/anime/54265/Kekkon_Yubiwa_Monogatari) | [Kekkon Yubiwa Monogatari](https://subsplease.org/shows/kekkon-yubiwa-monogatari) | TV | 12 / 12 | **Finished Airing** | 6.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kekkon+Yubiwa+Monogatari+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54265__kekkon_yubiwa_monogatari.txt) | 43 | 9345 | 2024-03-23 13:01 |
| 48895 | [![48895__overlord_iv](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48895__overlord_iv.jpg)](https://myanimelist.net/anime/48895/Overlord_IV) | [Overlord IV](https://subsplease.org/shows/overlord-iv) | TV | 13 / 13 | **Finished Airing** | 8.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Overlord+IV+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48895__overlord_iv.txt) | 43 | 17402 | 2022-09-27 14:01 |
| 52955 | [![52955__mahoutsukai_no_yome_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52955__mahoutsukai_no_yome_season_2.jpg)](https://myanimelist.net/anime/52955/Mahoutsukai_no_Yome_Season_2) | [Mahoutsukai no Yome S2](https://subsplease.org/shows/mahoutsukai-no-yome-s2) | TV | 24 / 12 | **Finished Airing** | 7.76 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahoutsukai+no+Yome+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52955__mahoutsukai_no_yome_season_2.txt) | 43 | 11500 | 2023-12-21 16:06 |
| 51297 | [![51297__ragna_crimson](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51297__ragna_crimson.jpg)](https://myanimelist.net/anime/51297/Ragna_Crimson) | [Ragna Crimson](https://subsplease.org/shows/ragna-crimson) | TV | 24 / 24 | **Finished Airing** | 7.54 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ragna+Crimson+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51297__ragna_crimson.txt) | 43 | 15085 | 2024-03-30 16:02 |
| 50613 | [![50613__rurouni_kenshin_meiji_kenkaku_romantan_2023](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50613__rurouni_kenshin_meiji_kenkaku_romantan_2023.jpg)](https://myanimelist.net/anime/50613/Rurouni_Kenshin__Meiji_Kenkaku_Romantan_2023) | [Rurouni Kenshin (2023)](https://subsplease.org/shows/rurouni-kenshin-2023) | TV | 38 / 24 | **Finished Airing** | 7.62 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Rurouni+Kenshin+2023+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50613__rurouni_kenshin_meiji_kenkaku_romantan_2023.txt) | 42 | 12636 | 2025-01-16 19:01 |
| 54199 | [![54199__kaii_to_otome_to_kamikakushi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54199__kaii_to_otome_to_kamikakushi.jpg)](https://myanimelist.net/anime/54199/Kaii_to_Otome_to_Kamikakushi) | [Kaii to Otome to Kamikakushi](https://subsplease.org/shows/kaii-to-otome-to-kamikakushi) | TV | 12 / 12 | **Finished Airing** | 6.77 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kaii+to+Otome+to+Kamikakushi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54199__kaii_to_otome_to_kamikakushi.txt) | 42 | 8177 | 2024-06-26 14:02 |
| 53439 | [![53439__boushoku_no_berserk](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53439__boushoku_no_berserk.jpg)](https://myanimelist.net/anime/53439/Boushoku_no_Berserk) | [Boushoku no Berserk](https://subsplease.org/shows/boushoku-no-berserk) | TV | 12 / 12 | **Finished Airing** | 6.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Boushoku+no+Berserk+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53439__boushoku_no_berserk.txt) | 42 | 15234 | 2023-12-17 16:35 |
| 50205 | [![50205__arknights_reimei_zensou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50205__arknights_reimei_zensou.jpg)](https://myanimelist.net/anime/50205/Arknights__Reimei_Zensou) | [Arknights - Fuyukomori Kaerimichi](https://subsplease.org/shows/arknights-reimei-zensou) | TV | 8 / 8 | **Finished Airing** | 7.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Arknights+Fuyukomori+Kaerimichi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50205__arknights_reimei_zensou.txt) | 42 | 6667 | 2023-11-24 18:01 |
| 56845 | [![56845__saikyou_tank_no_meikyuu_kouryaku_tairyoku_9999_no_rare_skill_mochi_tank_yuusha_party_wo_tsuihou_sareru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56845__saikyou_tank_no_meikyuu_kouryaku_tairyoku_9999_no_rare_skill_mochi_tank_yuusha_party_wo_tsuihou_sareru.jpg)](https://myanimelist.net/anime/56845/Saikyou_Tank_no_Meikyuu_Kouryaku__Tairyoku_9999_no_Rare_Skill-mochi_Tank_Yuusha_Party_wo_Tsuihou_sareru) | [Saikyou Tank no Meikyuu Kouryaku](https://subsplease.org/shows/saikyou-tank-no-meikyuu-kouryaku) | TV | 12 / 12 | **Finished Airing** | 6.12 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Saikyou+Tank+no+Meikyuu+Kouryaku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56845__saikyou_tank_no_meikyuu_kouryaku_tairyoku_9999_no_rare_skill_mochi_tank_yuusha_party_wo_tsuihou_sareru.txt) | 42 | 11189 | 2024-03-23 18:01 |
| 41084 | [![41084__made_in_abyss_retsujitsu_no_ougonkyou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41084__made_in_abyss_retsujitsu_no_ougonkyou.jpg)](https://myanimelist.net/anime/41084/Made_in_Abyss__Retsujitsu_no_Ougonkyou) | [Made in Abyss - Retsujitsu no Ougonkyou](https://subsplease.org/shows/made-in-abyss-retsujitsu-no-ougonkyou) | TV | 12 / 12 | **Finished Airing** | 8.62 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Made+in+Abyss+Retsujitsu+no+Ougonkyou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41084__made_in_abyss_retsujitsu_no_ougonkyou.txt) | 41 | 17900 | 2022-09-28 14:32 |
| 53126 | [![53126__yamada_kun_to_lv999_no_koi_wo_suru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53126__yamada_kun_to_lv999_no_koi_wo_suru.jpg)](https://myanimelist.net/anime/53126/Yamada-kun_to_Lv999_no_Koi_wo_Suru) | [Yamada-kun to Lv999 no Koi wo Suru](https://subsplease.org/shows/yamada-kun-to-lv999-no-koi-wo-suru) | TV | 13 / 13 | **Finished Airing** | 7.78 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yamada+kun+to+Lv999+no+Koi+wo+Suru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53126__yamada_kun_to_lv999_no_koi_wo_suru.txt) | 41 | 15580 | 2023-06-24 17:01 |
| 50612 | [![50612__dr_stone_ryuusui](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50612__dr_stone_ryuusui.jpg)](https://myanimelist.net/anime/50612/Dr_Stone__Ryuusui) | [Dr. Stone - Ryuusui](https://subsplease.org/shows/dr-stone-ryuusui) | TV Special | 1 / 1 | **Finished Airing** | 8.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dr+Stone+Ryuusui+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50612__dr_stone_ryuusui.txt) | 41 | 9486 | 2022-07-10 18:25 |
| 58779 | [![58779__shi_cao_lao_long_bei_guan_yi_e_long_zhi_ming_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58779__shi_cao_lao_long_bei_guan_yi_e_long_zhi_ming_2nd_season.jpg)](https://myanimelist.net/anime/58779/Shi_Cao_Lao_Long_Bei_Guan_Yi_E_Long_Zhi_Ming_2nd_Season) | [A Herbivorous Dragon of 5000 Years Gets Unfairly Villainized S2](https://subsplease.org/shows/a-herbivorous-dragon-of-5000-years-gets-unfairly-villainized-s2) | ONA | 12 / 12 | **Finished Airing** | 6.77 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+A+Herbivorous+Dragon+of+5000+Years+Gets+Unfairly+Villainized+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58779__shi_cao_lao_long_bei_guan_yi_e_long_zhi_ming_2nd_season.txt) | 41 | 3013 | 2024-12-18 04:02 |
| 54852 | [![54852__kikansha_no_mahou_wa_tokubetsu_desu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54852__kikansha_no_mahou_wa_tokubetsu_desu.jpg)](https://myanimelist.net/anime/54852/Kikansha_no_Mahou_wa_Tokubetsu_desu) | [Kikansha no Mahou wa Tokubetsu desu](https://subsplease.org/shows/kikansha-no-mahou-wa-tokubetsu-desu) | TV | 12 / 12 | **Finished Airing** | 7.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kikansha+no+Mahou+wa+Tokubetsu+desu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54852__kikansha_no_mahou_wa_tokubetsu_desu.txt) | 41 | 11900 | 2023-12-23 17:37 |
| 56230 | [![56230__jiisan_baasan_wakagaeru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56230__jiisan_baasan_wakagaeru.jpg)](https://myanimelist.net/anime/56230/Jiisan_Baasan_Wakagaeru) | [Jiisan Baasan Wakagaeru](https://subsplease.org/shows/jiisan-baasan-wakagaeru) | TV | 11 / 11 | **Finished Airing** | 7.47 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jiisan+Baasan+Wakagaeru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56230__jiisan_baasan_wakagaeru.txt) | 40 | 8236 | 2024-06-16 15:02 |
| 54790 | [![54790__undead_girl_murder_farce](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54790__undead_girl_murder_farce.jpg)](https://myanimelist.net/anime/54790/Undead_Girl_Murder_Farce) | [Undead Girl Murder Farce](https://subsplease.org/shows/undead-girl-murder-farce) | TV | 13 / 13 | **Finished Airing** | 7.86 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Undead+Girl+Murder+Farce+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54790__undead_girl_murder_farce.txt) | 40 | 13533 | 2023-09-27 17:26 |
| 54041 | [![54041__16bit_sensation_another_layer](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54041__16bit_sensation_another_layer.jpg)](https://myanimelist.net/anime/54041/16bit_Sensation__Another_Layer) | [16bit Sensation - Another Layer](https://subsplease.org/shows/16bit-sensation-another-layer) | TV | 13 / 13 | **Finished Airing** | 6.9 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+16bit+Sensation+Another+Layer+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54041__16bit_sensation_another_layer.txt) | 40 | 8726 | 2023-12-27 18:03 |
| 50796 | [![50796__kimi_wa_houkago_insomnia](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50796__kimi_wa_houkago_insomnia.jpg)](https://myanimelist.net/anime/50796/Kimi_wa_Houkago_Insomnia) | [Kimi wa Houkago Insomnia](https://subsplease.org/shows/kimi-wa-houkago-insomnia) | TV | 13 / 13 | **Finished Airing** | 8.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kimi+wa+Houkago+Insomnia+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50796__kimi_wa_houkago_insomnia.txt) | 40 | 12116 | 2023-07-03 16:00 |
| 50582 | [![50582__nanatsu_no_maken_ga_shihai_suru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50582__nanatsu_no_maken_ga_shihai_suru.jpg)](https://myanimelist.net/anime/50582/Nanatsu_no_Maken_ga_Shihai_suru) | [Nanatsu no Maken ga Shihai suru](https://subsplease.org/shows/nanatsu-no-maken-ga-shihai-suru) | TV | 15 / 15 | **Finished Airing** | 6.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Nanatsu+no+Maken+ga+Shihai+suru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50582__nanatsu_no_maken_ga_shihai_suru.txt) | 40 | 11834 | 2023-10-13 16:32 |
| 56179 | [![56179__delico_s_nursery](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56179__delico_s_nursery.jpg)](https://myanimelist.net/anime/56179/Delicos_Nursery) | [Delico's Nursery](https://subsplease.org/shows/delicos-nursery) | TV | 14 / 13 | **Finished Airing** | 6.81 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Delico+s+Nursery+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56179__delico_s_nursery.txt) | 39 | 3170 | 2024-11-27 18:03 |
| 51020 | [![51020__helck](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51020__helck.jpg)](https://myanimelist.net/anime/51020/Helck) | [Helck](https://subsplease.org/shows/helck) | TV | 24 / 24 | **Finished Airing** | 7.28 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Helck+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51020__helck.txt) | 39 | 15428 | 2023-12-19 18:31 |
| 50346 | [![50346__yofukashi_no_uta](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50346__yofukashi_no_uta.jpg)](https://myanimelist.net/anime/50346/Yofukashi_no_Uta) | [Yofukashi no Uta](https://subsplease.org/shows/yofukashi-no-uta) | TV | 13 / 13 | **Finished Airing** | 7.96 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yofukashi+no+Uta+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50346__yofukashi_no_uta.txt) | 39 | 18093 | 2022-09-29 17:31 |
| 49834 | [![49834__boku_ga_aishita_subete_no_kimi_e](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49834__boku_ga_aishita_subete_no_kimi_e.jpg)](https://myanimelist.net/anime/49834/Boku_ga_Aishita_Subete_no_Kimi_e) | [Boku ga Aishita Subete no Kimi e](https://subsplease.org/shows/boku-ga-aishita-subete-no-kimi-e) | Movie | 1 / 1 | **Finished Airing** | 7.49 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Boku+ga+Aishita+Subete+no+Kimi+e+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49834__boku_ga_aishita_subete_no_kimi_e.txt) | 39 | 5731 | 2023-04-21 06:02 |
| 53889 | [![53889__ao_no_exorcist_shimane_illuminati_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53889__ao_no_exorcist_shimane_illuminati_hen.jpg)](https://myanimelist.net/anime/53889/Ao_no_Exorcist__Shimane_Illuminati-hen) | [Ao no Exorcist - Shimane Illuminati-hen](https://subsplease.org/shows/ao-no-exorcist-shimane-illuminati-hen) | TV | 12 / 12 | **Finished Airing** | 7.05 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ao+no+Exorcist+Shimane+Illuminati+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53889__ao_no_exorcist_shimane_illuminati_hen.txt) | 39 | 10171 | 2024-03-23 17:01 |
| 52359 | [![52359__isekai_de_mofumofu_nadenade_suru_tame_ni_ganbattemasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52359__isekai_de_mofumofu_nadenade_suru_tame_ni_ganbattemasu.jpg)](https://myanimelist.net/anime/52359/Isekai_de_Mofumofu_Nadenade_suru_Tame_ni_Ganbattemasu) | [Isekai de Mofumofu Nadenade suru Tame ni Ganbattemasu](https://subsplease.org/shows/isekai-de-mofumofu-nadenade-suru-tame-ni-ganbattemasu) | TV | 12 / 12 | **Finished Airing** | 6.77 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+de+Mofumofu+Nadenade+suru+Tame+ni+Ganbattemasu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52359__isekai_de_mofumofu_nadenade_suru_tame_ni_ganbattemasu.txt) | 38 | 9107 | 2024-03-17 14:01 |
| 58357 | [![58357__tensui_no_sakuna_hime](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58357__tensui_no_sakuna_hime.jpg)](https://myanimelist.net/anime/58357/Tensui_no_Sakuna-hime) | [Tensui no Sakuna-hime](https://subsplease.org/shows/tensui-no-sakuna-hime) | TV | 13 / 13 | **Finished Airing** | 6.74 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tensui+no+Sakuna+hime+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58357__tensui_no_sakuna_hime.txt) | 38 | 4964 | 2024-09-28 15:32 |
| 53111 | [![53111__dungeon_ni_deai_wo_motomeru_no_wa_machigatteiru_darou_ka_iv_shin_shou_yakusai_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53111__dungeon_ni_deai_wo_motomeru_no_wa_machigatteiru_darou_ka_iv_shin_shou_yakusai_hen.jpg)](https://myanimelist.net/anime/53111/Dungeon_ni_Deai_wo_Motomeru_no_wa_Machigatteiru_Darou_ka_IV__Shin_Shou_-_Yakusai-hen) | [Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka S4](https://subsplease.org/shows/dungeon-ni-deai-wo-motomeru-no-wa-machigatteiru-darou-ka-s4) | TV | 23 / 11 | **Finished Airing** | 8.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dungeon+ni+Deai+wo+Motomeru+no+wa+Machigatteiru+Darou+ka+S4+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53111__dungeon_ni_deai_wo_motomeru_no_wa_machigatteiru_darou_ka_iv_shin_shou_yakusai_hen.txt) | 38 | 13876 | 2023-03-16 14:01 |
| 50869 | [![50869__kami_wa_game_ni_ueteiru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50869__kami_wa_game_ni_ueteiru.jpg)](https://myanimelist.net/anime/50869/Kami_wa_Game_ni_Ueteiru) | [Kami wa Game ni Ueteiru](https://subsplease.org/shows/kami-wa-game-ni-ueteiru) | TV | 13 / 13 | **Finished Airing** | 6.27 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kami+wa+Game+ni+Ueteiru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50869__kami_wa_game_ni_ueteiru.txt) | 38 | 6308 | 2024-06-24 13:32 |
| 50184 | [![50184__seiken_gakuin_no_makentsukai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50184__seiken_gakuin_no_makentsukai.jpg)](https://myanimelist.net/anime/50184/Seiken_Gakuin_no_Makentsukai) | [Seiken Gakuin no Makentsukai](https://subsplease.org/shows/seiken-gakuin-no-makentsukai) | TV | 12 / 12 | **Finished Airing** | 6.19 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Seiken+Gakuin+no+Makentsukai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50184__seiken_gakuin_no_makentsukai.txt) | 38 | 12213 | 2023-12-18 17:02 |
| 56980 | [![56980__karasu_wa_aruji_wo_erabanai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56980__karasu_wa_aruji_wo_erabanai.jpg)](https://myanimelist.net/anime/56980/Karasu_wa_Aruji_wo_Erabanai) | [Karasu wa Aruji wo Erabanai](https://subsplease.org/shows/karasu-wa-aruji-wo-erabanai) | TV | 20 / 20 | **Finished Airing** | 8.06 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Karasu+wa+Aruji+wo+Erabanai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56980__karasu_wa_aruji_wo_erabanai.txt) | 38 | 4742 | 2024-09-21 18:30 |
| 51815 | [![51815__kubo_san_wa_mob_wo_yurusanai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51815__kubo_san_wa_mob_wo_yurusanai.jpg)](https://myanimelist.net/anime/51815/Kubo-san_wa_Mob_wo_Yurusanai) | [Kubo-san wa Mob wo Yurusanai](https://subsplease.org/shows/kubo-san-wa-mob-wo-yurusanai) | TV | 12 / 12 | **Finished Airing** | 7.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kubo+san+wa+Mob+wo+Yurusanai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51815__kubo_san_wa_mob_wo_yurusanai.txt) | 38 | 9737 | 2023-06-20 15:31 |
| 50197 | [![50197__ijiranaide_nagatoro_san_2nd_attack](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50197__ijiranaide_nagatoro_san_2nd_attack.jpg)](https://myanimelist.net/anime/50197/Ijiranaide_Nagatoro-san_2nd_Attack) | [Ijiranaide, Nagatoro-san S2](https://subsplease.org/shows/ijiranaide-nagatoro-san-s2) | TV | 12 / 12 | **Finished Airing** | 7.47 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ijiranaide+Nagatoro+san+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50197__ijiranaide_nagatoro_san_2nd_attack.txt) | 38 | 7737 | 2023-03-18 17:31 |
| 40028 | [![40028__shingeki_no_kyojin_the_final_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40028__shingeki_no_kyojin_the_final_season.jpg)](https://myanimelist.net/anime/40028/Shingeki_no_Kyojin__The_Final_Season) | [Shingeki no Kyojin (The Final Season)](https://subsplease.org/shows/shingeki-no-kyojin) | TV | 28 / 16 | **Finished Airing** | 8.78 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shingeki+no+Kyojin+The+Final+Season+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40028__shingeki_no_kyojin_the_final_season.txt) | 37 | 16087 | 2022-04-03 20:46 |
| 53262 | [![53262__hoshikuzu_telepath](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53262__hoshikuzu_telepath.jpg)](https://myanimelist.net/anime/53262/Hoshikuzu_Telepath) | [Hoshikuzu Telepath](https://subsplease.org/shows/hoshikuzu-telepath) | TV | 12 / 12 | **Finished Airing** | 7.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hoshikuzu+Telepath+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53262__hoshikuzu_telepath.txt) | 37 | 5399 | 2023-12-25 13:32 |
| 54632 | [![54632__gekai_elise](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54632__gekai_elise.jpg)](https://myanimelist.net/anime/54632/Gekai_Elise) | [Gekai Elise](https://subsplease.org/shows/gekai-elise) | TV | 12 / 12 | **Finished Airing** | 7.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gekai+Elise+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54632__gekai_elise.txt) | 36 | 7697 | 2024-03-27 13:31 |
| 55774 | [![55774__himesama_goumon_no_jikan_desu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55774__himesama_goumon_no_jikan_desu.jpg)](https://myanimelist.net/anime/55774/Himesama_Goumon_no_Jikan_desu) | [Hime-sama Goumon no Jikan desu](https://subsplease.org/shows/hime-sama-goumon-no-jikan-desu) | TV | 12 / 12 | **Finished Airing** | 7.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hime+sama+Goumon+no+Jikan+desu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55774__himesama_goumon_no_jikan_desu.txt) | 36 | 8165 | 2024-03-25 16:31 |
| 53446 | [![53446__tondemo_skill_de_isekai_hourou_meshi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53446__tondemo_skill_de_isekai_hourou_meshi.jpg)](https://myanimelist.net/anime/53446/Tondemo_Skill_de_Isekai_Hourou_Meshi) | [Tondemo Skill de Isekai Hourou Meshi](https://subsplease.org/shows/tondemo-skill-de-isekai-hourou-meshi) | TV | 12 / 12 | **Finished Airing** | 7.65 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tondemo+Skill+de+Isekai+Hourou+Meshi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53446__tondemo_skill_de_isekai_hourou_meshi.txt) | 36 | 11039 | 2023-03-28 16:31 |
| 52305 | [![52305__tomo_chan_wa_onnanoko](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52305__tomo_chan_wa_onnanoko.jpg)](https://myanimelist.net/anime/52305/Tomo-chan_wa_Onnanoko) | [Tomo-chan wa Onnanoko!](https://subsplease.org/shows/tomo-chan-wa-onnanoko) | TV | 13 / 13 | **Finished Airing** | 7.75 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tomo+chan+wa+Onnanoko+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52305__tomo_chan_wa_onnanoko.txt) | 36 | 11590 | 2023-03-29 17:01 |
| 51764 | [![51764__level_1_dakedo_unique_skill_de_saikyou_desu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51764__level_1_dakedo_unique_skill_de_saikyou_desu.jpg)](https://myanimelist.net/anime/51764/Level_1_dakedo_Unique_Skill_de_Saikyou_desu) | [Level 1 dakedo Unique Skill de Saikyou desu](https://subsplease.org/shows/level-1-dakedo-unique-skill-de-saikyou-desu) | TV | 12 / 12 | **Finished Airing** | 6.22 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Level+1+dakedo+Unique+Skill+de+Saikyou+desu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51764__level_1_dakedo_unique_skill_de_saikyou_desu.txt) | 36 | 10251 | 2023-09-23 14:06 |
| 54789 | [![54789__boku_no_hero_academia_7th_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54789__boku_no_hero_academia_7th_season.jpg)](https://myanimelist.net/anime/54789/Boku_no_Hero_Academia_7th_Season) | [Boku no Hero Academia](https://subsplease.org/shows/boku-no-hero-academia) | TV | 52 / 21 | **Finished Airing** | 8.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Boku+no+Hero+Academia+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54789__boku_no_hero_academia_7th_season.txt) | 36 | 16555 | 2024-10-12 09:32 |
| 51215 | [![51215__seijo_no_maryoku_wa_bannou_desu_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51215__seijo_no_maryoku_wa_bannou_desu_season_2.jpg)](https://myanimelist.net/anime/51215/Seijo_no_Maryoku_wa_Bannou_desu_Season_2) | [Seijo no Maryoku wa Bannou Desu S2](https://subsplease.org/shows/seijo-no-maryoku-wa-bannou-desu-s2) | TV | 12 / 12 | **Finished Airing** | 7.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Seijo+no+Maryoku+wa+Bannou+Desu+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51215__seijo_no_maryoku_wa_bannou_desu_season_2.txt) | 36 | 9425 | 2023-12-19 16:41 |
| 48736 | [![48736__sono_bisque_doll_wa_koi_wo_suru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48736__sono_bisque_doll_wa_koi_wo_suru.jpg)](https://myanimelist.net/anime/48736/Sono_Bisque_Doll_wa_Koi_wo_Suru) | [Sono Bisque Doll wa Koi wo Suru](https://subsplease.org/shows/sono-bisque-doll-wa-koi-wo-suru) | TV | 12 / 12 | **Finished Airing** | 8.16 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sono+Bisque+Doll+wa+Koi+wo+Suru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48736__sono_bisque_doll_wa_koi_wo_suru.txt) | 36 | 17044 | 2022-03-26 16:31 |
| 53879 | [![53879__kamonohashi_ron_no_kindan_suiri](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53879__kamonohashi_ron_no_kindan_suiri.jpg)](https://myanimelist.net/anime/53879/Kamonohashi_Ron_no_Kindan_Suiri) | [Kamonohashi Ron no Kindan Suiri](https://subsplease.org/shows/kamonohashi-ron-no-kindan-suiri) | TV | 26 / 13 | **Finished Airing** | 7.5 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kamonohashi+Ron+no+Kindan+Suiri+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53879__kamonohashi_ron_no_kindan_suiri.txt) | 35 | 5032 | 2024-12-30 15:32 |
| 51693 | [![51693__kaminaki_sekai_no_kamisama_katsudou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51693__kaminaki_sekai_no_kamisama_katsudou.jpg)](https://myanimelist.net/anime/51693/Kaminaki_Sekai_no_Kamisama_Katsudou) | [Kaminaki Sekai no Kamisama Katsudou](https://subsplease.org/shows/kaminaki-sekai-no-kamisama-katsudou) | TV | 12 / 12 | **Finished Airing** | 6.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kaminaki+Sekai+no+Kamisama+Katsudou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51693__kaminaki_sekai_no_kamisama_katsudou.txt) | 35 | 13377 | 2023-07-05 16:31 |
| 49596 | [![49596__blue_lock](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49596__blue_lock.jpg)](https://myanimelist.net/anime/49596/Blue_Lock) | [Blue Lock](https://subsplease.org/shows/blue-lock) | TV | 38 / 24 | **Finished Airing** | 8.21 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Blue+Lock+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49596__blue_lock.txt) | 35 | 11178 | 2024-12-28 17:16 |
| 54431 | [![54431__toaru_ossan_no_vrmmo_katsudouki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54431__toaru_ossan_no_vrmmo_katsudouki.jpg)](https://myanimelist.net/anime/54431/Toaru_Ossan_no_VRMMO_Katsudouki) | [Toaru Ossan no VRMMO Katsudouki](https://subsplease.org/shows/toaru-ossan-no-vrmmo-katsudouki) | TV | 12 / 12 | **Finished Airing** | 6.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Toaru+Ossan+no+VRMMO+Katsudouki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54431__toaru_ossan_no_vrmmo_katsudouki.txt) | 34 | 9360 | 2023-12-18 18:05 |
| 53237 | [![53237__shy](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53237__shy.jpg)](https://myanimelist.net/anime/53237/Shy) | [SHY](https://subsplease.org/shows/shy) | TV | 24 / 12 | **Finished Airing** | 6.72 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+SHY+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53237__shy.txt) | 34 | 6114 | 2024-09-23 16:02 |
| 53450 | [![53450__xian_wang_de_richang_shenghuo_4](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53450__xian_wang_de_richang_shenghuo_4.jpg)](https://myanimelist.net/anime/53450/Xian_Wang_de_Richang_Shenghuo_4) | [The Daily Life of the Immortal King S4](https://subsplease.org/shows/the-daily-life-of-the-immortal-king-s4) | ONA | 12 / 12 | **Finished Airing** | 7.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+Daily+Life+of+the+Immortal+King+S4+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53450__xian_wang_de_richang_shenghuo_4.txt) | 34 | 4908 | 2024-02-25 04:01 |
| 51461 | [![51461__tonari_no_youkai_san](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51461__tonari_no_youkai_san.jpg)](https://myanimelist.net/anime/51461/Tonari_no_Youkai-san) | [Tonari no Youkai-san](https://subsplease.org/shows/tonari-no-youkai-san) | TV | 13 / 13 | **Finished Airing** | 7.29 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tonari+no+Youkai+san+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51461__tonari_no_youkai_san.txt) | 34 | 3985 | 2024-06-29 18:47 |
| 50710 | [![50710__urusei_yatsura_2022](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50710__urusei_yatsura_2022.jpg)](https://myanimelist.net/anime/50710/Urusei_Yatsura_2022) | [Urusei Yatsura (2022)](https://subsplease.org/shows/urusei-yatsura-2022) | TV | 46 / 23 | **Finished Airing** | 7.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Urusei+Yatsura+2022+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50710__urusei_yatsura_2022.txt) | 34 | 7051 | 2024-06-21 15:34 |
| 50416 | [![50416__skip_to_loafer](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50416__skip_to_loafer.jpg)](https://myanimelist.net/anime/50416/Skip_to_Loafer) | [Skip to Loafer](https://subsplease.org/shows/skip-to-loafer) | TV | 12 / 12 | **Finished Airing** | 8.12 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Skip+to+Loafer+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50416__skip_to_loafer.txt) | 34 | 11555 | 2023-06-20 15:31 |
| 50307 | [![50307__tonikaku_kawaii_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50307__tonikaku_kawaii_2nd_season.jpg)](https://myanimelist.net/anime/50307/Tonikaku_Kawaii_2nd_Season) | [Tonikaku Kawaii S2](https://subsplease.org/shows/tonikaku-kawaii-s2) | TV | 12 / 12 | **Finished Airing** | 7.61 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tonikaku+Kawaii+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50307__tonikaku_kawaii_2nd_season.txt) | 34 | 7719 | 2023-06-23 17:16 |
| 49835 | [![49835__kimi_wo_aishita_hitori_no_boku_e](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49835__kimi_wo_aishita_hitori_no_boku_e.jpg)](https://myanimelist.net/anime/49835/Kimi_wo_Aishita_Hitori_no_Boku_e) | [Kimi wo Aishita Hitori no Boku e](https://subsplease.org/shows/kimi-wo-aishita-hitori-no-boku-e) | Movie | 1 / 1 | **Finished Airing** | 7.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kimi+wo+Aishita+Hitori+no+Boku+e+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49835__kimi_wo_aishita_hitori_no_boku_e.txt) | 34 | 5448 | 2023-04-21 06:03 |
| 50481 | [![50481__eiyuuou_bu_wo_kiwameru_tame_tenseisu_soshite_sekai_saikyou_no_minarai_kishi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50481__eiyuuou_bu_wo_kiwameru_tame_tenseisu_soshite_sekai_saikyou_no_minarai_kishi.jpg)](https://myanimelist.net/anime/50481/Eiyuuou_Bu_wo_Kiwameru_Tame_Tenseisu__Soshite_Sekai_Saikyou_no_Minarai_Kishi♀) | [Eiyuuou, Bu wo Kiwameru Tame Tenseisu](https://subsplease.org/shows/eiyuuou-bu-wo-kiwameru-tame-tenseisu) | TV | 12 / 12 | **Finished Airing** | 6.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Eiyuuou+Bu+wo+Kiwameru+Tame+Tenseisu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50481__eiyuuou_bu_wo_kiwameru_tame_tenseisu_soshite_sekai_saikyou_no_minarai_kishi.txt) | 34 | 8505 | 2023-03-27 18:16 |
| 51252 | [![51252__spy_kyoushitsu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51252__spy_kyoushitsu.jpg)](https://myanimelist.net/anime/51252/Spy_Kyoushitsu) | [Spy Kyoushitsu](https://subsplease.org/shows/spy-kyoushitsu) | TV | 24 / 12 | **Finished Airing** | 6.4 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Spy+Kyoushitsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51252__spy_kyoushitsu.txt) | 33 | 7314 | 2023-09-28 17:32 |
| 50854 | [![50854__benriya_saitou_san_isekai_ni_iku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50854__benriya_saitou_san_isekai_ni_iku.jpg)](https://myanimelist.net/anime/50854/Benriya_Saitou-san_Isekai_ni_Iku) | [Benriya Saitou-san, Isekai ni Iku](https://subsplease.org/shows/benriya-saitou-san-isekai-ni-iku) | TV | 12 / 12 | **Finished Airing** | 7.4 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Benriya+Saitou+san+Isekai+ni+Iku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50854__benriya_saitou_san_isekai_ni_iku.txt) | 33 | 9683 | 2023-03-26 14:32 |
| 50583 | [![50583__buta_no_liver_wa_kanetsu_shiro](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50583__buta_no_liver_wa_kanetsu_shiro.jpg)](https://myanimelist.net/anime/50583/Buta_no_Liver_wa_Kanetsu_Shiro) | [Buta no Liver wa Kanetsu Shiro](https://subsplease.org/shows/buta-no-liver-wa-kanetsu-shiro) | TV | 12 / 12 | **Finished Airing** | 6.3 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Buta+no+Liver+wa+Kanetsu+Shiro+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50583__buta_no_liver_wa_kanetsu_shiro.txt) | 33 | 6316 | 2024-02-06 05:24 |
| 49891 | [![49891__tensei_shitara_ken_deshita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49891__tensei_shitara_ken_deshita.jpg)](https://myanimelist.net/anime/49891/Tensei_shitara_Ken_deshita) | [Tensei Shitara Ken Deshita](https://subsplease.org/shows/tensei-shitara-ken-deshita) | TV | 12 / 12 | **Finished Airing** | 7.51 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tensei+Shitara+Ken+Deshita+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49891__tensei_shitara_ken_deshita.txt) | 33 | 14002 | 2022-12-21 14:51 |
| 49709 | [![49709__fumetsu_no_anata_e_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49709__fumetsu_no_anata_e_season_2.jpg)](https://myanimelist.net/anime/49709/Fumetsu_no_Anata_e_Season_2) | [Fumetsu no Anata e S2](https://subsplease.org/shows/fumetsu-no-anata-e-s2) | TV | 20 / 20 | **Finished Airing** | 8.09 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fumetsu+no+Anata+e+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49709__fumetsu_no_anata_e_season_2.txt) | 33 | 7265 | 2023-03-12 12:31 |
| 57325 | [![57325__ramen_akaneko](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57325__ramen_akaneko.jpg)](https://myanimelist.net/anime/57325/Ramen_Akaneko) | [Ramen Akaneko](https://subsplease.org/shows/ramen-akaneko) | TV | 12 / 12 | **Finished Airing** | 7.37 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ramen+Akaneko+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57325__ramen_akaneko.txt) | 32 | 3463 | 2024-09-19 16:01 |
| 54743 | [![54743__dead_mount_death_play_part_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54743__dead_mount_death_play_part_2.jpg)](https://myanimelist.net/anime/54743/Dead_Mount_Death_Play_Part_2) | [Dead Mount Death Play](https://subsplease.org/shows/dead-mount-death-play) | TV | 24 / 12 | **Finished Airing** | 7.45 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dead+Mount+Death+Play+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54743__dead_mount_death_play_part_2.txt) | 32 | 13135 | 2023-12-25 16:31 |
| 54301 | [![54301__overtake](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54301__overtake.jpg)](https://myanimelist.net/anime/54301/Overtake) | [Overtake!](https://subsplease.org/shows/overtake) | TV | 12 / 12 | **Finished Airing** | 7.49 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Overtake+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54301__overtake.txt) | 32 | 6668 | 2023-12-17 15:37 |
| 53494 | [![53494__boukensha_ni_naritai_to_miyako_ni_deteitta_musume_ga_s_rank_ni_natteta](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53494__boukensha_ni_naritai_to_miyako_ni_deteitta_musume_ga_s_rank_ni_natteta.jpg)](https://myanimelist.net/anime/53494/Boukensha_ni_Naritai_to_Miyako_ni_Deteitta_Musume_ga_S-Rank_ni_Natteta) | [S-Rank Musume](https://subsplease.org/shows/s-rank-musume) | TV | 13 / 13 | **Finished Airing** | 6.76 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+S+Rank+Musume+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53494__boukensha_ni_naritai_to_miyako_ni_deteitta_musume_ga_s_rank_ni_natteta.txt) | 32 | 10546 | 2023-12-21 15:05 |
| 50803 | [![50803__jaku_chara_tomozaki_kun_2nd_stage](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50803__jaku_chara_tomozaki_kun_2nd_stage.jpg)](https://myanimelist.net/anime/50803/Jaku-Chara_Tomozaki-kun_2nd_Stage) | [Jaku-Chara Tomozaki-kun S2](https://subsplease.org/shows/jaku-chara-tomozaki-kun-s2) | TV | 13 / 13 | **Finished Airing** | 7.04 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jaku+Chara+Tomozaki+kun+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50803__jaku_chara_tomozaki_kun_2nd_stage.txt) | 32 | 6767 | 2024-03-27 12:32 |
| 48633 | [![48633__liar_liar](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48633__liar_liar.jpg)](https://myanimelist.net/anime/48633/Liar_Liar) | [Liar Liar](https://subsplease.org/shows/liar-liar) | TV | 12 / 12 | **Finished Airing** | 6.3 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Liar+Liar+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48633__liar_liar.txt) | 32 | 8920 | 2023-09-16 15:01 |
| 53040 | [![53040__kanojo_mo_kanojo_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53040__kanojo_mo_kanojo_season_2.jpg)](https://myanimelist.net/anime/53040/Kanojo_mo_Kanojo_Season_2) | [Kanojo mo Kanojo S2](https://subsplease.org/shows/kanojo-mo-kanojo-s2) | TV | 12 / 12 | **Finished Airing** | 7.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kanojo+mo+Kanojo+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53040__kanojo_mo_kanojo_season_2.txt) | 32 | 7303 | 2023-12-22 20:01 |
| 51678 | [![51678__oniichan_wa_oshimai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51678__oniichan_wa_oshimai.jpg)](https://myanimelist.net/anime/51678/Oniichan_wa_Oshimai) | [Oniichan wa Oshimai!](https://subsplease.org/shows/oniichan-wa-oshimai) | TV | 12 / 12 | **Finished Airing** | 7.66 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Oniichan+wa+Oshimai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51678__oniichan_wa_oshimai.txt) | 32 | 5753 | 2023-03-23 15:31 |
| 51462 | [![51462__isekai_nonbiri_nouka](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51462__isekai_nonbiri_nouka.jpg)](https://myanimelist.net/anime/51462/Isekai_Nonbiri_Nouka) | [Isekai Nonbiri Nouka](https://subsplease.org/shows/isekai-nonbiri-nouka) | TV | 12 / 12 | **Finished Airing** | 7.55 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+Nonbiri+Nouka+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51462__isekai_nonbiri_nouka.txt) | 32 | 13546 | 2023-03-24 13:02 |
| 58302 | [![58302__the_idolm_ster_shiny_colors_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58302__the_idolm_ster_shiny_colors_2nd_season.jpg)](https://myanimelist.net/anime/58302/The_iDOLMSTER_Shiny_Colors_2nd_Season) | [The iDOLM@STER Shiny Colors S2](https://subsplease.org/shows/the-idolmster-shiny-colors-s2) | TV | 12 / 12 | **Finished Airing** | 6.29 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+iDOLM+STER+Shiny+Colors+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58302__the_idolm_ster_shiny_colors_2nd_season.txt) | 31 | 1814 | 2024-12-20 18:32 |
| 57184 | [![57184__great_pretender_razbliuto](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57184__great_pretender_razbliuto.jpg)](https://myanimelist.net/anime/57184/Great_Pretender__Razbliuto) | [Great Pretender - Razbliuto](https://subsplease.org/shows/great-pretender-razbliuto) | ONA | 1 / 4 | **Finished Airing** | 6.8 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Great+Pretender+Razbliuto+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57184__great_pretender_razbliuto.txt) | 31 | 5128 | 2024-02-24 05:07 |
| 54234 | [![54234__suki_na_ko_ga_megane_wo_wasureta](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54234__suki_na_ko_ga_megane_wo_wasureta.jpg)](https://myanimelist.net/anime/54234/Suki_na_Ko_ga_Megane_wo_Wasureta) | [Suki na Ko ga Megane wo Wasureta](https://subsplease.org/shows/suki-na-ko-ga-megane-wo-wasureta) | TV | 13 / 13 | **Finished Airing** | 7.14 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Suki+na+Ko+ga+Megane+wo+Wasureta+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54234__suki_na_ko_ga_megane_wo_wasureta.txt) | 31 | 8948 | 2023-09-26 15:11 |
| 52193 | [![52193__akiba_meido_sensou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52193__akiba_meido_sensou.jpg)](https://myanimelist.net/anime/52193/Akiba_Meido_Sensou) | [Akiba Maid Sensou](https://subsplease.org/shows/akiba-maid-sensou) | TV | 12 / 12 | **Finished Airing** | 7.58 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Akiba+Maid+Sensou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52193__akiba_meido_sensou.txt) | 31 | 7264 | 2022-12-22 17:02 |
| 41567 | [![41567__isekai_quartet_movie_another_world](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41567__isekai_quartet_movie_another_world.jpg)](https://myanimelist.net/anime/41567/Isekai_Quartet_Movie__Another_World) | [Isekai Quartet Movie - Another World](https://subsplease.org/shows/isekai-quartet-movie-another-world) | Movie | 1 / 1 | **Finished Airing** | 7.47 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+Quartet+Movie+Another+World+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41567__isekai_quartet_movie_another_world.txt) | 31 | 3502 | 2023-01-04 05:29 |
| 57519 | [![57519__boku_no_hero_academia_memories](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57519__boku_no_hero_academia_memories.jpg)](https://myanimelist.net/anime/57519/Boku_no_Hero_Academia__Memories) | [Boku No Hero Academia Memories](https://subsplease.org/shows/boku-no-hero-academia-memories) | TV Special | 4 / 4 | **Finished Airing** | 6.65 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Boku+No+Hero+Academia+Memories+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57519__boku_no_hero_academia_memories.txt) | 30 | 6716 | 2024-04-27 09:32 |
| 52093 | [![52093__trigun_stampede](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52093__trigun_stampede.jpg)](https://myanimelist.net/anime/52093/Trigun_Stampede) | [Trigun Stampede](https://subsplease.org/shows/trigun-stampede) | TV | 12 / 12 | **Finished Airing** | 7.83 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Trigun+Stampede+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52093__trigun_stampede.txt) | 30 | 10461 | 2023-03-25 15:32 |
| 41497 | [![41497__fate_grand_order_shuukyoku_tokuiten_kani_jikan_shinden_solomon](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41497__fate_grand_order_shuukyoku_tokuiten_kani_jikan_shinden_solomon.jpg)](https://myanimelist.net/anime/41497/Fate_Grand_Order__Shuukyoku_Tokuiten_-_Kani_Jikan_Shinden_Solomon) | [Fate Grand Order - Final Singularity - The Grand Temple of Time Solomon](https://subsplease.org/shows/fate-grand-order-final-singularity-the-grand-temple-of-time-solomon-2) | Movie | 1 / 1 | **Finished Airing** | 7.74 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fate+Grand+Order+Final+Singularity+The+Grand+Temple+of+Time+Solomon+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41497__fate_grand_order_shuukyoku_tokuiten_kani_jikan_shinden_solomon.txt) | 30 | 5106 | 2022-02-18 21:46 |
| 54760 | [![54760__ryza_no_atelier_tokoyami_no_joou_to_himitsu_no_kakurega](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54760__ryza_no_atelier_tokoyami_no_joou_to_himitsu_no_kakurega.jpg)](https://myanimelist.net/anime/54760/Ryza_no_Atelier__Tokoyami_no_Joou_to_Himitsu_no_Kakurega) | [Ryza no Atelier](https://subsplease.org/shows/ryza-no-atelier) | TV | 12 / 12 | **Finished Airing** | 6.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ryza+no+Atelier+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54760__ryza_no_atelier_tokoyami_no_joou_to_himitsu_no_kakurega.txt) | 29 | 8375 | 2023-09-16 16:31 |
| 52505 | [![52505__dark_gathering](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52505__dark_gathering.jpg)](https://myanimelist.net/anime/52505/Dark_Gathering) | [Dark Gathering](https://subsplease.org/shows/dark-gathering) | TV | 25 / 25 | **Finished Airing** | 7.84 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dark+Gathering+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52505__dark_gathering.txt) | 29 | 10525 | 2023-12-24 17:00 |
| 51916 | [![51916__dekiru_neko_wa_kyou_mo_yuuutsu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51916__dekiru_neko_wa_kyou_mo_yuuutsu.jpg)](https://myanimelist.net/anime/51916/Dekiru_Neko_wa_Kyou_mo_Yuuutsu) | [Dekiru Neko wa Kyou mo Yuuutsu](https://subsplease.org/shows/dekiru-neko-wa-kyou-mo-yuuutsu) | TV | 13 / 13 | **Finished Airing** | 7.65 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dekiru+Neko+wa+Kyou+mo+Yuuutsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51916__dekiru_neko_wa_kyou_mo_yuuutsu.txt) | 29 | 8177 | 2023-09-29 18:46 |
| 43608 | [![43608__kaguya_sama_wa_kokurasetai_ultra_romantic](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43608__kaguya_sama_wa_kokurasetai_ultra_romantic.jpg)](https://myanimelist.net/anime/43608/Kaguya-sama_wa_Kokurasetai__Ultra_Romantic) | [Kaguya-sama wa Kokurasetai S3](https://subsplease.org/shows/kaguya-sama-wa-kokurasetai-s3) | TV | 13 / 13 | **Finished Airing** | 8.99 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kaguya+sama+wa+Kokurasetai+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43608__kaguya_sama_wa_kokurasetai_ultra_romantic.txt) | 29 | 11041 | 2022-06-26 02:10 |
| 35678 | [![35678__hibike_euphonium_movie_3_chikai_no_finale](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/35678__hibike_euphonium_movie_3_chikai_no_finale.jpg)](https://myanimelist.net/anime/35678/Hibike_Euphonium_Movie_3__Chikai_no_Finale) | [Hibike! Euphonium - Chikai no Finale](https://subsplease.org/shows/hibike-euphonium-chikai-no-finale) | Movie | 1 / 1 | **Finished Airing** | 7.76 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hibike+Euphonium+Chikai+no+Finale+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/35678__hibike_euphonium_movie_3_chikai_no_finale.txt) | 29 | 3584 | 2024-04-17 03:03 |
| 52608 | [![52608__tensei_kizoku_no_isekai_boukenroku_jichou_wo_shiranai_kamigami_no_shito](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52608__tensei_kizoku_no_isekai_boukenroku_jichou_wo_shiranai_kamigami_no_shito.jpg)](https://myanimelist.net/anime/52608/Tensei_Kizoku_no_Isekai_Boukenroku__Jichou_wo_Shiranai_Kamigami_no_Shito) | [Tensei Kizoku no Isekai Boukenroku](https://subsplease.org/shows/tensei-kizoku-no-isekai-boukenroku) | TV | 12 / 12 | **Finished Airing** | 6.68 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tensei+Kizoku+no+Isekai+Boukenroku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52608__tensei_kizoku_no_isekai_boukenroku_jichou_wo_shiranai_kamigami_no_shito.txt) | 29 | 10582 | 2023-06-18 13:01 |
| 52081 | [![52081__edomae_elf](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52081__edomae_elf.jpg)](https://myanimelist.net/anime/52081/Edomae_Elf) | [Edomae Elf](https://subsplease.org/shows/edomae-elf) | TV | 12 / 12 | **Finished Airing** | 7.26 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Edomae+Elf+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52081__edomae_elf.txt) | 29 | 8609 | 2023-06-23 18:26 |
| 49766 | [![49766__under_ninja](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49766__under_ninja.jpg)](https://myanimelist.net/anime/49766/Under_Ninja) | [Under Ninja](https://subsplease.org/shows/under-ninja) | TV | 12 / 12 | **Finished Airing** | 6.9 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Under+Ninja+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49766__under_ninja.txt) | 29 | 8120 | 2023-12-21 18:51 |
| 52308 | [![52308__kanojo_ga_koushaku_tei_ni_itta_riyuu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52308__kanojo_ga_koushaku_tei_ni_itta_riyuu.jpg)](https://myanimelist.net/anime/52308/Kanojo_ga_Koushaku-tei_ni_Itta_Riyuu) | [Kanojo ga Koushaku-tei ni Itta Riyuu](https://subsplease.org/shows/kanojo-ga-koushaku-tei-ni-itta-riyuu) | TV | 12 / 12 | **Finished Airing** | 7.53 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kanojo+ga+Koushaku+tei+ni+Itta+Riyuu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52308__kanojo_ga_koushaku_tei_ni_itta_riyuu.txt) | 28 | 5401 | 2023-06-26 13:01 |
| 53632 | [![53632__yumemiru_danshi_wa_genjitsushugisha](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53632__yumemiru_danshi_wa_genjitsushugisha.jpg)](https://myanimelist.net/anime/53632/Yumemiru_Danshi_wa_Genjitsushugisha) | [Yumemiru Danshi wa Genjitsushugisha](https://subsplease.org/shows/yumemiru-danshi-wa-genjitsushugisha) | TV | 12 / 12 | **Finished Airing** | 6.62 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yumemiru+Danshi+wa+Genjitsushugisha+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53632__yumemiru_danshi_wa_genjitsushugisha.txt) | 28 | 8799 | 2023-09-18 18:01 |
| 52082 | [![52082__shiro_seijo_to_kuro_bokushi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52082__shiro_seijo_to_kuro_bokushi.jpg)](https://myanimelist.net/anime/52082/Shiro_Seijo_to_Kuro_Bokushi) | [Shiro Seijo to Kuro Bokushi](https://subsplease.org/shows/shiro-seijo-to-kuro-bokushi) | TV | 12 / 12 | **Finished Airing** | 7.37 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shiro+Seijo+to+Kuro+Bokushi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52082__shiro_seijo_to_kuro_bokushi.txt) | 28 | 6139 | 2023-09-27 17:02 |
| 49413 | [![49413__shiguang_dailiren_ii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49413__shiguang_dailiren_ii.jpg)](https://myanimelist.net/anime/49413/Shiguang_Dailiren_II) | [Link Click S2](https://subsplease.org/shows/link-click-s2) | ONA | 12 / 12 | **Finished Airing** | 8.64 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Link+Click+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49413__shiguang_dailiren_ii.txt) | 28 | 5578 | 2023-09-22 04:01 |
| 52985 | [![52985__dekoboko_majo_no_oyako_jijou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52985__dekoboko_majo_no_oyako_jijou.jpg)](https://myanimelist.net/anime/52985/Dekoboko_Majo_no_Oyako_Jijou) | [Dekoboko Majo no Oyako Jijou](https://subsplease.org/shows/dekoboko-majo-no-oyako-jijou) | TV | 12 / 12 | **Finished Airing** | 6.77 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dekoboko+Majo+no+Oyako+Jijou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52985__dekoboko_majo_no_oyako_jijou.txt) | 28 | 6854 | 2023-12-10 15:21 |
| 52934 | [![52934__konyaku_haki_sareta_reijou_wo_hirotta_ore_ga_ikenai_koto_wo_oshiekomu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52934__konyaku_haki_sareta_reijou_wo_hirotta_ore_ga_ikenai_koto_wo_oshiekomu.jpg)](https://myanimelist.net/anime/52934/Konyaku_Haki_sareta_Reijou_wo_Hirotta_Ore_ga_Ikenai_Koto_wo_Oshiekomu) | [Ikenaikyo](https://subsplease.org/shows/ikenaikyo) | TV | 12 / 12 | **Finished Airing** | 7.16 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ikenaikyo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52934__konyaku_haki_sareta_reijou_wo_hirotta_ore_ga_ikenai_koto_wo_oshiekomu.txt) | 28 | 7713 | 2023-12-20 14:25 |
| 50220 | [![50220__isekai_shoukan_wa_nidome_desu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50220__isekai_shoukan_wa_nidome_desu.jpg)](https://myanimelist.net/anime/50220/Isekai_Shoukan_wa_Nidome_desu) | [Isekai Shoukan wa Nidome desu](https://subsplease.org/shows/isekai-shoukan-wa-nidome-desu) | TV | 12 / 12 | **Finished Airing** | 5.67 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+Shoukan+wa+Nidome+desu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50220__isekai_shoukan_wa_nidome_desu.txt) | 28 | 10339 | 2023-06-24 18:46 |
| 55651 | [![55651__tonikaku_kawaii_joshikou_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55651__tonikaku_kawaii_joshikou_hen.jpg)](https://myanimelist.net/anime/55651/Tonikaku_Kawaii__Joshikou-hen) | [Tonikaku Kawaii - Joshikou-hen](https://subsplease.org/shows/tonikaku-kawaii-joshikou-hen) | ONA | 4 / 4 | **Finished Airing** | 7.76 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tonikaku+Kawaii+Joshikou+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55651__tonikaku_kawaii_joshikou_hen.txt) | 27 | 5180 | 2023-08-23 03:31 |
| 53526 | [![53526__uma_musume_pretty_derby_season_3](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53526__uma_musume_pretty_derby_season_3.jpg)](https://myanimelist.net/anime/53526/Uma_Musume__Pretty_Derby_Season_3) | [Uma Musume - Pretty Derby S3](https://subsplease.org/shows/uma-musume-pretty-derby-s3) | TV | 13 / 13 | **Finished Airing** | 7.59 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Uma+Musume+Pretty+Derby+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53526__uma_musume_pretty_derby_season_3.txt) | 27 | 5149 | 2023-12-27 17:01 |
| 53263 | [![53263__seija_musou_salaryman_isekai_de_ikinokoru_tame_ni_ayumu_michi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53263__seija_musou_salaryman_isekai_de_ikinokoru_tame_ni_ayumu_michi.jpg)](https://myanimelist.net/anime/53263/Seija_Musou__Salaryman_Isekai_de_Ikinokoru_Tame_ni_Ayumu_Michi) | [Seija Musou](https://subsplease.org/shows/seija-musou) | TV | 12 / 12 | **Finished Airing** | 7.11 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Seija+Musou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53263__seija_musou_salaryman_isekai_de_ikinokoru_tame_ni_ayumu_michi.txt) | 27 | 9861 | 2023-09-21 17:58 |
| 51706 | [![51706__yuusha_ga_shinda](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51706__yuusha_ga_shinda.jpg)](https://myanimelist.net/anime/51706/Yuusha_ga_Shinda) | [Yuusha ga Shinda!](https://subsplease.org/shows/yuusha-ga-shinda) | TV | 12 / 12 | **Finished Airing** | 6.61 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yuusha+ga+Shinda+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51706__yuusha_ga_shinda.txt) | 27 | 9399 | 2023-06-22 16:31 |
| 49894 | [![49894__eiyuu_kyoushitsu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49894__eiyuu_kyoushitsu.jpg)](https://myanimelist.net/anime/49894/Eiyuu_Kyoushitsu) | [Eiyuu Kyoushitsu](https://subsplease.org/shows/eiyuu-kyoushitsu) | TV | 12 / 12 | **Finished Airing** | 6.05 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Eiyuu+Kyoushitsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49894__eiyuu_kyoushitsu.txt) | 27 | 8942 | 2023-09-24 14:31 |
| 45486 | [![45486__kuma_kuma_kuma_bear_punch](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/45486__kuma_kuma_kuma_bear_punch.jpg)](https://myanimelist.net/anime/45486/Kuma_Kuma_Kuma_Bear_Punch) | [Kuma Kuma Kuma Bear S2](https://subsplease.org/shows/kuma-kuma-kuma-bear-s2) | TV | 12 / 12 | **Finished Airing** | 7.28 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kuma+Kuma+Kuma+Bear+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/45486__kuma_kuma_kuma_bear_punch.txt) | 27 | 6116 | 2023-06-19 14:01 |
| 50205 | [![50205__arknights_reimei_zensou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50205__arknights_reimei_zensou.jpg)](https://myanimelist.net/anime/50205/Arknights__Reimei_Zensou) | [Arknights - Reimei Zensou](https://subsplease.org/shows/arknights-reimei-zensou) | TV | 8 / 8 | **Finished Airing** | 7.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Arknights+Reimei+Zensou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50205__arknights_reimei_zensou.txt) | 27 | 5774 | 2023-11-24 18:01 |
| 49612 | [![49612__ningen_fushin_no_boukensha_tachi_ga_sekai_wo_sukuu_you_desu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49612__ningen_fushin_no_boukensha_tachi_ga_sekai_wo_sukuu_you_desu.jpg)](https://myanimelist.net/anime/49612/Ningen_Fushin_no_Boukensha-tachi_ga_Sekai_wo_Sukuu_you_desu) | [Ningen Fushin](https://subsplease.org/shows/ningen-fushin) | TV | 12 / 12 | **Finished Airing** | 6.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ningen+Fushin+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49612__ningen_fushin_no_boukensha_tachi_ga_sekai_wo_sukuu_you_desu.txt) | 27 | 7813 | 2023-03-21 15:01 |
| 47778 | [![47778__kimetsu_no_yaiba_yuukaku_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47778__kimetsu_no_yaiba_yuukaku_hen.jpg)](https://myanimelist.net/anime/47778/Kimetsu_no_Yaiba__Yuukaku-hen) | [Kimetsu no Yaiba - Yuukaku-hen](https://subsplease.org/shows/kimetsu-no-yaiba-yuukaku-hen) | TV | 11 / 11 | **Finished Airing** | 8.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kimetsu+no+Yaiba+Yuukaku+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47778__kimetsu_no_yaiba_yuukaku_hen.txt) | 26 | 22521 | 2022-02-13 16:02 |
| 55855 | [![55855__kuroshitsuji_kishuku_gakkou_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55855__kuroshitsuji_kishuku_gakkou_hen.jpg)](https://myanimelist.net/anime/55855/Kuroshitsuji__Kishuku_Gakkou-hen) | [Kuroshitsuji - Kishuku Gakkou-hen](https://subsplease.org/shows/kuroshitsuji-kishuku-gakkou-hen) | TV | 11 / 11 | **Finished Airing** | 7.86 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kuroshitsuji+Kishuku+Gakkou+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55855__kuroshitsuji_kishuku_gakkou_hen.txt) | 26 | 4880 | 2024-06-22 16:02 |
| 54857 | [![54857__re_zero_kara_hajimeru_isekai_seikatsu_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54857__re_zero_kara_hajimeru_isekai_seikatsu_3rd_season.jpg)](https://myanimelist.net/anime/54857/Re_Zero_kara_Hajimeru_Isekai_Seikatsu_3rd_Season) | [Re Zero kara Hajimeru Isekai Seikatsu](https://subsplease.org/shows/re-zero-kara-hajimeru-isekai-seikatsu) | TV | 20 / 16 | Currently Airing | 8.69 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Re+Zero+kara+Hajimeru+Isekai+Seikatsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54857__re_zero_kara_hajimeru_isekai_seikatsu_3rd_season.txt) | 26 | 17652 | 2024-11-20 15:30 |
| 54616 | [![54616__potion_danomi_de_ikinobimasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54616__potion_danomi_de_ikinobimasu.jpg)](https://myanimelist.net/anime/54616/Potion-danomi_de_Ikinobimasu) | [Potion-danomi de Ikinobimasu!](https://subsplease.org/shows/potion-danomi-de-ikinobimasu) | TV | 12 / 12 | **Finished Airing** | 6.55 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Potion+danomi+de+Ikinobimasu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54616__potion_danomi_de_ikinobimasu.txt) | 26 | 7069 | 2023-12-24 02:24 |
| 53200 | [![53200__hataraku_maou_sama_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53200__hataraku_maou_sama_2nd_season.jpg)](https://myanimelist.net/anime/53200/Hataraku_Maou-sama_2nd_Season) | [Hataraku Maou-sama S2](https://subsplease.org/shows/hataraku-maou-sama-s2) | TV | 25 / 12 | **Finished Airing** | 6.66 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hataraku+Maou+sama+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53200__hataraku_maou_sama_2nd_season.txt) | 26 | 8686 | 2023-09-28 14:31 |
| 52611 | [![52611__okashi_na_tensei](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52611__okashi_na_tensei.jpg)](https://myanimelist.net/anime/52611/Okashi_na_Tensei) | [Okashi na Tensei](https://subsplease.org/shows/okashi-na-tensei) | TV | 12 / 12 | **Finished Airing** | 6.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Okashi+na+Tensei+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52611__okashi_na_tensei.txt) | 26 | 8214 | 2023-09-11 18:31 |
| 51705 | [![51705__otonari_ni_ginga](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51705__otonari_ni_ginga.jpg)](https://myanimelist.net/anime/51705/Otonari_ni_Ginga) | [Otonari ni Ginga](https://subsplease.org/shows/otonari-ni-ginga) | TV | 12 / 12 | **Finished Airing** | 7.14 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Otonari+ni+Ginga+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51705__otonari_ni_ginga.txt) | 26 | 7169 | 2023-06-24 18:01 |
| 48542 | [![48542__do_it_yourself](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48542__do_it_yourself.jpg)](https://myanimelist.net/anime/48542/Do_It_Yourself) | [Do It Yourself!!](https://subsplease.org/shows/do-it-yourself) | TV | 12 / 12 | **Finished Airing** | 7.45 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Do+It+Yourself+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48542__do_it_yourself.txt) | 26 | 5255 | 2022-12-21 18:01 |
| 44141 | [![44141__watashi_ni_tenshi_ga_maiorita_precious_friends](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44141__watashi_ni_tenshi_ga_maiorita_precious_friends.jpg)](https://myanimelist.net/anime/44141/Watashi_ni_Tenshi_ga_Maiorita_Precious_Friends) | [Watashi ni Tenshi ga Maiorita! - Precious Friends](https://subsplease.org/shows/watashi-ni-tenshi-ga-maiorita-precious-friends) | Movie | 1 / 1 | **Finished Airing** | 7.55 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Watashi+ni+Tenshi+ga+Maiorita+Precious+Friends+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44141__watashi_ni_tenshi_ga_maiorita_precious_friends.txt) | 26 | 2169 | 2023-04-16 22:44 |
| 58080 | [![58080__kenka_dokugaku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58080__kenka_dokugaku.jpg)](https://myanimelist.net/anime/58080/Kenka_Dokugaku) | [Kenka Dokugaku](https://subsplease.org/shows/kenka-dokugaku) | TV | 12 / 12 | **Finished Airing** | 7.34 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kenka+Dokugaku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58080__kenka_dokugaku.txt) | 26 | 5428 | 2024-06-26 17:27 |
| 54617 | [![54617__kyuujitsu_no_warumono_san](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54617__kyuujitsu_no_warumono_san.jpg)](https://myanimelist.net/anime/54617/Kyuujitsu_no_Warumono-san) | [Kyuujitsu no Warumono-san](https://subsplease.org/shows/kyuujitsu-no-warumono-san) | TV | 12 / 12 | **Finished Airing** | 7.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kyuujitsu+no+Warumono+san+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54617__kyuujitsu_no_warumono_san.txt) | 26 | 5100 | 2024-03-24 17:46 |
| 49979 | [![49979__akuyaku_reijou_nanode_last_boss_wo_kattemimashita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49979__akuyaku_reijou_nanode_last_boss_wo_kattemimashita.jpg)](https://myanimelist.net/anime/49979/Akuyaku_Reijou_nanode_Last_Boss_wo_Kattemimashita) | [Akuyaku Reijou nanode Last Boss wo Kattemimashita](https://subsplease.org/shows/akuyaku-reijou-nanode-last-boss-wo-kattemimashita) | TV | 12 / 12 | **Finished Airing** | 7.21 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Akuyaku+Reijou+nanode+Last+Boss+wo+Kattemimashita+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49979__akuyaku_reijou_nanode_last_boss_wo_kattemimashita.txt) | 26 | 5416 | 2022-12-10 15:01 |
| 48981 | [![48981__mahou_shoujo_magical_destroyers](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48981__mahou_shoujo_magical_destroyers.jpg)](https://myanimelist.net/anime/48981/Mahou_Shoujo_Magical_Destroyers) | [Mahou Shoujo Magical Destroyers](https://subsplease.org/shows/mahou-shoujo-magical-destroyers) | TV | 12 / 12 | **Finished Airing** | 6.27 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahou+Shoujo+Magical+Destroyers+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48981__mahou_shoujo_magical_destroyers.txt) | 26 | 6936 | 2023-06-23 18:46 |
| 53438 | [![53438__higeki_no_genkyou_to_naru_saikyou_gedou_last_boss_joou_wa_tami_no_tame_ni_tsukushimasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53438__higeki_no_genkyou_to_naru_saikyou_gedou_last_boss_joou_wa_tami_no_tame_ni_tsukushimasu.jpg)](https://myanimelist.net/anime/53438/Higeki_no_Genkyou_to_Naru_Saikyou_Gedou_Last_Boss_Joou_wa_Tami_no_Tame_ni_Tsukushimasu) | [LasTame](https://subsplease.org/shows/lastame) | TV | 12 / 12 | **Finished Airing** | 7.11 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+LasTame+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53438__higeki_no_genkyou_to_naru_saikyou_gedou_last_boss_joou_wa_tami_no_tame_ni_tsukushimasu.txt) | 26 | 8950 | 2023-09-21 15:31 |
| 53379 | [![53379__uchi_no_kaisha_no_chiisai_senpai_no_hanashi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53379__uchi_no_kaisha_no_chiisai_senpai_no_hanashi.jpg)](https://myanimelist.net/anime/53379/Uchi_no_Kaisha_no_Chiisai_Senpai_no_Hanashi) | [Uchi no Kaisha no Chiisai Senpai no Hanashi](https://subsplease.org/shows/uchi-no-kaisha-no-chiisai-senpai-no-hanashi) | TV | 12 / 12 | **Finished Airing** | 6.9 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Uchi+no+Kaisha+no+Chiisai+Senpai+no+Hanashi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53379__uchi_no_kaisha_no_chiisai_senpai_no_hanashi.txt) | 25 | 6933 | 2023-09-30 18:16 |
| 50932 | [![50932__saikyou_onmyouji_no_isekai_tenseiki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50932__saikyou_onmyouji_no_isekai_tenseiki.jpg)](https://myanimelist.net/anime/50932/Saikyou_Onmyouji_no_Isekai_Tenseiki) | [Saikyou Onmyouji no Isekai Tenseiki](https://subsplease.org/shows/saikyou-onmyouji-no-isekai-tenseiki) | TV | 13 / 13 | **Finished Airing** | 7.13 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Saikyou+Onmyouji+no+Isekai+Tenseiki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50932__saikyou_onmyouji_no_isekai_tenseiki.txt) | 25 | 9777 | 2023-04-01 17:16 |
| 48926 | [![48926__komi_san_wa_comyushou_desu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48926__komi_san_wa_comyushou_desu.jpg)](https://myanimelist.net/anime/48926/Komi-san_wa_Comyushou_desu) | [Komi-san wa, Comyushou desu.](https://subsplease.org/shows/komi-san-wa-comyushou-desu) | TV | 12 / 12 | **Finished Airing** | 7.84 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Komi+san+wa+Comyushou+desu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48926__komi_san_wa_comyushou_desu.txt) | 25 | 9945 | 2022-01-06 23:43 |
| 52446 | [![52446__kaiko_sareta_ankoku_heishi_30_dai_no_slow_na_second_life](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52446__kaiko_sareta_ankoku_heishi_30_dai_no_slow_na_second_life.jpg)](https://myanimelist.net/anime/52446/Kaiko_sareta_Ankoku_Heishi_30-dai_no_Slow_na_Second_Life) | [Kaiko sareta Ankoku Heishi (30-dai) no Slow na Second Life](https://subsplease.org/shows/kaiko-sareta-ankoku-heishi-30-dai-no-slow-na-second-life) | TV | 12 / 12 | **Finished Airing** | 7.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kaiko+sareta+Ankoku+Heishi+30+dai+no+Slow+na+Second+Life+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52446__kaiko_sareta_ankoku_heishi_30_dai_no_slow_na_second_life.txt) | 25 | 7738 | 2023-03-25 14:01 |
| 52405 | [![52405__highspeed_etoile](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52405__highspeed_etoile.jpg)](https://myanimelist.net/anime/52405/Highspeed_Etoile) | [Highspeed Etoile](https://subsplease.org/shows/highspeed-etoile) | TV | 12 / 12 | **Finished Airing** | 6.16 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Highspeed+Etoile+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52405__highspeed_etoile.txt) | 25 | 3855 | 2024-06-21 18:01 |
| 51632 | [![51632__isekai_wa_smartphone_to_tomo_ni_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51632__isekai_wa_smartphone_to_tomo_ni_2.jpg)](https://myanimelist.net/anime/51632/Isekai_wa_Smartphone_to_Tomo_ni_2) | [Isekai wa Smartphone to Tomo ni S2](https://subsplease.org/shows/isekai-wa-smartphone-to-tomo-ni-s2) | TV | 12 / 12 | **Finished Airing** | 6.42 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+wa+Smartphone+to+Tomo+ni+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51632__isekai_wa_smartphone_to_tomo_ni_2.txt) | 25 | 7218 | 2023-06-19 16:01 |
| 57945 | [![57945__tasuuketsu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57945__tasuuketsu.jpg)](https://myanimelist.net/anime/57945/Tasuuketsu) | [Tasuuketsu](https://subsplease.org/shows/tasuuketsu) | TV | 24 / 24 | **Finished Airing** | 5.54 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tasuuketsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57945__tasuuketsu.txt) | 24 | 2890 | 2024-12-24 20:02 |
| 52830 | [![52830__isekai_de_cheat_skill_wo_te_ni_shita_ore_wa_genjitsu_sekai_wo_mo_musou_suru_level_up_wa_jinsei_wo_kaeta](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52830__isekai_de_cheat_skill_wo_te_ni_shita_ore_wa_genjitsu_sekai_wo_mo_musou_suru_level_up_wa_jinsei_wo_kaeta.jpg)](https://myanimelist.net/anime/52830/Isekai_de_Cheat_Skill_wo_Te_ni_Shita_Ore_wa_Genjitsu_Sekai_wo_mo_Musou_Suru__Level_Up_wa_Jinsei_wo_Kaeta) | [Iseleve](https://subsplease.org/shows/iseleve) | TV | 13 / 13 | **Finished Airing** | 6.34 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Iseleve+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52830__isekai_de_cheat_skill_wo_te_ni_shita_ore_wa_genjitsu_sekai_wo_mo_musou_suru_level_up_wa_jinsei_wo_kaeta.txt) | 24 | 15233 | 2023-06-28 17:05 |
| 49109 | [![49109__kami_tachi_ni_hirowareta_otoko_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49109__kami_tachi_ni_hirowareta_otoko_2nd_season.jpg)](https://myanimelist.net/anime/49109/Kami-tachi_ni_Hirowareta_Otoko_2nd_Season) | [Kami-tachi ni Hirowareta Otoko S2](https://subsplease.org/shows/kami-tachi-ni-hirowareta-otoko-s2) | TV | 12 / 12 | **Finished Airing** | 6.8 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kami+tachi+ni+Hirowareta+Otoko+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49109__kami_tachi_ni_hirowareta_otoko_2nd_season.txt) | 24 | 5145 | 2023-03-26 13:04 |
| 44204 | [![44204__kyokou_suiri_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44204__kyokou_suiri_season_2.jpg)](https://myanimelist.net/anime/44204/Kyokou_Suiri_Season_2) | [Kyokou Suiri](https://subsplease.org/shows/kyokou-suiri) | TV | 12 / 12 | **Finished Airing** | 7.36 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kyokou+Suiri+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44204__kyokou_suiri_season_2.txt) | 24 | 4680 | 2023-03-26 15:32 |
| 52657 | [![52657__ousama_ranking_yuuki_no_takarabako](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52657__ousama_ranking_yuuki_no_takarabako.jpg)](https://myanimelist.net/anime/52657/Ousama_Ranking__Yuuki_no_Takarabako) | [Ousama Ranking - Yuuki no Takarabako](https://subsplease.org/shows/ousama-ranking-yuuki-no-takarabako) | TV | 10 / 10 | **Finished Airing** | 7.34 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ousama+Ranking+Yuuki+no+Takarabako+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52657__ousama_ranking_yuuki_no_takarabako.txt) | 23 | 7732 | 2023-06-15 18:02 |
| 57391 | [![57391__astro_note](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57391__astro_note.jpg)](https://myanimelist.net/anime/57391/Astro_Note) | [Astro Note](https://subsplease.org/shows/astro-note) | TV | 12 / 12 | **Finished Airing** | 6.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Astro+Note+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57391__astro_note.txt) | 23 | 4812 | 2024-06-21 14:02 |
| 53050 | [![53050__kanojo_okarishimasu_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53050__kanojo_okarishimasu_3rd_season.jpg)](https://myanimelist.net/anime/53050/Kanojo_Okarishimasu_3rd_Season) | [Kanojo, Okarishimasu](https://subsplease.org/shows/kanojo-okarishimasu) | TV | 24 / 12 | **Finished Airing** | 7.03 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kanojo+Okarishimasu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53050__kanojo_okarishimasu_3rd_season.txt) | 23 | 6381 | 2023-09-29 18:31 |
| 52990 | [![52990__keikenzumi_na_kimi_to_keiken_zero_na_ore_ga_otsukiai_suru_hanashi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52990__keikenzumi_na_kimi_to_keiken_zero_na_ore_ga_otsukiai_suru_hanashi.jpg)](https://myanimelist.net/anime/52990/Keikenzumi_na_Kimi_to_Keiken_Zero_na_Ore_ga_Otsukiai_suru_Hanashi) | [Kimizero](https://subsplease.org/shows/kimizero) | TV | 12 / 12 | **Finished Airing** | 6.75 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kimizero+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52990__keikenzumi_na_kimi_to_keiken_zero_na_ore_ga_otsukiai_suru_hanashi.txt) | 23 | 7678 | 2023-12-22 15:35 |
| 51498 | [![51498__masamune_kun_no_revenge_r](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51498__masamune_kun_no_revenge_r.jpg)](https://myanimelist.net/anime/51498/Masamune-kun_no_Revenge_R) | [Masamune-kun no Revenge S2](https://subsplease.org/shows/masamune-kun-no-revenge-s2) | TV | 12 / 12 | **Finished Airing** | 7.23 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Masamune+kun+no+Revenge+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51498__masamune_kun_no_revenge_r.txt) | 23 | 7187 | 2023-09-18 13:01 |
| 51219 | [![51219__isekai_one_turn_kill_neesan_ane_douhan_no_isekai_seikatsu_hajimemashita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51219__isekai_one_turn_kill_neesan_ane_douhan_no_isekai_seikatsu_hajimemashita.jpg)](https://myanimelist.net/anime/51219/Isekai_One_Turn_Kill_Neesan__Ane_Douhan_no_Isekai_Seikatsu_Hajimemashita) | [Isekai One Turn Kill Neesan](https://subsplease.org/shows/isekai-one-turn-kill-neesan) | TV | 12 / 12 | **Finished Airing** | 6.32 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+One+Turn+Kill+Neesan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51219__isekai_one_turn_kill_neesan_ane_douhan_no_isekai_seikatsu_hajimemashita.txt) | 23 | 7983 | 2023-06-23 14:31 |
| 41457 | [![41457__86](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41457__86.jpg)](https://myanimelist.net/anime/41457/86) | [86 - Eighty Six](https://subsplease.org/shows/86-eighty-six) | TV | 27 / 11 | **Finished Airing** | 8.32 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+86+Eighty+Six+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41457__86.txt) | 23 | 12549 | 2022-03-19 16:31 |
| 58854 | [![58854__kinoko_inu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/58854__kinoko_inu.jpg)](https://myanimelist.net/anime/58854/Kinoko_Inu) | [Kinoko Inu](https://subsplease.org/shows/kinoko-inu) | TV | 12 / 12 | **Finished Airing** | 6.89 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kinoko+Inu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/58854__kinoko_inu.txt) | 23 | 2055 | 2024-12-19 14:02 |
| 56425 | [![56425__houkago_shounen_hanako_kun](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56425__houkago_shounen_hanako_kun.jpg)](https://myanimelist.net/anime/56425/Houkago_Shounen_Hanako-kun) | [Houkago Shounen Hanako-kun](https://subsplease.org/shows/houkago-shounen-hanako-kun) | TV | 8 / 4 | **Finished Airing** | 7.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Houkago+Shounen+Hanako+kun+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56425__houkago_shounen_hanako_kun.txt) | 23 | 3334 | 2024-10-28 18:01 |
| 55597 | [![55597__hananoi_kun_to_koi_no_yamai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55597__hananoi_kun_to_koi_no_yamai.jpg)](https://myanimelist.net/anime/55597/Hananoi-kun_to_Koi_no_Yamai) | [Hananoi-kun to Koi no Yamai](https://subsplease.org/shows/hananoi-kun-to-koi-no-yamai) | TV | 12 / 12 | **Finished Airing** | 6.74 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hananoi+kun+to+Koi+no+Yamai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55597__hananoi_kun_to_koi_no_yamai.txt) | 23 | 4630 | 2024-06-20 17:02 |
| 51213 | [![51213__kinsou_no_vermeil_gakeppuchi_majutsushi_wa_saikyou_no_yakusai_to_mahou_sekai_wo_tsukisusumu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51213__kinsou_no_vermeil_gakeppuchi_majutsushi_wa_saikyou_no_yakusai_to_mahou_sekai_wo_tsukisusumu.jpg)](https://myanimelist.net/anime/51213/Kinsou_no_Vermeil__Gakeppuchi_Majutsushi_wa_Saikyou_no_Yakusai_to_Mahou_Sekai_wo_Tsukisusumu) | [Kinsou no Vermeil](https://subsplease.org/shows/kinsou-no-vermeil) | TV | 12 / 12 | **Finished Airing** | 6.8 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kinsou+no+Vermeil+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51213__kinsou_no_vermeil_gakeppuchi_majutsushi_wa_saikyou_no_yakusai_to_mahou_sekai_wo_tsukisusumu.txt) | 23 | 9461 | 2022-09-20 14:02 |
| 55358 | [![55358__bucchigiri](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55358__bucchigiri.jpg)](https://myanimelist.net/anime/55358/Bucchigiri) | [Bucchigiri](https://subsplease.org/shows/bucchigiri) | TV | 13 / 12 | **Finished Airing** | 6.53 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bucchigiri+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55358__bucchigiri.txt) | 22 | 5929 | 2024-04-06 15:31 |
| 52973 | [![52973__megami_no_café_terrace](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52973__megami_no_caf%C3%A9_terrace.jpg)](https://myanimelist.net/anime/52973/Megami_no_Café_Terrace) | [Megami no Cafe Terrace](https://subsplease.org/shows/megami-no-cafe-terrace) | TV | 24 / 12 | **Finished Airing** | 7.31 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Megami+no+Cafe+Terrace+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52973__megami_no_caf%C3%A9_terrace.txt) | 22 | 6878 | 2024-09-19 17:32 |
| 52461 | [![52461__rougo_ni_sonaete_isekai_de_8_manmai_no_kinka_wo_tamemasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52461__rougo_ni_sonaete_isekai_de_8_manmai_no_kinka_wo_tamemasu.jpg)](https://myanimelist.net/anime/52461/Rougo_ni_Sonaete_Isekai_de_8-manmai_no_Kinka_wo_Tamemasu) | [Rougo ni Sonaete Isekai de 8-manmai no Kinka wo Tamemasu](https://subsplease.org/shows/rougo-ni-sonaete-isekai-de-8-manmai-no-kinka-wo-tamemasu) | TV | 12 / 12 | **Finished Airing** | 6.94 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Rougo+ni+Sonaete+Isekai+de+8+manmai+no+Kinka+wo+Tamemasu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52461__rougo_ni_sonaete_isekai_de_8_manmai_no_kinka_wo_tamemasu.txt) | 22 | 6294 | 2023-03-25 18:47 |
| 51817 | [![51817__watashi_no_yuri_wa_oshigoto_desu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51817__watashi_no_yuri_wa_oshigoto_desu.jpg)](https://myanimelist.net/anime/51817/Watashi_no_Yuri_wa_Oshigoto_desu) | [Watashi no Yuri wa Oshigoto desu!](https://subsplease.org/shows/watashi-no-yuri-wa-oshigoto-desu) | TV | 12 / 12 | **Finished Airing** | 6.68 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Watashi+no+Yuri+wa+Oshigoto+desu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51817__watashi_no_yuri_wa_oshigoto_desu.txt) | 22 | 4331 | 2023-06-22 14:01 |
| 49154 | [![49154__high_card](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49154__high_card.jpg)](https://myanimelist.net/anime/49154/High_Card) | [High Card](https://subsplease.org/shows/high-card) | TV | 25 / 12 | **Finished Airing** | 7.16 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+High+Card+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49154__high_card.txt) | 22 | 4857 | 2024-11-06 06:12 |
| 46422 | [![46422__niehime_to_kemono_no_ou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/46422__niehime_to_kemono_no_ou.jpg)](https://myanimelist.net/anime/46422/Niehime_to_Kemono_no_Ou) | [Niehime to Kemono no Ou](https://subsplease.org/shows/niehime-to-kemono-no-ou) | TV | 24 / 24 | **Finished Airing** | 7.94 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Niehime+to+Kemono+no+Ou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/46422__niehime_to_kemono_no_ou.txt) | 22 | 4886 | 2023-09-27 15:32 |
| 44408 | [![44408__long_zu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44408__long_zu.jpg)](https://myanimelist.net/anime/44408/Long_Zu) | [Dragon Raja](https://subsplease.org/shows/dragon-raja) | ONA | 17 / 16 | **Finished Airing** | 7.24 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dragon+Raja+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44408__long_zu.txt) | 22 | 4814 | 2024-06-29 16:32 |
| 40356 | [![40356__tate_no_yuusha_no_nariagari_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40356__tate_no_yuusha_no_nariagari_season_2.jpg)](https://myanimelist.net/anime/40356/Tate_no_Yuusha_no_Nariagari_Season_2) | [Tate no Yuusha no Nariagari S2](https://subsplease.org/shows/tate-no-yuusha-no-nariagari-s2) | TV | 13 / 13 | **Finished Airing** | 6.52 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tate+no+Yuusha+no+Nariagari+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40356__tate_no_yuusha_no_nariagari_season_2.txt) | 22 | 11272 | 2022-06-29 13:01 |
| 57031 | [![57031__vampire_dormitory](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57031__vampire_dormitory.jpg)](https://myanimelist.net/anime/57031/Vampire_Dormitory) | [Vampire Dormitory](https://subsplease.org/shows/vampire-dormitory) | TV | 12 / 12 | **Finished Airing** | 6.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Vampire+Dormitory+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57031__vampire_dormitory.txt) | 22 | 2985 | 2024-06-23 14:47 |
| 55844 | [![55844__tasogare_out_focus](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55844__tasogare_out_focus.jpg)](https://myanimelist.net/anime/55844/Tasogare_Out_Focus) | [Tasogare Out Focus](https://subsplease.org/shows/tasogare-out-focus) | TV | 12 / 12 | **Finished Airing** | 7.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tasogare+Out+Focus+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55844__tasogare_out_focus.txt) | 22 | 2178 | 2024-09-19 15:02 |
| 51495 | [![51495__shin_shinka_no_mi_shiranai_uchi_ni_kachigumi_jinsei](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51495__shin_shinka_no_mi_shiranai_uchi_ni_kachigumi_jinsei.jpg)](https://myanimelist.net/anime/51495/Shin_Shinka_no_Mi__Shiranai_Uchi_ni_Kachigumi_Jinsei) | [Shinka no Mi S2](https://subsplease.org/shows/shinka-no-mi-s2) | TV | 12 / 12 | **Finished Airing** | 5.46 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shinka+no+Mi+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51495__shin_shinka_no_mi_shiranai_uchi_ni_kachigumi_jinsei.txt) | 22 | 4557 | 2023-03-31 18:01 |
| 53580 | [![53580__tensei_shitara_slime_datta_ken_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53580__tensei_shitara_slime_datta_ken_3rd_season.jpg)](https://myanimelist.net/anime/53580/Tensei_shitara_Slime_Datta_Ken_3rd_Season) | [Tensei Shitara Slime Datta Ken](https://subsplease.org/shows/tensei-shitara-slime-datta-ken) | TV | 51 / 24 | **Finished Airing** | 7.68 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tensei+Shitara+Slime+Datta+Ken+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53580__tensei_shitara_slime_datta_ken_3rd_season.txt) | 22 | 20044 | 2024-09-27 15:32 |
| 49926 | [![49926__kimetsu_no_yaiba_mugen_ressha_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49926__kimetsu_no_yaiba_mugen_ressha_hen.jpg)](https://myanimelist.net/anime/49926/Kimetsu_no_Yaiba__Mugen_Ressha-hen) | [Kimetsu no Yaiba - Mugen Ressha-hen](https://subsplease.org/shows/kimetsu-no-yaiba-mugen-ressha-hen) | TV | 7 / 7 | **Finished Airing** | 8.35 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kimetsu+no+Yaiba+Mugen+Ressha+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49926__kimetsu_no_yaiba_mugen_ressha_hen.txt) | 22 | 13509 | 2021-11-28 15:48 |
| 55570 | [![55570__shin_tennis_no_oujisama_u_17_world_cup_semifinal](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55570__shin_tennis_no_oujisama_u_17_world_cup_semifinal.jpg)](https://myanimelist.net/anime/55570/Shin_Tennis_no_Oujisama__U-17_World_Cup_Semifinal) | [The Prince of Tennis II - U-17 World Cup Semifinal](https://subsplease.org/shows/the-prince-of-tennis-ii-u-17-world-cup-semifinal) | TV | 13 / 13 | **Finished Airing** | 6.64 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+Prince+of+Tennis+II+U+17+World+Cup+Semifinal+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55570__shin_tennis_no_oujisama_u_17_world_cup_semifinal.txt) | 21 | 1917 | 2024-12-25 17:02 |
| 52214 | [![52214__genjitsu_no_yohane_sunshine_in_the_mirror](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52214__genjitsu_no_yohane_sunshine_in_the_mirror.jpg)](https://myanimelist.net/anime/52214/Genjitsu_no_Yohane__Sunshine_in_the_Mirror) | [Genjitsu no Yohane - Sunshine in the Mirror](https://subsplease.org/shows/genjitsu-no-yohane-sunshine-in-the-mirror) | TV | 13 / 13 | **Finished Airing** | 6.87 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Genjitsu+no+Yohane+Sunshine+in+the+Mirror+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52214__genjitsu_no_yohane_sunshine_in_the_mirror.txt) | 21 | 5412 | 2023-09-17 14:46 |
| 51711 | [![51711__hyouken_no_majutsushi_ga_sekai_wo_suberu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51711__hyouken_no_majutsushi_ga_sekai_wo_suberu.jpg)](https://myanimelist.net/anime/51711/Hyouken_no_Majutsushi_ga_Sekai_wo_Suberu) | [Hyouken no Majutsushi ga Sekai wo Suberu](https://subsplease.org/shows/hyouken-no-majutsushi-ga-sekai-wo-suberu) | TV | 12 / 12 | **Finished Airing** | 6.35 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hyouken+no+Majutsushi+ga+Sekai+wo+Suberu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51711__hyouken_no_majutsushi_ga_sekai_wo_suberu.txt) | 21 | 7137 | 2023-03-23 18:01 |
| 51096 | [![51096__youkoso_jitsuryoku_shijou_shugi_no_kyoushitsu_e_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51096__youkoso_jitsuryoku_shijou_shugi_no_kyoushitsu_e_2nd_season.jpg)](https://myanimelist.net/anime/51096/Youkoso_Jitsuryoku_Shijou_Shugi_no_Kyoushitsu_e_2nd_Season) | [Youkoso Jitsuryoku Shijou Shugi no Kyoushitsu e S2](https://subsplease.org/shows/youkoso-jitsuryoku-shijou-shugi-no-kyoushitsu-e-s2) | TV | 13 / 13 | **Finished Airing** | 8.09 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Youkoso+Jitsuryoku+Shijou+Shugi+no+Kyoushitsu+e+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51096__youkoso_jitsuryoku_shijou_shugi_no_kyoushitsu_e_2nd_season.txt) | 21 | 7979 | 2022-09-26 13:31 |
| 49827 | [![49827__kidou_senshi_gundam_cucuruz_doan_no_shima](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49827__kidou_senshi_gundam_cucuruz_doan_no_shima.jpg)](https://myanimelist.net/anime/49827/Kidou_Senshi_Gundam__Cucuruz_Doan_no_Shima) | [Mobile Suit Gundam - Cucuruz Doan's Island](https://subsplease.org/shows/mobile-suit-gundam-cucuruz-doans-island) | Movie | 1 / 1 | **Finished Airing** | 7.35 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mobile+Suit+Gundam+Cucuruz+Doan+s+Island+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49827__kidou_senshi_gundam_cucuruz_doan_no_shima.txt) | 21 | 3842 | 2023-04-29 20:32 |
| 54758 | [![54758__the_idolm_ster_shiny_colors](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54758__the_idolm_ster_shiny_colors.jpg)](https://myanimelist.net/anime/54758/The_iDOLMSTER_Shiny_Colors) | [The iDOLM@STER Shiny Colors](https://subsplease.org/shows/the-idolmster-shiny-colors) | TV | 12 / 12 | **Finished Airing** | 6.11 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+iDOLM+STER+Shiny+Colors+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54758__the_idolm_ster_shiny_colors.txt) | 21 | 2414 | 2024-06-21 17:32 |
| 50652 | [![50652__tsundere_akuyaku_reijou_liselotte_to_jikkyou_no_endou_kun_to_kaisetsu_no_kobayashi_san](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50652__tsundere_akuyaku_reijou_liselotte_to_jikkyou_no_endou_kun_to_kaisetsu_no_kobayashi_san.jpg)](https://myanimelist.net/anime/50652/Tsundere_Akuyaku_Reijou_Liselotte_to_Jikkyou_no_Endou-kun_to_Kaisetsu_no_Kobayashi-san) | [Tsunlise](https://subsplease.org/shows/tsunlise) | TV | 12 / 12 | **Finished Airing** | 7.23 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tsunlise+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50652__tsundere_akuyaku_reijou_liselotte_to_jikkyou_no_endou_kun_to_kaisetsu_no_kobayashi_san.txt) | 21 | 4470 | 2023-03-24 17:56 |
| 51458 | [![51458__lv1_maou_to_one_room_yuusha](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51458__lv1_maou_to_one_room_yuusha.jpg)](https://myanimelist.net/anime/51458/Lv1_Maou_to_One_Room_Yuusha) | [Lv1 Maou to One Room Yuusha](https://subsplease.org/shows/lv1-maou-to-one-room-yuusha) | TV | 12 / 12 | **Finished Airing** | 7.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Lv1+Maou+to+One+Room+Yuusha+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51458__lv1_maou_to_one_room_yuusha.txt) | 20 | 9518 | 2023-09-18 13:31 |
| 42745 | [![42745__machikado_mazoku_2_choume](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42745__machikado_mazoku_2_choume.jpg)](https://myanimelist.net/anime/42745/Machikado_Mazoku__2-choume) | [Machikado Mazoku S2](https://subsplease.org/shows/machikado-mazoku-s2) | TV | 12 / 12 | **Finished Airing** | 7.85 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Machikado+Mazoku+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42745__machikado_mazoku_2_choume.txt) | 20 | 5640 | 2022-06-30 17:29 |
| 50380 | [![50380__paripi_koumei](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50380__paripi_koumei.jpg)](https://myanimelist.net/anime/50380/Paripi_Koumei) | [Paripi Koumei](https://subsplease.org/shows/paripi-koumei) | TV | 12 / 12 | **Finished Airing** | 8.12 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Paripi+Koumei+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50380__paripi_koumei.txt) | 20 | 13431 | 2022-06-16 14:01 |
| 40507 | [![40507__arifureta_shokugyou_de_sekai_saikyou_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40507__arifureta_shokugyou_de_sekai_saikyou_2nd_season.jpg)](https://myanimelist.net/anime/40507/Arifureta_Shokugyou_de_Sekai_Saikyou_2nd_Season) | [Arifureta Shokugyou de Sekai Saikyou S2](https://subsplease.org/shows/arifureta-shokugyou-de-sekai-saikyou-s2) | TV | 15 / 12 | **Finished Airing** | 7.16 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Arifureta+Shokugyou+de+Sekai+Saikyou+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40507__arifureta_shokugyou_de_sekai_saikyou_2nd_season.txt) | 20 | 7210 | 2022-09-25 17:29 |
| 49470 | [![49470__mamahaha_no_tsurego_ga_motokano_datta](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49470__mamahaha_no_tsurego_ga_motokano_datta.jpg)](https://myanimelist.net/anime/49470/Mamahaha_no_Tsurego_ga_Motokano_datta) | [Mamahaha no Tsurego ga Motokano datta](https://subsplease.org/shows/mamahaha-no-tsurego-ga-motokano-datta) | TV | 12 / 12 | **Finished Airing** | 6.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mamahaha+no+Tsurego+ga+Motokano+datta+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49470__mamahaha_no_tsurego_ga_motokano_datta.txt) | 20 | 5490 | 2022-09-21 15:01 |
| 45613 | [![45613__kawaii_dake_ja_nai_shikimori_san](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/45613__kawaii_dake_ja_nai_shikimori_san.jpg)](https://myanimelist.net/anime/45613/Kawaii_dake_ja_Nai_Shikimori-san) | [Kawaii dake ja Nai Shikimori-san](https://subsplease.org/shows/kawaii-dake-ja-nai-shikimori-san) | TV | 14 / 12 | **Finished Airing** | 6.93 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kawaii+dake+ja+Nai+Shikimori+san+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/45613__kawaii_dake_ja_nai_shikimori_san.txt) | 19 | 6262 | 2022-07-09 18:16 |
| 54898 | [![54898__bungou_stray_dogs_5th_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54898__bungou_stray_dogs_5th_season.jpg)](https://myanimelist.net/anime/54898/Bungou_Stray_Dogs_5th_Season) | [Bungou Stray Dogs](https://subsplease.org/shows/bungou-stray-dogs) | TV | 24 / 11 | **Finished Airing** | 8.63 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bungou+Stray+Dogs+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54898__bungou_stray_dogs_5th_season.txt) | 19 | 6604 | 2023-09-20 14:31 |
| 49722 | [![49722__karakai_jouzu_no_takagi_san_movie](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49722__karakai_jouzu_no_takagi_san_movie.jpg)](https://myanimelist.net/anime/49722/Karakai_Jouzu_no_Takagi-san_Movie) | [Karakai Jouzu no Takagi-san Movie](https://subsplease.org/shows/karakai-jouzu-no-takagi-san-movie) | Movie | 1 / 1 | **Finished Airing** | 8.44 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Karakai+Jouzu+no+Takagi+san+Movie+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49722__karakai_jouzu_no_takagi_san_movie.txt) | 19 | 3268 | 2023-06-19 04:28 |
| 41468 | [![41468__burn_the_witch](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41468__burn_the_witch.jpg)](https://myanimelist.net/anime/41468/Burn_the_Witch) | [Burn the Witch](https://subsplease.org/shows/burn-the-witch) | ONA | 4 / 3 | **Finished Airing** | 7.14 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Burn+the+Witch+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41468__burn_the_witch.txt) | 19 | 5265 | 2024-01-01 15:33 |
| 39576 | [![39576__goblin_slayer_goblin_s_crown](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39576__goblin_slayer_goblin_s_crown.jpg)](https://myanimelist.net/anime/39576/Goblin_Slayer__Goblins_Crown) | [Goblin Slayer - Goblin's Crown](https://subsplease.org/shows/goblin-slayer-goblins-crown) | Movie | 1 / 1 | **Finished Airing** | 7.26 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Goblin+Slayer+Goblin+s+Crown+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39576__goblin_slayer_goblin_s_crown.txt) | 19 | 4495 | 2020-11-10 18:58 |
| 57390 | [![57390__wonderful_precure](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57390__wonderful_precure.jpg)](https://myanimelist.net/anime/57390/Wonderful_Precure) | [Wonderful Precure!](https://subsplease.org/shows/wonderful-precure) | TV | 49 / 50 | Currently Airing | 7.37 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Wonderful+Precure+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57390__wonderful_precure.txt) | 18 | 1508 | 2025-01-19 01:32 |
| 57192 | [![57192__yeosin_gangnim](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57192__yeosin_gangnim.jpg)](https://myanimelist.net/anime/57192/Yeosin_Gangnim) | [True Beauty](https://subsplease.org/shows/true-beauty) | ONA | 13 / 13 | **Finished Airing** | 6.59 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+True+Beauty+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57192__yeosin_gangnim.txt) | 18 | 2072 | 2024-10-30 16:32 |
| 56768 | [![56768__tadaima_okaeri](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56768__tadaima_okaeri.jpg)](https://myanimelist.net/anime/56768/Tadaima_Okaeri) | [Tadaima, Okaeri](https://subsplease.org/shows/tadaima-okaeri) | TV | 12 / 12 | **Finished Airing** | 7.75 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tadaima+Okaeri+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56768__tadaima_okaeri.txt) | 18 | 2314 | 2024-06-24 16:02 |
| 53411 | [![53411__buddy_daddies](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53411__buddy_daddies.jpg)](https://myanimelist.net/anime/53411/Buddy_Daddies) | [Buddy Daddies](https://subsplease.org/shows/buddy-daddies) | TV | 13 / 12 | **Finished Airing** | 8.11 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Buddy+Daddies+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53411__buddy_daddies.txt) | 18 | 6864 | 2023-03-31 16:31 |
| 53163 | [![53163__kawaisugi_crisis](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53163__kawaisugi_crisis.jpg)](https://myanimelist.net/anime/53163/Kawaisugi_Crisis) | [Kawaisugi Crisis](https://subsplease.org/shows/kawaisugi-crisis) | TV | 12 / 12 | **Finished Airing** | 6.81 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kawaisugi+Crisis+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53163__kawaisugi_crisis.txt) | 18 | 5180 | 2023-06-23 15:01 |
| 49520 | [![49520__aharen_san_wa_hakarenai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49520__aharen_san_wa_hakarenai.jpg)](https://myanimelist.net/anime/49520/Aharen-san_wa_Hakarenai) | [Aharen-san wa Hakarenai](https://subsplease.org/shows/aharen-san-wa-hakarenai) | TV | 12 / 12 | **Finished Airing** | 7.28 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Aharen+san+wa+Hakarenai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49520__aharen_san_wa_hakarenai.txt) | 18 | 6063 | 2022-06-17 18:16 |
| 49849 | [![49849__shinmai_renkinjutsushi_no_tenpo_keiei](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49849__shinmai_renkinjutsushi_no_tenpo_keiei.jpg)](https://myanimelist.net/anime/49849/Shinmai_Renkinjutsushi_no_Tenpo_Keiei) | [Shinmai Renkinjutsushi no Tenpo Keiei](https://subsplease.org/shows/shinmai-renkinjutsushi-no-tenpo-keiei) | TV | 12 / 12 | **Finished Airing** | 6.61 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shinmai+Renkinjutsushi+no+Tenpo+Keiei+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49849__shinmai_renkinjutsushi_no_tenpo_keiei.txt) | 18 | 5930 | 2022-12-19 13:00 |
| 47163 | [![47163__tensei_kenja_no_isekai_life_dai_2_no_shokugyou_wo_ete_sekai_saikyou_ni_narimashita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47163__tensei_kenja_no_isekai_life_dai_2_no_shokugyou_wo_ete_sekai_saikyou_ni_narimashita.jpg)](https://myanimelist.net/anime/47163/Tensei_Kenja_no_Isekai_Life__Dai-2_no_Shokugyou_wo_Ete_Sekai_Saikyou_ni_Narimashita) | [Tensei Kenja no Isekai Life](https://subsplease.org/shows/tensei-kenja-no-isekai-life) | TV | 12 / 12 | **Finished Airing** | 6.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tensei+Kenja+no+Isekai+Life+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47163__tensei_kenja_no_isekai_life_dai_2_no_shokugyou_wo_ete_sekai_saikyou_ni_narimashita.txt) | 18 | 10359 | 2022-09-12 12:01 |
| 55237 | [![55237__jashin_chan_dropkick_seikimatsu_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55237__jashin_chan_dropkick_seikimatsu_hen.jpg)](https://myanimelist.net/anime/55237/Jashin-chan_Dropkick_Seikimatsu-hen) | [Jashin-chan Dropkick - Seikimatsu-hen](https://subsplease.org/shows/jashin-chan-dropkick-seikimatsu-hen) | TV Special | 1 / 1 | **Finished Airing** | 6.89 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jashin+chan+Dropkick+Seikimatsu+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55237__jashin_chan_dropkick_seikimatsu_hen.txt) | 17 | 3641 | 2023-12-28 03:58 |
| 54275 | [![54275__temple](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54275__temple.jpg)](https://myanimelist.net/anime/54275/Temple) | [TenPuru](https://subsplease.org/shows/tenpuru) | TV | 13 / 12 | **Finished Airing** | 6.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+TenPuru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54275__temple.txt) | 17 | 6422 | 2023-11-22 09:56 |
| 53621 | [![53621__jijou_wo_shiranai_tenkousei_ga_guigui_kuru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53621__jijou_wo_shiranai_tenkousei_ga_guigui_kuru.jpg)](https://myanimelist.net/anime/53621/Jijou_wo_Shiranai_Tenkousei_ga_Guigui_Kuru) | [Jijou wo Shiranai Tenkousei ga Guigui Kuru](https://subsplease.org/shows/jijou-wo-shiranai-tenkousei-ga-guigui-kuru) | TV | 13 / 13 | **Finished Airing** | 7.51 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jijou+wo+Shiranai+Tenkousei+ga+Guigui+Kuru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53621__jijou_wo_shiranai_tenkousei_ga_guigui_kuru.txt) | 17 | 4258 | 2023-06-25 13:31 |
| 50586 | [![50586__migi_to_dali](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50586__migi_to_dali.jpg)](https://myanimelist.net/anime/50586/Migi_to_Dali) | [Migi to Dali](https://subsplease.org/shows/migi-to-dali) | TV | 13 / 13 | **Finished Airing** | 7.72 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Migi+to+Dali+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50586__migi_to_dali.txt) | 17 | 4443 | 2023-12-25 14:31 |
| 49784 | [![49784__mairimashita_iruma_kun_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49784__mairimashita_iruma_kun_3rd_season.jpg)](https://myanimelist.net/anime/49784/Mairimashita_Iruma-kun_3rd_Season) | [Mairimashita! Iruma-kun S3](https://subsplease.org/shows/mairimashita-iruma-kun-s3) | TV | 21 / 21 | **Finished Airing** | 7.82 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mairimashita+Iruma+kun+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49784__mairimashita_iruma_kun_3rd_season.txt) | 17 | 4940 | 2023-03-04 13:01 |
| 47162 | [![47162__shokei_shoujo_no_virgin_road](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47162__shokei_shoujo_no_virgin_road.jpg)](https://myanimelist.net/anime/47162/Shokei_Shoujo_no_Virgin_Road) | [Shokei Shoujo no Virgin Road](https://subsplease.org/shows/shokei-shoujo-no-virgin-road) | TV | 12 / 12 | **Finished Airing** | 6.76 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shokei+Shoujo+no+Virgin+Road+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47162__shokei_shoujo_no_virgin_road.txt) | 17 | 8796 | 2022-06-17 16:31 |
| 56165 | [![56165__boukyaku_battery_tv](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56165__boukyaku_battery_tv.jpg)](https://myanimelist.net/anime/56165/Boukyaku_Battery_TV) | [Boukyaku Battery](https://subsplease.org/shows/boukyaku-battery) | TV | 12 / 12 | **Finished Airing** | 7.51 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Boukyaku+Battery+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56165__boukyaku_battery_tv.txt) | 17 | 3864 | 2024-07-02 18:08 |
| 52173 | [![52173__koori_zokusei_danshi_to_cool_na_douryou_joshi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52173__koori_zokusei_danshi_to_cool_na_douryou_joshi.jpg)](https://myanimelist.net/anime/52173/Koori_Zokusei_Danshi_to_Cool_na_Douryou_Joshi) | [Koori Zokusei Danshi to Cool na Douryou Joshi](https://subsplease.org/shows/koori-zokusei-danshi-to-cool-na-douryou-joshi) | TV | 12 / 12 | **Finished Airing** | 7.32 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Koori+Zokusei+Danshi+to+Cool+na+Douryou+Joshi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52173__koori_zokusei_danshi_to_cool_na_douryou_joshi.txt) | 17 | 4552 | 2023-03-21 14:31 |
| 54798 | [![54798__kamierabi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54798__kamierabi.jpg)](https://myanimelist.net/anime/54798/Kamierabi) | [KamiErabi GOD.app](https://subsplease.org/shows/kamierabi-god-app) | TV | 24 / 12 | **Finished Airing** | 5.81 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+KamiErabi+GOD+app+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54798__kamierabi.txt) | 16 | 3224 | 2024-12-18 18:27 |
| 52046 | [![52046__yuusha_party_wo_tsuihou_sareta_beast_tamer_saikyoushu_no_nekomimi_shoujo_to_deau](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52046__yuusha_party_wo_tsuihou_sareta_beast_tamer_saikyoushu_no_nekomimi_shoujo_to_deau.jpg)](https://myanimelist.net/anime/52046/Yuusha_Party_wo_Tsuihou_sareta_Beast_Tamer_Saikyoushu_no_Nekomimi_Shoujo_to_Deau) | [Beast Tamer](https://subsplease.org/shows/beast-tamer) | TV | 13 / 13 | **Finished Airing** | 6.92 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Beast+Tamer+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52046__yuusha_party_wo_tsuihou_sareta_beast_tamer_saikyoushu_no_nekomimi_shoujo_to_deau.txt) | 16 | 6639 | 2022-12-24 16:01 |
| 48548 | [![48548__5_toubun_no_hanayome_movie](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48548__5_toubun_no_hanayome_movie.jpg)](https://myanimelist.net/anime/48548/5-toubun_no_Hanayome_Movie) | [Gotoubun no Hanayome Movie](https://subsplease.org/shows/gotoubun-no-hanayome-movie) | Movie | 1 / 1 | **Finished Airing** | 7.83 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gotoubun+no+Hanayome+Movie+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48548__5_toubun_no_hanayome_movie.txt) | 16 | 3080 | 2023-04-28 20:06 |
| 42962 | [![42962__uzaki_chan_wa_asobitai_double](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42962__uzaki_chan_wa_asobitai_double.jpg)](https://myanimelist.net/anime/42962/Uzaki-chan_wa_Asobitai_Double) | [Uzaki-chan wa Asobitai! S2](https://subsplease.org/shows/uzaki-chan-wa-asobitai-s2) | TV | 13 / 13 | **Finished Airing** | 7.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Uzaki+chan+wa+Asobitai+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42962__uzaki_chan_wa_asobitai_double.txt) | 16 | 4933 | 2022-12-24 15:31 |
| 40211 | [![40211__luo_xiao_hei_zhan_ji_movie](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40211__luo_xiao_hei_zhan_ji_movie.jpg)](https://myanimelist.net/anime/40211/Luo_Xiao_Hei_Zhan_Ji_Movie) | [The Legend of Hei](https://subsplease.org/shows/the-legend-of-hei) | Movie | 1 / 1 | **Finished Airing** | 8.3 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+Legend+of+Hei+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40211__luo_xiao_hei_zhan_ji_movie.txt) | 16 | 2332 | 2023-06-25 05:56 |
| 39535 | [![39535__mushoku_tensei_isekai_ittara_honki_dasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39535__mushoku_tensei_isekai_ittara_honki_dasu.jpg)](https://myanimelist.net/anime/39535/Mushoku_Tensei__Isekai_Ittara_Honki_Dasu) | [Mushoku Tensei](https://subsplease.org/shows/mushoku-tensei) | TV | 24 / 11 | **Finished Airing** | 8.36 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mushoku+Tensei+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39535__mushoku_tensei_isekai_ittara_honki_dasu.txt) | 16 | 18654 | 2022-03-16 02:03 |
| 38474 | [![38474__yuru_camp_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38474__yuru_camp_season_2.jpg)](https://myanimelist.net/anime/38474/Yuru_Camp△_Season_2) | [Yuru Camp S2](https://subsplease.org/shows/yuru-camp-s2) | TV | 13 / 13 | **Finished Airing** | 8.51 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yuru+Camp+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38474__yuru_camp_season_2.txt) | 16 | 5453 | 2021-04-01 15:03 |
| 51064 | [![51064__kuro_no_shoukanshi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51064__kuro_no_shoukanshi.jpg)](https://myanimelist.net/anime/51064/Kuro_no_Shoukanshi) | [Kuro no Shoukanshi](https://subsplease.org/shows/kuro-no-shoukanshi) | TV | 12 / 12 | **Finished Airing** | 7.04 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kuro+no+Shoukanshi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51064__kuro_no_shoukanshi.txt) | 16 | 6431 | 2022-09-24 14:31 |
| 53179 | [![53179__ars_no_kyojuu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53179__ars_no_kyojuu.jpg)](https://myanimelist.net/anime/53179/Ars_no_Kyojuu) | [Ars no Kyojuu](https://subsplease.org/shows/ars-no-kyojuu) | TV | 12 / 12 | **Finished Airing** | 6.04 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ars+no+Kyojuu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53179__ars_no_kyojuu.txt) | 15 | 5603 | 2023-03-24 17:01 |
| 52092 | [![52092__my_home_hero](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52092__my_home_hero.jpg)](https://myanimelist.net/anime/52092/My_Home_Hero) | [My Home Hero](https://subsplease.org/shows/my-home-hero) | TV | 12 / 12 | **Finished Airing** | 7.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+My+Home+Hero+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52092__my_home_hero.txt) | 15 | 6687 | 2023-06-18 15:01 |
| 50663 | [![50663__poputepipikku_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50663__poputepipikku_2nd_season.jpg)](https://myanimelist.net/anime/50663/Poputepipikku_2nd_Season) | [Pop Team Epic S2](https://subsplease.org/shows/pop-team-epic-s2) | TV | 12 / 11 | **Finished Airing** | 7.52 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Pop+Team+Epic+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50663__poputepipikku_2nd_season.txt) | 15 | 2972 | 2022-12-17 18:31 |
| 50175 | [![50175__yuusha_yamemasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50175__yuusha_yamemasu.jpg)](https://myanimelist.net/anime/50175/Yuusha_Yamemasu) | [Yuusha, Yamemasu](https://subsplease.org/shows/yuusha-yamemasu) | TV | 14 / 12 | **Finished Airing** | 7.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yuusha+Yamemasu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50175__yuusha_yamemasu.txt) | 15 | 10738 | 2022-08-25 04:17 |
| 49980 | [![49980__sugar_apple_fairy_tale](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49980__sugar_apple_fairy_tale.jpg)](https://myanimelist.net/anime/49980/Sugar_Apple_Fairy_Tale) | [Sugar Apple Fairy Tale](https://subsplease.org/shows/sugar-apple-fairy-tale) | TV | 24 / 12 | **Finished Airing** | 7.47 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sugar+Apple+Fairy+Tale+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49980__sugar_apple_fairy_tale.txt) | 15 | 4365 | 2023-09-22 13:01 |
| 49776 | [![49776__kumichou_musume_to_sewagakari](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49776__kumichou_musume_to_sewagakari.jpg)](https://myanimelist.net/anime/49776/Kumichou_Musume_to_Sewagakari) | [Kumichou Musume to Sewagakari](https://subsplease.org/shows/kumichou-musume-to-sewagakari) | TV | 12 / 12 | **Finished Airing** | 7.75 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kumichou+Musume+to+Sewagakari+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49776__kumichou_musume_to_sewagakari.txt) | 15 | 4044 | 2022-09-22 15:31 |
| 42385 | [![42385__the_idolm_ster_million_live](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42385__the_idolm_ster_million_live.jpg)](https://myanimelist.net/anime/42385/The_iDOLMSTER_Million_Live) | [The iDOLM@STER Million Live!](https://subsplease.org/shows/the-idolmster-million-live) | TV | 12 / 12 | **Finished Airing** | 6.92 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+iDOLM+STER+Million+Live+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42385__the_idolm_ster_million_live.txt) | 15 | 3051 | 2023-12-24 02:46 |
| 40938 | [![40938__hige_wo_soru_soshite_joshikousei_wo_hirou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40938__hige_wo_soru_soshite_joshikousei_wo_hirou.jpg)](https://myanimelist.net/anime/40938/Hige_wo_Soru_Soshite_Joshikousei_wo_Hirou) | [Hige wo Soru. Soshite Joshikousei wo Hirou.](https://subsplease.org/shows/hige-wo-soru-soshite-joshikousei-wo-hirou) | TV | 13 / 13 | **Finished Airing** | 7.27 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hige+wo+Soru+Soshite+Joshikousei+wo+Hirou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40938__hige_wo_soru_soshite_joshikousei_wo_hirou.txt) | 15 | 6514 | 2021-06-28 15:02 |
| 40787 | [![40787__josee_to_tora_to_sakana_tachi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40787__josee_to_tora_to_sakana_tachi.jpg)](https://myanimelist.net/anime/40787/Josee_to_Tora_to_Sakana-tachi) | [Josee to Tora to Sakana-tachi](https://subsplease.org/shows/josee-to-tora-to-sakana-tachi) | Movie | 1 / 1 | **Finished Airing** | 8.38 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Josee+to+Tora+to+Sakana+tachi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40787__josee_to_tora_to_sakana_tachi.txt) | 15 | 2929 | 2022-08-13 06:37 |
| 39247 | [![39247__kobayashi_san_chi_no_maid_dragon_s](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39247__kobayashi_san_chi_no_maid_dragon_s.jpg)](https://myanimelist.net/anime/39247/Kobayashi-san_Chi_no_Maid_Dragon_S) | [Kobayashi-san Chi no Maid Dragon S2](https://subsplease.org/shows/kobayashi-san-chi-no-maid-dragon-s2) | TV | 13 / 12 | **Finished Airing** | 8.23 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kobayashi+san+Chi+no+Maid+Dragon+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39247__kobayashi_san_chi_no_maid_dragon_s.txt) | 15 | 9781 | 2022-04-27 16:13 |
| 235 | [![235__meitantei_conan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/235__meitantei_conan.jpg)](https://myanimelist.net/anime/235/Meitantei_Conan) | [Detective Conan](https://subsplease.org/shows/detective-conan) | TV | 52 / ? | Currently Airing | 8.17 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Detective+Conan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/235__meitantei_conan.txt) | 15 | 2006 | 2025-01-18 12:31 |
| 53428 | [![53428__ayaka](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53428__ayaka.jpg)](https://myanimelist.net/anime/53428/Ayaka) | [Ayaka](https://subsplease.org/shows/ayaka) | TV | 12 / 12 | **Finished Airing** | 6.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ayaka+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53428__ayaka.txt) | 15 | 4210 | 2023-09-16 17:31 |
| 59499 | [![59499__asatir_2_mirai_no_mukashi_banashi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59499__asatir_2_mirai_no_mukashi_banashi.jpg)](https://myanimelist.net/anime/59499/Asatir_2__Mirai_no_Mukashi_Banashi) | [Asatir 2 - Mirai no Mukashi Banashi](https://subsplease.org/shows/asatir-2-mirai-no-mukashi-banashi) | TV | 11 / 13 | Currently Airing | N/A | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Asatir+2+Mirai+no+Mukashi+Banashi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59499__asatir_2_mirai_no_mukashi_banashi.txt) | ~14~ | 941 | 2025-01-20 15:01 |
| 54959 | [![54959__bang_dream_it_s_mygo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54959__bang_dream_it_s_mygo.jpg)](https://myanimelist.net/anime/54959/BanG_Dream_Its_MyGO) | [BanG Dream! It's MyGO!!!!!](https://subsplease.org/shows/bang-dream-its-mygo) | TV | 13 / 13 | **Finished Airing** | 8.23 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+BanG+Dream+It+s+MyGO+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54959__bang_dream_it_s_mygo.txt) | ~14~ | 3054 | 2023-09-14 14:01 |
| 51837 | [![51837__saikin_yatotta_maid_ga_ayashii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51837__saikin_yatotta_maid_ga_ayashii.jpg)](https://myanimelist.net/anime/51837/Saikin_Yatotta_Maid_ga_Ayashii) | [Saikin Yatotta Maid ga Ayashii](https://subsplease.org/shows/saikin-yatotta-maid-ga-ayashii) | TV | 11 / 11 | **Finished Airing** | 6.57 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Saikin+Yatotta+Maid+ga+Ayashii+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51837__saikin_yatotta_maid_ga_ayashii.txt) | ~14~ | 3500 | 2022-10-08 18:46 |
| 50606 | [![50606__ayakashi_triangle](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50606__ayakashi_triangle.jpg)](https://myanimelist.net/anime/50606/Ayakashi_Triangle) | [Ayakashi Triangle](https://subsplease.org/shows/ayakashi-triangle) | TV | 13 / 12 | **Finished Airing** | 6.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ayakashi+Triangle+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50606__ayakashi_triangle.txt) | ~14~ | 4154 | 2023-09-25 17:37 |
| 50590 | [![50590__koukyuu_no_karasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50590__koukyuu_no_karasu.jpg)](https://myanimelist.net/anime/50590/Koukyuu_no_Karasu) | [Koukyuu no Karasu](https://subsplease.org/shows/koukyuu-no-karasu) | TV | 13 / 13 | **Finished Airing** | 7.54 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Koukyuu+no+Karasu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50590__koukyuu_no_karasu.txt) | ~14~ | 2894 | 2022-12-24 17:01 |
| 48842 | [![48842__mahoutsukai_reimeiki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48842__mahoutsukai_reimeiki.jpg)](https://myanimelist.net/anime/48842/Mahoutsukai_Reimeiki) | [Mahoutsukai Reimeiki](https://subsplease.org/shows/mahoutsukai-reimeiki) | TV | 12 / 12 | **Finished Airing** | 6.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahoutsukai+Reimeiki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48842__mahoutsukai_reimeiki.txt) | ~14~ | 5163 | 2022-06-30 18:46 |
| 48760 | [![48760__gaikotsu_kishi_sama_tadaima_isekai_e_odekakechuu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48760__gaikotsu_kishi_sama_tadaima_isekai_e_odekakechuu.jpg)](https://myanimelist.net/anime/48760/Gaikotsu_Kishi-sama_Tadaima_Isekai_e_Odekakechuu) | [Gaikotsu Kishi-sama, Tadaima Isekai e Odekakechuu](https://subsplease.org/shows/gaikotsu-kishi-sama-tadaima-isekai-e-odekakechuu) | TV | 12 / 12 | **Finished Airing** | 7.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gaikotsu+Kishi+sama+Tadaima+Isekai+e+Odekakechuu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48760__gaikotsu_kishi_sama_tadaima_isekai_e_odekakechuu.txt) | ~14~ | 8359 | 2022-06-23 14:31 |
| 48624 | [![48624__re_cycle_of_the_penguindrum](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48624__re_cycle_of_the_penguindrum.jpg)](https://myanimelist.net/anime/48624/Re_cycle_of_the_Penguindrum) | [Re-cycle of the Penguindrum](https://subsplease.org/shows/re-cycle-of-the-penguindrum) | Movie | 2 / 2 | **Finished Airing** | 7.19 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Re+cycle+of+the+Penguindrum+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48624__re_cycle_of_the_penguindrum.txt) | ~14~ | 2294 | 2023-04-16 22:13 |
| 48415 | [![48415__shijou_saikyou_no_daimaou_murabito_a_ni_tensei_suru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48415__shijou_saikyou_no_daimaou_murabito_a_ni_tensei_suru.jpg)](https://myanimelist.net/anime/48415/Shijou_Saikyou_no_Daimaou_Murabito_A_ni_Tensei_suru) | [Shijou Saikyou no Daimaou, Murabito A ni Tensei suru](https://subsplease.org/shows/shijou-saikyou-no-daimaou-murabito-a-ni-tensei-suru) | TV | 12 / 12 | **Finished Airing** | 6.29 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shijou+Saikyou+no+Daimaou+Murabito+A+ni+Tensei+suru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48415__shijou_saikyou_no_daimaou_murabito_a_ni_tensei_suru.txt) | ~14~ | 6663 | 2022-06-22 12:01 |
| 47159 | [![47159__tensai_ouji_no_akaji_kokka_saisei_jutsu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47159__tensai_ouji_no_akaji_kokka_saisei_jutsu.jpg)](https://myanimelist.net/anime/47159/Tensai_Ouji_no_Akaji_Kokka_Saisei_Jutsu) | [Tensai Ouji no Akaji Kokka Saisei Jutsu](https://subsplease.org/shows/tensai-ouji-no-akaji-kokka-saisei-jutsu) | TV | 12 / 12 | **Finished Airing** | 7.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tensai+Ouji+no+Akaji+Kokka+Saisei+Jutsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47159__tensai_ouji_no_akaji_kokka_saisei_jutsu.txt) | ~14~ | 7534 | 2022-03-29 14:33 |
| 43760 | [![43760__hikari_no_ou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43760__hikari_no_ou.jpg)](https://myanimelist.net/anime/43760/Hikari_no_Ou) | [Hikari no Ou](https://subsplease.org/shows/hikari-no-ou) | TV | 20 / 10 | **Finished Airing** | 6.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hikari+no+Ou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43760__hikari_no_ou.txt) | ~14~ | 3845 | 2024-03-17 15:31 |
| 42361 | [![42361__ijiranaide_nagatoro_san](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42361__ijiranaide_nagatoro_san.jpg)](https://myanimelist.net/anime/42361/Ijiranaide_Nagatoro-san) | [Ijiranaide, Nagatoro-san](https://subsplease.org/shows/ijiranaide-nagatoro-san) | TV | 12 / 12 | **Finished Airing** | 7.22 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ijiranaide+Nagatoro+san+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42361__ijiranaide_nagatoro_san.txt) | ~14~ | 8040 | 2021-06-26 16:02 |
| 50399 | [![50399__tian_guan_cifu_er](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50399__tian_guan_cifu_er.jpg)](https://myanimelist.net/anime/50399/Tian_Guan_Cifu_Er) | [Heaven Official's Blessing S2](https://subsplease.org/shows/heaven-officials-blessing-s2) | ONA | 12 / 12 | **Finished Airing** | 8.66 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Heaven+Official+s+Blessing+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50399__tian_guan_cifu_er.txt) | ~14~ | 2988 | 2024-01-17 13:01 |
| 49438 | [![49438__isekai_yakkyoku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49438__isekai_yakkyoku.jpg)](https://myanimelist.net/anime/49438/Isekai_Yakkyoku) | [Isekai Yakkyoku](https://subsplease.org/shows/isekai-yakkyoku) | TV | 12 / 12 | **Finished Airing** | 7.28 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+Yakkyoku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49438__isekai_yakkyoku.txt) | ~14~ | 6635 | 2022-09-25 13:33 |
| 48483 | [![48483__mieruko_chan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48483__mieruko_chan.jpg)](https://myanimelist.net/anime/48483/Mieruko-chan) | [Mieruko-chan](https://subsplease.org/shows/mieruko-chan) | TV | 12 / 12 | **Finished Airing** | 7.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mieruko+chan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48483__mieruko_chan.txt) | ~14~ | 8981 | 2021-12-19 14:02 |
| 42429 | [![42429__honzuki_no_gekokujou_shisho_ni_naru_tame_ni_wa_shudan_wo_erandeiraremasen_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42429__honzuki_no_gekokujou_shisho_ni_naru_tame_ni_wa_shudan_wo_erandeiraremasen_3rd_season.jpg)](https://myanimelist.net/anime/42429/Honzuki_no_Gekokujou__Shisho_ni_Naru_Tame_ni_wa_Shudan_wo_Erandeiraremasen_3rd_Season) | [Honzuki no Gekokujou](https://subsplease.org/shows/honzuki-no-gekokujou) | TV | 12 / 10 | **Finished Airing** | 8.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Honzuki+no+Gekokujou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42429__honzuki_no_gekokujou_shisho_ni_naru_tame_ni_wa_shudan_wo_erandeiraremasen_3rd_season.txt) | ~14~ | 4255 | 2022-06-13 18:32 |
| 55998 | [![55998__momochi_san_chi_no_ayakashi_ouji](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55998__momochi_san_chi_no_ayakashi_ouji.jpg)](https://myanimelist.net/anime/55998/Momochi-san_Chi_no_Ayakashi_Ouji) | [Momochi-san Chi no Ayakashi Ouji](https://subsplease.org/shows/momochi-san-chi-no-ayakashi-ouji) | TV | 12 / 12 | **Finished Airing** | 6.51 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Momochi+san+Chi+no+Ayakashi+Ouji+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55998__momochi_san_chi_no_ayakashi_ouji.txt) | ~13~ | 3564 | 2024-03-22 16:31 |
| 55973 | [![55973__30_sai_made_doutei_dato_mahoutsukai_ni_nareru_rashii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55973__30_sai_made_doutei_dato_mahoutsukai_ni_nareru_rashii.jpg)](https://myanimelist.net/anime/55973/30-sai_made_Doutei_dato_Mahoutsukai_ni_Nareru_Rashii) | [30-sai made Doutei dato Mahoutsukai ni Nareru Rashii](https://subsplease.org/shows/30-sai-made-doutei-dato-mahoutsukai-ni-nareru-rashii) | TV | 12 / 12 | **Finished Airing** | 7.64 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+30+sai+made+Doutei+dato+Mahoutsukai+ni+Nareru+Rashii+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55973__30_sai_made_doutei_dato_mahoutsukai_ni_nareru_rashii.txt) | ~13~ | 3404 | 2024-03-27 16:31 |
| 51586 | [![51586__d4dj_all_mix](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51586__d4dj_all_mix.jpg)](https://myanimelist.net/anime/51586/D4DJ_All_Mix) | [D4DJ Double Mix](https://subsplease.org/shows/d4dj-all-mix) | TV | 1 / 12 | **Finished Airing** | 7.37 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+D4DJ+Double+Mix+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51586__d4dj_all_mix.txt) | ~13~ | 1401 | 2023-03-26 16:31 |
| 51536 | [![51536__the_idolm_ster_cinderella_girls_u149](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51536__the_idolm_ster_cinderella_girls_u149.jpg)](https://myanimelist.net/anime/51536/The_iDOLMSTER_Cinderella_Girls__U149) | [The IDOLM@STER Cinderella Girls - U149](https://subsplease.org/shows/the-idolmster-cinderella-girls-u149) | TV | 12 / 12 | **Finished Airing** | 7.69 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+IDOLM+STER+Cinderella+Girls+U149+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51536__the_idolm_ster_cinderella_girls_u149.txt) | ~13~ | 3364 | 2023-06-29 10:02 |
| 51265 | [![51265__inu_ni_nattara_suki_na_hito_ni_hirowareta](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51265__inu_ni_nattara_suki_na_hito_ni_hirowareta.jpg)](https://myanimelist.net/anime/51265/Inu_ni_Nattara_Suki_na_Hito_ni_Hirowareta) | [Inu ni Nattara Suki na Hito ni Hirowareta](https://subsplease.org/shows/inu-ni-nattara-suki-na-hito-ni-hirowareta) | TV | 14 / 12 | **Finished Airing** | 5.05 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Inu+ni+Nattara+Suki+na+Hito+ni+Hirowareta+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51265__inu_ni_nattara_suki_na_hito_ni_hirowareta.txt) | ~13~ | 3964 | 2023-04-26 18:50 |
| 49757 | [![49757__ji_yao_lu_qicheng_pian](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49757__ji_yao_lu_qicheng_pian.jpg)](https://myanimelist.net/anime/49757/Ji_Yao_Lu__Qicheng_Pian) | [Another Journey to the West](https://subsplease.org/shows/another-journey-to-the-west) | ONA | 12 / 12 | **Finished Airing** | 6.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Another+Journey+to+the+West+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49757__ji_yao_lu_qicheng_pian.txt) | ~13~ | 1826 | 2024-11-15 14:33 |
| 49053 | [![49053__given_uragawa_no_sonzai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49053__given_uragawa_no_sonzai.jpg)](https://myanimelist.net/anime/49053/Given__Uragawa_no_Sonzai) | [Given](https://subsplease.org/shows/given) | OVA | 1 / 1 | **Finished Airing** | 8.13 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Given+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49053__given_uragawa_no_sonzai.txt) | ~13~ | 1771 | 2022-06-15 05:15 |
| 48375 | [![48375__mahouka_koukou_no_rettousei_tsuioku_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48375__mahouka_koukou_no_rettousei_tsuioku_hen.jpg)](https://myanimelist.net/anime/48375/Mahouka_Koukou_no_Rettousei__Tsuioku-hen) | [Mahouka Koukou no Rettousei - Tsuioku-hen](https://subsplease.org/shows/mahouka-koukou-no-rettousei-tsuioku-hen) | TV Special | 1 / 1 | **Finished Airing** | 7.67 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahouka+Koukou+no+Rettousei+Tsuioku+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48375__mahouka_koukou_no_rettousei_tsuioku_hen.txt) | ~13~ | 4391 | 2021-12-31 18:33 |
| 46095 | [![46095__vivy_fluorite_eye_s_song](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/46095__vivy_fluorite_eye_s_song.jpg)](https://myanimelist.net/anime/46095/Vivy__Fluorite_Eyes_Song) | [Vivy - Fluorite Eye's Song](https://subsplease.org/shows/vivy-fluorite-eyes-song) | TV | 14 / 13 | **Finished Airing** | 8.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Vivy+Fluorite+Eye+s+Song+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/46095__vivy_fluorite_eye_s_song.txt) | ~13~ | 10840 | 2021-06-26 17:24 |
| 41461 | [![41461__date_a_live_iv](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41461__date_a_live_iv.jpg)](https://myanimelist.net/anime/41461/Date_A_Live_IV) | [Date a Live IV](https://subsplease.org/shows/date-a-live-iv) | TV | 12 / 12 | **Finished Airing** | 7.75 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Date+a+Live+IV+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41461__date_a_live_iv.txt) | ~13~ | 3534 | 2022-06-24 13:33 |
| 55310 | [![55310__atarashii_joushi_wa_do_tennen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55310__atarashii_joushi_wa_do_tennen.jpg)](https://myanimelist.net/anime/55310/Atarashii_Joushi_wa_Do_Tennen) | [Atarashii Joushi wa Do Tennen](https://subsplease.org/shows/atarashii-joushi-wa-do-tennen) | TV | 12 / 12 | **Finished Airing** | 7.93 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Atarashii+Joushi+wa+Do+Tennen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55310__atarashii_joushi_wa_do_tennen.txt) | ~13~ | 3295 | 2023-12-23 17:05 |
| 51128 | [![51128__noumin_kanren_no_skill_bakka_agetetara_nazeka_tsuyoku_natta](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51128__noumin_kanren_no_skill_bakka_agetetara_nazeka_tsuyoku_natta.jpg)](https://myanimelist.net/anime/51128/Noumin_Kanren_no_Skill_bakka_Agetetara_Nazeka_Tsuyoku_Natta) | [Noumin Kanren no Skill bakka Agetetara Nazeka Tsuyoku Natta](https://subsplease.org/shows/noumin-kanren-no-skill-bakka-agetetara-nazeka-tsuyoku-natta) | TV | 12 / 12 | **Finished Airing** | 5.96 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Noumin+Kanren+no+Skill+bakka+Agetetara+Nazeka+Tsuyoku+Natta+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51128__noumin_kanren_no_skill_bakka_agetetara_nazeka_tsuyoku_natta.txt) | ~13~ | 6910 | 2022-12-17 13:30 |
| 49782 | [![49782__shadows_house_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49782__shadows_house_2nd_season.jpg)](https://myanimelist.net/anime/49782/Shadows_House_2nd_Season) | [Shadows House S2](https://subsplease.org/shows/shadows-house-s2) | TV | 12 / 12 | **Finished Airing** | 8.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shadows+House+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49782__shadows_house_2nd_season.txt) | ~13~ | 3741 | 2022-09-23 16:33 |
| 48438 | [![48438__mahoutsukai_no_yome_nishi_no_shounen_to_seiran_no_kishi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48438__mahoutsukai_no_yome_nishi_no_shounen_to_seiran_no_kishi.jpg)](https://myanimelist.net/anime/48438/Mahoutsukai_no_Yome__Nishi_no_Shounen_to_Seiran_no_Kishi) | [Mahoutsukai no Yome - Nishi no Shounen to Seiran no Kishi](https://subsplease.org/shows/mahoutsukai-no-yome-nishi-no-shounen-to-seiran-no-kishi) | OVA | 3 / 3 | **Finished Airing** | 7.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahoutsukai+no+Yome+Nishi+no+Shounen+to+Seiran+no+Kishi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48438__mahoutsukai_no_yome_nishi_no_shounen_to_seiran_no_kishi.txt) | ~12~ | 4210 | 2022-12-14 07:38 |
| 57502 | [![57502__meiji_gekken_1874](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57502__meiji_gekken_1874.jpg)](https://myanimelist.net/anime/57502/Meiji_Gekken__1874) | [Meiji Gekken 1874](https://subsplease.org/shows/meiji-gekken-1874) | TV | 10 / 10 | **Finished Airing** | 6.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Meiji+Gekken+1874+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57502__meiji_gekken_1874.txt) | ~12~ | 3470 | 2024-03-17 15:02 |
| 53848 | [![53848__megumi_no_daigo_kyuukoku_no_orange](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53848__megumi_no_daigo_kyuukoku_no_orange.jpg)](https://myanimelist.net/anime/53848/Megumi_no_Daigo__Kyuukoku_no_Orange) | [Megumi no Daigo - Kyuukoku no Orange](https://subsplease.org/shows/megumi-no-daigo-kyuukoku-no-orange) | TV | 25 / 23 | **Finished Airing** | 6.69 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Megumi+no+Daigo+Kyuukoku+no+Orange+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53848__megumi_no_daigo_kyuukoku_no_orange.txt) | ~12~ | 3181 | 2024-03-23 09:01 |
| 53223 | [![53223__kingdom_5th_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53223__kingdom_5th_season.jpg)](https://myanimelist.net/anime/53223/Kingdom_5th_Season) | [Kingdom S5](https://subsplease.org/shows/kingdom-s5) | TV | 13 / 13 | **Finished Airing** | 8.72 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kingdom+S5+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53223__kingdom_5th_season.txt) | ~12~ | 6111 | 2024-03-31 00:07 |
| 50461 | [![50461__otome_game_sekai_wa_mob_ni_kibishii_sekai_desu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50461__otome_game_sekai_wa_mob_ni_kibishii_sekai_desu.jpg)](https://myanimelist.net/anime/50461/Otome_Game_Sekai_wa_Mob_ni_Kibishii_Sekai_desu) | [Otome Game Sekai wa Mob ni Kibishii Sekai desu](https://subsplease.org/shows/otome-game-sekai-wa-mob-ni-kibishii-sekai-desu) | TV | 12 / 12 | **Finished Airing** | 7.32 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Otome+Game+Sekai+wa+Mob+ni+Kibishii+Sekai+desu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50461__otome_game_sekai_wa_mob_ni_kibishii_sekai_desu.txt) | ~12~ | 6134 | 2022-06-19 13:33 |
| 50384 | [![50384__mononogatari](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50384__mononogatari.jpg)](https://myanimelist.net/anime/50384/Mononogatari) | [Mononogatari](https://subsplease.org/shows/mononogatari) | TV | 24 / 12 | **Finished Airing** | 7.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mononogatari+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50384__mononogatari.txt) | ~12~ | 3918 | 2023-09-18 17:01 |
| 49236 | [![49236__youjo_senki_sabaku_no_pasta_daisakusen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49236__youjo_senki_sabaku_no_pasta_daisakusen.jpg)](https://myanimelist.net/anime/49236/Youjo_Senki__Sabaku_no_Pasta_Daisakusen) | [Youjo Senki](https://subsplease.org/shows/youjo-senki) | ONA | 1 / 1 | **Finished Airing** | 7.46 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Youjo+Senki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49236__youjo_senki_sabaku_no_pasta_daisakusen.txt) | ~12~ | 4028 | 2021-06-19 17:53 |
| 49236 | [![49236__youjo_senki_sabaku_no_pasta_daisakusen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49236__youjo_senki_sabaku_no_pasta_daisakusen.jpg)](https://myanimelist.net/anime/49236/Youjo_Senki__Sabaku_no_Pasta_Daisakusen) | [Youjo Senki - Sabaku no Pasta Dai Sakusen](https://subsplease.org/shows/youjo-senki) | ONA | 1 / 1 | **Finished Airing** | 7.46 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Youjo+Senki+Sabaku+no+Pasta+Dai+Sakusen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49236__youjo_senki_sabaku_no_pasta_daisakusen.txt) | ~12~ | 4028 | 2021-06-19 17:53 |
| 46102 | [![46102__odd_taxi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/46102__odd_taxi.jpg)](https://myanimelist.net/anime/46102/Odd_Taxi) | [Odd Taxi](https://subsplease.org/shows/odd-taxi) | TV | 14 / 13 | **Finished Airing** | 8.65 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Odd+Taxi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/46102__odd_taxi.txt) | ~12~ | 3980 | 2022-09-10 08:46 |
| 42282 | [![42282__otome_game_no_hametsu_flag_shika_nai_akuyaku_reijou_ni_tensei_shiteshimatta_x](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42282__otome_game_no_hametsu_flag_shika_nai_akuyaku_reijou_ni_tensei_shiteshimatta_x.jpg)](https://myanimelist.net/anime/42282/Otome_Game_no_Hametsu_Flag_shika_Nai_Akuyaku_Reijou_ni_Tensei_shiteshimatta_X) | [Hamefura S2](https://subsplease.org/shows/hamefura-s2) | TV | 13 / 12 | **Finished Airing** | 7.26 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hamefura+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42282__otome_game_no_hametsu_flag_shika_nai_akuyaku_reijou_ni_tensei_shiteshimatta_x.txt) | ~12~ | 4126 | 2022-01-01 01:24 |
| 53787 | [![53787__ai_no_idenshi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53787__ai_no_idenshi.jpg)](https://myanimelist.net/anime/53787/AI_no_Idenshi) | [AI no Idenshi](https://subsplease.org/shows/ai-no-idenshi) | TV | 12 / 12 | **Finished Airing** | 6.53 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+AI+no+Idenshi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53787__ai_no_idenshi.txt) | ~12~ | 5330 | 2023-09-29 19:01 |
| 51859 | [![51859__touken_ranbu_kai_kyoden_moyuru_honnouji](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51859__touken_ranbu_kai_kyoden_moyuru_honnouji.jpg)](https://myanimelist.net/anime/51859/Touken_Ranbu_Kai__Kyoden_Moyuru_Honnouji) | [Touken Ranbu Kai - Kyoden Moyuru Honnouji](https://subsplease.org/shows/touken-ranbu-kai-kyoden-moyuru-honnouji) | TV | 8 / 8 | **Finished Airing** | 6.11 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Touken+Ranbu+Kai+Kyoden+Moyuru+Honnouji+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51859__touken_ranbu_kai_kyoden_moyuru_honnouji.txt) | ~12~ | 2264 | 2024-05-21 15:31 |
| 40586 | [![40586__slime_taoshite_300_nen_shiranai_uchi_ni_level_max_ni_nattemashita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40586__slime_taoshite_300_nen_shiranai_uchi_ni_level_max_ni_nattemashita.jpg)](https://myanimelist.net/anime/40586/Slime_Taoshite_300-nen_Shiranai_Uchi_ni_Level_Max_ni_Nattemashita) | [Slime Taoshite 300-nen, Shiranai Uchi ni Level Max ni Nattemashita](https://subsplease.org/shows/slime-taoshite-300-nen-shiranai-uchi-ni-level-max-ni-nattemashita) | TV | 12 / 12 | **Finished Airing** | 6.91 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Slime+Taoshite+300+nen+Shiranai+Uchi+ni+Level+Max+ni+Nattemashita+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40586__slime_taoshite_300_nen_shiranai_uchi_ni_level_max_ni_nattemashita.txt) | ~12~ | 6353 | 2021-06-26 13:31 |
| 40834 | [![40834__ousama_ranking](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40834__ousama_ranking.jpg)](https://myanimelist.net/anime/40834/Ousama_Ranking) | [Ousama Ranking](https://subsplease.org/shows/ousama-ranking) | TV | 23 / 23 | **Finished Airing** | 8.5 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ousama+Ranking+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40834__ousama_ranking.txt) | ~12~ | 11832 | 2022-03-24 17:48 |
| 33970 | [![33970__girls_panzer_saishuushou_part_1](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/33970__girls_panzer_saishuushou_part_1.jpg)](https://myanimelist.net/anime/33970/Girls___Panzer__Saishuushou_Part_1) | [Girls und Panzer das Finale](https://subsplease.org/shows/girls-und-panzer-das-finale) | Movie | 3 / 1 | **Finished Airing** | 7.95 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Girls+und+Panzer+das+Finale+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/33970__girls_panzer_saishuushou_part_1.txt) | ~12~ | 3568 | 2023-01-04 05:31 |
| 59010 | [![59010__yami_shibai_13](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/59010__yami_shibai_13.jpg)](https://myanimelist.net/anime/59010/Yami_Shibai_13) | [Yami Shibai 13](https://subsplease.org/shows/yami-shibai-13) | TV | 13 / 13 | **Finished Airing** | 6.04 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yami+Shibai+13+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/59010__yami_shibai_13.txt) | ~11~ | 1307 | 2024-10-06 19:46 |
| 51417 | [![51417__engage_kiss](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51417__engage_kiss.jpg)](https://myanimelist.net/anime/51417/Engage_Kiss) | [Engage Kiss](https://subsplease.org/shows/engage-kiss) | TV | 13 / 13 | **Finished Airing** | 6.79 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Engage+Kiss+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51417__engage_kiss.txt) | ~11~ | 5847 | 2022-09-24 17:01 |
| 51381 | [![51381__rwby_hyousetsu_teikoku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51381__rwby_hyousetsu_teikoku.jpg)](https://myanimelist.net/anime/51381/RWBY__Hyousetsu_Teikoku) | [RWBY - Hyousetsu Teikoku](https://subsplease.org/shows/rwby-hyousetsu-teikoku) | TV | 13 / 12 | **Finished Airing** | 6.34 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+RWBY+Hyousetsu+Teikoku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51381__rwby_hyousetsu_teikoku.txt) | ~11~ | 3405 | 2022-09-18 15:01 |
| 50425 | [![50425__fuufu_ijou_koibito_miman](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50425__fuufu_ijou_koibito_miman.jpg)](https://myanimelist.net/anime/50425/Fuufu_Ijou_Koibito_Miman) | [Fuufu Ijou, Koibito Miman](https://subsplease.org/shows/fuufu-ijou-koibito-miman) | TV | 12 / 12 | **Finished Airing** | 7.58 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fuufu+Ijou+Koibito+Miman+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50425__fuufu_ijou_koibito_miman.txt) | ~11~ | 4699 | 2022-12-25 15:01 |
| 49342 | [![49342__shin_ikkitousen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49342__shin_ikkitousen.jpg)](https://myanimelist.net/anime/49342/Shin_Ikkitousen) | [Shin Ikkitousen](https://subsplease.org/shows/shin-ikkitousen) | TV | 3 / 3 | **Finished Airing** | 5.83 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shin+Ikkitousen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49342__shin_ikkitousen.txt) | ~11~ | 2984 | 2022-05-31 13:01 |
| 48491 | [![48491__yama_no_susume_next_summit](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48491__yama_no_susume_next_summit.jpg)](https://myanimelist.net/anime/48491/Yama_no_Susume__Next_Summit) | [Yama no Susume - Next Summit](https://subsplease.org/shows/yama-no-susume-next-summit) | TV | 12 / 12 | **Finished Airing** | 7.63 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yama+no+Susume+Next+Summit+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48491__yama_no_susume_next_summit.txt) | ~11~ | 2571 | 2022-12-20 16:00 |
| 47790 | [![47790__sekai_saikou_no_ansatsusha_isekai_kizoku_ni_tensei_suru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47790__sekai_saikou_no_ansatsusha_isekai_kizoku_ni_tensei_suru.jpg)](https://myanimelist.net/anime/47790/Sekai_Saikou_no_Ansatsusha_Isekai_Kizoku_ni_Tensei_suru) | [Sekai Saikou no Ansatsusha, Isekai Kizoku ni Tensei suru](https://subsplease.org/shows/sekai-saikou-no-ansatsusha-isekai-kizoku-ni-tensei-suru) | TV | 12 / 12 | **Finished Airing** | 7.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sekai+Saikou+no+Ansatsusha+Isekai+Kizoku+ni+Tensei+suru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47790__sekai_saikou_no_ansatsusha_isekai_kizoku_ni_tensei_suru.txt) | ~11~ | 11171 | 2021-12-22 15:31 |
| 42351 | [![42351__senpai_ga_uzai_kouhai_no_hanashi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42351__senpai_ga_uzai_kouhai_no_hanashi.jpg)](https://myanimelist.net/anime/42351/Senpai_ga_Uzai_Kouhai_no_Hanashi) | [Senpai ga Uzai Kouhai no Hanashi](https://subsplease.org/shows/senpai-ga-uzai-kouhai-no-hanashi) | TV | 12 / 12 | **Finished Airing** | 7.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Senpai+ga+Uzai+Kouhai+no+Hanashi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42351__senpai_ga_uzai_kouhai_no_hanashi.txt) | ~11~ | 6713 | 2021-12-25 17:02 |
| 55894 | [![55894__bokura_no_ame_iro_protocol](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55894__bokura_no_ame_iro_protocol.jpg)](https://myanimelist.net/anime/55894/Bokura_no_Ame-iro_Protocol) | [Bokura no Ameiro Protocol](https://subsplease.org/shows/bokura-no-ameiro-protocol) | TV | 12 / 12 | **Finished Airing** | 6.1 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bokura+no+Ameiro+Protocol+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55894__bokura_no_ame_iro_protocol.txt) | ~11~ | 4061 | 2023-12-23 19:05 |
| 50864 | [![50864__ooyukiumi_no_kaina](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50864__ooyukiumi_no_kaina.jpg)](https://myanimelist.net/anime/50864/Ooyukiumi_no_Kaina) | [Ooyukiumi no Kaina](https://subsplease.org/shows/ooyukiumi-no-kaina) | TV | 12 / 11 | **Finished Airing** | 6.68 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ooyukiumi+no+Kaina+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50864__ooyukiumi_no_kaina.txt) | ~11~ | 4906 | 2023-12-28 04:13 |
| 41488 | [![41488__tensura_nikki_tensei_shitara_slime_datta_ken](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41488__tensura_nikki_tensei_shitara_slime_datta_ken.jpg)](https://myanimelist.net/anime/41488/Tensura_Nikki__Tensei_shitara_Slime_Datta_Ken) | [Tensura Nikki - Tensei Shitara Slime Datta Ken](https://subsplease.org/shows/tensura-nikki-tensei-shitara-slime-datta-ken) | TV | 12 / 12 | **Finished Airing** | 7.59 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tensura+Nikki+Tensei+Shitara+Slime+Datta+Ken+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41488__tensura_nikki_tensei_shitara_slime_datta_ken.txt) | ~11~ | 4657 | 2021-06-22 15:01 |
| 41456 | [![41456__sentouin_hakenshimasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41456__sentouin_hakenshimasu.jpg)](https://myanimelist.net/anime/41456/Sentouin_Hakenshimasu) | [Sentouin, Hakenshimasu!](https://subsplease.org/shows/sentouin-hakenshimasu) | TV | 12 / 12 | **Finished Airing** | 7.14 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sentouin+Hakenshimasu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41456__sentouin_hakenshimasu.txt) | ~11~ | 6342 | 2021-06-20 12:02 |
| 30455 | [![30455__kancolle_itsuka_ano_umi_de](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/30455__kancolle_itsuka_ano_umi_de.jpg)](https://myanimelist.net/anime/30455/KanColle__Itsuka_Ano_Umi_de) | [KanColle S2](https://subsplease.org/shows/kancolle-s2) | TV | 8 / 8 | **Finished Airing** | 6.26 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+KanColle+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/30455__kancolle_itsuka_ano_umi_de.txt) | ~11~ | 2236 | 2023-03-25 16:31 |
| 53300 | [![53300__ojou_to_banken_kun](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53300__ojou_to_banken_kun.jpg)](https://myanimelist.net/anime/53300/Ojou_to_Banken-kun) | [Ojou to Banken-kun](https://subsplease.org/shows/ojou-to-banken-kun) | TV | 13 / 13 | **Finished Airing** | 5.67 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ojou+to+Banken+kun+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53300__ojou_to_banken_kun.txt) | ~10~ | 3790 | 2023-12-21 17:21 |
| 51680 | [![51680__cool_doji_danshi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51680__cool_doji_danshi.jpg)](https://myanimelist.net/anime/51680/Cool_Doji_Danshi) | [Cool Doji Danshi](https://subsplease.org/shows/cool-doji-danshi) | TV | 24 / 24 | **Finished Airing** | 7.86 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cool+Doji+Danshi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51680__cool_doji_danshi.txt) | ~10~ | 2182 | 2023-03-27 18:00 |
| 51464 | [![51464__4_nin_wa_sorezore_uso_wo_tsuku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51464__4_nin_wa_sorezore_uso_wo_tsuku.jpg)](https://myanimelist.net/anime/51464/4-nin_wa_Sorezore_Uso_wo_Tsuku) | [4-nin wa Sorezore Uso wo Tsuku](https://subsplease.org/shows/4-nin-wa-sorezore-uso-wo-tsuku) | TV | 11 / 11 | **Finished Airing** | 7.14 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+4+nin+wa+Sorezore+Uso+wo+Tsuku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51464__4_nin_wa_sorezore_uso_wo_tsuku.txt) | ~10~ | 2022 | 2022-12-24 19:46 |
| 51440 | [![51440__sasaki_to_miyano_movie_sotsugyou_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51440__sasaki_to_miyano_movie_sotsugyou_hen.jpg)](https://myanimelist.net/anime/51440/Sasaki_to_Miyano_Movie__Sotsugyou-hen) | [Sasaki to Miyano - Sotsugyou-hen](https://subsplease.org/shows/sasaki-to-miyano-sotsugyou-hen) | Movie | 1 / 1 | **Finished Airing** | 8.3 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sasaki+to+Miyano+Sotsugyou+hen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51440__sasaki_to_miyano_movie_sotsugyou_hen.txt) | ~10~ | 2451 | 2023-09-30 06:04 |
| 51098 | [![51098__shinobi_no_ittoki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51098__shinobi_no_ittoki.jpg)](https://myanimelist.net/anime/51098/Shinobi_no_Ittoki) | [Shinobi no Ittoki](https://subsplease.org/shows/shinobi-no-ittoki) | TV | 12 / 12 | **Finished Airing** | 6.06 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shinobi+no+Ittoki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51098__shinobi_no_ittoki.txt) | ~10~ | 4328 | 2022-12-20 14:01 |
| 50248 | [![50248__birdie_wing_golf_girls_story](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50248__birdie_wing_golf_girls_story.jpg)](https://myanimelist.net/anime/50248/Birdie_Wing__Golf_Girls_Story) | [Birdie Wing - Golf Girls' Story](https://subsplease.org/shows/birdie-wing-golf-girls-story) | TV | 25 / 13 | **Finished Airing** | 7.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Birdie+Wing+Golf+Girls+Story+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50248__birdie_wing_golf_girls_story.txt) | ~10~ | 3756 | 2023-06-23 18:31 |
| 48753 | [![48753__jahy_sama_wa_kujikenai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48753__jahy_sama_wa_kujikenai.jpg)](https://myanimelist.net/anime/48753/Jahy-sama_wa_Kujikenai) | [Jahy-sama wa Kujikenai!](https://subsplease.org/shows/jahy-sama-wa-kujikenai) | TV | 20 / 20 | **Finished Airing** | 7.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jahy+sama+wa+Kujikenai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48753__jahy_sama_wa_kujikenai.txt) | ~10~ | 5637 | 2021-12-18 19:46 |
| 48675 | [![48675__kakkou_no_iinazuke](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48675__kakkou_no_iinazuke.jpg)](https://myanimelist.net/anime/48675/Kakkou_no_Iinazuke) | [Kakkou no Iinazuke](https://subsplease.org/shows/kakkou-no-iinazuke) | TV | 24 / 24 | **Finished Airing** | 6.87 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kakkou+no+Iinazuke+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48675__kakkou_no_iinazuke.txt) | ~10~ | 4218 | 2022-10-02 04:02 |
| 48239 | [![48239__leadale_no_daichi_nite](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48239__leadale_no_daichi_nite.jpg)](https://myanimelist.net/anime/48239/Leadale_no_Daichi_nite) | [Leadale no Daichi nite](https://subsplease.org/shows/leadale-no-daichi-nite) | TV | 12 / 12 | **Finished Airing** | 6.95 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Leadale+no+Daichi+nite+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48239__leadale_no_daichi_nite.txt) | ~10~ | 6106 | 2022-03-23 14:17 |
| 45653 | [![45653__soredemo_ayumu_wa_yosetekuru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/45653__soredemo_ayumu_wa_yosetekuru.jpg)](https://myanimelist.net/anime/45653/Soredemo_Ayumu_wa_Yosetekuru) | [Soredemo Ayumu wa Yosetekuru](https://subsplease.org/shows/soredemo-ayumu-wa-yosetekuru) | TV | 12 / 12 | **Finished Airing** | 7.02 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Soredemo+Ayumu+wa+Yosetekuru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/45653__soredemo_ayumu_wa_yosetekuru.txt) | ~10~ | 4911 | 2022-09-23 16:01 |
| 43299 | [![43299__wonder_egg_priority](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43299__wonder_egg_priority.jpg)](https://myanimelist.net/anime/43299/Wonder_Egg_Priority) | [Wonder Egg Priority](https://subsplease.org/shows/wonder-egg-priority) | TV | 13 / 12 | **Finished Airing** | 7.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Wonder+Egg+Priority+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43299__wonder_egg_priority.txt) | ~10~ | 8876 | 2021-06-30 03:56 |
| 42994 | [![42994__jashin_chan_dropkick_x](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42994__jashin_chan_dropkick_x.jpg)](https://myanimelist.net/anime/42994/Jashin-chan_Dropkick_X) | [Jashin-chan Dropkick X](https://subsplease.org/shows/jashin-chan-dropkick-x) | TV | 12 / 12 | **Finished Airing** | 7.42 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jashin+chan+Dropkick+X+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42994__jashin_chan_dropkick_x.txt) | ~10~ | 2520 | 2022-09-20 18:04 |
| 42587 | [![42587__yuuki_yuuna_wa_yuusha_de_aru_dai_mankai_no_shou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42587__yuuki_yuuna_wa_yuusha_de_aru_dai_mankai_no_shou.jpg)](https://myanimelist.net/anime/42587/Yuuki_Yuuna_wa_Yuusha_de_Aru__Dai_Mankai_no_Shou) | [Yuuki Yuuna wa Yuusha de Aru - Dai Mankai no Shou](https://subsplease.org/shows/yuuki-yuuna-wa-yuusha-de-aru-dai-mankai-no-shou) | TV | 12 / 12 | **Finished Airing** | 7.3 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yuuki+Yuuna+wa+Yuusha+de+Aru+Dai+Mankai+no+Shou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42587__yuuki_yuuna_wa_yuusha_de_aru_dai_mankai_no_shou.txt) | ~10~ | 2446 | 2021-12-17 18:56 |
| 41710 | [![41710__genjitsu_shugi_yuusha_no_oukoku_saikenki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41710__genjitsu_shugi_yuusha_no_oukoku_saikenki.jpg)](https://myanimelist.net/anime/41710/Genjitsu_Shugi_Yuusha_no_Oukoku_Saikenki) | [Genjitsu Shugi Yuusha no Oukoku Saikenki](https://subsplease.org/shows/genjitsu-shugi-yuusha-no-oukoku-saikenki) | TV | 26 / 13 | **Finished Airing** | 7.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Genjitsu+Shugi+Yuusha+no+Oukoku+Saikenki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41710__genjitsu_shugi_yuusha_no_oukoku_saikenki.txt) | ~10~ | 9172 | 2022-04-02 17:31 |
| 40852 | [![40852__dr_stone_stone_wars](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40852__dr_stone_stone_wars.jpg)](https://myanimelist.net/anime/40852/Dr_Stone__Stone_Wars) | [Dr. Stone S2](https://subsplease.org/shows/dr-stone-s2) | TV | 11 / 11 | **Finished Airing** | 8.16 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dr+Stone+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40852__dr_stone_stone_wars.txt) | ~10~ | 9675 | 2021-03-25 14:32 |
| 37744 | [![37744__isekai_cheat_magician](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/37744__isekai_cheat_magician.jpg)](https://myanimelist.net/anime/37744/Isekai_Cheat_Magician) | [Isekai Cheat Magician](https://subsplease.org/shows/isekai-cheat-magician) | TV | 1 / 12 | **Finished Airing** | 5.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+Cheat+Magician+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/37744__isekai_cheat_magician.txt) | ~10~ | 2009 | 2021-07-08 01:02 |
| 33737 | [![33737__megaton_kyuu_musashi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/33737__megaton_kyuu_musashi.jpg)](https://myanimelist.net/anime/33737/Megaton-kyuu_Musashi) | [Megaton-kyuu Musashi](https://subsplease.org/shows/megaton-kyuu-musashi) | TV | 13 / 13 | **Finished Airing** | 6.29 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Megaton+kyuu+Musashi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/33737__megaton_kyuu_musashi.txt) | ~10~ | 2172 | 2024-07-18 15:34 |
| 235 | [![235__meitantei_conan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/235__meitantei_conan.jpg)](https://myanimelist.net/anime/235/Meitantei_Conan) | [Detective Conan - Kid vs Komei - The Targeted Lips](https://subsplease.org/shows/detective-conan) | TV | 1 / ? | Currently Airing | 8.17 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Detective+Conan+Kid+vs+Komei+The+Targeted+Lips+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/235__meitantei_conan.txt) | ~10~ | 1690 | 2025-01-18 12:31 |
| 54259 | [![54259__rokudou_no_onna_tachi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54259__rokudou_no_onna_tachi.jpg)](https://myanimelist.net/anime/54259/Rokudou_no_Onna-tachi) | [Rokudou no Onna-tachi](https://subsplease.org/shows/rokudou-no-onna-tachi) | TV | 12 / 12 | **Finished Airing** | 6.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Rokudou+no+Onna+tachi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54259__rokudou_no_onna_tachi.txt) | ~10~ | 4672 | 2023-06-23 17:46 |
| 48556 | [![48556__takt_op_destiny](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48556__takt_op_destiny.jpg)](https://myanimelist.net/anime/48556/Takt_Op_Destiny) | [Takt Op. Destiny](https://subsplease.org/shows/takt-op-destiny) | TV | 12 / 12 | **Finished Airing** | 7.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Takt+Op+Destiny+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48556__takt_op_destiny.txt) | ~10~ | 7367 | 2021-12-21 17:01 |
| 40594 | [![40594__tatoeba_last_dungeon_mae_no_mura_no_shounen_ga_joban_no_machi_de_kurasu_youna_monogatari](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40594__tatoeba_last_dungeon_mae_no_mura_no_shounen_ga_joban_no_machi_de_kurasu_youna_monogatari.jpg)](https://myanimelist.net/anime/40594/Tatoeba_Last_Dungeon_Mae_no_Mura_no_Shounen_ga_Joban_no_Machi_de_Kurasu_Youna_Monogatari) | [Tatoeba Last Dungeon Mae no Mura no Shounen ga Joban no Machi de Kurasu Youna Monogatari](https://subsplease.org/shows/tatoeba-last-dungeon-mae-no-mura-no-shounen-ga-joban-no-machi-de-kurasu-youna-monogatari) | TV | 12 / 12 | **Finished Airing** | 6.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tatoeba+Last+Dungeon+Mae+no+Mura+no+Shounen+ga+Joban+no+Machi+de+Kurasu+Youna+Monogatari+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40594__tatoeba_last_dungeon_mae_no_mura_no_shounen_ga_joban_no_machi_de_kurasu_youna_monogatari.txt) | ~10~ | 6910 | 2021-03-22 14:31 |
| 37807 | [![37807__princess_principal_crown_handler_movie_1](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/37807__princess_principal_crown_handler_movie_1.jpg)](https://myanimelist.net/anime/37807/Princess_Principal__Crown_Handler_Movie_1) | [Princess Principal - Crown Handler](https://subsplease.org/shows/princess-principal-crown-handler) | Movie | 2 / 1 | **Finished Airing** | 7.53 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Princess+Principal+Crown+Handler+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/37807__princess_principal_crown_handler_movie_1.txt) | ~9~ | 2846 | 2023-04-16 22:26 |
| 57623 | [![57623__nijiyon_animation_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57623__nijiyon_animation_2.jpg)](https://myanimelist.net/anime/57623/Nijiyon_Animation_2) | [Nijiyon Animation S2](https://subsplease.org/shows/nijiyon-animation-s2) | TV | 12 / 12 | **Finished Airing** | 6.84 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Nijiyon+Animation+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57623__nijiyon_animation_2.txt) | ~9~ | 1763 | 2024-06-21 13:16 |
| 57180 | [![57180__yami_shibai_12](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57180__yami_shibai_12.jpg)](https://myanimelist.net/anime/57180/Yami_Shibai_12) | [Yami Shibai 12](https://subsplease.org/shows/yami-shibai-12) | TV | 13 / 13 | **Finished Airing** | 5.85 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yami+Shibai+12+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57180__yami_shibai_12.txt) | ~9~ | 1475 | 2024-04-07 19:45 |
| 55636 | [![55636__snack_basue](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55636__snack_basue.jpg)](https://myanimelist.net/anime/55636/Snack_Basue) | [Snack Basue](https://subsplease.org/shows/snack-basue) | TV | 13 / 13 | **Finished Airing** | 6.49 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Snack+Basue+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55636__snack_basue.txt) | ~9~ | 2595 | 2024-04-05 17:17 |
| 53671 | [![53671__love_live_nijigasaki_gakuen_school_idol_doukoukai_next_sky](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53671__love_live_nijigasaki_gakuen_school_idol_doukoukai_next_sky.jpg)](https://myanimelist.net/anime/53671/Love_Live_Nijigasaki_Gakuen_School_Idol_Doukoukai__Next_Sky) | [Love Live! Nijigasaki Gakuen School Idol Doukoukai - Next Sky](https://subsplease.org/shows/love-live-nijigasaki-gakuen-school-idol-doukoukai-next-sky) | OVA | 1 / 1 | **Finished Airing** | 7.29 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Love+Live+Nijigasaki+Gakuen+School+Idol+Doukoukai+Next+Sky+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53671__love_live_nijigasaki_gakuen_school_idol_doukoukai_next_sky.txt) | ~9~ | 2022 | 2023-10-28 22:33 |
| 53633 | [![53633__bullbuster](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53633__bullbuster.jpg)](https://myanimelist.net/anime/53633/Bullbuster) | [Bullbuster](https://subsplease.org/shows/bullbuster) | TV | 12 / 12 | **Finished Airing** | 6.18 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bullbuster+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53633__bullbuster.txt) | ~9~ | 3544 | 2023-12-20 14:05 |
| 53587 | [![53587__the_marginal_service](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53587__the_marginal_service.jpg)](https://myanimelist.net/anime/53587/The_Marginal_Service) | [The Marginal Service](https://subsplease.org/shows/the-marginal-service) | TV | 12 / 12 | **Finished Airing** | 5.49 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+Marginal+Service+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53587__the_marginal_service.txt) | ~9~ | 2836 | 2023-06-27 18:31 |
| 53213 | [![53213__revenger](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53213__revenger.jpg)](https://myanimelist.net/anime/53213/Revenger) | [Revenger](https://subsplease.org/shows/revenger) | TV | 12 / 12 | **Finished Airing** | 6.66 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Revenger+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53213__revenger.txt) | ~9~ | 4594 | 2023-03-23 13:31 |
| 51466 | [![51466__sekai_ga_horobiru_mae_ni_kimi_ni_aitai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51466__sekai_ga_horobiru_mae_ni_kimi_ni_aitai.jpg)](https://myanimelist.net/anime/51466/Sekai_ga_Horobiru_Mae_ni_Kimi_ni_Aitai) | [Sekai ga Horobiru Mae ni Kimi ni Aitai](https://subsplease.org/shows/sekai-ga-horobiru-mae-ni-kimi-ni-aitai) | Movie | 1 / 1 | **Finished Airing** | 4.93 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sekai+ga+Horobiru+Mae+ni+Kimi+ni+Aitai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51466__sekai_ga_horobiru_mae_ni_kimi_ni_aitai.txt) | ~9~ | 2267 | 2023-04-17 16:40 |
| 50923 | [![50923__mushikaburi_hime](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50923__mushikaburi_hime.jpg)](https://myanimelist.net/anime/50923/Mushikaburi-hime) | [Mushikaburi Hime](https://subsplease.org/shows/mushikaburi-hime) | TV | 12 / 12 | **Finished Airing** | 6.87 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mushikaburi+Hime+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50923__mushikaburi_hime.txt) | ~9~ | 2818 | 2022-12-22 14:30 |
| 50871 | [![50871__alice_gear_aegis_expansion](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50871__alice_gear_aegis_expansion.jpg)](https://myanimelist.net/anime/50871/Alice_Gear_Aegis_Expansion) | [Alice Gear Aegis Expansion](https://subsplease.org/shows/alice-gear-aegis-expansion) | TV | 13 / 12 | **Finished Airing** | 5.72 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Alice+Gear+Aegis+Expansion+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50871__alice_gear_aegis_expansion.txt) | ~9~ | 2621 | 2023-06-19 12:00 |
| 50203 | [![50203__love_live_superstar_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50203__love_live_superstar_2nd_season.jpg)](https://myanimelist.net/anime/50203/Love_Live_Superstar_2nd_Season) | [Love Live! Superstar!! S2](https://subsplease.org/shows/love-live-superstar-s2) | TV | 12 / 12 | **Finished Airing** | 7.68 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Love+Live+Superstar+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50203__love_live_superstar_2nd_season.txt) | ~9~ | 1431 | 2022-10-11 01:53 |
| 48643 | [![48643__koi_wa_sekai_seifuku_no_ato_de](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48643__koi_wa_sekai_seifuku_no_ato_de.jpg)](https://myanimelist.net/anime/48643/Koi_wa_Sekai_Seifuku_no_Ato_de) | [Koi wa Sekai Seifuku no Ato de](https://subsplease.org/shows/koi-wa-sekai-seifuku-no-ato-de) | TV | 12 / 12 | **Finished Airing** | 7.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Koi+wa+Sekai+Seifuku+no+Ato+de+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48643__koi_wa_sekai_seifuku_no_ato_de.txt) | ~9~ | 5059 | 2022-06-24 14:01 |
| 47161 | [![47161__shikkakumon_no_saikyou_kenja](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47161__shikkakumon_no_saikyou_kenja.jpg)](https://myanimelist.net/anime/47161/Shikkakumon_no_Saikyou_Kenja) | [Shikkakumon no Saikyou Kenja](https://subsplease.org/shows/shikkakumon-no-saikyou-kenja) | TV | 12 / 12 | **Finished Airing** | 6.23 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shikkakumon+no+Saikyou+Kenja+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47161__shikkakumon_no_saikyou_kenja.txt) | ~9~ | 6416 | 2022-03-26 14:01 |
| 44248 | [![44248__fate_grand_carnival](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44248__fate_grand_carnival.jpg)](https://myanimelist.net/anime/44248/Fate_Grand_Carnival) | [Fate Grand Carnival](https://subsplease.org/shows/fate-grand-carnival) | OVA | 2 / 4 | **Finished Airing** | 7.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fate+Grand+Carnival+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44248__fate_grand_carnival.txt) | ~9~ | 3198 | 2022-07-04 20:59 |
| 40748 | [![40748__jujutsu_kaisen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40748__jujutsu_kaisen.jpg)](https://myanimelist.net/anime/40748/Jujutsu_Kaisen) | [Jujutsu Kaisen](https://subsplease.org/shows/jujutsu-kaisen) | TV | 48 / 24 | **Finished Airing** | 8.57 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jujutsu+Kaisen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40748__jujutsu_kaisen.txt) | ~9~ | 31944 | 2023-12-28 18:17 |
| 40594 | [![40594__tatoeba_last_dungeon_mae_no_mura_no_shounen_ga_joban_no_machi_de_kurasu_youna_monogatari](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40594__tatoeba_last_dungeon_mae_no_mura_no_shounen_ga_joban_no_machi_de_kurasu_youna_monogatari.jpg)](https://myanimelist.net/anime/40594/Tatoeba_Last_Dungeon_Mae_no_Mura_no_Shounen_ga_Joban_no_Machi_de_Kurasu_Youna_Monogatari) | [Last Dungeon](https://subsplease.org/shows/tatoeba-last-dungeon-mae-no-mura-no-shounen-ga-joban-no-machi-de-kurasu-youna-monogatari) | TV | 12 / 12 | **Finished Airing** | 6.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Last+Dungeon+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40594__tatoeba_last_dungeon_mae_no_mura_no_shounen_ga_joban_no_machi_de_kurasu_youna_monogatari.txt) | ~9~ | 6910 | 2021-03-22 14:31 |
| 53698 | [![53698__world_dai_star](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53698__world_dai_star.jpg)](https://myanimelist.net/anime/53698/World_Dai_Star) | [World Dai Star](https://subsplease.org/shows/world-dai-star) | TV | 12 / 12 | **Finished Airing** | 7.32 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+World+Dai+Star+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53698__world_dai_star.txt) | ~9~ | 2102 | 2023-06-25 15:31 |
| 51403 | [![51403__renai_flops](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51403__renai_flops.jpg)](https://myanimelist.net/anime/51403/Renai_Flops) | [Renai Flops](https://subsplease.org/shows/renai-flops) | TV | 12 / 12 | **Finished Airing** | 6.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Renai+Flops+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51403__renai_flops.txt) | ~9~ | 5020 | 2022-12-28 16:05 |
| 48441 | [![48441__the_legend_of_heroes_sen_no_kiseki_northern_war](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48441__the_legend_of_heroes_sen_no_kiseki_northern_war.jpg)](https://myanimelist.net/anime/48441/The_Legend_of_Heroes__Sen_no_Kiseki_-_Northern_War) | [The Legend of Heroes - Sen no Kiseki - Northern War](https://subsplease.org/shows/the-legend-of-heroes-sen-no-kiseki-northern-war) | TV | 12 / 12 | **Finished Airing** | 5.63 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+Legend+of+Heroes+Sen+no+Kiseki+Northern+War+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48441__the_legend_of_heroes_sen_no_kiseki_northern_war.txt) | ~9~ | 3340 | 2023-03-24 13:31 |
| 41812 | [![41812__megami_ryou_no_ryoubo_kun](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41812__megami_ryou_no_ryoubo_kun.jpg)](https://myanimelist.net/anime/41812/Megami-ryou_no_Ryoubo-kun) | [Megami-ryou no Ryoubo-kun.](https://subsplease.org/shows/megami-ryou-no-ryoubo-kun) | TV | 10 / 10 | **Finished Airing** | 6.5 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Megami+ryou+no+Ryoubo+kun+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41812__megami_ryou_no_ryoubo_kun.txt) | ~8~ | 4329 | 2021-09-15 16:03 |
| 56691 | [![56691__gekkan_mousou_kagaku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/56691__gekkan_mousou_kagaku.jpg)](https://myanimelist.net/anime/56691/Gekkan_Mousou_Kagaku) | [Gekkan Mousou Kagaku](https://subsplease.org/shows/gekkan-mousou-kagaku) | TV | 12 / 12 | **Finished Airing** | 5.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gekkan+Mousou+Kagaku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/56691__gekkan_mousou_kagaku.txt) | ~8~ | 2412 | 2024-03-28 15:31 |
| 55153 | [![55153__yuzuki_san_chi_no_yonkyoudai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55153__yuzuki_san_chi_no_yonkyoudai.jpg)](https://myanimelist.net/anime/55153/Yuzuki-san_Chi_no_Yonkyoudai) | [Yuzuki-san Chi no Yonkyoudai](https://subsplease.org/shows/yuzuki-san-chi-no-yonkyoudai) | TV | 12 / 12 | **Finished Airing** | 7.88 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yuzuki+san+Chi+no+Yonkyoudai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55153__yuzuki_san_chi_no_yonkyoudai.txt) | ~8~ | 3156 | 2023-12-21 17:05 |
| 52274 | [![52274__nokemono_tachi_no_yoru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52274__nokemono_tachi_no_yoru.jpg)](https://myanimelist.net/anime/52274/Nokemono-tachi_no_Yoru) | [Nokemono-tachi no Yoru](https://subsplease.org/shows/nokemono-tachi-no-yoru) | TV | 13 / 13 | **Finished Airing** | 6.61 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Nokemono+tachi+no+Yoru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52274__nokemono_tachi_no_yoru.txt) | ~8~ | 3035 | 2023-04-02 13:31 |
| 51956 | [![51956__paradox_live_the_animation](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51956__paradox_live_the_animation.jpg)](https://myanimelist.net/anime/51956/Paradox_Live_the_Animation) | [Paradox Live](https://subsplease.org/shows/paradox-live) | TV | 12 / 12 | **Finished Airing** | 6.86 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Paradox+Live+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51956__paradox_live_the_animation.txt) | ~8~ | 1979 | 2023-12-26 19:01 |
| 51139 | [![51139__kizuna_no_allele](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51139__kizuna_no_allele.jpg)](https://myanimelist.net/anime/51139/Kizuna_no_Allele) | [Kizuna no Allele](https://subsplease.org/shows/kizuna-no-allele) | TV | 24 / 12 | **Finished Airing** | 5.24 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kizuna+no+Allele+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51139__kizuna_no_allele.txt) | ~8~ | 2073 | 2023-12-20 17:50 |
| 50571 | [![50571__zanting_rang_wo_cha_gonglue](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50571__zanting_rang_wo_cha_gonglue.jpg)](https://myanimelist.net/anime/50571/Zanting_Rang_Wo_Cha_Gonglue) | [Kouryaku Wanted - Isekai Sukuimasu](https://subsplease.org/shows/kouryaku-wanted-isekai-sukuimasu) | ONA | 12 / 12 | **Finished Airing** | 6.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kouryaku+Wanted+Isekai+Sukuimasu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50571__zanting_rang_wo_cha_gonglue.txt) | ~8~ | 2734 | 2023-12-22 16:35 |
| 50338 | [![50338__kunoichi_tsubaki_no_mune_no_uchi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50338__kunoichi_tsubaki_no_mune_no_uchi.jpg)](https://myanimelist.net/anime/50338/Kunoichi_Tsubaki_no_Mune_no_Uchi) | [Kunoichi Tsubaki no Mune no Uchi](https://subsplease.org/shows/kunoichi-tsubaki-no-mune-no-uchi) | TV | 13 / 13 | **Finished Airing** | 7.11 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kunoichi+Tsubaki+no+Mune+no+Uchi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50338__kunoichi_tsubaki_no_mune_no_uchi.txt) | ~8~ | 3004 | 2022-07-02 16:31 |
| 50287 | [![50287__kyuuketsuki_sugu_shinu_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50287__kyuuketsuki_sugu_shinu_2.jpg)](https://myanimelist.net/anime/50287/Kyuuketsuki_Sugu_Shinu_2) | [Kyuuketsuki Sugu Shinu S2](https://subsplease.org/shows/kyuuketsuki-sugu-shinu-s2) | TV | 12 / 12 | **Finished Airing** | 7.45 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kyuuketsuki+Sugu+Shinu+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50287__kyuuketsuki_sugu_shinu_2.txt) | ~8~ | 1981 | 2023-03-27 14:02 |
| 48414 | [![48414__sabikui_bisco](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48414__sabikui_bisco.jpg)](https://myanimelist.net/anime/48414/Sabikui_Bisco) | [Sabikui Bisco](https://subsplease.org/shows/sabikui-bisco) | TV | 12 / 12 | **Finished Airing** | 7.14 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sabikui+Bisco+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48414__sabikui_bisco.txt) | ~8~ | 5688 | 2022-03-28 15:31 |
| 45425 | [![45425__slow_loop](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/45425__slow_loop.jpg)](https://myanimelist.net/anime/45425/Slow_Loop) | [Slow Loop](https://subsplease.org/shows/slow-loop) | TV | 12 / 12 | **Finished Airing** | 7.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Slow+Loop+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/45425__slow_loop.txt) | ~8~ | 2821 | 2022-03-25 14:01 |
| 43556 | [![43556__tsurune_movie_hajimari_no_issha](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43556__tsurune_movie_hajimari_no_issha.jpg)](https://myanimelist.net/anime/43556/Tsurune_Movie__Hajimari_no_Issha) | [Tsurune Movie - Hajimari no Issha](https://subsplease.org/shows/tsurune-movie-hajimari-no-issha) | Movie | 1 / 1 | **Finished Airing** | 7.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tsurune+Movie+Hajimari+no+Issha+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43556__tsurune_movie_hajimari_no_issha.txt) | ~8~ | 2180 | 2023-06-19 04:33 |
| 43470 | [![43470__rikei_ga_koi_ni_ochita_no_de_shoumei_shitemita_heart](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43470__rikei_ga_koi_ni_ochita_no_de_shoumei_shitemita_heart.jpg)](https://myanimelist.net/anime/43470/Rikei_ga_Koi_ni_Ochita_no_de_Shoumei_shitemita_Heart) | [Rikei ga Koi ni Ochita no de Shoumei shitemita S2](https://subsplease.org/shows/rikei-ga-koi-ni-ochita-no-de-shoumei-shitemita-s2) | TV | 12 / 12 | **Finished Airing** | 7.3 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Rikei+ga+Koi+ni+Ochita+no+de+Shoumei+shitemita+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43470__rikei_ga_koi_ni_ochita_no_de_shoumei_shitemita_heart.txt) | ~8~ | 2631 | 2022-06-17 16:47 |
| 41589 | [![41589__tokyo_mew_mew_new](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41589__tokyo_mew_mew_new.jpg)](https://myanimelist.net/anime/41589/Tokyo_Mew_Mew_New_♡) | [Tokyo Mew Mew New](https://subsplease.org/shows/tokyo-mew-mew-new) | TV | 24 / 12 | **Finished Airing** | 6.61 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tokyo+Mew+Mew+New+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41589__tokyo_mew_mew_new.txt) | ~8~ | 2281 | 2023-06-20 16:02 |
| 41379 | [![41379__kimi_wa_kanata](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41379__kimi_wa_kanata.jpg)](https://myanimelist.net/anime/41379/Kimi_wa_Kanata) | [Kimi wa Kanata](https://subsplease.org/shows/kimi-wa-kanata) | Movie | 1 / 1 | **Finished Airing** | 5.92 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kimi+wa+Kanata+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41379__kimi_wa_kanata.txt) | ~8~ | 2079 | 2021-10-22 16:49 |
| 41025 | [![41025__fumetsu_no_anata_e](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41025__fumetsu_no_anata_e.jpg)](https://myanimelist.net/anime/41025/Fumetsu_no_Anata_e) | [Fumetsu no Anata e](https://subsplease.org/shows/fumetsu-no-anata-e) | TV | 20 / 20 | **Finished Airing** | 8.34 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fumetsu+no+Anata+e+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41025__fumetsu_no_anata_e.txt) | ~8~ | 8831 | 2021-08-30 16:32 |
| 40904 | [![40904__bokutachi_no_remake](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40904__bokutachi_no_remake.jpg)](https://myanimelist.net/anime/40904/Bokutachi_no_Remake) | [Bokutachi no Remake](https://subsplease.org/shows/bokutachi-no-remake) | TV | 13 / 12 | **Finished Airing** | 7.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bokutachi+no+Remake+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40904__bokutachi_no_remake.txt) | ~8~ | 5724 | 2021-09-25 14:32 |
| 39808 | [![39808__non_non_biyori_nonstop](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39808__non_non_biyori_nonstop.jpg)](https://myanimelist.net/anime/39808/Non_Non_Biyori_Nonstop) | [Non Non Biyori Nonstop](https://subsplease.org/shows/non-non-biyori-nonstop) | TV | 12 / 12 | **Finished Airing** | 8.36 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Non+Non+Biyori+Nonstop+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39808__non_non_biyori_nonstop.txt) | ~8~ | 3222 | 2021-03-28 17:53 |
| 39584 | [![39584__human_lost_ningen_shikkaku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39584__human_lost_ningen_shikkaku.jpg)](https://myanimelist.net/anime/39584/Human_Lost__Ningen_Shikkaku) | [Human Lost](https://subsplease.org/shows/human-lost) | Movie | 1 / 1 | **Finished Airing** | 5.79 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Human+Lost+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39584__human_lost_ningen_shikkaku.txt) | ~8~ | 1379 | 2022-08-13 06:39 |
| 49721 | [![49721__karakai_jouzu_no_takagi_san_3](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49721__karakai_jouzu_no_takagi_san_3.jpg)](https://myanimelist.net/anime/49721/Karakai_Jouzu_no_Takagi-san_3) | [Karakai Jouzu no Takagi-san S3](https://subsplease.org/shows/karakai-jouzu-no-takagi-san-s3) | TV | 12 / 12 | **Finished Airing** | 8.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Karakai+Jouzu+no+Takagi+san+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49721__karakai_jouzu_no_takagi_san_3.txt) | ~8~ | 5254 | 2022-03-25 17:05 |
| 50917 | [![50917__prima_doll](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50917__prima_doll.jpg)](https://myanimelist.net/anime/50917/Prima_Doll) | [Prima Doll](https://subsplease.org/shows/prima-doll) | TV | 12 / 12 | **Finished Airing** | 7.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Prima+Doll+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50917__prima_doll.txt) | ~7~ | 2544 | 2022-09-23 16:31 |
| 50429 | [![50429__aiyou_de_mishi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50429__aiyou_de_mishi.jpg)](https://myanimelist.net/anime/50429/Aiyou_de_Mishi) | [X and Y](https://subsplease.org/shows/x-and-y) | ONA | 16 / 16 | **Finished Airing** | 7.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+X+and+Y+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50429__aiyou_de_mishi.txt) | ~7~ | 1796 | 2023-07-19 04:01 |
| 50348 | [![50348__peter_grill_to_kenja_no_jikan_super_extra](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50348__peter_grill_to_kenja_no_jikan_super_extra.jpg)](https://myanimelist.net/anime/50348/Peter_Grill_to_Kenja_no_Jikan__Super_Extra) | [Peter Grill to Kenja no Jikan S2](https://subsplease.org/shows/peter-grill-to-kenja-no-jikan-s2) | TV | 12 / 12 | **Finished Airing** | 5.94 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Peter+Grill+to+Kenja+no+Jikan+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50348__peter_grill_to_kenja_no_jikan_super_extra.txt) | ~7~ | 2588 | 2022-12-25 16:30 |
| 50002 | [![50002__edens_zero_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50002__edens_zero_2nd_season.jpg)](https://myanimelist.net/anime/50002/Edens_Zero_2nd_Season) | [Edens Zero](https://subsplease.org/shows/edens-zero) | TV | 25 / 25 | **Finished Airing** | 7.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Edens+Zero+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50002__edens_zero_2nd_season.txt) | ~7~ | 4191 | 2023-09-30 18:01 |
| 49533 | [![49533__uchi_no_shishou_wa_shippo_ga_nai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49533__uchi_no_shishou_wa_shippo_ga_nai.jpg)](https://myanimelist.net/anime/49533/Uchi_no_Shishou_wa_Shippo_ga_Nai) | [Uchi no Shishou wa Shippo ga Nai](https://subsplease.org/shows/uchi-no-shishou-wa-shippo-ga-nai) | TV | 13 / 13 | **Finished Airing** | 6.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Uchi+no+Shishou+wa+Shippo+ga+Nai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49533__uchi_no_shishou_wa_shippo_ga_nai.txt) | ~7~ | 2189 | 2022-12-23 14:30 |
| 49376 | [![49376__mou_ippon](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49376__mou_ippon.jpg)](https://myanimelist.net/anime/49376/Mou_Ippon) | [Mou Ippon!](https://subsplease.org/shows/mou-ippon) | TV | 13 / 13 | **Finished Airing** | 7.09 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mou+Ippon+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49376__mou_ippon.txt) | ~7~ | 2911 | 2023-04-02 17:35 |
| 48553 | [![48553__akebi_chan_no_sailor_fuku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48553__akebi_chan_no_sailor_fuku.jpg)](https://myanimelist.net/anime/48553/Akebi-chan_no_Sailor-fuku) | [Akebi-chan no Sailor-fuku](https://subsplease.org/shows/akebi-chan-no-sailor-fuku) | TV | 12 / 12 | **Finished Airing** | 7.65 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Akebi+chan+no+Sailor+fuku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48553__akebi_chan_no_sailor_fuku.txt) | ~7~ | 4549 | 2022-03-26 17:01 |
| 48471 | [![48471__tsuki_to_laika_to_nosferatu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48471__tsuki_to_laika_to_nosferatu.jpg)](https://myanimelist.net/anime/48471/Tsuki_to_Laika_to_Nosferatu) | [Tsuki to Laika to Nosferatu](https://subsplease.org/shows/tsuki-to-laika-to-nosferatu) | TV | 12 / 12 | **Finished Airing** | 7.32 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tsuki+to+Laika+to+Nosferatu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48471__tsuki_to_laika_to_nosferatu.txt) | ~7~ | 4914 | 2021-12-19 17:07 |
| 47257 | [![47257__shinigami_bocchan_to_kuro_maid](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47257__shinigami_bocchan_to_kuro_maid.jpg)](https://myanimelist.net/anime/47257/Shinigami_Bocchan_to_Kuro_Maid) | [Shinigami Bocchan to Kuro Maid](https://subsplease.org/shows/shinigami-bocchan-to-kuro-maid) | TV | 36 / 12 | **Finished Airing** | 7.59 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shinigami+Bocchan+to+Kuro+Maid+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47257__shinigami_bocchan_to_kuro_maid.txt) | ~7~ | 4310 | 2024-06-23 14:02 |
| 46604 | [![46604__dolls_frontline](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/46604__dolls_frontline.jpg)](https://myanimelist.net/anime/46604/Dolls_Frontline) | [Girls' Frontline](https://subsplease.org/shows/girls-frontline) | TV | 12 / 12 | **Finished Airing** | 5.87 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Girls+Frontline+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/46604__dolls_frontline.txt) | ~7~ | 2428 | 2022-03-25 17:02 |
| 46471 | [![46471__tantei_wa_mou_shindeiru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/46471__tantei_wa_mou_shindeiru.jpg)](https://myanimelist.net/anime/46471/Tantei_wa_Mou_Shindeiru) | [Tantei wa Mou, Shindeiru.](https://subsplease.org/shows/tantei-wa-mou-shindeiru) | TV | 12 / 12 | **Finished Airing** | 6.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tantei+wa+Mou+Shindeiru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/46471__tantei_wa_mou_shindeiru.txt) | ~7~ | 5123 | 2021-09-19 13:32 |
| 44586 | [![44586__kakushigoto_movie](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44586__kakushigoto_movie.jpg)](https://myanimelist.net/anime/44586/Kakushigoto_Movie) | [Kakushigoto Movie](https://subsplease.org/shows/kakushigoto-movie) | Movie | 1 / 1 | **Finished Airing** | 7.68 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kakushigoto+Movie+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44586__kakushigoto_movie.txt) | ~7~ | 2210 | 2021-08-07 03:35 |
| 42897 | [![42897__horimiya](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42897__horimiya.jpg)](https://myanimelist.net/anime/42897/Horimiya) | [Horimiya](https://subsplease.org/shows/horimiya) | TV | 13 / 13 | **Finished Airing** | 8.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Horimiya+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42897__horimiya.txt) | ~7~ | 9376 | 2021-04-03 17:05 |
| 42670 | [![42670__princess_connect_re_dive_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42670__princess_connect_re_dive_season_2.jpg)](https://myanimelist.net/anime/42670/Princess_Connect_Re_Dive_Season_2) | [Princess Connect! Re-Dive S2](https://subsplease.org/shows/princess-connect-re-dive-s2) | TV | 12 / 12 | **Finished Airing** | 7.75 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Princess+Connect+Re+Dive+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42670__princess_connect_re_dive_season_2.txt) | ~7~ | 3649 | 2022-03-28 16:01 |
| 41782 | [![41782__bang_dream_movie_poppin_dream](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41782__bang_dream_movie_poppin_dream.jpg)](https://myanimelist.net/anime/41782/BanG_Dream_Movie__Poppin_Dream) | [BanG Dream! Movie](https://subsplease.org/shows/bang-dream-movie) | Movie | 1 / 1 | **Finished Airing** | 7.89 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+BanG+Dream+Movie+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41782__bang_dream_movie_poppin_dream.txt) | ~7~ | 953 | 2022-09-05 00:56 |
| 41623 | [![41623__isekai_maou_to_shoukan_shoujo_no_dorei_majutsu_ω](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41623__isekai_maou_to_shoukan_shoujo_no_dorei_majutsu_%CF%89.jpg)](https://myanimelist.net/anime/41623/Isekai_Maou_to_Shoukan_Shoujo_no_Dorei_Majutsu_Ω) | [Isekai Maou to Shoukan Shoujo no Dorei Majutsu S2](https://subsplease.org/shows/isekai-maou-to-shoukan-shoujo-no-dorei-majutsu-s2) | TV | 10 / 10 | **Finished Airing** | 6.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+Maou+to+Shoukan+Shoujo+no+Dorei+Majutsu+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41623__isekai_maou_to_shoukan_shoujo_no_dorei_majutsu_%CF%89.txt) | ~7~ | 5424 | 2021-06-10 18:46 |
| 41530 | [![41530__magia_record_mahou_shoujo_madoka_magica_gaiden_2nd_season_kakusei_zenya](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41530__magia_record_mahou_shoujo_madoka_magica_gaiden_2nd_season_kakusei_zenya.jpg)](https://myanimelist.net/anime/41530/Magia_Record__Mahou_Shoujo_Madoka☆Magica_Gaiden_2nd_Season_-_Kakusei_Zenya) | [Magia Record S2](https://subsplease.org/shows/magia-record-s2) | TV | 9 / 8 | **Finished Airing** | 7.0 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Magia+Record+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41530__magia_record_mahou_shoujo_madoka_magica_gaiden_2nd_season_kakusei_zenya.txt) | ~7~ | 2175 | 2021-09-25 16:33 |
| 41402 | [![41402__mairimashita_iruma_kun_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41402__mairimashita_iruma_kun_2nd_season.jpg)](https://myanimelist.net/anime/41402/Mairimashita_Iruma-kun_2nd_Season) | [Mairimashita! Iruma-kun S2](https://subsplease.org/shows/mairimashita-iruma-kun-s2) | TV | 21 / 21 | **Finished Airing** | 8.03 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mairimashita+Iruma+kun+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41402__mairimashita_iruma_kun_2nd_season.txt) | ~7~ | 3890 | 2021-09-11 11:16 |
| 39783 | [![39783__5_toubun_no_hanayome](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39783__5_toubun_no_hanayome.jpg)](https://myanimelist.net/anime/39783/5-toubun_no_Hanayome_∬) | [Go-toubun no Hanayome S2](https://subsplease.org/shows/go-toubun-no-hanayome-s2) | TV | 12 / 12 | **Finished Airing** | 8.02 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Go+toubun+no+Hanayome+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39783__5_toubun_no_hanayome.txt) | ~7~ | 4264 | 2021-03-25 21:01 |
| 37984 | [![37984__kumo_desu_ga_nani_ka](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/37984__kumo_desu_ga_nani_ka.jpg)](https://myanimelist.net/anime/37984/Kumo_desu_ga_Nani_ka) | [Kumo desu ga, Nani ka](https://subsplease.org/shows/kumo-desu-ga-nani-ka) | TV | 24 / 24 | **Finished Airing** | 7.45 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kumo+desu+ga+Nani+ka+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/37984__kumo_desu_ga_nani_ka.txt) | ~7~ | 7801 | 2021-07-03 13:02 |
| 50284 | [![50284__technoroid_overmind](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50284__technoroid_overmind.jpg)](https://myanimelist.net/anime/50284/Technoroid__Overmind) | [Technoroid Overmind](https://subsplease.org/shows/technoroid-overmind) | TV | 12 / 12 | **Finished Airing** | 6.53 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Technoroid+Overmind+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50284__technoroid_overmind.txt) | ~7~ | 1455 | 2023-03-29 16:31 |
| 50273 | [![50273__tomodachi_game](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50273__tomodachi_game.jpg)](https://myanimelist.net/anime/50273/Tomodachi_Game) | [Tomodachi Game](https://subsplease.org/shows/tomodachi-game) | TV | 12 / 12 | **Finished Airing** | 7.72 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tomodachi+Game+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50273__tomodachi_game.txt) | ~7~ | 3966 | 2022-06-21 15:01 |
| 48997 | [![48997__fantasy_bishoujo_juniku_ojisan_to](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48997__fantasy_bishoujo_juniku_ojisan_to.jpg)](https://myanimelist.net/anime/48997/Fantasy_Bishoujo_Juniku_Ojisan_to) | [Fantasy Bishoujo Juniku Ojisan to](https://subsplease.org/shows/fantasy-bishoujo-juniku-ojisan-to) | TV | 12 / 12 | **Finished Airing** | 7.13 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fantasy+Bishoujo+Juniku+Ojisan+to+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48997__fantasy_bishoujo_juniku_ojisan_to.txt) | ~7~ | 4899 | 2022-03-29 16:31 |
| 48405 | [![48405__totsukuni_no_shoujo_2022](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48405__totsukuni_no_shoujo_2022.jpg)](https://myanimelist.net/anime/48405/Totsukuni_no_Shoujo_2022) | [Totsukuni no Shoujo](https://subsplease.org/shows/totsukuni-no-shoujo) | OVA | 3 / 1 | **Finished Airing** | 7.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Totsukuni+no+Shoujo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48405__totsukuni_no_shoujo_2022.txt) | ~6~ | 1705 | 2022-08-05 17:17 |
| 41780 | [![41780__bang_dream_movie_episode_of_roselia_i_yakusoku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41780__bang_dream_movie_episode_of_roselia_i_yakusoku.jpg)](https://myanimelist.net/anime/41780/BanG_Dream_Movie__Episode_of_Roselia_-_I__Yakusoku) | [BanG Dream! Movie - Episode of Roselia](https://subsplease.org/shows/bang-dream-movie-episode-of-roselia) | Movie | 2 / 1 | **Finished Airing** | 7.71 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+BanG+Dream+Movie+Episode+of+Roselia+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41780__bang_dream_movie_episode_of_roselia_i_yakusoku.txt) | ~6~ | 878 | 2022-08-06 17:28 |
| 55166 | [![55166__yami_shibai_11](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/55166__yami_shibai_11.jpg)](https://myanimelist.net/anime/55166/Yami_Shibai_11) | [Yami Shibai 11](https://subsplease.org/shows/yami-shibai-11) | TV | 13 / 13 | **Finished Airing** | 5.83 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yami+Shibai+11+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/55166__yami_shibai_11.txt) | ~6~ | 1454 | 2023-10-01 19:30 |
| 54803 | [![54803__captain_tsubasa_season_2_junior_youth_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54803__captain_tsubasa_season_2_junior_youth_hen.jpg)](https://myanimelist.net/anime/54803/Captain_Tsubasa_Season_2__Junior_Youth-hen) | [Captain Tsubasa S2](https://subsplease.org/shows/captain-tsubasa-s2) | TV | 39 / 39 | **Finished Airing** | 7.42 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Captain+Tsubasa+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54803__captain_tsubasa_season_2_junior_youth_hen.txt) | ~6~ | 1602 | 2024-06-30 09:02 |
| 54738 | [![54738__majutsushi_orphen_hagure_tabi_seiiki_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54738__majutsushi_orphen_hagure_tabi_seiiki_hen.jpg)](https://myanimelist.net/anime/54738/Majutsushi_Orphen_Hagure_Tabi__Seiiki-hen) | [Majutsushi Orphen Hagure Tabi S4](https://subsplease.org/shows/majutsushi-orphen-hagure-tabi-s4) | TV | 12 / 12 | **Finished Airing** | 6.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Majutsushi+Orphen+Hagure+Tabi+S4+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54738__majutsushi_orphen_hagure_tabi_seiiki_hen.txt) | ~6~ | 2198 | 2023-06-28 12:31 |
| 53162 | [![53162__majutsushi_orphen_hagure_tabi_urbanrama_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53162__majutsushi_orphen_hagure_tabi_urbanrama_hen.jpg)](https://myanimelist.net/anime/53162/Majutsushi_Orphen_Hagure_Tabi__Urbanrama-hen) | [Majutsushi Orphen Hagure Tabi S3](https://subsplease.org/shows/majutsushi-orphen-hagure-tabi-s3) | TV | 12 / 12 | **Finished Airing** | 6.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Majutsushi+Orphen+Hagure+Tabi+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53162__majutsushi_orphen_hagure_tabi_urbanrama_hen.txt) | ~6~ | 1879 | 2023-04-05 12:37 |
| 50891 | [![50891__hoshi_no_samidare](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50891__hoshi_no_samidare.jpg)](https://myanimelist.net/anime/50891/Hoshi_no_Samidare) | [Hoshi no Samidare](https://subsplease.org/shows/hoshi-no-samidare) | TV | 25 / 24 | **Finished Airing** | 5.63 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hoshi+no+Samidare+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50891__hoshi_no_samidare.txt) | ~6~ | 2372 | 2022-12-23 19:46 |
| 48916 | [![48916__love_live_nijigasaki_gakuen_school_idol_doukoukai_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48916__love_live_nijigasaki_gakuen_school_idol_doukoukai_2nd_season.jpg)](https://myanimelist.net/anime/48916/Love_Live_Nijigasaki_Gakuen_School_Idol_Doukoukai_2nd_Season) | [Love Live! Nijigasaki Gakuen School Idol Doukoukai S2](https://subsplease.org/shows/love-live-nijigasaki-gakuen-school-idol-doukoukai-s2) | TV | 13 / 13 | **Finished Airing** | 7.67 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Love+Live+Nijigasaki+Gakuen+School+Idol+Doukoukai+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48916__love_live_nijigasaki_gakuen_school_idol_doukoukai_2nd_season.txt) | ~6~ | 1551 | 2022-06-25 13:31 |
| 48363 | [![48363__rpg_fudousan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48363__rpg_fudousan.jpg)](https://myanimelist.net/anime/48363/RPG_Fudousan) | [RPG Fudousan](https://subsplease.org/shows/rpg-fudousan) | TV | 12 / 12 | **Finished Airing** | 6.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+RPG+Fudousan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48363__rpg_fudousan.txt) | ~6~ | 2711 | 2022-06-22 13:33 |
| 46093 | [![46093__shiroi_suna_no_aquatope](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/46093__shiroi_suna_no_aquatope.jpg)](https://myanimelist.net/anime/46093/Shiroi_Suna_no_Aquatope) | [Shiroi Suna no Aquatope](https://subsplease.org/shows/shiroi-suna-no-aquatope) | TV | 24 / 24 | **Finished Airing** | 7.51 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shiroi+Suna+no+Aquatope+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/46093__shiroi_suna_no_aquatope.txt) | ~6~ | 3645 | 2021-12-16 17:31 |
| 44074 | [![44074__shiguang_dailiren](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44074__shiguang_dailiren.jpg)](https://myanimelist.net/anime/44074/Shiguang_Dailiren) | [Link Click](https://subsplease.org/shows/link-click) | ONA | 13 / 11 | **Finished Airing** | 8.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Link+Click+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44074__shiguang_dailiren.txt) | ~6~ | 1788 | 2021-08-28 20:28 |
| 44037 | [![44037__shin_no_nakama_ja_nai_to_yuusha_no_party_wo_oidasareta_node_henkyou_de_slow_life_suru_koto_ni_shimashita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44037__shin_no_nakama_ja_nai_to_yuusha_no_party_wo_oidasareta_node_henkyou_de_slow_life_suru_koto_ni_shimashita.jpg)](https://myanimelist.net/anime/44037/Shin_no_Nakama_ja_Nai_to_Yuusha_no_Party_wo_Oidasareta_node_Henkyou_de_Slow_Life_suru_Koto_ni_Shimashita) | [Shin no Nakama](https://subsplease.org/shows/shin-no-nakama) | TV | 13 / 13 | **Finished Airing** | 6.93 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shin+no+Nakama+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44037__shin_no_nakama_ja_nai_to_yuusha_no_party_wo_oidasareta_node_henkyou_de_slow_life_suru_koto_ni_shimashita.txt) | ~6~ | 7887 | 2021-12-29 14:32 |
| 43969 | [![43969__kanojo_mo_kanojo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43969__kanojo_mo_kanojo.jpg)](https://myanimelist.net/anime/43969/Kanojo_mo_Kanojo) | [Kanojo mo Kanojo](https://subsplease.org/shows/kanojo-mo-kanojo) | TV | 12 / 12 | **Finished Airing** | 6.52 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kanojo+mo+Kanojo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43969__kanojo_mo_kanojo.txt) | ~6~ | 4922 | 2021-09-17 19:02 |
| 43762 | [![43762__hula_fulla_dance](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43762__hula_fulla_dance.jpg)](https://myanimelist.net/anime/43762/Hula_Fulla_Dance) | [Hula Fulla Dance](https://subsplease.org/shows/hula-fulla-dance) | Movie | 1 / 1 | **Finished Airing** | 6.3 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hula+Fulla+Dance+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43762__hula_fulla_dance.txt) | ~6~ | 1044 | 2023-06-23 16:37 |
| 43523 | [![43523__tsuki_ga_michibiku_isekai_douchuu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43523__tsuki_ga_michibiku_isekai_douchuu.jpg)](https://myanimelist.net/anime/43523/Tsuki_ga_Michibiku_Isekai_Douchuu) | [Tsuki ga Michibiku Isekai Douchuu](https://subsplease.org/shows/tsuki-ga-michibiku-isekai-douchuu) | TV | 12 / 12 | **Finished Airing** | 7.71 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tsuki+ga+Michibiku+Isekai+Douchuu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43523__tsuki_ga_michibiku_isekai_douchuu.txt) | ~6~ | 8280 | 2021-09-22 15:07 |
| 42072 | [![42072__kenja_no_deshi_wo_nanoru_kenja](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42072__kenja_no_deshi_wo_nanoru_kenja.jpg)](https://myanimelist.net/anime/42072/Kenja_no_Deshi_wo_Nanoru_Kenja) | [Kenja no Deshi wo Nanoru Kenja](https://subsplease.org/shows/kenja-no-deshi-wo-nanoru-kenja) | TV | 12 / 12 | **Finished Airing** | 6.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kenja+no+Deshi+wo+Nanoru+Kenja+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42072__kenja_no_deshi_wo_nanoru_kenja.txt) | ~6~ | 4558 | 2022-03-30 03:12 |
| 40454 | [![40454__dungeon_ni_deai_wo_motomeru_no_wa_machigatteiru_darou_ka_iii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40454__dungeon_ni_deai_wo_motomeru_no_wa_machigatteiru_darou_ka_iii.jpg)](https://myanimelist.net/anime/40454/Dungeon_ni_Deai_wo_Motomeru_no_wa_Machigatteiru_Darou_ka_III) | [Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka S3](https://subsplease.org/shows/dungeon-ni-deai-wo-motomeru-no-wa-machigatteiru-darou-ka-s3) | TV | 13 / 12 | **Finished Airing** | 7.46 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dungeon+ni+Deai+wo+Motomeru+no+wa+Machigatteiru+Darou+ka+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40454__dungeon_ni_deai_wo_motomeru_no_wa_machigatteiru_darou_ka_iii.txt) | ~6~ | 5835 | 2021-04-29 23:49 |
| 40174 | [![40174__zombieland_saga_revenge](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40174__zombieland_saga_revenge.jpg)](https://myanimelist.net/anime/40174/Zombieland_Saga_Revenge) | [Zombieland Saga S2](https://subsplease.org/shows/zombieland-saga-s2) | TV | 12 / 12 | **Finished Airing** | 7.99 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Zombieland+Saga+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40174__zombieland_saga_revenge.txt) | ~6~ | 3448 | 2021-06-24 15:32 |
| 39990 | [![39990__vlad_love](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39990__vlad_love.jpg)](https://myanimelist.net/anime/39990/Vlad_Love) | [Vlad Love](https://subsplease.org/shows/vlad-love) | ONA | 12 / 12 | **Finished Airing** | 5.84 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Vlad+Love+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39990__vlad_love.txt) | ~6~ | 1693 | 2021-03-13 21:31 |
| 33839 | [![33839__alice_in_deadly_school](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/33839__alice_in_deadly_school.jpg)](https://myanimelist.net/anime/33839/Alice_in_Deadly_School) | [Alice in Deadly School](https://subsplease.org/shows/alice-in-deadly-school) | OVA | 1 / 1 | **Finished Airing** | 5.18 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Alice+in+Deadly+School+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/33839__alice_in_deadly_school.txt) | ~6~ | 1808 | 2021-04-06 16:58 |
| 50404 | [![50404__xian_wang_de_richang_shenghuo_3](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50404__xian_wang_de_richang_shenghuo_3.jpg)](https://myanimelist.net/anime/50404/Xian_Wang_de_Richang_Shenghuo_3) | [The Daily Life of the Immortal King S3](https://subsplease.org/shows/the-daily-life-of-the-immortal-king-s3) | ONA | 12 / 12 | **Finished Airing** | 7.35 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+Daily+Life+of+the+Immortal+King+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50404__xian_wang_de_richang_shenghuo_3.txt) | ~6~ | 2940 | 2022-12-11 06:01 |
| 49160 | [![49160__shachiku_san_wa_youjo_yuurei_ni_iyasaretai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49160__shachiku_san_wa_youjo_yuurei_ni_iyasaretai.jpg)](https://myanimelist.net/anime/49160/Shachiku-san_wa_Youjo_Yuurei_ni_Iyasaretai) | [Shachiku-san wa Youjo Yuurei ni Iyasaretai](https://subsplease.org/shows/shachiku-san-wa-youjo-yuurei-ni-iyasaretai) | TV | 12 / 12 | **Finished Airing** | 7.1 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shachiku+san+wa+Youjo+Yuurei+ni+Iyasaretai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49160__shachiku_san_wa_youjo_yuurei_ni_iyasaretai.txt) | ~6~ | 1772 | 2022-06-23 13:31 |
| 48857 | [![48857__healer_girl](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48857__healer_girl.jpg)](https://myanimelist.net/anime/48857/Healer_Girl) | [Healer Girl](https://subsplease.org/shows/healer-girl) | TV | 12 / 12 | **Finished Airing** | 7.31 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Healer+Girl+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48857__healer_girl.txt) | ~6~ | 1696 | 2022-06-20 14:31 |
| 48849 | [![48849__sonny_boy](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48849__sonny_boy.jpg)](https://myanimelist.net/anime/48849/Sonny_Boy) | [Sonny Boy](https://subsplease.org/shows/sonny-boy) | TV | 12 / 12 | **Finished Airing** | 7.82 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sonny+Boy+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48849__sonny_boy.txt) | ~6~ | 5279 | 2021-09-30 16:31 |
| 45055 | [![45055__taishou_otome_otogibanashi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/45055__taishou_otome_otogibanashi.jpg)](https://myanimelist.net/anime/45055/Taishou_Otome_Otogibanashi) | [Taishou Otome Otogibanashi](https://subsplease.org/shows/taishou-otome-otogibanashi) | TV | 12 / 12 | **Finished Airing** | 7.79 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Taishou+Otome+Otogibanashi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/45055__taishou_otome_otogibanashi.txt) | ~6~ | 3007 | 2021-12-24 17:56 |
| 44524 | [![44524__isekai_meikyuu_de_harem_wo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44524__isekai_meikyuu_de_harem_wo.jpg)](https://myanimelist.net/anime/44524/Isekai_Meikyuu_de_Harem_wo) | [Isekai Meikyuu de Harem wo](https://subsplease.org/shows/isekai-meikyuu-de-harem-wo) | TV | 12 / 12 | **Finished Airing** | 6.54 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+Meikyuu+de+Harem+wo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44524__isekai_meikyuu_de_harem_wo.txt) | ~6~ | 5191 | 2022-09-22 00:12 |
| 40685 | [![40685__super_cub](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40685__super_cub.jpg)](https://myanimelist.net/anime/40685/Super_Cub) | [Super Cub](https://subsplease.org/shows/super-cub) | TV | 12 / 12 | **Finished Airing** | 7.45 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Super+Cub+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40685__super_cub.txt) | ~6~ | 3127 | 2021-06-23 15:02 |
| 39586 | [![39586__hataraku_saibou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39586__hataraku_saibou.jpg)](https://myanimelist.net/anime/39586/Hataraku_Saibou) | [Hataraku Saibou S2](https://subsplease.org/shows/hataraku-saibou-s2) | TV | 8 / 8 | **Finished Airing** | 7.37 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hataraku+Saibou+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39586__hataraku_saibou.txt) | ~6~ | 4149 | 2021-02-25 17:01 |
| 49738 | [![49738__heike_monogatari](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49738__heike_monogatari.jpg)](https://myanimelist.net/anime/49738/Heike_Monogatari) | [Heike Monogatari](https://subsplease.org/shows/heike-monogatari) | TV | 11 / 11 | **Finished Airing** | 7.78 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Heike+Monogatari+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49738__heike_monogatari.txt) | ~6~ | 3145 | 2021-11-24 15:03 |
| 57995 | [![57995__bai_yao_pu_4th_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/57995__bai_yao_pu_4th_season.jpg)](https://myanimelist.net/anime/57995/Bai_Yao_Pu_4th_Season) | [Fairies Album S4](https://subsplease.org/shows/fairies-album-s4) | ONA | 12 / 12 | **Finished Airing** | 7.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fairies+Album+S4+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/57995__bai_yao_pu_4th_season.txt) | ~5~ | 935 | 2024-05-03 03:01 |
| 54118 | [![54118__idolish7_movie_live_4bit_beyond_the_period](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54118__idolish7_movie_live_4bit_beyond_the_period.jpg)](https://myanimelist.net/anime/54118/IDOLiSH7_Movie__LIVE_4bit_-_BEYOND_THE_PERiOD) | [IDOLiSH7 Movie - LIVE 4bit](https://subsplease.org/shows/idolish7-movie-live-4bit) | Movie | 2 / 2 | **Finished Airing** | 7.88 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+IDOLiSH7+Movie+LIVE+4bit+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54118__idolish7_movie_live_4bit_beyond_the_period.txt) | ~5~ | 954 | 2023-11-22 10:02 |
| 53132 | [![53132__uniteup](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53132__uniteup.jpg)](https://myanimelist.net/anime/53132/UniteUp) | [UniteUp!](https://subsplease.org/shows/uniteup) | TV | 12 / 12 | **Finished Airing** | 7.05 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+UniteUp+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53132__uniteup.txt) | ~5~ | 930 | 2023-04-15 16:01 |
| 52976 | [![52976__berserk_ougon_jidai_hen_memorial_edition](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52976__berserk_ougon_jidai_hen_memorial_edition.jpg)](https://myanimelist.net/anime/52976/Berserk__Ougon_Jidai-hen_-_Memorial_Edition) | [Berserk - The Golden Age Arc Memorial Edition](https://subsplease.org/shows/berserk-the-golden-age-arc-memorial-edition) | TV | 13 / 13 | **Finished Airing** | 7.89 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Berserk+The+Golden+Age+Arc+Memorial+Edition+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52976__berserk_ougon_jidai_hen_memorial_edition.txt) | ~5~ | 3488 | 2022-12-24 18:01 |
| 52826 | [![52826__tsurune_tsunagari_no_issha](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52826__tsurune_tsunagari_no_issha.jpg)](https://myanimelist.net/anime/52826/Tsurune__Tsunagari_no_Issha) | [Tsurune S2](https://subsplease.org/shows/tsurune-s2) | TV | 13 / 13 | **Finished Airing** | 8.09 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tsurune+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52826__tsurune_tsunagari_no_issha.txt) | ~5~ | 2837 | 2023-03-29 16:01 |
| 51923 | [![51923__warau_arsnotoria_sun](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51923__warau_arsnotoria_sun.jpg)](https://myanimelist.net/anime/51923/Warau_Arsnotoria_Sun) | [Warau Arsnotoria Sun!](https://subsplease.org/shows/warau-arsnotoria-sun) | TV | 12 / 12 | **Finished Airing** | 5.8 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Warau+Arsnotoria+Sun+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51923__warau_arsnotoria_sun.txt) | ~5~ | 1887 | 2022-09-21 13:01 |
| 51586 | [![51586__d4dj_all_mix](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51586__d4dj_all_mix.jpg)](https://myanimelist.net/anime/51586/D4DJ_All_Mix) | [D4DJ All Mix](https://subsplease.org/shows/d4dj-all-mix) | TV | 12 / 12 | **Finished Airing** | 7.37 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+D4DJ+All+Mix+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51586__d4dj_all_mix.txt) | ~5~ | 1407 | 2023-03-26 16:31 |
| 50250 | [![50250__chiikawa](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50250__chiikawa.jpg)](https://myanimelist.net/anime/50250/Chiikawa) | [Chiikawa](https://subsplease.org/shows/chiikawa) | TV | 52 / ? | Currently Airing | 7.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Chiikawa+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50250__chiikawa.txt) | ~5~ | 959 | 2024-11-28 17:57 |
| 49942 | [![49942__tales_of_luminaria_the_fateful_crossroad](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49942__tales_of_luminaria_the_fateful_crossroad.jpg)](https://myanimelist.net/anime/49942/Tales_of_Luminaria__The_Fateful_Crossroad) | [Tales of Luminaria - The Fateful Crossroad](https://subsplease.org/shows/tales-of-luminaria-the-fateful-crossroad) | ONA | 2 / 2 | **Finished Airing** | 6.17 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tales+of+Luminaria+The+Fateful+Crossroad+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49942__tales_of_luminaria_the_fateful_crossroad.txt) | ~5~ | 2080 | 2022-01-21 04:37 |
| 49605 | [![49605__ganbare_douki_chan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49605__ganbare_douki_chan.jpg)](https://myanimelist.net/anime/49605/Ganbare_Douki-chan) | [Ganbare Douki-chan](https://subsplease.org/shows/ganbare-douki-chan) | ONA | 12 / 12 | **Finished Airing** | 6.51 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ganbare+Douki+chan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49605__ganbare_douki_chan.txt) | ~5~ | 4792 | 2021-12-05 23:15 |
| 49385 | [![49385__kaijin_kaihatsu_bu_no_kuroitsu_san](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49385__kaijin_kaihatsu_bu_no_kuroitsu_san.jpg)](https://myanimelist.net/anime/49385/Kaijin_Kaihatsu-bu_no_Kuroitsu-san) | [Kaijin Kaihatsu-bu no Kuroitsu-san](https://subsplease.org/shows/kaijin-kaihatsu-bu-no-kuroitsu-san) | TV | 12 / 12 | **Finished Airing** | 6.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kaijin+Kaihatsu+bu+no+Kuroitsu+san+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49385__kaijin_kaihatsu_bu_no_kuroitsu_san.txt) | ~5~ | 2856 | 2022-04-02 18:46 |
| 49283 | [![49283__bakuten_movie](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49283__bakuten_movie.jpg)](https://myanimelist.net/anime/49283/Bakuten_Movie) | [Bakuten!! Movie](https://subsplease.org/shows/bakuten-movie) | Movie | 1 / 1 | **Finished Airing** | 7.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bakuten+Movie+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49283__bakuten_movie.txt) | ~5~ | 1170 | 2023-06-24 00:34 |
| 48761 | [![48761__saihate_no_paladin](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48761__saihate_no_paladin.jpg)](https://myanimelist.net/anime/48761/Saihate_no_Paladin) | [Saihate no Paladin](https://subsplease.org/shows/saihate-no-paladin) | TV | 13 / 12 | **Finished Airing** | 6.87 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Saihate+no+Paladin+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48761__saihate_no_paladin.txt) | ~5~ | 8155 | 2022-01-03 15:31 |
| 48573 | [![48573__uta_no_prince_sama_movie_maji_love_st_rish_tours](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48573__uta_no_prince_sama_movie_maji_love_st_rish_tours.jpg)](https://myanimelist.net/anime/48573/Uta_no☆Prince-sama♪_Movie__Maji_Love_ST☆RISH_Tours) | [Uta no Prince-sama Maji Love Starish Tours](https://subsplease.org/shows/uta-no-prince-sama-maji-love-starish-tours) | Movie | 2 / 1 | **Finished Airing** | 7.13 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Uta+no+Prince+sama+Maji+Love+Starish+Tours+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48573__uta_no_prince_sama_movie_maji_love_st_rish_tours.txt) | ~5~ | 816 | 2023-04-16 22:35 |
| 48573 | [![48573__uta_no_prince_sama_movie_maji_love_st_rish_tours](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48573__uta_no_prince_sama_movie_maji_love_st_rish_tours.jpg)](https://myanimelist.net/anime/48573/Uta_no☆Prince-sama♪_Movie__Maji_Love_ST☆RISH_Tours) | [Uta no Prince-sama Maji Love Starish Tours Movie](https://subsplease.org/shows/uta-no-prince-sama-maji-love-starish-tours) | Movie | 1 / 1 | **Finished Airing** | 7.13 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Uta+no+Prince+sama+Maji+Love+Starish+Tours+Movie+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48573__uta_no_prince_sama_movie_maji_love_st_rish_tours.txt) | ~5~ | 877 | 2023-04-16 22:35 |
| 44203 | [![44203__seirei_gensouki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44203__seirei_gensouki.jpg)](https://myanimelist.net/anime/44203/Seirei_Gensouki) | [Seirei Gensouki](https://subsplease.org/shows/seirei-gensouki) | TV | 12 / 12 | **Finished Airing** | 7.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Seirei+Gensouki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44203__seirei_gensouki.txt) | ~5~ | 6429 | 2021-09-20 18:57 |
| 43439 | [![43439__shadows_house](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43439__shadows_house.jpg)](https://myanimelist.net/anime/43439/Shadows_House) | [Shadows House](https://subsplease.org/shows/shadows-house) | TV | 13 / 13 | **Finished Airing** | 7.8 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shadows+House+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43439__shadows_house.txt) | ~5~ | 3930 | 2021-07-03 17:02 |
| 43007 | [![43007__osananajimi_ga_zettai_ni_makenai_love_comedy](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43007__osananajimi_ga_zettai_ni_makenai_love_comedy.jpg)](https://myanimelist.net/anime/43007/Osananajimi_ga_Zettai_ni_Makenai_Love_Comedy) | [Osananajimi ga Zettai ni Makenai Love Comedy](https://subsplease.org/shows/osananajimi-ga-zettai-ni-makenai-love-comedy) | TV | 12 / 12 | **Finished Airing** | 6.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Osananajimi+ga+Zettai+ni+Makenai+Love+Comedy+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43007__osananajimi_ga_zettai_ni_makenai_love_comedy.txt) | ~5~ | 2649 | 2021-06-30 13:02 |
| 42249 | [![42249__tokyo_revengers](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42249__tokyo_revengers.jpg)](https://myanimelist.net/anime/42249/Tokyo_Revengers) | [Tokyo Revengers](https://subsplease.org/shows/tokyo-revengers) | TV | 24 / 24 | **Finished Airing** | 7.88 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tokyo+Revengers+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42249__tokyo_revengers.txt) | ~5~ | 7362 | 2021-09-18 19:02 |
| 42091 | [![42091__shingeki_no_kyojin_chronicle](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42091__shingeki_no_kyojin_chronicle.jpg)](https://myanimelist.net/anime/42091/Shingeki_no_Kyojin__Chronicle) | [Shingeki no Kyojin - Chronicle](https://subsplease.org/shows/shingeki-no-kyojin-chronicle) | Movie | 1 / 1 | **Finished Airing** | 7.79 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shingeki+no+Kyojin+Chronicle+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42091__shingeki_no_kyojin_chronicle.txt) | ~5~ | 2826 | 2020-11-24 21:24 |
| 41694 | [![41694__hataraku_saibou_black](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41694__hataraku_saibou_black.jpg)](https://myanimelist.net/anime/41694/Hataraku_Saibou_Black) | [Hataraku Saibou Black](https://subsplease.org/shows/hataraku-saibou-black) | TV | 13 / 13 | **Finished Airing** | 7.52 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hataraku+Saibou+Black+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41694__hataraku_saibou_black.txt) | ~5~ | 4204 | 2021-03-18 17:32 |
| 41392 | [![41392__urasekai_picnic](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41392__urasekai_picnic.jpg)](https://myanimelist.net/anime/41392/Urasekai_Picnic) | [Urasekai Picnic](https://subsplease.org/shows/urasekai-picnic) | TV | 12 / 12 | **Finished Airing** | 6.65 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Urasekai+Picnic+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41392__urasekai_picnic.txt) | ~5~ | 3073 | 2021-03-22 15:31 |
| 40730 | [![40730__tian_guan_cifu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40730__tian_guan_cifu.jpg)](https://myanimelist.net/anime/40730/Tian_Guan_Cifu) | [Heaven Official's Blessing](https://subsplease.org/shows/heaven-officials-blessing) | ONA | 12 / 11 | **Finished Airing** | 8.42 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Heaven+Official+s+Blessing+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40730__tian_guan_cifu.txt) | ~5~ | 1819 | 2021-02-17 07:45 |
| 40620 | [![40620__uramichi_oniisan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40620__uramichi_oniisan.jpg)](https://myanimelist.net/anime/40620/Uramichi_Oniisan) | [Uramichi Oniisan](https://subsplease.org/shows/uramichi-oniisan) | TV | 13 / 13 | **Finished Airing** | 7.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Uramichi+Oniisan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40620__uramichi_oniisan.txt) | ~5~ | 2826 | 2021-09-27 16:02 |
| 40615 | [![40615__umibe_no_étranger](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40615__umibe_no_%C3%A9tranger.jpg)](https://myanimelist.net/anime/40615/Umibe_no_Étranger) | [Umibe no Etranger](https://subsplease.org/shows/umibe-no-etranger) | Movie | 1 / 1 | **Finished Airing** | 7.83 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Umibe+no+Etranger+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40615__umibe_no_%C3%A9tranger.txt) | ~5~ | 1552 | 2021-07-10 00:22 |
| 40608 | [![40608__muv_luv_alternative](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40608__muv_luv_alternative.jpg)](https://myanimelist.net/anime/40608/Muv-Luv_Alternative) | [Muv-Luv Alternative](https://subsplease.org/shows/muv-luv-alternative) | TV | 24 / 12 | **Finished Airing** | 5.85 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Muv+Luv+Alternative+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40608__muv_luv_alternative.txt) | ~5~ | 2562 | 2022-12-21 18:26 |
| 40590 | [![40590__utawarerumono_futari_no_hakuoro](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40590__utawarerumono_futari_no_hakuoro.jpg)](https://myanimelist.net/anime/40590/Utawarerumono__Futari_no_Hakuoro) | [Utawarerumono - Futari no Hakuoro](https://subsplease.org/shows/utawarerumono-futari-no-hakuoro) | TV | 28 / 28 | **Finished Airing** | 7.52 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Utawarerumono+Futari+no+Hakuoro+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40590__utawarerumono_futari_no_hakuoro.txt) | ~5~ | 2743 | 2022-12-25 12:09 |
| 40421 | [![40421__given_movie_1](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40421__given_movie_1.jpg)](https://myanimelist.net/anime/40421/Given_Movie_1) | [Given Movie](https://subsplease.org/shows/given-movie) | Movie | 1 / 1 | **Finished Airing** | 8.12 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Given+Movie+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40421__given_movie_1.txt) | ~5~ | 1542 | 2021-02-03 03:07 |
| 39761 | [![39761__saezuru_tori_wa_habatakanai_the_clouds_gather](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39761__saezuru_tori_wa_habatakanai_the_clouds_gather.jpg)](https://myanimelist.net/anime/39761/Saezuru_Tori_wa_Habatakanai__The_Clouds_Gather) | [Saezuru Tori wa Habatakanai - The Clouds Gather](https://subsplease.org/shows/saezuru-tori-wa-habatakanai) | Movie | 1 / 1 | **Finished Airing** | 7.38 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Saezuru+Tori+wa+Habatakanai+The+Clouds+Gather+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39761__saezuru_tori_wa_habatakanai_the_clouds_gather.txt) | ~5~ | 940 | 2021-05-27 05:13 |
| 39617 | [![39617__yakusoku_no_neverland_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39617__yakusoku_no_neverland_2nd_season.jpg)](https://myanimelist.net/anime/39617/Yakusoku_no_Neverland_2nd_Season) | [Yakusoku no Neverland S2](https://subsplease.org/shows/yakusoku-no-neverland-s2) | TV | 12 / 11 | **Finished Airing** | 5.26 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yakusoku+no+Neverland+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39617__yakusoku_no_neverland_2nd_season.txt) | ~5~ | 8219 | 2021-03-25 19:04 |
| 38680 | [![38680__fruits_basket_1st_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38680__fruits_basket_1st_season.jpg)](https://myanimelist.net/anime/38680/Fruits_Basket_1st_Season) | [Fruits Basket (2019)](https://subsplease.org/shows/fruits-basket-2019) | TV | 13 / 25 | **Finished Airing** | 8.21 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fruits+Basket+2019+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38680__fruits_basket_1st_season.txt) | ~5~ | 2946 | 2021-06-28 17:32 |
| 38680 | [![38680__fruits_basket_1st_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38680__fruits_basket_1st_season.jpg)](https://myanimelist.net/anime/38680/Fruits_Basket_1st_Season) | [Fruits Basket (2019) S3](https://subsplease.org/shows/fruits-basket-2019) | TV | 13 / 25 | **Finished Airing** | 8.21 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fruits+Basket+2019+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38680__fruits_basket_1st_season.txt) | ~5~ | 2946 | 2021-06-28 17:32 |
| 38006 | [![38006__renmei_kuugun_koukuu_mahou_ongakutai_luminous_witches](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38006__renmei_kuugun_koukuu_mahou_ongakutai_luminous_witches.jpg)](https://myanimelist.net/anime/38006/Renmei_Kuugun_Koukuu_Mahou_Ongakutai_Luminous_Witches) | [Luminous Witches](https://subsplease.org/shows/luminous-witches) | TV | 12 / 12 | **Finished Airing** | 6.67 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Luminous+Witches+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38006__renmei_kuugun_koukuu_mahou_ongakutai_luminous_witches.txt) | ~5~ | 2171 | 2022-09-25 13:01 |
| 48804 | [![48804__isekai_shokudou_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48804__isekai_shokudou_2.jpg)](https://myanimelist.net/anime/48804/Isekai_Shokudou_2) | [Isekai Shokudou S2](https://subsplease.org/shows/isekai-shokudou-s2) | TV | 12 / 12 | **Finished Airing** | 7.68 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Isekai+Shokudou+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48804__isekai_shokudou_2.txt) | ~5~ | 3839 | 2021-12-17 18:31 |
| 48742 | [![48742__kono_healer_mendokusai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48742__kono_healer_mendokusai.jpg)](https://myanimelist.net/anime/48742/Kono_Healer_Mendokusai) | [Kono Healer, Mendokusai](https://subsplease.org/shows/kono-healer-mendokusai) | TV | 12 / 12 | **Finished Airing** | 6.34 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kono+Healer+Mendokusai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48742__kono_healer_mendokusai.txt) | ~5~ | 2459 | 2022-06-26 12:01 |
| 44276 | [![44276__kyuukyoku_shinka_shita_full_dive_rpg_ga_genjitsu_yori_mo_kusoge_dattara](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44276__kyuukyoku_shinka_shita_full_dive_rpg_ga_genjitsu_yori_mo_kusoge_dattara.jpg)](https://myanimelist.net/anime/44276/Kyuukyoku_Shinka_shita_Full_Dive_RPG_ga_Genjitsu_yori_mo_Kusoge_Dattara) | [Full Dive](https://subsplease.org/shows/full-dive) | TV | 12 / 12 | **Finished Airing** | 6.47 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Full+Dive+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44276__kyuukyoku_shinka_shita_full_dive_rpg_ga_genjitsu_yori_mo_kusoge_dattara.txt) | ~5~ | 4424 | 2021-06-23 14:32 |
| 53077 | [![53077__nijiyon_animation](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53077__nijiyon_animation.jpg)](https://myanimelist.net/anime/53077/Nijiyon_Animation) | [Nijiyon Animation](https://subsplease.org/shows/nijiyon-animation) | TV | 15 / 12 | **Finished Airing** | 6.81 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Nijiyon+Animation+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53077__nijiyon_animation.txt) | ~4~ | 1560 | 2024-06-01 17:24 |
| 42625 | [![42625__heion_sedai_no_idaten_tachi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42625__heion_sedai_no_idaten_tachi.jpg)](https://myanimelist.net/anime/42625/Heion_Sedai_no_Idaten-tachi) | [Heion Sedai no Idaten-tachi](https://subsplease.org/shows/heion-sedai-no-idaten-tachi) | TV | 11 / 11 | **Finished Airing** | 7.62 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Heion+Sedai+no+Idaten+tachi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42625__heion_sedai_no_idaten_tachi.txt) | ~4~ | 4455 | 2021-09-28 04:02 |
| 48830 | [![48830__free_movie_5_the_final_stroke_kouhen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48830__free_movie_5_the_final_stroke_kouhen.jpg)](https://myanimelist.net/anime/48830/Free_Movie_5__The_Final_Stroke_-_Kouhen) | [Free! - The Final Stroke](https://subsplease.org/shows/free-the-final-stroke) | Movie | 2 / 1 | **Finished Airing** | 7.81 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Free+The+Final+Stroke+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48830__free_movie_5_the_final_stroke_kouhen.txt) | ~4~ | 1486 | 2023-11-19 19:19 |
| 54858 | [![54858__hypnosis_mic_division_rap_battle_rhyme_anima](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54858__hypnosis_mic_division_rap_battle_rhyme_anima.jpg)](https://myanimelist.net/anime/54858/Hypnosis_Mic__Division_Rap_Battle_-_Rhyme_Anima__) | [Hypnosis Mic -Division Rap Battle- Rhyme Anima S2](https://subsplease.org/shows/hypnosis-mic-division-rap-battle-rhyme-anima-s2) | TV | 13 / 13 | **Finished Airing** | 6.86 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hypnosis+Mic+Division+Rap+Battle+Rhyme+Anima+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54858__hypnosis_mic_division_rap_battle_rhyme_anima.txt) | ~4~ | 1343 | 2023-12-29 17:32 |
| 54716 | [![54716__kibou_no_chikara_otona_precure_23](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54716__kibou_no_chikara_otona_precure_23.jpg)](https://myanimelist.net/anime/54716/Kibou_no_Chikara__Otona_Precure_23) | [Kibou no Chikara - Otona Precure '23](https://subsplease.org/shows/kibou-no-chikara-otona-precure-23) | TV | 12 / 12 | **Finished Airing** | 6.6 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kibou+no+Chikara+Otona+Precure+23+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54716__kibou_no_chikara_otona_precure_23.txt) | ~4~ | 1804 | 2023-12-23 11:50 |
| 53716 | [![53716__hirogaru_sky_precure](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53716__hirogaru_sky_precure.jpg)](https://myanimelist.net/anime/53716/Hirogaru_Sky_Precure) | [Hirogaru Sky! Precure](https://subsplease.org/shows/hirogaru-sky-precure) | TV | 50 / 50 | **Finished Airing** | 7.62 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hirogaru+Sky+Precure+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53716__hirogaru_sky_precure.txt) | ~4~ | 1575 | 2024-01-28 01:31 |
| 50760 | [![50760__teppen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50760__teppen.jpg)](https://myanimelist.net/anime/50760/Teppen) | [Teppen](https://subsplease.org/shows/teppen) | TV | 12 / 12 | **Finished Airing** | 6.62 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Teppen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50760__teppen.txt) | ~4~ | 1385 | 2022-09-24 14:24 |
| 49854 | [![49854__getsuyoubi_no_tawawa_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49854__getsuyoubi_no_tawawa_2.jpg)](https://myanimelist.net/anime/49854/Getsuyoubi_no_Tawawa_2) | [Getsuyoubi no Tawawa S2](https://subsplease.org/shows/getsuyoubi-no-tawawa-s2) | ONA | 12 / 12 | **Finished Airing** | 6.62 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Getsuyoubi+no+Tawawa+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49854__getsuyoubi_no_tawawa_2.txt) | ~4~ | 3505 | 2021-12-05 23:21 |
| 49519 | [![49519__hakozume_kouban_joshi_no_gyakushuu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49519__hakozume_kouban_joshi_no_gyakushuu.jpg)](https://myanimelist.net/anime/49519/Hakozume__Kouban_Joshi_no_Gyakushuu) | [Hakozume - Kouban Joshi no Gyakushuu](https://subsplease.org/shows/hakozume-kouban-joshi-no-gyakushuu) | TV | 13 / 13 | **Finished Airing** | 7.17 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hakozume+Kouban+Joshi+no+Gyakushuu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49519__hakozume_kouban_joshi_no_gyakushuu.txt) | ~4~ | 2596 | 2022-03-30 15:32 |
| 49515 | [![49515__digimon_ghost_game](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49515__digimon_ghost_game.jpg)](https://myanimelist.net/anime/49515/Digimon_Ghost_Game) | [Digimon Ghost Game](https://subsplease.org/shows/digimon-ghost-game) | TV | 52 / 67 | **Finished Airing** | 6.74 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Digimon+Ghost+Game+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49515__digimon_ghost_game.txt) | ~4~ | 1329 | 2023-03-26 02:31 |
| 49052 | [![49052__ao_ashi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49052__ao_ashi.jpg)](https://myanimelist.net/anime/49052/Ao_Ashi) | [Ao Ashi](https://subsplease.org/shows/ao-ashi) | TV | 24 / 24 | **Finished Airing** | 8.17 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ao+Ashi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49052__ao_ashi.txt) | ~4~ | 2940 | 2022-09-24 12:01 |
| 48680 | [![48680__tesla_note](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48680__tesla_note.jpg)](https://myanimelist.net/anime/48680/Tesla_Note) | [Tesla Note](https://subsplease.org/shows/tesla-note) | TV | 13 / 13 | **Finished Airing** | 4.67 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tesla+Note+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48680__tesla_note.txt) | ~4~ | 1586 | 2021-12-26 14:32 |
| 48580 | [![48580__vanitas_no_karte](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48580__vanitas_no_karte.jpg)](https://myanimelist.net/anime/48580/Vanitas_no_Karte) | [Vanitas no Carte](https://subsplease.org/shows/vanitas-no-carte) | TV | 25 / 12 | **Finished Airing** | 7.9 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Vanitas+no+Carte+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48580__vanitas_no_karte.txt) | ~4~ | 5265 | 2022-04-01 16:31 |
| 47391 | [![47391__seven_knights_revolution_eiyuu_no_keishousha](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47391__seven_knights_revolution_eiyuu_no_keishousha.jpg)](https://myanimelist.net/anime/47391/Seven_Knights_Revolution__Eiyuu_no_Keishousha) | [Seven Knights Revolution - Eiyuu no Keishousha](https://subsplease.org/shows/seven-knights-revolution-eiyuu-no-keishousha) | TV | 12 / 12 | **Finished Airing** | 6.3 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Seven+Knights+Revolution+Eiyuu+no+Keishousha+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47391__seven_knights_revolution_eiyuu_no_keishousha.txt) | ~4~ | 1693 | 2021-06-20 17:03 |
| 46985 | [![46985__shinka_no_mi_shiranai_uchi_ni_kachigumi_jinsei](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/46985__shinka_no_mi_shiranai_uchi_ni_kachigumi_jinsei.jpg)](https://myanimelist.net/anime/46985/Shinka_no_Mi__Shiranai_Uchi_ni_Kachigumi_Jinsei) | [Shinka no Mi - Shiranai Uchi ni Kachigumi Jinsei](https://subsplease.org/shows/shinka-no-mi-shiranai-uchi-ni-kachigumi-jinsei) | TV | 12 / 12 | **Finished Airing** | 6.18 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shinka+no+Mi+Shiranai+Uchi+ni+Kachigumi+Jinsei+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/46985__shinka_no_mi_shiranai_uchi_ni_kachigumi_jinsei.txt) | ~4~ | 4390 | 2021-12-20 19:31 |
| 45572 | [![45572__mahouka_koukou_no_yuutousei](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/45572__mahouka_koukou_no_yuutousei.jpg)](https://myanimelist.net/anime/45572/Mahouka_Koukou_no_Yuutousei) | [Mahouka Koukou no Yuutousei](https://subsplease.org/shows/mahouka-koukou-no-yuutousei) | TV | 13 / 13 | **Finished Airing** | 6.68 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahouka+Koukou+no+Yuutousei+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/45572__mahouka_koukou_no_yuutousei.txt) | ~4~ | 4254 | 2021-09-25 16:02 |
| 45560 | [![45560__orient](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/45560__orient.jpg)](https://myanimelist.net/anime/45560/Orient) | [Orient](https://subsplease.org/shows/orient) | TV | 24 / 12 | **Finished Airing** | 6.61 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Orient+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/45560__orient.txt) | ~4~ | 2248 | 2022-09-26 17:03 |
| 44961 | [![44961__platinum_end](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44961__platinum_end.jpg)](https://myanimelist.net/anime/44961/Platinum_End) | [Platinum End](https://subsplease.org/shows/platinum-end) | TV | 24 / 24 | **Finished Airing** | 6.03 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Platinum+End+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44961__platinum_end.txt) | ~4~ | 4537 | 2022-03-24 20:01 |
| 43735 | [![43735__cue](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43735__cue.jpg)](https://myanimelist.net/anime/43735/Cue) | [Cue!](https://subsplease.org/shows/cue) | TV | 24 / 24 | **Finished Airing** | 6.78 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cue+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43735__cue.txt) | ~4~ | 1124 | 2022-06-24 18:31 |
| 43691 | [![43691__kageki_shoujo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43691__kageki_shoujo.jpg)](https://myanimelist.net/anime/43691/Kageki_Shoujo) | [Kageki Shoujo!!](https://subsplease.org/shows/kageki-shoujo) | TV | 13 / 13 | **Finished Airing** | 7.72 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kageki+Shoujo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43691__kageki_shoujo.txt) | ~4~ | 2191 | 2021-09-25 16:02 |
| 42941 | [![42941__uma_musume_pretty_derby_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42941__uma_musume_pretty_derby_season_2.jpg)](https://myanimelist.net/anime/42941/Uma_Musume__Pretty_Derby_Season_2) | [Uma Musume - Pretty Derby S2](https://subsplease.org/shows/uma-musume-pretty-derby-s2) | TV | 13 / 13 | **Finished Airing** | 8.06 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Uma+Musume+Pretty+Derby+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42941__uma_musume_pretty_derby_season_2.txt) | ~4~ | 1740 | 2021-03-29 16:01 |
| 42923 | [![42923__sk](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42923__sk.jpg)](https://myanimelist.net/anime/42923/SK∞) | [SK8 the Infinity](https://subsplease.org/shows/sk8-the-infinity) | TV | 13 / 12 | **Finished Airing** | 8.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+SK8+the+Infinity+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42923__sk.txt) | ~4~ | 3330 | 2021-04-03 18:32 |
| 42826 | [![42826__seijo_no_maryoku_wa_bannou_desu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42826__seijo_no_maryoku_wa_bannou_desu.jpg)](https://myanimelist.net/anime/42826/Seijo_no_Maryoku_wa_Bannou_desu) | [Seijo no Maryoku wa Bannou Desu](https://subsplease.org/shows/seijo-no-maryoku-wa-bannou-desu) | TV | 12 / 12 | **Finished Airing** | 7.31 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Seijo+no+Maryoku+wa+Bannou+Desu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42826__seijo_no_maryoku_wa_bannou_desu.txt) | ~4~ | 4179 | 2021-06-22 15:31 |
| 42798 | [![42798__sayonara_watashi_no_cramer_movie_first_touch](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42798__sayonara_watashi_no_cramer_movie_first_touch.jpg)](https://myanimelist.net/anime/42798/Sayonara_Watashi_no_Cramer_Movie__First_Touch) | [Sayonara Watashi no Cramer - First Touch](https://subsplease.org/shows/sayonara-watashi-no-cramer-first-touch) | Movie | 1 / 1 | **Finished Airing** | 7.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sayonara+Watashi+no+Cramer+First+Touch+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42798__sayonara_watashi_no_cramer_movie_first_touch.txt) | ~4~ | 738 | 2021-06-11 22:21 |
| 42774 | [![42774__sayonara_watashi_no_cramer](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42774__sayonara_watashi_no_cramer.jpg)](https://myanimelist.net/anime/42774/Sayonara_Watashi_no_Cramer) | [Sayonara Watashi no Cramer](https://subsplease.org/shows/sayonara-watashi-no-cramer) | TV | 13 / 13 | **Finished Airing** | 6.1 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sayonara+Watashi+no+Cramer+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42774__sayonara_watashi_no_cramer.txt) | ~4~ | 1057 | 2021-06-27 16:01 |
| 42340 | [![42340__meikyuu_black_company](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42340__meikyuu_black_company.jpg)](https://myanimelist.net/anime/42340/Meikyuu_Black_Company) | [Meikyuu Black Company](https://subsplease.org/shows/meikyuu-black-company) | TV | 12 / 12 | **Finished Airing** | 7.23 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Meikyuu+Black+Company+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42340__meikyuu_black_company.txt) | ~4~ | 5840 | 2021-09-24 14:02 |
| 41103 | [![41103__koi_to_yobu_ni_wa_kimochi_warui](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41103__koi_to_yobu_ni_wa_kimochi_warui.jpg)](https://myanimelist.net/anime/41103/Koi_to_Yobu_ni_wa_Kimochi_Warui) | [Koi to Yobu ni wa Kimochi Warui](https://subsplease.org/shows/koi-to-yobu-ni-wa-kimochi-warui) | TV | 12 / 12 | **Finished Airing** | 7.13 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Koi+to+Yobu+ni+wa+Kimochi+Warui+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41103__koi_to_yobu_ni_wa_kimochi_warui.txt) | ~4~ | 2219 | 2021-06-14 12:46 |
| 40530 | [![40530__jaku_chara_tomozaki_kun](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40530__jaku_chara_tomozaki_kun.jpg)](https://myanimelist.net/anime/40530/Jaku-Chara_Tomozaki-kun) | [Jaku-Chara Tomozaki-kun](https://subsplease.org/shows/jaku-chara-tomozaki-kun) | TV | 14 / 12 | **Finished Airing** | 7.11 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jaku+Chara+Tomozaki+kun+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40530__jaku_chara_tomozaki_kun.txt) | ~4~ | 3832 | 2021-07-03 04:15 |
| 40526 | [![40526__dragon_ie_wo_kau](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40526__dragon_ie_wo_kau.jpg)](https://myanimelist.net/anime/40526/Dragon_Ie_wo_Kau) | [Dragon, Ie wo Kau.](https://subsplease.org/shows/dragon-ie-wo-kau) | TV | 12 / 12 | **Finished Airing** | 6.42 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dragon+Ie+wo+Kau+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40526__dragon_ie_wo_kau.txt) | ~4~ | 1987 | 2021-06-20 14:02 |
| 34566 | [![34566__boruto_naruto_next_generations](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/34566__boruto_naruto_next_generations.jpg)](https://myanimelist.net/anime/34566/Boruto__Naruto_Next_Generations) | [Boruto - Naruto Next Generations](https://subsplease.org/shows/boruto-naruto-next-generations) | TV | 52 / 293 | **Finished Airing** | 6.0 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Boruto+Naruto+Next+Generations+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/34566__boruto_naruto_next_generations.txt) | ~4~ | 3032 | 2023-03-26 09:04 |
| 50060 | [![50060__shadowverse_flame](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50060__shadowverse_flame.jpg)](https://myanimelist.net/anime/50060/Shadowverse_Flame) | [Shadowverse Flame](https://subsplease.org/shows/shadowverse-flame) | TV | 52 / 50 | **Finished Airing** | 6.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shadowverse+Flame+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50060__shadowverse_flame.txt) | ~4~ | 970 | 2024-09-28 02:32 |
| 49969 | [![49969__tribe_nine](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49969__tribe_nine.jpg)](https://myanimelist.net/anime/49969/Tribe_Nine) | [Tribe Nine](https://subsplease.org/shows/tribe-nine) | TV | 12 / 12 | **Finished Airing** | 6.06 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tribe+Nine+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49969__tribe_nine.txt) | ~4~ | 1484 | 2022-03-28 13:31 |
| 48406 | [![48406__re_main](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48406__re_main.jpg)](https://myanimelist.net/anime/48406/Re-Main) | [Re-Main](https://subsplease.org/shows/re-main) | TV | 12 / 12 | **Finished Airing** | 7.19 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Re+Main+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48406__re_main.txt) | ~4~ | 1675 | 2021-10-02 17:32 |
| 44516 | [![44516__koroshi_ai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44516__koroshi_ai.jpg)](https://myanimelist.net/anime/44516/Koroshi_Ai) | [Koroshi Ai](https://subsplease.org/shows/koroshi-ai) | TV | 12 / 12 | **Finished Airing** | 6.9 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Koroshi+Ai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44516__koroshi_ai.txt) | ~4~ | 2770 | 2022-03-30 14:46 |
| 41899 | [![41899__ore_dake_haireru_kakushi_dungeon](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41899__ore_dake_haireru_kakushi_dungeon.jpg)](https://myanimelist.net/anime/41899/Ore_dake_Haireru_Kakushi_Dungeon) | [Ore dake Haireru Kakushi Dungeon](https://subsplease.org/shows/ore-dake-haireru-kakushi-dungeon) | TV | 12 / 12 | **Finished Airing** | 6.28 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ore+dake+Haireru+Kakushi+Dungeon+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41899__ore_dake_haireru_kakushi_dungeon.txt) | ~4~ | 5339 | 2021-03-26 18:27 |
| 41312 | [![41312__kami_tachi_ni_hirowareta_otoko](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41312__kami_tachi_ni_hirowareta_otoko.jpg)](https://myanimelist.net/anime/41312/Kami-tachi_ni_Hirowareta_Otoko) | [Kami-tachi ni Hirowareta Otoko](https://subsplease.org/shows/kami-tachi-ni-hirowareta-otoko) | TV | 12 / 12 | **Finished Airing** | 6.97 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kami+tachi+ni+Hirowareta+Otoko+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41312__kami_tachi_ni_hirowareta_otoko.txt) | ~4~ | 2785 | 2020-12-20 15:01 |
| 40960 | [![40960__cheat_kusushi_no_slow_life_isekai_ni_tsukurou_drugstore](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40960__cheat_kusushi_no_slow_life_isekai_ni_tsukurou_drugstore.jpg)](https://myanimelist.net/anime/40960/Cheat_Kusushi_no_Slow_Life__Isekai_ni_Tsukurou_Drugstore) | [Cheat Kusushi no Slow Life - Isekai ni Tsukurou Drugstore](https://subsplease.org/shows/cheat-kusushi-no-slow-life-isekai-ni-tsukurou-drugstore) | TV | 12 / 12 | **Finished Airing** | 6.45 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cheat+Kusushi+no+Slow+Life+Isekai+ni+Tsukurou+Drugstore+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40960__cheat_kusushi_no_slow_life_isekai_ni_tsukurou_drugstore.txt) | ~4~ | 3597 | 2021-09-22 14:02 |
| 54638 | [![54638__kawagoe_boys_sing](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54638__kawagoe_boys_sing.jpg)](https://myanimelist.net/anime/54638/Kawagoe_Boys_Sing) | [Kawagoe Boys Sing](https://subsplease.org/shows/kawagoe-boys-sing) | TV | 12 / 12 | **Finished Airing** | 5.67 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kawagoe+Boys+Sing+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54638__kawagoe_boys_sing.txt) | ~3~ | 1139 | 2024-01-16 07:49 |
| 54142 | [![54142__cardfight_vanguard_divinez](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54142__cardfight_vanguard_divinez.jpg)](https://myanimelist.net/anime/54142/Cardfight_Vanguard__Divinez) | [Cardfight!! Vanguard - Divinez](https://subsplease.org/shows/cardfight-vanguard-divinez) | TV | 13 / 13 | **Finished Airing** | 7.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cardfight+Vanguard+Divinez+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54142__cardfight_vanguard_divinez.txt) | ~3~ | 890 | 2024-04-19 23:42 |
| 53748 | [![53748__saint_seiya_knights_of_the_zodiac_battle_sanctuary_part_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53748__saint_seiya_knights_of_the_zodiac_battle_sanctuary_part_2.jpg)](https://myanimelist.net/anime/53748/Saint_Seiya__Knights_of_the_Zodiac_-_Battle_Sanctuary_Part_2) | [Knights of the Zodiac - Saint Seiya S2 Part 2](https://subsplease.org/shows/knights-of-the-zodiac-saint-seiya-s2-part-2) | ONA | 12 / 12 | **Finished Airing** | 6.47 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Knights+of+the+Zodiac+Saint+Seiya+S2+Part+2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53748__saint_seiya_knights_of_the_zodiac_battle_sanctuary_part_2.txt) | ~3~ | 1190 | 2024-06-12 03:26 |
| 53012 | [![53012__human_bug_daigaku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53012__human_bug_daigaku.jpg)](https://myanimelist.net/anime/53012/Human_Bug_Daigaku) | [Human Bug Daigaku](https://subsplease.org/shows/human-bug-daigaku) | TV | 12 / 12 | **Finished Airing** | 6.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Human+Bug+Daigaku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53012__human_bug_daigaku.txt) | ~3~ | 1172 | 2022-12-21 14:31 |
| 52614 | [![52614__mix_meisei_story_2nd_season_nidome_no_natsu_sora_no_mukou_e](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52614__mix_meisei_story_2nd_season_nidome_no_natsu_sora_no_mukou_e.jpg)](https://myanimelist.net/anime/52614/Mix__Meisei_Story_2nd_Season_-_Nidome_no_Natsu_Sora_no_Mukou_e) | [Mix - Meisei Story S2](https://subsplease.org/shows/mix-meisei-story-s2) | TV | 24 / 24 | **Finished Airing** | 6.97 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mix+Meisei+Story+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52614__mix_meisei_story_2nd_season_nidome_no_natsu_sora_no_mukou_e.txt) | ~3~ | 1564 | 2023-09-23 10:01 |
| 51415 | [![51415__opus_colors](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51415__opus_colors.jpg)](https://myanimelist.net/anime/51415/OpusCOLORs) | [Opus.COLORs](https://subsplease.org/shows/opus-colors) | TV | 12 / 12 | **Finished Airing** | 5.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Opus+COLORs+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51415__opus_colors.txt) | ~3~ | 1080 | 2023-06-22 16:01 |
| 51371 | [![51371__bucchigire](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51371__bucchigire.jpg)](https://myanimelist.net/anime/51371/Bucchigire) | [Bucchigire!](https://subsplease.org/shows/bucchigire) | TV | 12 / 12 | **Finished Airing** | 6.0 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bucchigire+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51371__bucchigire.txt) | ~3~ | 1634 | 2022-09-24 17:25 |
| 51092 | [![51092__yuurei_deco](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51092__yuurei_deco.jpg)](https://myanimelist.net/anime/51092/Yuurei_Deco) | [Yurei Deco](https://subsplease.org/shows/yurei-deco) | TV | 12 / 12 | **Finished Airing** | 6.03 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yurei+Deco+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51092__yuurei_deco.txt) | ~3~ | 1429 | 2022-09-18 15:31 |
| 50999 | [![50999__extreme_hearts](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50999__extreme_hearts.jpg)](https://myanimelist.net/anime/50999/Extreme_Hearts) | [Extreme Hearts](https://subsplease.org/shows/extreme-hearts) | TV | 12 / 12 | **Finished Airing** | 6.53 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Extreme+Hearts+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50999__extreme_hearts.txt) | ~3~ | 1206 | 2022-09-24 17:31 |
| 50955 | [![50955__onipan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50955__onipan.jpg)](https://myanimelist.net/anime/50955/Onipan) | [Onipan!](https://subsplease.org/shows/onipan) | TV | 12 / 60 | **Finished Airing** | 6.47 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Onipan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50955__onipan.txt) | ~3~ | 1591 | 2022-07-01 03:01 |
| 50552 | [![50552__yowamushi_pedal_limit_break](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50552__yowamushi_pedal_limit_break.jpg)](https://myanimelist.net/anime/50552/Yowamushi_Pedal__Limit_Break) | [Yowamushi Pedal S5](https://subsplease.org/shows/yowamushi-pedal-s5) | TV | 25 / 25 | **Finished Airing** | 7.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yowamushi+Pedal+S5+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50552__yowamushi_pedal_limit_break.txt) | ~3~ | 1616 | 2023-03-25 22:09 |
| 50438 | [![50438__yatogame_chan_kansatsu_nikki_yonsatsume](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50438__yatogame_chan_kansatsu_nikki_yonsatsume.jpg)](https://myanimelist.net/anime/50438/Yatogame-chan_Kansatsu_Nikki_Yonsatsume) | [Yatogame-chan Kansatsu Nikki S4](https://subsplease.org/shows/yatogame-chan-kansatsu-nikki-s4) | TV | 10 / 10 | **Finished Airing** | 6.3 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yatogame+chan+Kansatsu+Nikki+S4+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50438__yatogame_chan_kansatsu_nikki_yonsatsume.txt) | ~3~ | 1058 | 2022-06-11 12:30 |
| 50281 | [![50281__delicious_party_precure](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50281__delicious_party_precure.jpg)](https://myanimelist.net/anime/50281/Delicious_Party♡Precure) | [Delicious Party Precure](https://subsplease.org/shows/delicious-party-precure) | TV | 45 / 45 | **Finished Airing** | 6.86 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Delicious+Party+Precure+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50281__delicious_party_precure.txt) | ~3~ | 741 | 2023-01-29 01:31 |
| 50221 | [![50221__shine_post](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50221__shine_post.jpg)](https://myanimelist.net/anime/50221/Shine_Post) | [Shine Post](https://subsplease.org/shows/shine-post) | TV | 12 / 12 | **Finished Airing** | 7.49 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shine+Post+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50221__shine_post.txt) | ~3~ | 1746 | 2022-10-18 17:31 |
| 50204 | [![50204__tokyo_24_ku](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50204__tokyo_24_ku.jpg)](https://myanimelist.net/anime/50204/Tokyo_24-ku) | [Tokyo 24-ku](https://subsplease.org/shows/tokyo-24-ku) | TV | 13 / 12 | **Finished Airing** | 6.34 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tokyo+24+ku+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50204__tokyo_24_ku.txt) | ~3~ | 1959 | 2022-04-06 17:01 |
| 49514 | [![49514__gensou_sangokushi_tengen_reishinki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49514__gensou_sangokushi_tengen_reishinki.jpg)](https://myanimelist.net/anime/49514/Gensou_Sangokushi__Tengen_Reishinki) | [Gensou Sangokushi - Tengen Reishinki](https://subsplease.org/shows/gensou-sangokushi-tengen-reishinki) | TV | 12 / 12 | **Finished Airing** | 5.31 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gensou+Sangokushi+Tengen+Reishinki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49514__gensou_sangokushi_tengen_reishinki.txt) | ~3~ | 1429 | 2022-03-28 18:02 |
| 49304 | [![49304__seiken_densetsu_legend_of_mana_the_teardrop_crystal](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49304__seiken_densetsu_legend_of_mana_the_teardrop_crystal.jpg)](https://myanimelist.net/anime/49304/Seiken_Densetsu__Legend_of_Mana_-_The_Teardrop_Crystal) | [Seiken Densetsu - Legend of Mana - The Teardrop Crystal](https://subsplease.org/shows/seiken-densetsu-legend-of-mana-the-teardrop-crystal) | TV | 13 / 12 | **Finished Airing** | 5.47 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Seiken+Densetsu+Legend+of+Mana+The+Teardrop+Crystal+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49304__seiken_densetsu_legend_of_mana_the_teardrop_crystal.txt) | ~3~ | 1787 | 2023-02-20 00:36 |
| 49184 | [![49184__gunma_chan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49184__gunma_chan.jpg)](https://myanimelist.net/anime/49184/Gunma-chan) | [Gunma-chan](https://subsplease.org/shows/gunma-chan) | TV | 13 / 13 | **Finished Airing** | 5.95 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gunma+chan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49184__gunma_chan.txt) | ~3~ | 505 | 2023-05-17 05:02 |
| 48779 | [![48779__deaimon](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48779__deaimon.jpg)](https://myanimelist.net/anime/48779/Deaimon) | [Deaimon](https://subsplease.org/shows/deaimon) | TV | 12 / 12 | **Finished Airing** | 7.55 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Deaimon+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48779__deaimon.txt) | ~3~ | 3300 | 2022-06-22 15:31 |
| 48776 | [![48776__build_divide_code_black](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48776__build_divide_code_black.jpg)](https://myanimelist.net/anime/48776/Build_Divide__Code_Black) | [Build Divide - Code Black](https://subsplease.org/shows/build-divide-code-black) | TV | 12 / 12 | **Finished Airing** | 6.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Build+Divide+Code+Black+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48776__build_divide_code_black.txt) | ~3~ | 1460 | 2021-12-25 17:01 |
| 48649 | [![48649__fuuto_tantei](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48649__fuuto_tantei.jpg)](https://myanimelist.net/anime/48649/Fuuto_Tantei) | [Fuuto Tantei](https://subsplease.org/shows/fuuto-tantei) | TV | 12 / 12 | **Finished Airing** | 7.32 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fuuto+Tantei+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48649__fuuto_tantei.txt) | ~3~ | 2303 | 2022-10-16 16:01 |
| 48644 | [![48644__gyakuten_sekai_no_denchi_shoujo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48644__gyakuten_sekai_no_denchi_shoujo.jpg)](https://myanimelist.net/anime/48644/Gyakuten_Sekai_no_Denchi_Shoujo) | [Gyakuten Sekai no Denchi Shoujo](https://subsplease.org/shows/gyakuten-sekai-no-denchi-shoujo) | TV | 12 / 12 | **Finished Airing** | 6.31 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gyakuten+Sekai+no+Denchi+Shoujo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48644__gyakuten_sekai_no_denchi_shoujo.txt) | ~3~ | 1620 | 2021-12-27 15:02 |
| 48470 | [![48470__d_cide_traumerei_the_animation](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48470__d_cide_traumerei_the_animation.jpg)](https://myanimelist.net/anime/48470/D_Cide_Traumerei_the_Animation) | [D_Cide Traumerei the Animation](https://subsplease.org/shows/d_cide-traumerei-the-animation) | TV | 13 / 13 | **Finished Airing** | 5.81 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+D+Cide+Traumerei+the+Animation+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48470__d_cide_traumerei_the_animation.txt) | ~3~ | 1542 | 2021-10-02 15:01 |
| 47639 | [![47639__blue_reflection_ray](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47639__blue_reflection_ray.jpg)](https://myanimelist.net/anime/47639/Blue_Reflection_Ray) | [Blue Reflection Ray](https://subsplease.org/shows/blue-reflection-ray) | TV | 24 / 24 | **Finished Airing** | 5.85 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Blue+Reflection+Ray+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47639__blue_reflection_ray.txt) | ~3~ | 1191 | 2021-09-24 17:57 |
| 47250 | [![47250__jouran_the_princess_of_snow_and_blood](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/47250__jouran_the_princess_of_snow_and_blood.jpg)](https://myanimelist.net/anime/47250/Jouran__The_Princess_of_Snow_and_Blood) | [Joran The Princess of Snow and Blood](https://subsplease.org/shows/joran-the-princess-of-snow-and-blood) | TV | 12 / 12 | **Finished Airing** | 6.11 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Joran+The+Princess+of+Snow+and+Blood+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/47250__jouran_the_princess_of_snow_and_blood.txt) | ~3~ | 1921 | 2021-06-15 17:17 |
| 44940 | [![44940__world_trigger_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44940__world_trigger_3rd_season.jpg)](https://myanimelist.net/anime/44940/World_Trigger_3rd_Season) | [World Trigger S3](https://subsplease.org/shows/world-trigger-s3) | TV | 14 / 14 | **Finished Airing** | 8.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+World+Trigger+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44940__world_trigger_3rd_season.txt) | ~3~ | 3268 | 2022-01-22 18:32 |
| 44275 | [![44275__selection_project](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44275__selection_project.jpg)](https://myanimelist.net/anime/44275/Selection_Project) | [Selection Project](https://subsplease.org/shows/selection-project) | TV | 13 / 13 | **Finished Airing** | 7.26 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Selection+Project+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44275__selection_project.txt) | ~3~ | 1540 | 2021-12-24 14:02 |
| 44274 | [![44274__puraore_pride_of_orange](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44274__puraore_pride_of_orange.jpg)](https://myanimelist.net/anime/44274/Puraore_Pride_of_Orange) | [Puraore! Pride of Orange](https://subsplease.org/shows/puraore-pride-of-orange) | TV | 12 / 12 | **Finished Airing** | 6.24 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Puraore+Pride+of+Orange+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44274__puraore_pride_of_orange.txt) | ~3~ | 1344 | 2021-12-22 15:02 |
| 43814 | [![43814__deatte_5_byou_de_battle](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43814__deatte_5_byou_de_battle.jpg)](https://myanimelist.net/anime/43814/Deatte_5-byou_de_Battle) | [Deatte 5-byou de Battle](https://subsplease.org/shows/deatte-5-byou-de-battle) | TV | 12 / 12 | **Finished Airing** | 6.76 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Deatte+5+byou+de+Battle+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43814__deatte_5_byou_de_battle.txt) | ~3~ | 3561 | 2021-09-27 17:32 |
| 42627 | [![42627__peach_boy_riverside](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42627__peach_boy_riverside.jpg)](https://myanimelist.net/anime/42627/Peach_Boy_Riverside) | [Peach Boy Riverside](https://subsplease.org/shows/peach-boy-riverside) | TV | 12 / 12 | **Finished Airing** | 6.24 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Peach+Boy+Riverside+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42627__peach_boy_riverside.txt) | ~3~ | 4573 | 2021-09-16 14:32 |
| 42590 | [![42590__mashiro_no_oto](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42590__mashiro_no_oto.jpg)](https://myanimelist.net/anime/42590/Mashiro_no_Oto) | [Mashiro no Oto](https://subsplease.org/shows/mashiro-no-oto) | TV | 12 / 12 | **Finished Airing** | 7.53 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mashiro+no+Oto+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42590__mashiro_no_oto.txt) | ~3~ | 2258 | 2021-06-18 18:46 |
| 42506 | [![42506__world_witches_hasshin_shimasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42506__world_witches_hasshin_shimasu.jpg)](https://myanimelist.net/anime/42506/World_Witches_Hasshin_Shimasu) | [World Witches Hasshin Shimasu!](https://subsplease.org/shows/world-witches-hasshin-shimasu) | TV | 12 / 12 | **Finished Airing** | 6.49 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+World+Witches+Hasshin+Shimasu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42506__world_witches_hasshin_shimasu.txt) | ~3~ | 874 | 2021-03-31 17:49 |
| 42307 | [![42307__subarashiki_kono_sekai_the_animation](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42307__subarashiki_kono_sekai_the_animation.jpg)](https://myanimelist.net/anime/42307/Subarashiki_Kono_Sekai_The_Animation) | [Subarashiki Kono Sekai The Animation](https://subsplease.org/shows/subarashiki-kono-sekai-the-animation) | TV | 12 / 12 | **Finished Airing** | 6.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Subarashiki+Kono+Sekai+The+Animation+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42307__subarashiki_kono_sekai_the_animation.txt) | ~3~ | 1971 | 2021-06-25 17:27 |
| 42129 | [![42129__bem_movie_become_human](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42129__bem_movie_become_human.jpg)](https://myanimelist.net/anime/42129/Bem_Movie__Become_Human) | [Bem Movie - Become Human](https://subsplease.org/shows/bem-movie-become-human) | Movie | 1 / 1 | **Finished Airing** | 6.81 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bem+Movie+Become+Human+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42129__bem_movie_become_human.txt) | ~3~ | 1074 | 2020-10-30 00:43 |
| 41833 | [![41833__kyuuketsuki_sugu_shinu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41833__kyuuketsuki_sugu_shinu.jpg)](https://myanimelist.net/anime/41833/Kyuuketsuki_Sugu_Shinu) | [Kyuuketsuki Sugu Shinu](https://subsplease.org/shows/kyuuketsuki-sugu-shinu) | TV | 12 / 12 | **Finished Airing** | 7.27 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kyuuketsuki+Sugu+Shinu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41833__kyuuketsuki_sugu_shinu.txt) | ~3~ | 2399 | 2021-12-20 15:03 |
| 41762 | [![41762__tenchi_souzou_design_bu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41762__tenchi_souzou_design_bu.jpg)](https://myanimelist.net/anime/41762/Tenchi_Souzou_Design-bu) | [Tenchi Souzou Design-bu](https://subsplease.org/shows/tenchi-souzou-design-bu) | TV | 13 / 12 | **Finished Airing** | 7.16 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tenchi+Souzou+Design+bu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41762__tenchi_souzou_design_bu.txt) | ~3~ | 1653 | 2021-04-01 15:31 |
| 41611 | [![41611__wan_sheng_jie](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41611__wan_sheng_jie.jpg)](https://myanimelist.net/anime/41611/Wan_Sheng_Jie) | [All Saints Street](https://subsplease.org/shows/all-saints-street) | ONA | 8 / 12 | **Finished Airing** | 7.97 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+All+Saints+Street+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41611__wan_sheng_jie.txt) | ~3~ | 1094 | 2023-10-03 08:49 |
| 41265 | [![41265__mars_red](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41265__mars_red.jpg)](https://myanimelist.net/anime/41265/Mars_Red) | [Mars Red](https://subsplease.org/shows/mars-red) | TV | 13 / 13 | **Finished Airing** | 6.59 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mars+Red+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41265__mars_red.txt) | ~3~ | 2601 | 2021-06-28 18:02 |
| 41169 | [![41169__love_live_superstar](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41169__love_live_superstar.jpg)](https://myanimelist.net/anime/41169/Love_Live_Superstar) | [Love Live! Superstar!!](https://subsplease.org/shows/love-live-superstar) | TV | 12 / 12 | **Finished Airing** | 7.93 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Love+Live+Superstar+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41169__love_live_superstar.txt) | ~3~ | 1831 | 2021-10-19 01:02 |
| 41109 | [![41109__log_horizon_entaku_houkai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41109__log_horizon_entaku_houkai.jpg)](https://myanimelist.net/anime/41109/Log_Horizon__Entaku_Houkai) | [Log Horizon S3](https://subsplease.org/shows/log-horizon-s3) | TV | 12 / 12 | **Finished Airing** | 7.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Log+Horizon+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41109__log_horizon_entaku_houkai.txt) | ~3~ | 6593 | 2021-03-31 12:02 |
| 40870 | [![40870__ssss_dynazenon](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40870__ssss_dynazenon.jpg)](https://myanimelist.net/anime/40870/SSSSDynazenon) | [SSSS.Dynazenon](https://subsplease.org/shows/ssss-dynazenon) | TV | 12 / 12 | **Finished Airing** | 7.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+SSSS+Dynazenon+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40870__ssss_dynazenon.txt) | ~3~ | 3701 | 2021-06-18 14:02 |
| 40729 | [![40729__nomad_megalo_box_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40729__nomad_megalo_box_2.jpg)](https://myanimelist.net/anime/40729/Nomad__Megalo_Box_2) | [Nomad - Megalo Box 2](https://subsplease.org/shows/nomad-megalo-box-2) | TV | 13 / 13 | **Finished Airing** | 8.18 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Nomad+Megalo+Box+2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40729__nomad_megalo_box_2.txt) | ~3~ | 4498 | 2021-06-27 15:02 |
| 38959 | [![38959__lord_el_melloi_ii_sei_no_jikenbo_rail_zeppelin_grace_note](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38959__lord_el_melloi_ii_sei_no_jikenbo_rail_zeppelin_grace_note.jpg)](https://myanimelist.net/anime/38959/Lord_El-Melloi_II-sei_no_Jikenbo__Rail_Zeppelin_Grace_Note) | [Lord El-Melloi II Case Files](https://subsplease.org/shows/lord-el-melloi-ii-case-files) | TV | 1 / 13 | **Finished Airing** | 7.38 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Lord+El+Melloi+II+Case+Files+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38959__lord_el_melloi_ii_sei_no_jikenbo_rail_zeppelin_grace_note.txt) | ~3~ | 2306 | 2021-12-31 18:24 |
| 34572 | [![34572__black_clover](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/34572__black_clover.jpg)](https://myanimelist.net/anime/34572/Black_Clover) | [Black Clover](https://subsplease.org/shows/black-clover) | TV | 25 / 170 | **Finished Airing** | 8.14 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Black+Clover+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/34572__black_clover.txt) | ~3~ | 3687 | 2021-03-30 10:26 |
| 52045 | [![52045__obey_me_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52045__obey_me_season_2.jpg)](https://myanimelist.net/anime/52045/Obey_Me_Season_2) | [Obey Me! S2](https://subsplease.org/shows/obey-me-s2) | ONA | 12 / 12 | **Finished Airing** | 7.1 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Obey+Me+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52045__obey_me_season_2.txt) | ~3~ | 738 | 2022-12-30 09:00 |
| 50421 | [![50421__shi_cao_lao_long_bei_guan_yi_e_long_zhi_ming](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50421__shi_cao_lao_long_bei_guan_yi_e_long_zhi_ming.jpg)](https://myanimelist.net/anime/50421/Shi_Cao_Lao_Long_Bei_Guan_Yi_E_Long_Zhi_Ming) | [A Herbivorous Dragon of 5000 Years Gets Unfairly Villainized](https://subsplease.org/shows/a-herbivorous-dragon-of-5000-years-gets-unfairly-villainized) | ONA | 12 / 12 | **Finished Airing** | 6.74 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+A+Herbivorous+Dragon+of+5000+Years+Gets+Unfairly+Villainized+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50421__shi_cao_lao_long_bei_guan_yi_e_long_zhi_ming.txt) | ~3~ | 2180 | 2022-10-08 05:01 |
| 43767 | [![43767__night_head_2041](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43767__night_head_2041.jpg)](https://myanimelist.net/anime/43767/Night_Head_2041) | [Night Head 2041](https://subsplease.org/shows/night-head-2041) | TV | 12 / 12 | **Finished Airing** | 6.5 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Night+Head+2041+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43767__night_head_2041.txt) | ~3~ | 1903 | 2021-09-29 18:02 |
| 40750 | [![40750__kaifuku_jutsushi_no_yarinaoshi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40750__kaifuku_jutsushi_no_yarinaoshi.jpg)](https://myanimelist.net/anime/40750/Kaifuku_Jutsushi_no_Yarinaoshi) | [Kaifuku Jutsushi no Yarinaoshi](https://subsplease.org/shows/kaifuku-jutsushi-no-yarinaoshi) | TV | 12 / 12 | **Finished Airing** | 6.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kaifuku+Jutsushi+no+Yarinaoshi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40750__kaifuku_jutsushi_no_yarinaoshi.txt) | ~3~ | 3989 | 2021-03-31 15:38 |
| 38192 | [![38192__sakugan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38192__sakugan.jpg)](https://myanimelist.net/anime/38192/Sakugan) | [Sakugan](https://subsplease.org/shows/sakugan) | TV | 12 / 12 | **Finished Airing** | 6.51 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sakugan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38192__sakugan.txt) | ~3~ | 4158 | 2021-12-23 15:31 |
| 31433 | [![31433__ginga_eiyuu_densetsu_die_neue_these_kaikou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/31433__ginga_eiyuu_densetsu_die_neue_these_kaikou.jpg)](https://myanimelist.net/anime/31433/Ginga_Eiyuu_Densetsu__Die_Neue_These_-_Kaikou) | [Legend of the Galactic Heroes - Die Neue These](https://subsplease.org/shows/legend-of-the-galactic-heroes-die-neue-these) | TV | 24 / 12 | **Finished Airing** | 7.79 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Legend+of+the+Galactic+Heroes+Die+Neue+These+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/31433__ginga_eiyuu_densetsu_die_neue_these_kaikou.txt) | ~3~ | 2818 | 2022-12-16 00:01 |
| 48590 | [![48590__mini_dragon](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48590__mini_dragon.jpg)](https://myanimelist.net/anime/48590/Mini_Dragon) | [Kobayashi-san Chi no Maid Dragon S2 Shorts](https://subsplease.org/shows/kobayashi-san-chi-no-maid-dragon-s2-shorts) | ONA | 16 / 13 | **Finished Airing** | 7.35 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kobayashi+san+Chi+no+Maid+Dragon+S2+Shorts+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48590__mini_dragon.txt) | ~2~ | 3326 | 2021-09-11 00:12 |
| 48488 | [![48488__higurashi_no_naku_koro_ni_sotsu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48488__higurashi_no_naku_koro_ni_sotsu.jpg)](https://myanimelist.net/anime/48488/Higurashi_no_Naku_Koro_ni_Sotsu) | [Higurashi no Naku Koro ni Sotsu](https://subsplease.org/shows/higurashi-no-naku-koro-ni-sotsu) | TV | 15 / 15 | **Finished Airing** | 6.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Higurashi+no+Naku+Koro+ni+Sotsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48488__higurashi_no_naku_koro_ni_sotsu.txt) | ~2~ | 3851 | 2021-09-30 15:32 |
| 53414 | [![53414__lupin_zero](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/53414__lupin_zero.jpg)](https://myanimelist.net/anime/53414/Lupin_Zero) | [Lupin Zero](https://subsplease.org/shows/lupin-zero) | ONA | 6 / 6 | **Finished Airing** | 7.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Lupin+Zero+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/53414__lupin_zero.txt) | ~2~ | 2200 | 2023-01-13 15:01 |
| 50862 | [![50862__estab_life_great_escape](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50862__estab_life_great_escape.jpg)](https://myanimelist.net/anime/50862/Estab-Life__Great_Escape) | [Estab-Life - Great Escape](https://subsplease.org/shows/estab-life-great-escape) | TV | 12 / 12 | **Finished Airing** | 6.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Estab+Life+Great+Escape+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50862__estab_life_great_escape.txt) | ~2~ | 1333 | 2022-06-01 23:01 |
| 49780 | [![49780__atasha_kawashiri_kodama_da_yo_dangerous_lifehacker_no_tadareta_seikatsu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49780__atasha_kawashiri_kodama_da_yo_dangerous_lifehacker_no_tadareta_seikatsu.jpg)](https://myanimelist.net/anime/49780/Atasha_Kawashiri_Kodama_da_yo__Dangerous_Lifehacker_no_Tadareta_Seikatsu) | [Atasha Kawajiri Kodama Da yo](https://subsplease.org/shows/atasha-kawajiri-kodama-da-yo) | TV | 24 / 24 | **Finished Airing** | 5.72 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Atasha+Kawajiri+Kodama+Da+yo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49780__atasha_kawashiri_kodama_da_yo_dangerous_lifehacker_no_tadareta_seikatsu.txt) | ~2~ | 1066 | 2022-08-11 18:55 |
| 49692 | [![49692__heroine_tarumono_kiraware_heroine_to_naisho_no_oshigoto](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49692__heroine_tarumono_kiraware_heroine_to_naisho_no_oshigoto.jpg)](https://myanimelist.net/anime/49692/Heroine_Tarumono_Kiraware_Heroine_to_Naisho_no_Oshigoto) | [Heroine Tarumono! Kiraware Heroine to Naisho no Oshigoto](https://subsplease.org/shows/heroine-tarumono-kiraware-heroine-to-naisho-no-oshigoto) | TV | 12 / 12 | **Finished Airing** | 7.27 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Heroine+Tarumono+Kiraware+Heroine+to+Naisho+no+Oshigoto+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49692__heroine_tarumono_kiraware_heroine_to_naisho_no_oshigoto.txt) | ~2~ | 1867 | 2022-06-23 15:03 |
| 49040 | [![49040__lupin_iii_part_6](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49040__lupin_iii_part_6.jpg)](https://myanimelist.net/anime/49040/Lupin_III__Part_6) | [Lupin III - Part 6](https://subsplease.org/shows/lupin-iii-part-6) | TV | 25 / 24 | **Finished Airing** | 7.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Lupin+III+Part+6+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49040__lupin_iii_part_6.txt) | ~2~ | 2645 | 2022-03-26 17:31 |
| 48777 | [![48777__build_divide_code_white](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48777__build_divide_code_white.jpg)](https://myanimelist.net/anime/48777/Build_Divide__Code_White) | [Build Divide - Code White](https://subsplease.org/shows/build-divide-code-white) | TV | 12 / 12 | **Finished Airing** | 6.64 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Build+Divide+Code+White+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48777__build_divide_code_white.txt) | ~2~ | 880 | 2022-06-25 17:01 |
| 48775 | [![48775__kaginado](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48775__kaginado.jpg)](https://myanimelist.net/anime/48775/Kaginado) | [Kaginado](https://subsplease.org/shows/kaginado) | TV | 24 / 12 | **Finished Airing** | 7.12 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kaginado+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48775__kaginado.txt) | ~2~ | 1218 | 2022-06-28 16:00 |
| 48702 | [![48702__dance_dance_danseur](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48702__dance_dance_danseur.jpg)](https://myanimelist.net/anime/48702/Dance_Dance_Danseur) | [Dance Dance Danseur](https://subsplease.org/shows/dance-dance-danseur) | TV | 11 / 11 | **Finished Airing** | 7.82 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dance+Dance+Danseur+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48702__dance_dance_danseur.txt) | ~2~ | 1797 | 2022-06-17 18:47 |
| 48567 | [![48567__visual_prison](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48567__visual_prison.jpg)](https://myanimelist.net/anime/48567/Visual_Prison) | [Visual Prison](https://subsplease.org/shows/visual-prison) | TV | 12 / 12 | **Finished Airing** | 6.71 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Visual+Prison+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48567__visual_prison.txt) | ~2~ | 984 | 2021-12-24 16:32 |
| 48492 | [![48492__scarlet_nexus](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48492__scarlet_nexus.jpg)](https://myanimelist.net/anime/48492/Scarlet_Nexus) | [Scarlet Nexus](https://subsplease.org/shows/scarlet-nexus) | TV | 26 / 26 | **Finished Airing** | 5.71 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Scarlet+Nexus+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48492__scarlet_nexus.txt) | ~2~ | 2543 | 2021-12-23 13:32 |
| 48466 | [![48466__kyoukai_senki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48466__kyoukai_senki.jpg)](https://myanimelist.net/anime/48466/Kyoukai_Senki) | [Kyoukai Senki](https://subsplease.org/shows/kyoukai-senki) | TV | 25 / 13 | **Finished Airing** | 6.32 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kyoukai+Senki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48466__kyoukai_senki.txt) | ~2~ | 2255 | 2022-06-27 16:01 |
| 45665 | [![45665__fairy_ranmaru_anata_no_kokoro_otasuke_shimasu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/45665__fairy_ranmaru_anata_no_kokoro_otasuke_shimasu.jpg)](https://myanimelist.net/anime/45665/Fairy_Ranmaru__Anata_no_Kokoro_Otasuke_Shimasu) | [Fairy Ranmaru](https://subsplease.org/shows/fairy-ranmaru) | TV | 12 / 12 | **Finished Airing** | 5.9 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fairy+Ranmaru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/45665__fairy_ranmaru_anata_no_kokoro_otasuke_shimasu.txt) | ~2~ | 508 | 2021-06-24 15:02 |
| 44081 | [![44081__b_project_netsuretsu_love_call](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44081__b_project_netsuretsu_love_call.jpg)](https://myanimelist.net/anime/44081/B-Project__NetsuretsuLove_Call) | [B-Project S3](https://subsplease.org/shows/b-project-s3) | TV | 12 / 12 | **Finished Airing** | 6.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+B+Project+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44081__b_project_netsuretsu_love_call.txt) | ~2~ | 864 | 2023-12-18 18:45 |
| 44055 | [![44055__sasaki_to_miyano](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44055__sasaki_to_miyano.jpg)](https://myanimelist.net/anime/44055/Sasaki_to_Miyano) | [Sasaki to Miyano](https://subsplease.org/shows/sasaki-to-miyano) | TV | 13 / 12 | **Finished Airing** | 8.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sasaki+to+Miyano+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44055__sasaki_to_miyano.txt) | ~2~ | 1587 | 2022-07-27 04:39 |
| 43763 | [![43763__cestvs_the_roman_fighter](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43763__cestvs_the_roman_fighter.jpg)](https://myanimelist.net/anime/43763/Cestvs__The_Roman_Fighter) | [Cestvs - The Roman Fighter](https://subsplease.org/shows/cestvs-the-roman-fighter) | TV | 11 / 11 | **Finished Airing** | 5.96 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cestvs+The+Roman+Fighter+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43763__cestvs_the_roman_fighter.txt) | ~2~ | 928 | 2021-06-23 18:42 |
| 43756 | [![43756__bakuten](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43756__bakuten.jpg)](https://myanimelist.net/anime/43756/Bakuten) | [Bakuten!!](https://subsplease.org/shows/bakuten) | TV | 12 / 12 | **Finished Airing** | 7.49 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bakuten+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43756__bakuten.txt) | ~2~ | 778 | 2021-06-24 18:16 |
| 43741 | [![43741__getter_robo_arc](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43741__getter_robo_arc.jpg)](https://myanimelist.net/anime/43741/Getter_Robo_Arc) | [Getter Robo Arc](https://subsplease.org/shows/getter-robo-arc) | TV | 13 / 13 | **Finished Airing** | 6.18 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Getter+Robo+Arc+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43741__getter_robo_arc.txt) | ~2~ | 1257 | 2021-09-26 12:37 |
| 42544 | [![42544__kaizoku_oujo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42544__kaizoku_oujo.jpg)](https://myanimelist.net/anime/42544/Kaizoku_Oujo) | [Kaizoku Oujo](https://subsplease.org/shows/kaizoku-oujo) | TV | 12 / 12 | **Finished Airing** | 7.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kaizoku+Oujo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42544__kaizoku_oujo.txt) | ~2~ | 4304 | 2021-10-24 04:06 |
| 42395 | [![42395__shakunetsu_kabaddi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42395__shakunetsu_kabaddi.jpg)](https://myanimelist.net/anime/42395/Shakunetsu_Kabaddi) | [Shakunetsu Kabaddi](https://subsplease.org/shows/shakunetsu-kabaddi) | TV | 12 / 12 | **Finished Airing** | 7.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shakunetsu+Kabaddi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42395__shakunetsu_kabaddi.txt) | ~2~ | 1220 | 2021-06-18 18:01 |
| 42321 | [![42321__battle_athletess_daiundoukai_restart](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42321__battle_athletess_daiundoukai_restart.jpg)](https://myanimelist.net/anime/42321/Battle_Athletess_Daiundoukai_ReSTART) | [Battle Athletess Daiundoukai ReSTART!](https://subsplease.org/shows/battle-athletess-daiundoukai-restart) | TV | 12 / 12 | **Finished Airing** | 5.11 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Battle+Athletess+Daiundoukai+ReSTART+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42321__battle_athletess_daiundoukai_restart.txt) | ~2~ | 742 | 2021-06-26 16:32 |
| 41946 | [![41946__shuumatsu_no_harem](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41946__shuumatsu_no_harem.jpg)](https://myanimelist.net/anime/41946/Shuumatsu_no_Harem) | [Shuumatsu no Harem](https://subsplease.org/shows/shuumatsu-no-harem) | TV | 11 / 11 | **Finished Airing** | 5.91 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shuumatsu+no+Harem+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41946__shuumatsu_no_harem.txt) | ~2~ | 2477 | 2022-03-18 17:31 |
| 41915 | [![41915__zuihou_de_zhaohuan_shi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41915__zuihou_de_zhaohuan_shi.jpg)](https://myanimelist.net/anime/41915/Zuihou_de_Zhaohuan_Shi) | [The Last Summoner](https://subsplease.org/shows/the-last-summoner) | ONA | 12 / 12 | **Finished Airing** | 6.55 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+Last+Summoner+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41915__zuihou_de_zhaohuan_shi.txt) | ~2~ | 2254 | 2022-07-05 05:01 |
| 41619 | [![41619__munou_na_nana](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41619__munou_na_nana.jpg)](https://myanimelist.net/anime/41619/Munou_na_Nana) | [Munou na Nana](https://subsplease.org/shows/munou-na-nana) | TV | 13 / 13 | **Finished Airing** | 7.17 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Munou+na+Nana+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41619__munou_na_nana.txt) | ~2~ | 3106 | 2020-12-27 13:31 |
| 41380 | [![41380__100_man_no_inochi_no_ue_ni_ore_wa_tatteiru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41380__100_man_no_inochi_no_ue_ni_ore_wa_tatteiru.jpg)](https://myanimelist.net/anime/41380/100-man_no_Inochi_no_Ue_ni_Ore_wa_Tatteiru) | [100-man no Inochi no Ue ni Ore wa Tatte Iru](https://subsplease.org/shows/100-man-no-inochi-no-ue-ni-ore-wa-tatte-iru) | TV | 24 / 12 | **Finished Airing** | 6.51 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+100+man+no+Inochi+no+Ue+ni+Ore+wa+Tatte+Iru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41380__100_man_no_inochi_no_ue_ni_ore_wa_tatteiru.txt) | ~2~ | 3309 | 2021-09-24 16:32 |
| 41074 | [![41074__digimon_adventure](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41074__digimon_adventure.jpg)](https://myanimelist.net/anime/41074/Digimon_Adventure_) | [Digimon Adventure (2020)](https://subsplease.org/shows/digimon-adventure-2020) | TV | 50 / 67 | **Finished Airing** | 6.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Digimon+Adventure+2020+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41074__digimon_adventure.txt) | ~2~ | 1222 | 2021-09-26 02:32 |
| 41006 | [![41006__higurashi_no_naku_koro_ni_gou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41006__higurashi_no_naku_koro_ni_gou.jpg)](https://myanimelist.net/anime/41006/Higurashi_no_Naku_Koro_ni_Gou) | [Higurashi no Naku Koro ni Gou](https://subsplease.org/shows/higurashi-no-naku-koro-ni-gou) | TV | 24 / 24 | **Finished Airing** | 7.2 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Higurashi+no+Naku+Koro+ni+Gou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41006__higurashi_no_naku_koro_ni_gou.txt) | ~2~ | 3938 | 2021-03-18 16:32 |
| 40961 | [![40961__hortensia_saga](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40961__hortensia_saga.jpg)](https://myanimelist.net/anime/40961/Hortensia_Saga) | [Hortensia Saga](https://subsplease.org/shows/hortensia-saga) | TV | 12 / 12 | **Finished Airing** | 5.84 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hortensia+Saga+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40961__hortensia_saga.txt) | ~2~ | 2100 | 2021-03-24 19:49 |
| 40930 | [![40930__azur_lane_bisoku_zenshin](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40930__azur_lane_bisoku_zenshin.jpg)](https://myanimelist.net/anime/40930/Azur_Lane__Bisoku_Zenshin) | [Azur Lane - Bisoku Zenshin!](https://subsplease.org/shows/azur-lane-bisoku-zenshin) | TV | 12 / 12 | **Finished Airing** | 7.01 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Azur+Lane+Bisoku+Zenshin+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40930__azur_lane_bisoku_zenshin.txt) | ~2~ | 1869 | 2021-03-29 17:00 |
| 40908 | [![40908__kemono_jihen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40908__kemono_jihen.jpg)](https://myanimelist.net/anime/40908/Kemono_Jihen) | [Kemono Jihen](https://subsplease.org/shows/kemono-jihen) | TV | 12 / 12 | **Finished Airing** | 7.37 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kemono+Jihen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40908__kemono_jihen.txt) | ~2~ | 4061 | 2021-03-28 13:31 |
| 40879 | [![40879__love_live_nijigasaki_gakuen_school_idol_doukoukai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40879__love_live_nijigasaki_gakuen_school_idol_doukoukai.jpg)](https://myanimelist.net/anime/40879/Love_Live_Nijigasaki_Gakuen_School_Idol_Doukoukai) | [Love Live! Nijigasaki Gakuen School Idol Doukoukai](https://subsplease.org/shows/love-live-nijigasaki-gakuen-school-idol-doukoukai) | TV | 13 / 13 | **Finished Airing** | 7.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Love+Live+Nijigasaki+Gakuen+School+Idol+Doukoukai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40879__love_live_nijigasaki_gakuen_school_idol_doukoukai.txt) | ~2~ | 1629 | 2020-12-26 14:06 |
| 40682 | [![40682__kingdom_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40682__kingdom_3rd_season.jpg)](https://myanimelist.net/anime/40682/Kingdom_3rd_Season) | [Kingdom S3](https://subsplease.org/shows/kingdom-s3) | TV | 26 / 26 | **Finished Airing** | 8.83 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kingdom+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40682__kingdom_3rd_season.txt) | ~2~ | 1883 | 2021-10-17 18:41 |
| 40646 | [![40646__yes_ka_no_ka_hanbun_ka](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40646__yes_ka_no_ka_hanbun_ka.jpg)](https://myanimelist.net/anime/40646/Yes_ka_No_ka_Hanbun_ka) | [Yes ka No ka Hanbun ka](https://subsplease.org/shows/yes-ka-no-ka-hanbun-ka) | Movie | 1 / 1 | **Finished Airing** | 7.06 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yes+ka+No+ka+Hanbun+ka+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40646__yes_ka_no_ka_hanbun_ka.txt) | ~2~ | 746 | 2021-04-30 16:37 |
| 40497 | [![40497__mahouka_koukou_no_rettousei_raihousha_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40497__mahouka_koukou_no_rettousei_raihousha_hen.jpg)](https://myanimelist.net/anime/40497/Mahouka_Koukou_no_Rettousei__Raihousha-hen) | [Mahouka Koukou no Rettousei S2](https://subsplease.org/shows/mahouka-koukou-no-rettousei-s2) | TV | 13 / 13 | **Finished Airing** | 7.27 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Mahouka+Koukou+no+Rettousei+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40497__mahouka_koukou_no_rettousei_raihousha_hen.txt) | ~2~ | 5020 | 2020-12-26 17:01 |
| 40085 | [![40085__maesetsu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40085__maesetsu.jpg)](https://myanimelist.net/anime/40085/Maesetsu) | [Maesetsu!](https://subsplease.org/shows/maesetsu) | TV | 12 / 12 | **Finished Airing** | 5.8 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Maesetsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40085__maesetsu.txt) | ~2~ | 680 | 2020-12-27 15:31 |
| 39893 | [![39893__muteking_the_dancing_hero](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39893__muteking_the_dancing_hero.jpg)](https://myanimelist.net/anime/39893/Muteking_the_Dancing_Hero) | [Muteking the Dancing Hero](https://subsplease.org/shows/muteking-the-dancing-hero) | TV | 12 / 12 | **Finished Airing** | 5.84 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Muteking+the+Dancing+Hero+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39893__muteking_the_dancing_hero.txt) | ~2~ | 827 | 2021-12-18 17:31 |
| 39469 | [![39469__tsugu_tsugumomo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39469__tsugu_tsugumomo.jpg)](https://myanimelist.net/anime/39469/Tsugu_Tsugumomo) | [Tsugumomo S2 OVA](https://subsplease.org/shows/tsugumomo-s2) | TV | 1 / 12 | **Finished Airing** | 7.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tsugumomo+S2+OVA+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39469__tsugu_tsugumomo.txt) | ~2~ | 952 | 2020-11-06 00:54 |
| 38749 | [![38749__blade_runner_black_lotus](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38749__blade_runner_black_lotus.jpg)](https://myanimelist.net/anime/38749/Blade_Runner__Black_Lotus) | [Blade Runner - Black Lotus](https://subsplease.org/shows/blade-runner-black-lotus) | TV | 13 / 13 | **Finished Airing** | 6.29 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Blade+Runner+Black+Lotus+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38749__blade_runner_black_lotus.txt) | ~2~ | 2258 | 2022-02-06 05:01 |
| 38476 | [![38476__heya_camp](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38476__heya_camp.jpg)](https://myanimelist.net/anime/38476/Heya_Camp△) | [Heya Camp](https://subsplease.org/shows/heya-camp) | TV | 1 / 12 | **Finished Airing** | 7.36 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Heya+Camp+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38476__heya_camp.txt) | ~2~ | 1289 | 2021-02-25 18:55 |
| 38091 | [![38091__hachigatsu_no_cinderella_nine](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38091__hachigatsu_no_cinderella_nine.jpg)](https://myanimelist.net/anime/38091/Hachigatsu_no_Cinderella_Nine) | [Hachigatsu no Cinderella Nine](https://subsplease.org/shows/hachigatsu-no-cinderella-nine) | TV | 1 / 12 | **Finished Airing** | 6.12 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hachigatsu+no+Cinderella+Nine+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38091__hachigatsu_no_cinderella_nine.txt) | ~2~ | 541 | 2021-10-01 04:17 |
| 35335 | [![35335__musashino](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/35335__musashino.jpg)](https://myanimelist.net/anime/35335/Musashino) | [Musashino!](https://subsplease.org/shows/musashino) | TV | 12 / 12 | **Finished Airing** | 4.65 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Musashino+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/35335__musashino.txt) | ~2~ | 1010 | 2022-09-17 15:30 |
| 32455 | [![32455__gekidol_actidol_project](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/32455__gekidol_actidol_project.jpg)](https://myanimelist.net/anime/32455/Gekidol__Actidol_Project) | [Gekidol](https://subsplease.org/shows/gekidol) | TV | 13 / 12 | **Finished Airing** | 5.51 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gekidol+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/32455__gekidol_actidol_project.txt) | ~2~ | 930 | 2021-03-23 12:31 |
| 44069 | [![44069__xian_wang_de_richang_shenghuo_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44069__xian_wang_de_richang_shenghuo_2.jpg)](https://myanimelist.net/anime/44069/Xian_Wang_de_Richang_Shenghuo_2) | [The Daily Life of the Immortal King S2](https://subsplease.org/shows/the-daily-life-of-the-immortal-king-s2) | ONA | 12 / 12 | **Finished Airing** | 7.38 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+Daily+Life+of+the+Immortal+King+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44069__xian_wang_de_richang_shenghuo_2.txt) | ~2~ | 2409 | 2022-01-08 03:03 |
| 41462 | [![41462__bang_dream_film_live_2nd_stage](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41462__bang_dream_film_live_2nd_stage.jpg)](https://myanimelist.net/anime/41462/BanG_Dream_Film_Live_2nd_Stage) | [BanG Dream! Film Live 2nd Stage](https://subsplease.org/shows/bang-dream-film-live-2nd-stage) | Movie | 4 / 1 | **Finished Airing** | 7.88 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+BanG+Dream+Film+Live+2nd+Stage+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41462__bang_dream_film_live_2nd_stage.txt) | ~2~ | 897 | 2022-08-07 21:50 |
| 40911 | [![40911__yuukoku_no_moriarty](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40911__yuukoku_no_moriarty.jpg)](https://myanimelist.net/anime/40911/Yuukoku_no_Moriarty) | [Yuukoku no Moriarty](https://subsplease.org/shows/yuukoku-no-moriarty) | TV | 24 / 11 | **Finished Airing** | 8.17 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yuukoku+no+Moriarty+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40911__yuukoku_no_moriarty.txt) | ~2~ | 2468 | 2021-06-27 15:43 |
| 40842 | [![40842__idoly_pride](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40842__idoly_pride.jpg)](https://myanimelist.net/anime/40842/Idoly_Pride) | [Idoly Pride](https://subsplease.org/shows/idoly-pride) | TV | 12 / 12 | **Finished Airing** | 7.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Idoly+Pride+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40842__idoly_pride.txt) | ~2~ | 1009 | 2021-03-28 16:31 |
| 40776 | [![40776__haikyuu_to_the_top_part_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40776__haikyuu_to_the_top_part_2.jpg)](https://myanimelist.net/anime/40776/Haikyuu_To_the_Top_Part_2) | [Haikyuu!! To The Top](https://subsplease.org/shows/haikyuu-to-the-top) | TV | 12 / 12 | **Finished Airing** | 8.55 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Haikyuu+To+The+Top+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40776__haikyuu_to_the_top_part_2.txt) | ~2~ | 3962 | 2020-12-18 19:52 |
| 40550 | [![40550__assault_lily_bouquet](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40550__assault_lily_bouquet.jpg)](https://myanimelist.net/anime/40550/Assault_Lily__Bouquet) | [Assault Lily Bouquet](https://subsplease.org/shows/assault-lily-bouquet) | TV | 12 / 12 | **Finished Airing** | 6.5 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Assault+Lily+Bouquet+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40550__assault_lily_bouquet.txt) | ~2~ | 1878 | 2020-12-25 13:01 |
| 38853 | [![38853__ex_arm](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38853__ex_arm.jpg)](https://myanimelist.net/anime/38853/Ex-Arm) | [Ex-Arm](https://subsplease.org/shows/ex-arm) | TV | 12 / 12 | **Finished Airing** | 2.88 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ex+Arm+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38853__ex_arm.txt) | ~2~ | 1566 | 2021-03-28 17:02 |
| 50470 | [![50470__kami_kuzu_idol](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50470__kami_kuzu_idol.jpg)](https://myanimelist.net/anime/50470/Kami_Kuzu☆Idol) | [Kami Kuzu Idol](https://subsplease.org/shows/kami-kuzu-idol) | TV | 10 / 10 | **Finished Airing** | 6.75 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kami+Kuzu+Idol+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50470__kami_kuzu_idol.txt) | ~1~ | 1693 | 2022-09-02 23:04 |
| 40956 | [![40956__enen_no_shouboutai_ni_no_shou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40956__enen_no_shouboutai_ni_no_shou.jpg)](https://myanimelist.net/anime/40956/Enen_no_Shouboutai__Ni_no_Shou) | [Enen no Shouboutai S2](https://subsplease.org/shows/enen-no-shouboutai-s2) | TV | 10 / 24 | **Finished Airing** | 7.82 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Enen+no+Shouboutai+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40956__enen_no_shouboutai_ni_no_shou.txt) | ~1~ | 5606 | 2020-12-11 17:56 |
| 54143 | [![54143__cardfight_vanguard_divinez_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/54143__cardfight_vanguard_divinez_season_2.jpg)](https://myanimelist.net/anime/54143/Cardfight_Vanguard__Divinez_Season_2) | [Cardfight!! Vanguard - Divinez S2](https://subsplease.org/shows/cardfight-vanguard-divinez-s2) | TV | 13 / 13 | **Finished Airing** | 7.09 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cardfight+Vanguard+Divinez+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/54143__cardfight_vanguard_divinez_season_2.txt) | ~1~ | 756 | 2024-10-11 23:42 |
| 52079 | [![52079__cardfight_vanguard_will_dress_season_3](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52079__cardfight_vanguard_will_dress_season_3.jpg)](https://myanimelist.net/anime/52079/Cardfight_Vanguard__will_Dress_Season_3) | [Cardfight!! Vanguard will+Dress S3](https://subsplease.org/shows/cardfight-vanguard-willdress-s3) | TV | 13 / 13 | **Finished Airing** | 6.57 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cardfight+Vanguard+will+Dress+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52079__cardfight_vanguard_will_dress_season_3.txt) | ~1~ | 684 | 2023-10-06 23:41 |
| 50985 | [![50985__chimimo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50985__chimimo.jpg)](https://myanimelist.net/anime/50985/Chimimo) | [Chimimo](https://subsplease.org/shows/chimimo) | TV | 12 / 12 | **Finished Airing** | 6.91 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Chimimo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50985__chimimo.txt) | ~1~ | 994 | 2022-09-22 17:33 |
| 50599 | [![50599__yami_shibai_10](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50599__yami_shibai_10.jpg)](https://myanimelist.net/anime/50599/Yami_Shibai_10) | [Yami Shibai 10](https://subsplease.org/shows/yami-shibai-10) | TV | 13 / 13 | **Finished Airing** | 6.18 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yami+Shibai+10+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50599__yami_shibai_10.txt) | ~1~ | 642 | 2022-04-03 19:30 |
| 50379 | [![50379__shoot_goal_to_the_future](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50379__shoot_goal_to_the_future.jpg)](https://myanimelist.net/anime/50379/Shoot_Goal_to_the_Future) | [Shoot! Goal to the Future](https://subsplease.org/shows/shoot-goal-to-the-future) | TV | 13 / 13 | **Finished Airing** | 5.28 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shoot+Goal+to+the+Future+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50379__shoot_goal_to_the_future.txt) | ~1~ | 822 | 2022-09-24 15:01 |
| 50185 | [![50185__ryman_s_club](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50185__ryman_s_club.jpg)](https://myanimelist.net/anime/50185/Rymans_Club) | [Ryman's Club](https://subsplease.org/shows/rymans-club) | TV | 12 / 12 | **Finished Airing** | 7.35 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ryman+s+Club+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50185__ryman_s_club.txt) | ~1~ | 1390 | 2022-04-16 18:31 |
| 50099 | [![50099__shin_tennis_no_oujisama_u_17_world_cup](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50099__shin_tennis_no_oujisama_u_17_world_cup.jpg)](https://myanimelist.net/anime/50099/Shin_Tennis_no_Oujisama__U-17_World_Cup) | [The Prince of Tennis II - U-17 World Cup](https://subsplease.org/shows/the-prince-of-tennis-ii-u-17-world-cup) | TV | 13 / 13 | **Finished Airing** | 6.77 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+Prince+of+Tennis+II+U+17+World+Cup+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50099__shin_tennis_no_oujisama_u_17_world_cup.txt) | ~1~ | 1150 | 2022-09-28 16:01 |
| 49820 | [![49820__cardfight_vanguard_will_dress_season_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49820__cardfight_vanguard_will_dress_season_2.jpg)](https://myanimelist.net/anime/49820/Cardfight_Vanguard__will_Dress_Season_2) | [Cardfight!! Vanguard will+Dress S2](https://subsplease.org/shows/cardfight-vanguard-willdress-s2) | TV | 12 / 12 | **Finished Airing** | 6.71 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cardfight+Vanguard+will+Dress+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49820__cardfight_vanguard_will_dress_season_2.txt) | ~1~ | 552 | 2023-03-31 23:41 |
| 49691 | [![49691__gunjou_no_fanfare](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49691__gunjou_no_fanfare.jpg)](https://myanimelist.net/anime/49691/Gunjou_no_Fanfare) | [Gunjou no Fanfare](https://subsplease.org/shows/gunjou-no-fanfare) | TV | 13 / 13 | **Finished Airing** | 6.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gunjou+no+Fanfare+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49691__gunjou_no_fanfare.txt) | ~1~ | 858 | 2022-06-25 16:01 |
| 49551 | [![49551__hanabi_chan_wa_okuregachi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49551__hanabi_chan_wa_okuregachi.jpg)](https://myanimelist.net/anime/49551/Hanabi-chan_wa_Okuregachi) | [Hanabi-chan wa Okuregachi](https://subsplease.org/shows/hanabi-chan-wa-okuregachi) | TV | 12 / 12 | **Finished Airing** | 6.14 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hanabi+chan+wa+Okuregachi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49551__hanabi_chan_wa_okuregachi.txt) | ~1~ | 907 | 2022-09-25 14:00 |
| 49522 | [![49522__toutotsu_ni_egypt_shin_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49522__toutotsu_ni_egypt_shin_2.jpg)](https://myanimelist.net/anime/49522/Toutotsu_ni_Egypt_Shin_2) | [Toutotsu ni Egypt Shin S2](https://subsplease.org/shows/toutotsu-ni-egypt-shin-s2) | ONA | 10 / 10 | **Finished Airing** | 6.62 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Toutotsu+ni+Egypt+Shin+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49522__toutotsu_ni_egypt_shin_2.txt) | ~1~ | 804 | 2023-03-14 17:00 |
| 49292 | [![49292__deep_insanity_the_lost_child](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49292__deep_insanity_the_lost_child.jpg)](https://myanimelist.net/anime/49292/Deep_Insanity__The_Lost_Child) | [Deep Insanity - The Lost Child](https://subsplease.org/shows/deep-insanity-the-lost-child) | TV | 12 / 12 | **Finished Airing** | 5.6 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Deep+Insanity+The+Lost+Child+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49292__deep_insanity_the_lost_child.txt) | ~1~ | 1887 | 2021-12-28 16:33 |
| 49285 | [![49285__waccha_primagi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49285__waccha_primagi.jpg)](https://myanimelist.net/anime/49285/Waccha_PriMagi) | [Waccha PriMagi!](https://subsplease.org/shows/waccha-primagi) | TV | 51 / 51 | **Finished Airing** | 7.02 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Waccha+PriMagi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49285__waccha_primagi.txt) | ~1~ | 699 | 2022-10-09 02:02 |
| 45783 | [![45783__saiyuuki_reload_zeroin](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/45783__saiyuuki_reload_zeroin.jpg)](https://myanimelist.net/anime/45783/Saiyuuki_Reload__Zeroin) | [Saiyuuki Reload - Zeroin](https://subsplease.org/shows/saiyuuki-reload-zeroin) | TV | 13 / 13 | **Finished Airing** | 6.66 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Saiyuuki+Reload+Zeroin+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/45783__saiyuuki_reload_zeroin.txt) | ~1~ | 1383 | 2022-03-31 15:05 |
| 45577 | [![45577__idolish7_third_beat](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/45577__idolish7_third_beat.jpg)](https://myanimelist.net/anime/45577/IDOLiSH7_Third_Beat) | [IDOLiSH7 S3](https://subsplease.org/shows/idolish7-s3) | TV | 30 / 13 | **Finished Airing** | 8.23 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+IDOLiSH7+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/45577__idolish7_third_beat.txt) | ~1~ | 534 | 2023-02-26 16:02 |
| 44387 | [![44387__sankaku_mado_no_sotogawa_wa_yoru](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44387__sankaku_mado_no_sotogawa_wa_yoru.jpg)](https://myanimelist.net/anime/44387/Sankaku_Mado_no_Sotogawa_wa_Yoru) | [Sankaku Mado no Sotogawa wa Yoru](https://subsplease.org/shows/sankaku-mado-no-sotogawa-wa-yoru) | TV | 12 / 12 | **Finished Airing** | 6.75 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sankaku+Mado+no+Sotogawa+wa+Yoru+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44387__sankaku_mado_no_sotogawa_wa_yoru.txt) | ~1~ | 1131 | 2021-12-19 14:31 |
| 44191 | [![44191__tropical_rouge_precure](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44191__tropical_rouge_precure.jpg)](https://myanimelist.net/anime/44191/Tropical-Rouge_Precure) | [Tropical-Rouge! Precure](https://subsplease.org/shows/tropical-rouge-precure) | TV | 46 / 46 | **Finished Airing** | 7.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tropical+Rouge+Precure+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44191__tropical_rouge_precure.txt) | ~1~ | 582 | 2022-01-30 01:31 |
| 43771 | [![43771__vazzrock_the_animation](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43771__vazzrock_the_animation.jpg)](https://myanimelist.net/anime/43771/Vazzrock_The_Animation) | [Vazzrock the Animation](https://subsplease.org/shows/vazzrock-the-animation) | TV | 13 / 13 | **Finished Airing** | 5.44 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Vazzrock+the+Animation+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43771__vazzrock_the_animation.txt) | ~1~ | 584 | 2022-12-27 15:31 |
| 43591 | [![43591__hetalia_world_stars](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43591__hetalia_world_stars.jpg)](https://myanimelist.net/anime/43591/Hetalia_World★Stars) | [Hetalia World Stars](https://subsplease.org/shows/hetalia-world-stars) | ONA | 12 / 12 | **Finished Airing** | 6.86 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hetalia+World+Stars+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43591__hetalia_world_stars.txt) | ~1~ | 551 | 2021-06-16 16:01 |
| 42981 | [![42981__idolls](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42981__idolls.jpg)](https://myanimelist.net/anime/42981/Idolls) | [Idolls!](https://subsplease.org/shows/idolls) | TV | 10 / 10 | **Finished Airing** | 5.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Idolls+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42981__idolls.txt) | ~1~ | 511 | 2021-03-12 16:00 |
| 42959 | [![42959__yatogame_chan_kansatsu_nikki_sansatsume](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42959__yatogame_chan_kansatsu_nikki_sansatsume.jpg)](https://myanimelist.net/anime/42959/Yatogame-chan_Kansatsu_Nikki_Sansatsume) | [Yatogame-chan Kansatsu Nikki S3](https://subsplease.org/shows/yatogame-chan-kansatsu-nikki-s3) | TV | 12 / 12 | **Finished Airing** | 6.38 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yatogame+chan+Kansatsu+Nikki+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42959__yatogame_chan_kansatsu_nikki_sansatsume.txt) | ~1~ | 801 | 2021-03-28 13:30 |
| 42892 | [![42892__baraou_no_souretsu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42892__baraou_no_souretsu.jpg)](https://myanimelist.net/anime/42892/Baraou_no_Souretsu) | [Baraou no Souretsu](https://subsplease.org/shows/baraou-no-souretsu) | TV | 25 / 24 | **Finished Airing** | 6.38 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Baraou+no+Souretsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42892__baraou_no_souretsu.txt) | ~1~ | 1048 | 2022-06-26 14:02 |
| 42822 | [![42822__kai_byoui_ramune](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42822__kai_byoui_ramune.jpg)](https://myanimelist.net/anime/42822/Kai_Byoui_Ramune) | [Kai Byoui Ramune](https://subsplease.org/shows/kai-byoui-ramune) | TV | 12 / 12 | **Finished Airing** | 7.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kai+Byoui+Ramune+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42822__kai_byoui_ramune.txt) | ~1~ | 1183 | 2021-03-27 17:01 |
| 42808 | [![42808__shenmue_the_animation](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42808__shenmue_the_animation.jpg)](https://myanimelist.net/anime/42808/Shenmue_the_Animation) | [Shenmue the Animation](https://subsplease.org/shows/shenmue-the-animation) | TV | 13 / 13 | **Finished Airing** | 6.64 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shenmue+the+Animation+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42808__shenmue_the_animation.txt) | ~1~ | 1823 | 2022-05-01 04:01 |
| 41917 | [![41917__min_diao_ju_yi_wen_lu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41917__min_diao_ju_yi_wen_lu.jpg)](https://myanimelist.net/anime/41917/Min_Diao_Ju_Yi_Wen_Lu) | [Bureau of Paranormal Investigation](https://subsplease.org/shows/bureau-of-paranormal-investigation) | ONA | 12 / 12 | **Finished Airing** | 6.63 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bureau+of+Paranormal+Investigation+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41917__min_diao_ju_yi_wen_lu.txt) | ~1~ | 1316 | 2023-02-03 21:19 |
| 41834 | [![41834__king_s_raid_ishi_wo_tsugumono_tachi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41834__king_s_raid_ishi_wo_tsugumono_tachi.jpg)](https://myanimelist.net/anime/41834/Kings_Raid__Ishi_wo_Tsugumono-tachi) | [King's Raid - Ishi wo Tsugu Mono-tachi](https://subsplease.org/shows/kings-raid-ishi-wo-tsugu-mono-tachi) | TV | 26 / 26 | **Finished Airing** | 6.04 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+King+s+Raid+Ishi+wo+Tsugu+Mono+tachi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41834__king_s_raid_ishi_wo_tsugumono_tachi.txt) | ~1~ | 1444 | 2021-03-26 17:25 |
| 41688 | [![41688__toutotsu_ni_egypt_shin](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41688__toutotsu_ni_egypt_shin.jpg)](https://myanimelist.net/anime/41688/Toutotsu_ni_Egypt_Shin) | [Toutotsu ni Egypt Kami](https://subsplease.org/shows/toutotsu-ni-egypt-shin) | ONA | 10 / 10 | **Finished Airing** | 6.6 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Toutotsu+ni+Egypt+Kami+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41688__toutotsu_ni_egypt_shin.txt) | ~1~ | 461 | 2021-02-08 04:00 |
| 41556 | [![41556__maiko_san_chi_no_makanai_san](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41556__maiko_san_chi_no_makanai_san.jpg)](https://myanimelist.net/anime/41556/Maiko-san_Chi_no_Makanai-san) | [Maiko-san Chi no Makanai-san](https://subsplease.org/shows/maiko-san-chi-no-makanai-san) | TV | 12 / 12 | **Finished Airing** | 7.03 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Maiko+san+Chi+no+Makanai+san+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41556__maiko_san_chi_no_makanai_san.txt) | ~1~ | 954 | 2022-01-27 04:01 |
| 41521 | [![41521__wixoss_diva_a_live](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41521__wixoss_diva_a_live.jpg)](https://myanimelist.net/anime/41521/WIXOSS_DivaALive) | [WIXOSS Diva(A)Live](https://subsplease.org/shows/wixoss-divaalive) | TV | 12 / 12 | **Finished Airing** | 5.6 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+WIXOSS+Diva+A+Live+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41521__wixoss_diva_a_live.txt) | ~1~ | 697 | 2021-03-26 16:32 |
| 41433 | [![41433__akudama_drive](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41433__akudama_drive.jpg)](https://myanimelist.net/anime/41433/Akudama_Drive) | [Akudama Drive](https://subsplease.org/shows/akudama-drive) | TV | 12 / 12 | **Finished Airing** | 7.58 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Akudama+Drive+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41433__akudama_drive.txt) | ~1~ | 4599 | 2020-12-24 13:02 |
| 41389 | [![41389__tonikaku_kawaii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41389__tonikaku_kawaii.jpg)](https://myanimelist.net/anime/41389/Tonikaku_Kawaii) | [Tonikaku Kawaii](https://subsplease.org/shows/tonikaku-kawaii) | TV | 15 / 12 | **Finished Airing** | 7.86 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tonikaku+Kawaii+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41389__tonikaku_kawaii.txt) | ~1~ | 3347 | 2022-11-28 16:56 |
| 40964 | [![40964__back_arrow](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40964__back_arrow.jpg)](https://myanimelist.net/anime/40964/Back_Arrow) | [Back Arrow](https://subsplease.org/shows/back-arrow) | TV | 24 / 24 | **Finished Airing** | 6.33 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Back+Arrow+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40964__back_arrow.txt) | ~1~ | 1728 | 2021-06-18 16:32 |
| 40907 | [![40907__world_trigger_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40907__world_trigger_2nd_season.jpg)](https://myanimelist.net/anime/40907/World_Trigger_2nd_Season) | [World Trigger S2](https://subsplease.org/shows/world-trigger-s2) | TV | 12 / 12 | **Finished Airing** | 8.05 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+World+Trigger+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40907__world_trigger_2nd_season.txt) | ~1~ | 2644 | 2021-04-03 18:31 |
| 40571 | [![40571__majo_no_tabitabi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40571__majo_no_tabitabi.jpg)](https://myanimelist.net/anime/40571/Majo_no_Tabitabi) | [Majo no Tabitabi](https://subsplease.org/shows/majo-no-tabitabi) | TV | 12 / 12 | **Finished Airing** | 7.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Majo+no+Tabitabi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40571__majo_no_tabitabi.txt) | ~1~ | 4487 | 2020-12-18 13:02 |
| 39790 | [![39790__adachi_to_shimamura](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39790__adachi_to_shimamura.jpg)](https://myanimelist.net/anime/39790/Adachi_to_Shimamura) | [Adachi to Shimamura](https://subsplease.org/shows/adachi-to-shimamura) | TV | 12 / 12 | **Finished Airing** | 7.09 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Adachi+to+Shimamura+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39790__adachi_to_shimamura.txt) | ~1~ | 2491 | 2020-12-24 17:39 |
| 39681 | [![39681__d4dj_first_mix](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39681__d4dj_first_mix.jpg)](https://myanimelist.net/anime/39681/D4DJ_First_Mix) | [D4DJ First Mix](https://subsplease.org/shows/d4dj-first-mix) | TV | 13 / 13 | **Finished Airing** | 7.61 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+D4DJ+First+Mix+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39681__d4dj_first_mix.txt) | ~1~ | 817 | 2021-01-29 14:03 |
| 37262 | [![37262__ta_ga_tame_no_alchemist](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/37262__ta_ga_tame_no_alchemist.jpg)](https://myanimelist.net/anime/37262/Ta_ga_Tame_no_Alchemist) | [Ta ga Tame no Alchemist](https://subsplease.org/shows/ta-ga-tame-no-alchemist) | Movie | 1 / 1 | **Finished Airing** | 6.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ta+ga+Tame+no+Alchemist+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/37262__ta_ga_tame_no_alchemist.txt) | ~1~ | 1182 | 2021-02-05 00:18 |
| 36458 | [![36458__soukou_musume_senki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/36458__soukou_musume_senki.jpg)](https://myanimelist.net/anime/36458/Soukou_Musume_Senki) | [Soukou Musume Senki](https://subsplease.org/shows/soukou-musume-senki) | TV | 12 / 12 | **Finished Airing** | 5.78 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Soukou+Musume+Senki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/36458__soukou_musume_senki.txt) | ~1~ | 1202 | 2021-03-24 19:36 |
| 36028 | [![36028__golden_kamuy](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/36028__golden_kamuy.jpg)](https://myanimelist.net/anime/36028/Golden_Kamuy) | [Golden Kamuy](https://subsplease.org/shows/golden-kamuy) | TV | 25 / 12 | **Finished Airing** | 7.88 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Golden+Kamuy+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/36028__golden_kamuy.txt) | ~1~ | 3775 | 2023-06-26 15:01 |
| 52273 | [![52273__saint_seiya_knights_of_the_zodiac_battle_sanctuary](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/52273__saint_seiya_knights_of_the_zodiac_battle_sanctuary.jpg)](https://myanimelist.net/anime/52273/Saint_Seiya__Knights_of_the_Zodiac_-_Battle_Sanctuary) | [Knights of the Zodiac - Saint Seiya S2](https://subsplease.org/shows/knights-of-the-zodiac-saint-seiya-s2) | ONA | 12 / 12 | **Finished Airing** | 6.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Knights+of+the+Zodiac+Saint+Seiya+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/52273__saint_seiya_knights_of_the_zodiac_battle_sanctuary.txt) | ~1~ | 656 | 2022-10-09 20:01 |
| 41930 | [![41930__kamisama_ni_natta_hi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41930__kamisama_ni_natta_hi.jpg)](https://myanimelist.net/anime/41930/Kamisama_ni_Natta_Hi) | [Kamisama ni Natta Hi](https://subsplease.org/shows/kamisama-ni-natta-hi) | TV | 12 / 12 | **Finished Airing** | 6.79 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kamisama+ni+Natta+Hi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41930__kamisama_ni_natta_hi.txt) | ~1~ | 3820 | 2020-12-26 16:31 |
| 41364 | [![41364__one_room_third_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41364__one_room_third_season.jpg)](https://myanimelist.net/anime/41364/One_Room__Third_Season) | [One Room S3](https://subsplease.org/shows/one-room-s3) | TV | 12 / 12 | **Finished Airing** | 6.43 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+One+Room+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41364__one_room_third_season.txt) | ~1~ | 731 | 2020-12-21 18:15 |
| 40752 | [![40752__bishounen_tanteidan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40752__bishounen_tanteidan.jpg)](https://myanimelist.net/anime/40752/Bishounen_Tanteidan) | [Bishounen Tanteidan](https://subsplease.org/shows/bishounen-tanteidan) | TV | 12 / 12 | **Finished Airing** | 7.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bishounen+Tanteidan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40752__bishounen_tanteidan.txt) | ~1~ | 1428 | 2021-06-26 18:32 |
| 37599 | [![37599__magatsu_wahrheit_zuerst](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/37599__magatsu_wahrheit_zuerst.jpg)](https://myanimelist.net/anime/37599/Magatsu_Wahrheit__Zuerst) | [Magatsu Wahrheit - Zuerst](https://subsplease.org/shows/magatsu-wahrheit-zuerst) | TV | 12 / 12 | **Finished Airing** | 6.54 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Magatsu+Wahrheit+Zuerst+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/37599__magatsu_wahrheit_zuerst.txt) | ~1~ | 1136 | 2020-12-29 15:02 |
| 41573 | [![41573__majutsushi_orphen_hagure_tabi_kimluck_hen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41573__majutsushi_orphen_hagure_tabi_kimluck_hen.jpg)](https://myanimelist.net/anime/41573/Majutsushi_Orphen_Hagure_Tabi__Kimluck-hen) | [Majutsushi Orphen Hagure Tabi S2](https://subsplease.org/shows/majutsushi-orphen-hagure-tabi-s2) | TV | 11 / 11 | **Finished Airing** | 6.19 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Majutsushi+Orphen+Hagure+Tabi+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41573__majutsushi_orphen_hagure_tabi_kimluck_hen.txt) | ~0~ | 1266 | 2021-03-31 13:02 |
| 51203 | [![51203__meng_qi_shi_shen_huanxi_zhui_hun](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/51203__meng_qi_shi_shen_huanxi_zhui_hun.jpg)](https://myanimelist.net/anime/51203/Meng_Qi_Shi_Shen__Huanxi_Zhui_Hun) | [Cinderella Chef S3](https://subsplease.org/shows/cinderella-chef-s3) | ONA | 12 / 12 | **Finished Airing** | 7.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cinderella+Chef+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/51203__meng_qi_shi_shen_huanxi_zhui_hun.txt) | ~0~ | 553 | 2022-09-13 05:01 |
| 50789 | [![50789__jantama_pong](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50789__jantama_pong.jpg)](https://myanimelist.net/anime/50789/Jantama_Pong☆) | [Jantama Pong](https://subsplease.org/shows/jantama-pong) | TV | 12 / 11 | **Finished Airing** | 6.08 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Jantama+Pong+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50789__jantama_pong.txt) | ~0~ | 1003 | 2022-06-17 17:00 |
| 50537 | [![50537__bai_yao_pu_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50537__bai_yao_pu_3rd_season.jpg)](https://myanimelist.net/anime/50537/Bai_Yao_Pu_3rd_Season) | [Fairies Album S3](https://subsplease.org/shows/fairies-album-s3) | ONA | 12 / 12 | **Finished Airing** | 7.67 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Fairies+Album+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50537__bai_yao_pu_3rd_season.txt) | ~0~ | 472 | 2022-10-03 05:01 |
| 50160 | [![50160__kingdom_4th_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50160__kingdom_4th_season.jpg)](https://myanimelist.net/anime/50160/Kingdom_4th_Season) | [Kingdom S4](https://subsplease.org/shows/kingdom-s4) | TV | 26 / 26 | **Finished Airing** | 8.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kingdom+S4+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50160__kingdom_4th_season.txt) | ~0~ | 2363 | 2022-10-01 18:31 |
| 50021 | [![50021__dou_shen_ji](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/50021__dou_shen_ji.jpg)](https://myanimelist.net/anime/50021/Dou_Shen_Ji) | [Ancient Girls Frame](https://subsplease.org/shows/ancient-girls-frame) | ONA | 12 / 12 | **Finished Airing** | 5.82 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ancient+Girls+Frame+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/50021__dou_shen_ji.txt) | ~0~ | 761 | 2021-12-29 04:26 |
| 49819 | [![49819__cardfight_vanguard_will_dress](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49819__cardfight_vanguard_will_dress.jpg)](https://myanimelist.net/anime/49819/Cardfight_Vanguard__will_Dress) | [Cardfight!! Vanguard will+Dress](https://subsplease.org/shows/cardfight-vanguard-willdress) | TV | 13 / 13 | **Finished Airing** | 6.86 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cardfight+Vanguard+will+Dress+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49819__cardfight_vanguard_will_dress.txt) | ~0~ | 393 | 2022-09-26 15:41 |
| 49556 | [![49556__love_all_play](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49556__love_all_play.jpg)](https://myanimelist.net/anime/49556/Love_All_Play) | [Love All Play](https://subsplease.org/shows/love-all-play) | TV | 24 / 24 | **Finished Airing** | 6.53 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Love+All+Play+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49556__love_all_play.txt) | ~0~ | 1002 | 2022-09-24 10:03 |
| 49338 | [![49338__hakuouki_ova_2021](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49338__hakuouki_ova_2021.jpg)](https://myanimelist.net/anime/49338/Hakuouki_OVA_2021) | [Hakuouki OVA](https://subsplease.org/shows/hakuouki-ova) | OVA | 3 / 3 | **Finished Airing** | 7.07 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hakuouki+OVA+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49338__hakuouki_ova_2021.txt) | ~0~ | 895 | 2022-01-29 18:37 |
| 49263 | [![49263__yaku_nara_mug_cup_mo_niban_gama](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49263__yaku_nara_mug_cup_mo_niban_gama.jpg)](https://myanimelist.net/anime/49263/Yaku_nara_Mug_Cup_mo__Niban_Gama) | [Yakunara Mug Cup mo S2](https://subsplease.org/shows/yakunara-mug-cup-mo-s2) | TV | 24 / 12 | **Finished Airing** | 6.83 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yakunara+Mug+Cup+mo+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49263__yaku_nara_mug_cup_mo_niban_gama.txt) | ~0~ | 1047 | 2021-12-17 20:19 |
| 49110 | [![49110__yami_shibai_9](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/49110__yami_shibai_9.jpg)](https://myanimelist.net/anime/49110/Yami_Shibai_9) | [Yami Shibai 9](https://subsplease.org/shows/yami-shibai-9) | TV | 13 / 13 | **Finished Airing** | 5.91 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yami+Shibai+9+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/49110__yami_shibai_9.txt) | ~0~ | 576 | 2021-10-03 19:30 |
| 48641 | [![48641__obey_me](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/48641__obey_me.jpg)](https://myanimelist.net/anime/48641/Obey_Me) | [Obey Me!](https://subsplease.org/shows/obey-me) | ONA | 12 / 12 | **Finished Airing** | 7.15 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Obey+Me+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/48641__obey_me.txt) | ~0~ | 792 | 2021-12-31 08:01 |
| 46118 | [![46118__wave_surfing_yappe_tv](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/46118__wave_surfing_yappe_tv.jpg)](https://myanimelist.net/anime/46118/Wave_Surfing_Yappe_TV) | [Wave!! Surfing Yappe!! (TV)](https://subsplease.org/shows/wave-surfing-yappe-tv) | TV | 12 / 12 | **Finished Airing** | 5.86 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Wave+Surfing+Yappe+TV+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/46118__wave_surfing_yappe_tv.txt) | ~0~ | 478 | 2021-03-29 18:31 |
| 45587 | [![45587__itazuraguma_no_gloomy](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/45587__itazuraguma_no_gloomy.jpg)](https://myanimelist.net/anime/45587/Itazuraguma_no_Gloomy) | [Itazuraguma no Gloomy](https://subsplease.org/shows/itazuraguma-no-gloomy) | TV | 12 / 12 | **Finished Airing** | 5.28 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Itazuraguma+no+Gloomy+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/45587__itazuraguma_no_gloomy.txt) | ~0~ | 435 | 2021-06-28 15:30 |
| 44208 | [![44208__yami_shibai_8](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44208__yami_shibai_8.jpg)](https://myanimelist.net/anime/44208/Yami_Shibai_8) | [Yami Shibai 8](https://subsplease.org/shows/yami-shibai-8) | TV | 13 / 13 | **Finished Airing** | 5.82 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yami+Shibai+8+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44208__yami_shibai_8.txt) | ~0~ | 490 | 2021-04-04 19:30 |
| 44064 | [![44064__liehuo_jiao_chou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44064__liehuo_jiao_chou.jpg)](https://myanimelist.net/anime/44064/Liehuo_Jiao_Chou) | [Drowning Sorrows in Raging Fire](https://subsplease.org/shows/drowning-sorrows-in-raging-fire) | ONA | 12 / 12 | **Finished Airing** | 7.27 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Drowning+Sorrows+in+Raging+Fire+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44064__liehuo_jiao_chou.txt) | ~0~ | 969 | 2021-12-16 03:03 |
| 44041 | [![44041__sd_gundam_world_heroes](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44041__sd_gundam_world_heroes.jpg)](https://myanimelist.net/anime/44041/SD_Gundam_World_Heroes) | [SD Gundam World Heroes](https://subsplease.org/shows/sd-gundam-world-heroes) | ONA | 24 / 24 | **Finished Airing** | 5.74 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+SD+Gundam+World+Heroes+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44041__sd_gundam_world_heroes.txt) | ~0~ | 400 | 2021-09-16 10:01 |
| 44040 | [![44040__abciee_shuugyou_nikki](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/44040__abciee_shuugyou_nikki.jpg)](https://myanimelist.net/anime/44040/Abciee_Shuugyou_Nikki) | [Abciee Shuugyou Nikki](https://subsplease.org/shows/abciee-shuugyou-nikki) | TV | 12 / 12 | **Finished Airing** | 5.44 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Abciee+Shuugyou+Nikki+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/44040__abciee_shuugyou_nikki.txt) | ~0~ | 366 | 2021-03-24 18:00 |
| 43001 | [![43001__youjo_shachou](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/43001__youjo_shachou.jpg)](https://myanimelist.net/anime/43001/Youjo_Shachou) | [Youjo Shachou](https://subsplease.org/shows/youjo-shachou) | ONA | 1 / 13 | **Finished Airing** | 6.59 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Youjo+Shachou+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/43001__youjo_shachou.txt) | ~0~ | 642 | 2021-01-01 03:10 |
| 42946 | [![42946__kusoge_tte_iuna_animation](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42946__kusoge_tte_iuna_animation.jpg)](https://myanimelist.net/anime/42946/Kusoge_tte_Iuna_Animation) | [Kusoge-tte Iuna!](https://subsplease.org/shows/kusoge-tte-iuna) | ONA | 12 / 12 | **Finished Airing** | 5.38 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kusoge+tte+Iuna+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42946__kusoge_tte_iuna_animation.txt) | ~0~ | 360 | 2021-01-12 03:00 |
| 42883 | [![42883__sore_dake_ga_neck](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42883__sore_dake_ga_neck.jpg)](https://myanimelist.net/anime/42883/Sore_dake_ga_Neck) | [Sore dake ga Neck](https://subsplease.org/shows/sore-dake-ga-neck) | TV | 12 / 12 | **Finished Airing** | 5.67 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sore+dake+ga+Neck+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42883__sore_dake_ga_neck.txt) | ~0~ | 310 | 2021-01-04 18:50 |
| 42862 | [![42862__otona_no_bouguya_san_ii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42862__otona_no_bouguya_san_ii.jpg)](https://myanimelist.net/anime/42862/Otona_no_Bouguya-san_II) | [Otona no Bouguya-san S2](https://subsplease.org/shows/otona-no-bouguya-san-s2) | ONA | 12 / 12 | **Finished Airing** | 5.5 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Otona+no+Bouguya+san+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42862__otona_no_bouguya_san_ii.txt) | ~0~ | 733 | 2021-03-19 15:31 |
| 42832 | [![42832__tales_of_crestoria_toga_waga_wo_shoite_kare_wa_tatsu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42832__tales_of_crestoria_toga_waga_wo_shoite_kare_wa_tatsu.jpg)](https://myanimelist.net/anime/42832/Tales_of_Crestoria__Toga_Waga_wo_Shoite_Kare_wa_Tatsu) | [Tales of Crestoria - Toga Waga wo Shoite Kare wa Tatsu](https://subsplease.org/shows/tales-of-crestoria-toga-waga-wo-shoite-kare-wa-tatsu) | TV Special | 1 / 1 | **Finished Airing** | 6.57 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tales+of+Crestoria+Toga+Waga+wo+Shoite+Kare+wa+Tatsu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42832__tales_of_crestoria_toga_waga_wo_shoite_kare_wa_tatsu.txt) | ~0~ | 1003 | 2020-10-18 17:49 |
| 42825 | [![42825__project_scard_praeter_no_kizu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42825__project_scard_praeter_no_kizu.jpg)](https://myanimelist.net/anime/42825/Project_Scard__Praeter_no_Kizu) | [Project Scard - Praeter no Kizu](https://subsplease.org/shows/project-scard-praeter-no-kizu) | TV | 13 / 13 | **Finished Airing** | 5.92 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Project+Scard+Praeter+no+Kizu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42825__project_scard_praeter_no_kizu.txt) | ~0~ | 992 | 2021-04-02 17:57 |
| 42668 | [![42668__taisou_zamurai](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42668__taisou_zamurai.jpg)](https://myanimelist.net/anime/42668/Taisou_Zamurai) | [Taisou Zamurai](https://subsplease.org/shows/taisou-zamurai) | TV | 11 / 11 | **Finished Airing** | 7.23 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Taisou+Zamurai+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42668__taisou_zamurai.txt) | ~0~ | 1037 | 2020-12-19 17:31 |
| 42657 | [![42657__himitsukessha_taka_no_tsume_golden_spell](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42657__himitsukessha_taka_no_tsume_golden_spell.jpg)](https://myanimelist.net/anime/42657/Himitsukessha_Taka_no_Tsume__Golden_Spell) | [Himitsukessha Taka no Tsume - Golden Spell](https://subsplease.org/shows/himitsukessha-taka-no-tsume-golden-spell) | TV | 12 / 12 | **Finished Airing** | 6.18 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Himitsukessha+Taka+no+Tsume+Golden+Spell+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42657__himitsukessha_taka_no_tsume_golden_spell.txt) | ~0~ | 234 | 2020-12-20 18:30 |
| 42571 | [![42571__dogeza_de_tanondemita](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42571__dogeza_de_tanondemita.jpg)](https://myanimelist.net/anime/42571/Dogeza_de_Tanondemita) | [Dogeza de Tanondemita](https://subsplease.org/shows/dogeza-de-tanondemita) | TV | 12 / 12 | **Finished Airing** | 5.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dogeza+de+Tanondemita+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42571__dogeza_de_tanondemita.txt) | ~0~ | 1038 | 2020-12-30 15:40 |
| 42568 | [![42568__yaku_nara_mug_cup_mo](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42568__yaku_nara_mug_cup_mo.jpg)](https://myanimelist.net/anime/42568/Yaku_nara_Mug_Cup_mo) | [Yakunara Mug Cup mo](https://subsplease.org/shows/yakunara-mug-cup-mo) | TV | 24 / 12 | **Finished Airing** | 6.54 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Yakunara+Mug+Cup+mo+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42568__yaku_nara_mug_cup_mo.txt) | ~0~ | 1079 | 2021-06-21 16:57 |
| 42516 | [![42516__cardfight_vanguard_overdress](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42516__cardfight_vanguard_overdress.jpg)](https://myanimelist.net/anime/42516/Cardfight_Vanguard__overDress) | [Cardfight!! Vanguard overDress](https://subsplease.org/shows/cardfight-vanguard-overdress) | TV | 25 / 12 | **Finished Airing** | 5.97 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cardfight+Vanguard+overDress+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42516__cardfight_vanguard_overdress.txt) | ~0~ | 375 | 2021-12-27 15:41 |
| 42514 | [![42514__anime_kapibara_san](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42514__anime_kapibara_san.jpg)](https://myanimelist.net/anime/42514/Anime_Kapibara-san) | [Anime Kapibara-san](https://subsplease.org/shows/anime-kapibara-san) | TV | 24 / 24 | **Finished Airing** | 6.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Anime+Kapibara+san+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42514__anime_kapibara_san.txt) | ~0~ | 288 | 2021-03-25 23:30 |
| 42391 | [![42391__osomatsu_san_3rd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42391__osomatsu_san_3rd_season.jpg)](https://myanimelist.net/anime/42391/Osomatsu-san_3rd_Season) | [Osomatsu-san S3](https://subsplease.org/shows/osomatsu-san-s3) | TV | 25 / 25 | **Finished Airing** | 7.39 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Osomatsu+san+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42391__osomatsu_san_3rd_season.txt) | ~0~ | 454 | 2021-03-29 18:01 |
| 42250 | [![42250__bungou_stray_dogs_wan](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/42250__bungou_stray_dogs_wan.jpg)](https://myanimelist.net/anime/42250/Bungou_Stray_Dogs_Wan) | [Bungou Stray Dogs Wan!](https://subsplease.org/shows/bungou-stray-dogs-wan) | TV | 12 / 12 | **Finished Airing** | 7.98 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Bungou+Stray+Dogs+Wan+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/42250__bungou_stray_dogs_wan.txt) | ~0~ | 760 | 2021-03-30 16:30 |
| 41911 | [![41911__hanyou_no_yashahime_sengoku_otogizoushi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41911__hanyou_no_yashahime_sengoku_otogizoushi.jpg)](https://myanimelist.net/anime/41911/Hanyou_no_Yashahime__Sengoku_Otogizoushi) | [Hanyou no Yashahime](https://subsplease.org/shows/hanyou-no-yashahime) | TV | 48 / 24 | **Finished Airing** | 6.7 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hanyou+no+Yashahime+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41911__hanyou_no_yashahime_sengoku_otogizoushi.txt) | ~0~ | 1732 | 2022-03-26 18:14 |
| 41783 | [![41783__iwa_kakeru_sport_climbing_girls](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41783__iwa_kakeru_sport_climbing_girls.jpg)](https://myanimelist.net/anime/41783/Iwa_Kakeru_Sport_Climbing_Girls) | [Iwa Kakeru! Sport Climbing Girls](https://subsplease.org/shows/iwa-kakeru-sport-climbing-girls) | TV | 12 / 12 | **Finished Airing** | 6.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Iwa+Kakeru+Sport+Climbing+Girls+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41783__iwa_kakeru_sport_climbing_girls.txt) | ~0~ | 1671 | 2020-12-19 19:01 |
| 41574 | [![41574__guraburu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41574__guraburu.jpg)](https://myanimelist.net/anime/41574/Guraburu) | [Guraburu!](https://subsplease.org/shows/guraburu) | TV | 12 / 12 | **Finished Airing** | 5.79 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Guraburu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41574__guraburu.txt) | ~0~ | 440 | 2020-12-24 14:01 |
| 41520 | [![41520__show_by_rock_stars](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41520__show_by_rock_stars.jpg)](https://myanimelist.net/anime/41520/Show_by_Rock_Stars) | [Show by Rock!! Stars!!](https://subsplease.org/shows/show-by-rock-stars) | TV | 12 / 12 | **Finished Airing** | 7.14 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Show+by+Rock+Stars+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41520__show_by_rock_stars.txt) | ~0~ | 617 | 2021-03-25 14:02 |
| 41372 | [![41372__senyoku_no_sigrdrifa](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41372__senyoku_no_sigrdrifa.jpg)](https://myanimelist.net/anime/41372/Senyoku_no_Sigrdrifa) | [Senyoku no Sigrdrifa](https://subsplease.org/shows/senyoku-no-sigrdrifa) | TV | 13 / 12 | **Finished Airing** | 6.45 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Senyoku+no+Sigrdrifa+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41372__senyoku_no_sigrdrifa.txt) | ~0~ | 1549 | 2020-12-26 16:01 |
| 41345 | [![41345__noblesse](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41345__noblesse.jpg)](https://myanimelist.net/anime/41345/Noblesse) | [Noblesse](https://subsplease.org/shows/noblesse) | TV | 13 / 13 | **Finished Airing** | 6.89 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Noblesse+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41345__noblesse.txt) | ~0~ | 2127 | 2020-12-30 14:01 |
| 41283 | [![41283__cardfight_vanguard_gaiden_if](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/41283__cardfight_vanguard_gaiden_if.jpg)](https://myanimelist.net/anime/41283/Cardfight_Vanguard_Gaiden__If) | [Cardfight!! Vanguard Gaiden - If](https://subsplease.org/shows/cardfight-vanguard-gaiden-if) | TV | 9 / 25 | **Finished Airing** | 6.48 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Cardfight+Vanguard+Gaiden+If+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/41283__cardfight_vanguard_gaiden_if.txt) | ~0~ | 186 | 2020-11-27 23:41 |
| 40974 | [![40974__kuma_kuma_kuma_bear](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40974__kuma_kuma_kuma_bear.jpg)](https://myanimelist.net/anime/40974/Kuma_Kuma_Kuma_Bear) | [Kuma Kuma Kuma Bear](https://subsplease.org/shows/kuma-kuma-kuma-bear) | TV | 12 / 12 | **Finished Airing** | 7.23 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kuma+Kuma+Kuma+Bear+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40974__kuma_kuma_kuma_bear.txt) | ~0~ | 2461 | 2020-12-23 13:01 |
| 40958 | [![40958__rail_romanesque](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40958__rail_romanesque.jpg)](https://myanimelist.net/anime/40958/Rail_Romanesque) | [Rail Romanesque](https://subsplease.org/shows/rail-romanesque) | TV | 12 / 12 | **Finished Airing** | 5.17 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Rail+Romanesque+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40958__rail_romanesque.txt) | ~0~ | 638 | 2020-12-18 17:00 |
| 40957 | [![40957__shin_chuuka_ichiban_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40957__shin_chuuka_ichiban_2nd_season.jpg)](https://myanimelist.net/anime/40957/Shin_Chuuka_Ichiban_2nd_Season) | [Shin Chuuka Ichiban!](https://subsplease.org/shows/shin-chuuka-ichiban) | TV | 12 / 12 | **Finished Airing** | 6.68 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shin+Chuuka+Ichiban+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40957__shin_chuuka_ichiban_2nd_season.txt) | ~0~ | 503 | 2021-03-29 16:42 |
| 40906 | [![40906__dragon_quest_dai_no_daibouken_2020](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40906__dragon_quest_dai_no_daibouken_2020.jpg)](https://myanimelist.net/anime/40906/Dragon_Quest__Dai_no_Daibouken_2020) | [Dragon Quest - Dai no Daibouken (2020)](https://subsplease.org/shows/dragon-quest-dai-no-daibouken-2020) | TV | 51 / 100 | **Finished Airing** | 7.73 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Dragon+Quest+Dai+no+Daibouken+2020+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40906__dragon_quest_dai_no_daibouken_2020.txt) | ~0~ | 1599 | 2022-10-22 02:04 |
| 40901 | [![40901__toji_no_miko_kizamishi_issen_no_tomoshibi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40901__toji_no_miko_kizamishi_issen_no_tomoshibi.jpg)](https://myanimelist.net/anime/40901/Toji_no_Miko__Kizamishi_Issen_no_Tomoshibi) | [Toji no Miko - Kizamishi Issen no Tomoshibi](https://subsplease.org/shows/toji-no-miko-kizamishi-issen-no-tomoshibi) | OVA | 2 / 2 | **Finished Airing** | 6.63 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Toji+no+Miko+Kizamishi+Issen+no+Tomoshibi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40901__toji_no_miko_kizamishi_issen_no_tomoshibi.txt) | ~0~ | 808 | 2020-11-29 18:29 |
| 40885 | [![40885__can_ci_pin_fangzhu_xingkong](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40885__can_ci_pin_fangzhu_xingkong.jpg)](https://myanimelist.net/anime/40885/Can_Ci_Pin__Fangzhu_Xingkong) | [The Defective](https://subsplease.org/shows/the-defective) | ONA | 16 / 16 | **Finished Airing** | 6.97 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+The+Defective+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40885__can_ci_pin_fangzhu_xingkong.txt) | ~0~ | 712 | 2021-11-05 03:02 |
| 40833 | [![40833__inu_to_neko_docchi_mo_katteru_to_mainichi_tanoshii](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40833__inu_to_neko_docchi_mo_katteru_to_mainichi_tanoshii.jpg)](https://myanimelist.net/anime/40833/Inu_to_Neko_Docchi_mo_Katteru_to_Mainichi_Tanoshii) | [Inu to Neko Docchimo Katteru to Mainichi Tanoshii](https://subsplease.org/shows/inu-to-neko-docchimo-katteru-to-mainichi-tanoshii) | TV | 24 / 24 | **Finished Airing** | 7.23 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Inu+to+Neko+Docchimo+Katteru+to+Mainichi+Tanoshii+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40833__inu_to_neko_docchi_mo_katteru_to_mainichi_tanoshii.txt) | ~0~ | 491 | 2021-03-26 18:00 |
| 40803 | [![40803__hypnosis_mic_division_rap_battle_rhyme_anima](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40803__hypnosis_mic_division_rap_battle_rhyme_anima.jpg)](https://myanimelist.net/anime/40803/Hypnosis_Mic__Division_Rap_Battle_-_Rhyme_Anima) | [Hypnosis Mic -Division Rap Battle- Rhyme Anima](https://subsplease.org/shows/hypnosis-mic-division-rap-battle-rhyme-anima) | TV | 13 / 13 | **Finished Airing** | 6.81 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Hypnosis+Mic+Division+Rap+Battle+Rhyme+Anima+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40803__hypnosis_mic_division_rap_battle_rhyme_anima.txt) | ~0~ | 576 | 2020-12-25 16:31 |
| 40786 | [![40786__skate_leading_stars](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40786__skate_leading_stars.jpg)](https://myanimelist.net/anime/40786/Skate-Leading☆Stars) | [Skate Leading Stars](https://subsplease.org/shows/skate-leading-stars) | TV | 12 / 12 | **Finished Airing** | 6.45 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Skate+Leading+Stars+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40786__skate_leading_stars.txt) | ~0~ | 540 | 2021-03-14 14:02 |
| 40679 | [![40679__2_43_seiin_koukou_danshi_volley_bu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40679__2_43_seiin_koukou_danshi_volley_bu.jpg)](https://myanimelist.net/anime/40679/243__Seiin_Koukou_Danshi_Volley-bu) | [2.43 - Seiin Koukou Danshi Volley-bu](https://subsplease.org/shows/2-43-seiin-koukou-danshi-volley-bu) | TV | 12 / 12 | **Finished Airing** | 6.14 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+2+43+Seiin+Koukou+Danshi+Volley+bu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40679__2_43_seiin_koukou_danshi_volley_bu.txt) | ~0~ | 1370 | 2021-03-25 18:47 |
| 40610 | [![40610__healin_good_precure](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40610__healin_good_precure.jpg)](https://myanimelist.net/anime/40610/Healin_Good♡Precure) | [Healin Good Precure](https://subsplease.org/shows/healin-good-precure) | TV | 19 / 45 | **Finished Airing** | 6.92 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Healin+Good+Precure+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40610__healin_good_precure.txt) | ~0~ | 264 | 2021-02-21 01:33 |
| 40595 | [![40595__kimi_to_boku_no_saigo_no_senjou_aruiwa_sekai_ga_hajimaru_seisen](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40595__kimi_to_boku_no_saigo_no_senjou_aruiwa_sekai_ga_hajimaru_seisen.jpg)](https://myanimelist.net/anime/40595/Kimi_to_Boku_no_Saigo_no_Senjou_Aruiwa_Sekai_ga_Hajimaru_Seisen) | [Kimi to Boku no Saigo no Senjou, Arui wa Sekai ga Hajimaru Seisen](https://subsplease.org/shows/kimi-to-boku-no-saigo-no-senjou-arui-wa-sekai-ga-hajimaru-seisen) | TV | 12 / 12 | **Finished Airing** | 6.69 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Kimi+to+Boku+no+Saigo+no+Senjou+Arui+wa+Sekai+ga+Hajimaru+Seisen+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40595__kimi_to_boku_no_saigo_no_senjou_aruiwa_sekai_ga_hajimaru_seisen.txt) | ~0~ | 3112 | 2020-12-23 18:59 |
| 40506 | [![40506__shadowverse](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40506__shadowverse.jpg)](https://myanimelist.net/anime/40506/Shadowverse) | [Shadowverse](https://subsplease.org/shows/shadowverse) | TV | 25 / 48 | **Finished Airing** | 5.75 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shadowverse+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40506__shadowverse.txt) | ~0~ | 358 | 2021-03-30 18:41 |
| 40504 | [![40504__major_2nd_2nd_season](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40504__major_2nd_2nd_season.jpg)](https://myanimelist.net/anime/40504/Major_2nd_2nd_Season) | [Major 2nd S2](https://subsplease.org/shows/major-2nd-s2) | TV | 6 / 25 | **Finished Airing** | 7.47 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Major+2nd+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40504__major_2nd_2nd_season.txt) | ~0~ | 500 | 2020-11-07 12:01 |
| 40488 | [![40488__futsal_boys](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40488__futsal_boys.jpg)](https://myanimelist.net/anime/40488/Futsal_Boys) | [Futsal Boys!!!!!](https://subsplease.org/shows/futsal-boys) | TV | 12 / 12 | **Finished Airing** | 5.46 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Futsal+Boys+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40488__futsal_boys.txt) | ~0~ | 831 | 2022-03-27 14:32 |
| 40397 | [![40397__maoujou_de_oyasumi](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40397__maoujou_de_oyasumi.jpg)](https://myanimelist.net/anime/40397/Maoujou_de_Oyasumi) | [Maou-jou de Oyasumi](https://subsplease.org/shows/maou-jou-de-oyasumi) | TV | 12 / 12 | **Finished Airing** | 7.96 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Maou+jou+de+Oyasumi+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40397__maoujou_de_oyasumi.txt) | ~0~ | 2926 | 2020-12-21 18:01 |
| 40359 | [![40359__ikebukuro_west_gate_park](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40359__ikebukuro_west_gate_park.jpg)](https://myanimelist.net/anime/40359/Ikebukuro_West_Gate_Park) | [Ikebukuro West Gate Park](https://subsplease.org/shows/ikebukuro-west-gate-park) | TV | 12 / 12 | **Finished Airing** | 6.87 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ikebukuro+West+Gate+Park+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40359__ikebukuro_west_gate_park.txt) | ~0~ | 1292 | 2020-12-22 13:01 |
| 40358 | [![40358__gal_to_kyouryuu](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40358__gal_to_kyouryuu.jpg)](https://myanimelist.net/anime/40358/Gal_to_Kyouryuu) | [Gal to Kyouryuu](https://subsplease.org/shows/gal-to-kyouryuu) | TV | 5 / 12 | **Finished Airing** | 6.45 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gal+to+Kyouryuu+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40358__gal_to_kyouryuu.txt) | ~0~ | 691 | 2020-12-19 17:31 |
| 40272 | [![40272__a3_season_autumn_winter](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/40272__a3_season_autumn_winter.jpg)](https://myanimelist.net/anime/40272/A3_Season_Autumn___Winter) | [A3! Season Autumn & Winter](https://subsplease.org/shows/a3-season-autumn-winter) | TV | 12 / 12 | **Finished Airing** | 7.25 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+A3+Season+Autumn+Winter+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/40272__a3_season_autumn_winter.txt) | ~0~ | 252 | 2020-12-28 16:02 |
| 39917 | [![39917__sabiiro_no_armor_reimei](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39917__sabiiro_no_armor_reimei.jpg)](https://myanimelist.net/anime/39917/Sabiiro_no_Armor__Reimei) | [Sabiiro no Armor - Reimei](https://subsplease.org/shows/sabiiro-no-armor-reimei) | TV | 12 / 12 | **Finished Airing** | 3.83 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Sabiiro+no+Armor+Reimei+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39917__sabiiro_no_armor_reimei.txt) | ~0~ | 784 | 2022-03-27 14:31 |
| 39725 | [![39725__i_chu_halfway_through_the_idol](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39725__i_chu_halfway_through_the_idol.jpg)](https://myanimelist.net/anime/39725/I★Chu__Halfway_Through_the_Idol) | [I-Chu - Halfway Through the Idol](https://subsplease.org/shows/i-chu-halfway-through-the-idol) | TV | 12 / 12 | **Finished Airing** | 6.56 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+I+Chu+Halfway+Through+the+Idol+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39725__i_chu_halfway_through_the_idol.txt) | ~0~ | 387 | 2021-03-24 15:02 |
| 39609 | [![39609__ochikobore_fruit_tart](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/39609__ochikobore_fruit_tart.jpg)](https://myanimelist.net/anime/39609/Ochikobore_Fruit_Tart) | [Ochikobore Fruit Tart](https://subsplease.org/shows/ochikobore-fruit-tart) | TV | 12 / 12 | **Finished Airing** | 6.77 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Ochikobore+Fruit+Tart+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/39609__ochikobore_fruit_tart.txt) | ~0~ | 1090 | 2020-12-28 13:31 |
| 38669 | [![38669__tsukiuta_the_animation_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38669__tsukiuta_the_animation_2.jpg)](https://myanimelist.net/anime/38669/Tsukiuta_The_Animation_2) | [Tsukiuta. The Animation S2](https://subsplease.org/shows/tsukiuta-the-animation-s2) | TV | 13 / 13 | **Finished Airing** | 6.65 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tsukiuta+The+Animation+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38669__tsukiuta_the_animation_2.txt) | ~0~ | 221 | 2020-12-30 14:31 |
| 38440 | [![38440__shikizakura](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38440__shikizakura.jpg)](https://myanimelist.net/anime/38440/Shikizakura) | [Shikizakura](https://subsplease.org/shows/shikizakura) | TV | 12 / 12 | **Finished Airing** | 5.84 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Shikizakura+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38440__shikizakura.txt) | ~0~ | 1606 | 2021-12-26 03:02 |
| 38337 | [![38337__gochuumon_wa_usagi_desu_ka_bloom](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38337__gochuumon_wa_usagi_desu_ka_bloom.jpg)](https://myanimelist.net/anime/38337/Gochuumon_wa_Usagi_desu_ka_Bloom) | [Gochuumon wa Usagi Desu ka S3](https://subsplease.org/shows/gochuumon-wa-usagi-desu-ka-s3) | TV | 12 / 12 | **Finished Airing** | 7.92 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Gochuumon+wa+Usagi+Desu+ka+S3+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38337__gochuumon_wa_usagi_desu_ka_bloom.txt) | ~0~ | 1725 | 2020-12-26 13:01 |
| 38005 | [![38005__strike_witches_road_to_berlin](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/38005__strike_witches_road_to_berlin.jpg)](https://myanimelist.net/anime/38005/Strike_Witches__Road_to_Berlin) | [Strike Witches - Road to Berlin](https://subsplease.org/shows/strike-witches-road-to-berlin) | TV | 12 / 12 | **Finished Airing** | 7.35 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Strike+Witches+Road+to+Berlin+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/38005__strike_witches_road_to_berlin.txt) | ~0~ | 1063 | 2020-12-23 17:06 |
| 37962 | [![37962__idolish7_second_beat](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/37962__idolish7_second_beat.jpg)](https://myanimelist.net/anime/37962/IDOLiSH7_Second_Beat) | [IDOLiSH7 S2](https://subsplease.org/shows/idolish7-s2) | TV | 11 / 15 | **Finished Airing** | 8.13 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+IDOLiSH7+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/37962__idolish7_second_beat.txt) | ~0~ | 253 | 2020-12-27 15:31 |
| 37008 | [![37008__tsukipro_the_animation_2](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/images/37008__tsukipro_the_animation_2.jpg)](https://myanimelist.net/anime/37008/Tsukipro_The_Animation_2) | [Tsukipro The Animation S2](https://subsplease.org/shows/tsukipro-the-animation-s2) | TV | 13 / 13 | **Finished Airing** | 6.41 | [Search](https://nyaa.si/?f=0&c=1_0&q=subsplease+Tsukipro+The+Animation+S2+1080p+mkv) | [Download](https://huggingface.co/datasets/deepghs/subsplease_animes/resolve/main/magnets/37008__tsukipro_the_animation_2.txt) | ~0~ | 443 | 2021-12-29 14:02 |
|
isp-uv-es/CloudSEN12Plus | isp-uv-es | "2025-01-01T00:54:09Z" | 3,760 | 7 | [
"task_categories:image-segmentation",
"language:en",
"license:cc0-1.0",
"size_categories:10K<n<100K",
"format:parquet",
"modality:image",
"modality:tabular",
"modality:text",
"modality:geospatial",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us",
"clouds",
"earth-observation",
"remote-sensing",
"sentinel-2",
"deep-learning",
"multi-spectral",
"satellite",
"geospatial"
] | [
"image-segmentation"
] | "2024-08-07T18:27:00Z" | ---
license: cc0-1.0
task_categories:
- image-segmentation
language:
- en
tags:
- clouds
- earth-observation
- remote-sensing
- sentinel-2
- deep-learning
- multi-spectral
- satellite
- geospatial
pretty_name: cloudsen12
size_categories:
- 100K<n<1M
---
# 🚨 New Dataset Version Released!
## We are excited to announce the release of **Version [1.1]** of our dataset!
## This update includes:
- **[L2A & L1C support]**.
- **[Temporal support]**.
- **[Check the data without downloading (Cloud-optimized properties)]**.
# 📥 Go to: https://huggingface.co/datasets/tacofoundation/cloudsen12 and follow the instructions in colab
<center>
<img src="cloudsen12.gif" alt="drawing" width="35%"/>
</center>
**CloudSEN12+** is a significant extension of the [CloudSEN12](https://cloudsen12.github.io/) dataset, which doubles the number of expert-reviewed labels, making it, by
a large margin, the largest cloud detection dataset to date for Sentinel-2. All labels from the previous version have been curated and refined, enhancing the
dataset's trustworthiness. This new release is licensed **under CC0**, which puts it in the public domain and allows anyone to use, modify, and distribute
it without permission or attribution.
## Data Folder order
The CloudSEN12+ dataset is organized into `train`, `val`, and `test` splits. The images have
been padded from 509x509 to 512x512 and 2000x2000 to 2048x2048 to ensure that the patches are divisible by 32. The padding is filled with zeros in the left and bottom sides of the image. For
those who prefer traditional storage formats, GeoTIFF files are available in our [ScienceDataBank](https://www.scidb.cn/en/detail?dataSetId=2036f4657b094edfbb099053d6024b08&version=V1) repository.
<center>
<img src="https://cdn-uploads.huggingface.co/production/uploads/6402474cfa1acad600659e92/9UA4U3WObVeq7BAcf37-C.png" alt="drawing" width="50%"/>
</center>
*CloudSEN12+ spatial coverage. The terms p509 and p2000 denote the patch size 509 × 509 and 2000 × 2000, respectively. ‘high’, ‘scribble’, and ‘nolabel’ refer to the types of expert-labeled annotations*
**ML-STAC Snippet**
```python
import mlstac
dataset = mlstac.load('isp-uv-es/CloudSEN12Plus')
```
**Sensor: Sentinel2 - MSI**
**ML-STAC Task: image-segmentation**
**ML-STAC Dataset Version: 1.0.0**
**Data raw repository: [https://cloudsen12.github.io/](https://cloudsen12.github.io/)**
**Dataset discussion: [https://huggingface.co/datasets/isp-uv-es/CloudSEN12Plus/discussions](https://huggingface.co/datasets/isp-uv-es/CloudSEN12Plus/discussions)**
**Split_strategy: stratified**
**Paper: [https://www.sciencedirect.com/science/article/pii/S2352340924008163](https://www.sciencedirect.com/science/article/pii/S2352340924008163)**
## Data Providers
|Name|Role|URL|
| :---: | :---: | :---: |
|Image & Signal Processing|['host']|https://isp.uv.es/|
|ESA|['producer']|https://www.esa.int/|
## Curators
|Name|Organization|URL|
| :---: | :---: | :---: |
|Cesar Aybar|Image & Signal Processing|http://csaybar.github.io/|
## Labels
For human **_high-quality_** labels (also UnetMobV2_V2 & UnetMobV2_V1 predictions).
|Name|Value|
| :---: | :---: |
|clear|0|
|thick-cloud|1|
|thin-cloud|2|
|cloud-shadow|3|
For human **_scribble_** labels.
|Name|Value|
| :---: | :---: |
|clear|0|
|thick-cloud border|1|
|thick-cloud center|2|
|thin-cloud border|3|
|thin-cloud center|4|
|cloud-shadow border|5|
|cloud-shadow center|6|
## Dimensions
|Axis|Name|Description|
| :---: | :---: | :---: |
|0|C|Spectral bands|
|1|H|Height|
|2|W|Width|
## Spectral Bands
|Name|Common Name|Description|Center Wavelength|Full Width Half Max|Index|
| :---: | :---: | :---: | :---: | :---: | :---: |
|B01|coastal aerosol|Band 1 - Coastal aerosol - 60m|443.5|17.0|0|
|B02|blue|Band 2 - Blue - 10m|496.5|53.0|1|
|B03|green|Band 3 - Green - 10m|560.0|34.0|2|
|B04|red|Band 4 - Red - 10m|664.5|29.0|3|
|B05|red edge 1|Band 5 - Vegetation red edge 1 - 20m|704.5|13.0|4|
|B06|red edge 2|Band 6 - Vegetation red edge 2 - 20m|740.5|13.0|5|
|B07|red edge 3|Band 7 - Vegetation red edge 3 - 20m|783.0|18.0|6|
|B08|NIR|Band 8 - Near infrared - 10m|840.0|114.0|7|
|B8A|red edge 4|Band 8A - Vegetation red edge 4 - 20m|864.5|19.0|8|
|B09|water vapor|Band 9 - Water vapor - 60m|945.0|18.0|9|
|B10|cirrus|Band 10 - Cirrus - 60m|1375.5|31.0|10|
|B11|SWIR 1|Band 11 - Shortwave infrared 1 - 20m|1613.5|89.0|11|
|B12|SWIR 2|Band 12 - Shortwave infrared 2 - 20m|2199.5|173.0|12|
|CM1| Cloud Mask 1| Expert-labeled image. |-|-|13|
|CM2| Cloud Mask 2| UnetMobV2-V1 labeled image. |-|-|14|
## Data Structure
We use `.mls` format to store the data in HugginFace and GeoTIFF for ScienceDataBank.
## Folder Structure
The **fixed/** folder contains high and scribble labels, which have been improved in this new version. These changes have already been integrated.
The **demo/** folder contains examples illustrating how to utilize the models trained with CLoudSEN12 to estimate the hardness and trustworthiness indices.
The **images/** folder contains the CloudSEN12+ imagery
## Download
The code below can be used to download the dataset using the `mlstac` library. For a more detailed example, please refer to the `examples` section in our
website [https://cloudsen12.github.io/](https://cloudsen12.github.io/).
```python
import mlstac
import matplotlib.pyplot as plt
import numpy as np
ds = mlstac.load(snippet="isp-uv-es/CloudSEN12Plus")
subset = ds.metadata[(ds.metadata["split"] == "test") & (ds.metadata["label_type"] == "high") & (ds.metadata["proj_shape"] == 509)][10:14]
datacube = mlstac.get_data(dataset=subset)
```
Make a plot of the data point downloaded
```python
datapoint = datacube[2]
datapoint_rgb = np.moveaxis(datapoint[[3, 2, 1]], 0, -1) / 5_000
fig, ax = plt.subplots(1, 3, figsize=(10, 5))
ax[0].imshow(datapoint_rgb)
ax[0].set_title("RGB")
ax[1].imshow(datapoint[13], cmap="gray")
ax[1].set_title("Human label")
ax[2].imshow(datapoint[14], cmap="gray")
ax[2].set_title("UnetMobV2 v1.0")
```
![image/png](https://cdn-uploads.huggingface.co/production/uploads/6402474cfa1acad600659e92/scVhZf3rkB3uWkZZ6Epmu.png)
## Citation
Cite the dataset as:
```bibtex
@article{aybar2024cloudsen12+,
title={CloudSEN12+: The largest dataset of expert-labeled pixels for cloud and cloud shadow detection in Sentinel-2},
author={Aybar, Cesar and Bautista, Lesly and Montero, David and Contreras, Julio and Ayala, Daryl and Prudencio, Fernando and Loja, Jhomira and Ysuhuaylas, Luis and Herrera, Fernando and Gonzales, Karen and others},
journal={Data in Brief},
pages={110852},
year={2024},
DOI={10.1016/j.dib.2024.110852},
publisher={Elsevier}
}
``` |
EPFL-CVLAB-SPACECRAFT/SwissCube | EPFL-CVLAB-SPACECRAFT | "2024-12-04T15:18:34Z" | 3,753 | 1 | [
"license:mit",
"modality:image",
"region:us"
] | null | "2024-10-31T09:10:06Z" | ---
license: mit
---
|
hf-internal-testing/dummy_image_text_data | hf-internal-testing | "2023-02-08T10:34:38Z" | 3,751 | 1 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2023-02-08T10:34:30Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1944983.0
num_examples: 20
download_size: 1690123
dataset_size: 1944983.0
---
# Dataset Card for "dummy_image_text_data"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
lowercaseonly/cghd | lowercaseonly | "2025-01-06T00:09:38Z" | 3,717 | 1 | [
"task_categories:object-detection",
"task_categories:image-segmentation",
"language:en",
"language:de",
"license:cc-by-3.0",
"size_categories:1K<n<10K",
"modality:image",
"modality:text",
"library:datasets",
"library:mlcroissant",
"region:us"
] | [
"object-detection",
"image-segmentation"
] | "2023-05-21T12:20:21Z" | ---
license: cc-by-3.0
pretty_name: A Public Ground-Truth Dataset for Handwritten Circuit Diagram Images
size_categories:
- 1K<n<10K
task_categories:
- object-detection
- image-segmentation
language:
- en
- de
---
# Public Ground-Truth Dataset for Handwritten Circuit Diagrams (GTDB-HD)
This repository contains images of hand-drawn electrical circuit diagrams as well as accompanying bounding box annotation for object detection as well as segmentation ground truth files. This dataset is intended to train (e.g. neural network) models for the purpose of the extraction of electrical graphs from raster graphics.
## Structure
The folder structure is made up as follows:
```
gtdh-hd
│ README.md # This File
│ classes.json # Classes List
│ classes_color.json # Classes to Color Map
│ classes_discontinuous.json # Classes Morphology Info
│ classes_ports.json # Electrical Port Descriptions for Classes
│ consistency.py # Dataset Statistics and Consistency Check
| loader.py # Simple Dataset Loader and Storage Functions
│ segmentation.py # Multiclass Segmentation Generation
│ utils.py # Helper Functions
│ requirements.txt # Requirements for Scripts
└───drafter_D
│ └───annotations # Bounding Box Annotations
│ │ │ CX_DY_PZ.xml
│ │ │ ...
│ │
│ └───images # Raw Images
│ │ │ CX_DY_PZ.jpg
│ │ │ ...
│ │
│ └───instances # Instance Segmentation Polygons
│ │ │ CX_DY_PZ.json
│ │ │ ...
│ │
│ └───segmentation # Binary Segmentation Maps (Strokes vs. Background)
│ │ │ CX_DY_PZ.jpg
│ │ │ ...
...
```
Where:
- `D` is the (globally) running number of a drafter
- `X` is the (globally) running number of the circuit (12 Circuits per Drafter)
- `Y` is the Local Number of the Circuit's Drawings (2 Drawings per Circuit)
- `Z` is the Local Number of the Drawing's Image (4 Pictures per Drawing)
### Image Files
Every image is RGB-colored and either stored as `jpg`, `jpeg` or `png` (both uppercase and lowercase suffixes exist).
### Bounding Box Annotations
A complete list of class labels including a suggested mapping table to integer numbers for training and prediction purposes can be found in `classes.json`. The annotations contains **BB**s (Bounding Boxes) of **RoI**s (Regions of Interest) like electrical symbols or texts within the raw images and are stored in the [PASCAL VOC](http://host.robots.ox.ac.uk/pascal/VOC/) format.
Please note: *For every Raw image in the dataset, there is an accompanying bounding box annotation file.*
#### Known Labeled Issues
- C25_D1_P4 cuts off a text
- C27 cuts of some texts
- C29_D1_P1 has one additional text
- C31_D2_P4 has a text less
- C33_D1_P4 has a text less
- C46_D2_P2 cuts of a text
### Instance Segmentation
For every binary segmentation map, there is an accompanying polygonal annotation file for instance segmentation purposes, which is stored in the [labelme](https://github.com/wkentaro/labelme) format. Note that the contained polygons are quite coarse, intended to be used in conjunction with the binary segmentation maps for connection extraction and to tell individual instances with overlapping BBs apart.
### Segmentation Maps
Binary Segmentation images are available for some samples and bear the same resolution as the respective image files. They are considered to contain only black and white pixels indicating areas of drawings strokes and background respectively.
### Netlists
For some images, there are also netlist files available, which are stored in the [ASC](http://ltwiki.org/LTspiceHelp/LTspiceHelp/Spice_Netlist.htm) format.
### Consistency and Statistics
This repository comes with a stand-alone script to:
- Obtain Statistics on
- Class Distribution
- BB Sizes
- Check the BB Consistency
- Classes with Regards to the `classes.json`
- Counts between Pictures of the same Drawing
- Ensure a uniform writing style of the Annotation Files (indent)
The respective script is called without arguments to operate on the **entire** dataset:
```
$ python3 consistency.py
```
Note that due to a complete re-write of the annotation data, the script takes several seconds to finish. A drafter can be specified as CLI argument to restrict the evaluation (for example drafter 15):
```
$ python3 consistency.py 15
```
### Multi-Class (Instance) Segmentation Processing
This dataset comes with a script to process both new and existing (instance) segmentation files. It is invoked as follows:
```
$ python3 segmentation.py <command> <drafter_id> <target> <source>
```
Where:
- `<command>` has to be one of:
- `transform`
- Converts existing BB Annotations to Polygon Annotations
- Default target folder: `instances`
- Existing polygon files will not be overridden in the default settings, hence this command will take no effect in an completely populated dataset.
- Intended to be invoked after adding new binary segmentation maps
- **This step has to be performed before all other commands**
- `wire`
- Generates Wire Describing Polygons
- Default target folder: `wires`
- `keypoint`
- Generates Keypoints for Component Terminals
- Default target folder: `keypoints`
- `create`
- Generates Multi-Class segmentation Maps
- Default target folder: `segmentation_multi_class`
- `refine`
- Refines Coarse Polygon Annotations to precisely match the annotated objects
- Default target folder: `instances_refined`
- For instance segmentation purposes
- `pipeline`
- executes `wire`,`keypoint` and `refine` stacked, with one common `source` and `target` folder
- Default target folder: `instances_refined`
- `assign`
- Connector Point to Port Type Assignment by Geometric Transformation Matching
- `<drafter_id>` **optionally** restricts the process to one of the drafters
- `<target>` **optionally** specifies a divergent target folder for results to be placed in
- `<source>` **optionally** specifies a divergent source folder to read from
Please note that source and target forlders are **always** subfolder inside the individual drafter folders. Specifying source and target folders allow to stack the results of individual processing steps. For example, to perform the entire pipeline for drafter 20 manually, use:
```
python3 segmentation.py wire 20 instances_processed instances
python3 segmentation.py keypoint 20 instances_processed instances_processed
python3 segmentation.py refine 20 instances_processed instances_processed
```
### Dataset Loader
This dataset is also shipped with a set of loader and writer functions, which are internally used by the segmentation and consistency scripts and can be used for training. The dataset loader is simple, framework-agnostic and has been prepared to be callable from any location in the file system. Basic usage:
```
from loader import read_dataset
db_bb = read_dataset() # Read all BB Annotations
db_seg = read_dataset(segmentation=True) # Read all Polygon Annotations
db_bb_val = read_dataset(drafter=12) # Read Drafter 12 BB Annotations
len(db_bb) # Get The Amount of Samples
db_bb[5] # Get an Arbitrary Sample
db = read_images(drafter=12) # Returns a list of (Image, Annotation) pairs
db = read_snippets(drafter=12) # Returns a list of (Image, Annotation) pairs
```
## Citation
If you use this dataset for scientific publications, please consider citing us as follows:
```
@inproceedings{thoma2021public,
title={A Public Ground-Truth Dataset for Handwritten Circuit Diagram Images},
author={Thoma, Felix and Bayer, Johannes and Li, Yakun and Dengel, Andreas},
booktitle={International Conference on Document Analysis and Recognition},
pages={20--27},
year={2021},
organization={Springer}
}
```
## How to Contribute
If you want to contribute to the dataset as a drafter or in case of any further questions, please send an email to: <[email protected]> (corresponding author), <[email protected]>, <[email protected]>
## Guidelines
These guidelines are used throughout the generation of the dataset. They can be used as an instruction for participants and data providers.
### Drafter Guidelines
- 12 Circuits should be drawn, each of them twice (24 drawings in total)
- Most important: The drawing should be as natural to the drafter as possible
- Free-Hand sketches are preferred, using rulers and drawing Template stencils should be avoided unless it appears unnatural to the drafter
- Different types of pens/pencils should be used for different drawings
- Different kinds of (colored, structured, ruled, lined) paper should be used
- One symbol set (European/American) should be used throughout one drawing (consistency)
- It is recommended to use the symbol set that the drafter is most familiar with
- It is **strongly** recommended to share the first one or two circuits for review by the dataset organizers before drawing the rest to avoid problems (complete redrawing in worst case)
### Image Capturing Guidelines
- For each drawing, 4 images should be taken (96 images in total per drafter)
- Angle should vary
- Lighting should vary
- Moderate (e.g. motion) blur is allowed
- All circuit-related aspects of the drawing must be _human-recognicable_
- The drawing should be the main part of the image, but _naturally_ occurring objects from the environment are welcomed
- The first image should be _clean_, i.e. ideal capturing conditions
- Kinks and Buckling can be applied to the drawing between individual image capturing
- Try to use the file name convention (`CX_DY_PZ.jpg`) as early as possible
- The circuit range `X` will be given to you
- `Y` should be `1` or `2` for the drawing
- `Z` should be `1`,`2`,`3` or `4` for the picture
### Object Annotation Guidelines
- General Placement
- A **RoI** must be **completely** surrounded by its **BB**
- A **BB** should be as tight as possible to the **RoI**
- In case of connecting lines not completely touching the symbol, the BB should extended (only by a small margin) to enclose those gaps (epecially considering junctions)
- Characters that are part of the **essential symbol definition** should be included in the BB (e.g. the `+` of a polarized capacitor should be included in its BB)
- **Junction** annotations
- Used for actual junction points (Connection of three or more wire segments with a small solid circle)
- Used for connection of three or more sraight line wire segements where a physical connection can be inferred by context (i.e. can be distinuished from **crossover**)
- Used for wire line corners
- Redundant Junction Points should **not** be annotated (small solid circle in the middle of a straight line segment)
- Should not be used for corners or junctions that are part of the symbol definition (e.g. Transistors)
- **Crossover** Annotations
- If dashed/dotted line: BB should cover the two next dots/dashes
- **Text** annotations
- Individual Text Lines should be annotated Individually
- Text Blocks should only be annotated If Related to Circuit or Circuit's Components
- Semantically meaningful chunks of information should be annotated Individually
- component characteristics enclosed in a single annotation (e.g. __100Ohms__, __10%__ tolerance, __5V__ max voltage)
- Component Names and Types (e.g. __C1__, __R5__, __ATTINY2313__)
- Custom Component Terminal Labels (i.e. __Integrated Circuit__ Pins)
- Circuit Descriptor (e.g. "Radio Amplifier")
- Texts not related to the Circuit should be ignored
- e.g. Brief paper, Company Logos
- Drafters auxiliary markings for internal organization like "D12"
- Texts on Surrounding or Background Papers
- Characters which are part of the essential symbol definition should __not__ be annotated as Text dedicatedly
- e.g. Schmitt Trigger __S__, , and gate __&__, motor __M__, Polarized capacitor __+__
- Only add terminal text annotation if the terminal is not part of the essential symbol definition
- **Table** cells should be annotated independently
- **Operation Amplifiers**
- Both the triangular US symbols and the european IC-like symbols symbols for OpAmps should be labeled `operational_amplifier`
- The `+` and `-` signs at the OpAmp's input terminals are considered essential and should therefore not be annotated as texts
- **Complex Components**
- Both the entire Component and its sub-Components and internal connections should be annotated:
| Complex Component | Annotation |
| ----------------- | ------------------------------------------------------ |
| Optocoupler | 0. `optocoupler` as Overall Annotation |
| | 1. `diode.light_emitting` |
| | 2. `transistor.photo` (or `resistor.photo`) |
| | 3. `optical` if LED and Photo-Sensor arrows are shared |
| | Then the arrows area should be includes in all |
| Relay | 0. `relay` as Overall Annotation |
| (also for | 1. `inductor` |
| coupled switches) | 2. `switch` |
| | 3. `mechanical` for the dashed line between them |
| Transformer | 0. `transformer` as Overall Annotation |
| | 1. `inductor` or `inductor.coupled` (watch the dot) |
| | 3. `magnetic` for the core |
#### Rotation Annotations
The Rotation (integer in degree) should capture the overall rotation of the symbol shape. However, the position of the terminals should also be taked into consideration. Under idealized circumstances (no perspective distorion and accurately drawn symbols according to the symbol library), these two requirements equal each other. For pathological cases however, in which shape and the set of terminals (or even individual terminals) are conflicting, the rotation should compromise between all factors.
Rotation annotations are currently work in progress. They should be provided for at least the following classes:
- "voltage.dc"
- "resistor"
- "capacitor.unpolarized"
- "diode"
- "transistor.bjt"
#### Text Annotations
- The Character Sequence in the Text Label Annotations should describe the actual Characters depicted in the respective Bounding Box as Precisely as Possible
- Bounding Box Annotations of class `text`
- Bear an additional `<text>` tag in which their content is given as string
- The `Omega` and `Mikro` Symbols are escaped respectively
- Currently Work in Progress
- The utils script allows for migrating text annotations from one annotation file to another: `python3 utils.py source target`
### Segmentation Map Guidelines
- Areas of __Intended__ drawing strokes (ink and pencil abrasion respectively) should be marked black, all other pixels (background) should be white
- shining through the paper (from the rear side or other sheets) should be considered background
### Polygon Annotation Guidelines
0. Before starting, make sure the respective files exist for the image sample to be polygon-annotated:
- BB Annotations (Pascal VOC XML File)
- (Binary) Segmentation Map
1. Transform the BB annotations into raw polygons
- Use: `python3 segmentation.py transform`
2. Refine the Polygons
- **To Avoid Embedding Image Data into the resulting JSON**, use: `labelme --nodata`
- Just make sure there are no overlaps between instances
- Especially take care about overlaps with structural elements like junctions and crossovers
3. Generate Multi-Class Segmentation Maps from the refined polygons
- Use: `python3 segmentation.py create`
- Use the generated images for a visual inspection
- After spotting problems, continue with Step 2
### Terminal Annotation Guidelines
```
labelme --labels "connector" --config "{shift_auto_shape_color: 1}" --nodata
```
## Licence
The entire content of this repository, including all image files, annotation files as well as has sourcecode, metadata and documentation has been published under the [Creative Commons Attribution Share Alike Licence 3.0](https://creativecommons.org/licenses/by-sa/3.0/).
|
davanstrien/MAMe2 | davanstrien | "2023-07-27T09:27:06Z" | 3,716 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2023-07-26T11:20:15Z" | ---
dataset_info:
config_name: '256'
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': Albumen photograph
'1': Bronze
'2': Ceramic
'3': Clay
'4': Engraving
'5': Etching
'6': Faience
'7': Glass
'8': Gold
'9': Graphite
'10': Hand-colored engraving
'11': Hand-colored etching
'12': Iron
'13': Ivory
'14': Limestone
'15': Lithograph
'16': Marble
'17': Oil on canvas
'18': Pen and brown ink
'19': Polychromed wood
'20': Porcelain
'21': Silk and metal thread
'22': Silver
'23': Steel
'24': Wood
'25': Wood engraving
'26': Woodblock
'27': Woodcut
'28': Woven fabric
- name: Museum
dtype: string
- name: Museum-based instance ID
dtype: string
- name: Width
dtype: float32
- name: Height
dtype: float32
- name: Product size
dtype: float32
- name: Aspect ratio
dtype: float32
splits:
- name: train
num_bytes: 441294458.5
num_examples: 20300
- name: validation
num_bytes: 26810584.95
num_examples: 1450
- name: test
num_bytes: 362018531.291
num_examples: 15657
download_size: 723376699
dataset_size: 830123574.7409999
configs:
- config_name: '256'
data_files:
- split: train
path: 256/train-*
- split: validation
path: 256/validation-*
- split: test
path: 256/test-*
---
# Dataset Card for "MAMe2"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
alkzar90/ddpm-rl-finetuning-evals | alkzar90 | "2024-10-01T05:14:36Z" | 3,710 | 0 | [
"license:apache-2.0",
"size_categories:10K<n<100K",
"format:imagefolder",
"modality:image",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-08-03T17:06:37Z" | ---
license: apache-2.0
size_categories:
- 100K<n<1M
pretty_name: evalhrf
configs:
- config_name: ddpm-celebahq
data_files:
- split: train
path: data/ddpm-celebahq/train/batch_*.zip
- config_name: ddpm-celebahq-mini
data_files:
- split: train
path: data/celebahq-baseline-mini/train/batch_*.zip
- config_name: ddpo-aesthetic
data_files:
- split: train
path: data/ddpo-aesthetic-celebahq/train/batch_*.zip
- config_name: ddpo-aesthetic-fix-lr
data_files:
- split: train
path: data/ddpo-aesthetic-fix-lr/train/batch_*.zip
- config_name: ddpo-compressibility
data_files:
- split: train
path: data/ddpo-compressibility-celebahq/train/batch_*.zip
- config_name: ddpo-compressibility-mini
data_files:
- split: train
path: data/ddpo-compressibility-celebahq-mini/train/batch_*.zip
- config_name: ddpo-incompressibility
data_files:
- split: train
path: data/ddpo-incompressibility-celebahq/train/batch_*.zip
- config_name: ddpo-incompressibility-mini
data_files:
- split: train
path: data/ddpo-incompressibility-celebahq-mini/train/batch_*.zip
- config_name: hrf-compressibility-window-baseline1
data_files:
- split: train
path: data/hrf-compressibility-window-baseline-seed1/train/batch_*.zip
- config_name: hrf-compressibility-window-baseline2
data_files:
- split: train
path: data/hrf-compressibility-window-baseline-seed2/train/batch_*.zip
- config_name: hrf-compressibility-window-baseline3
data_files:
- split: train
path: data/hrf-compressibility-window-baseline-seed3/train/batch_*.zip
- config_name: hrf-compressibility-window-early1
data_files:
- split: train
path: data/hrf-compressibility-window-early-seed1/train/batch_*.zip
- config_name: hrf-compressibility-window-early2
data_files:
- split: train
path: data/hrf-compressibility-window-early-seed2/train/batch_*.zip
- config_name: hrf-compressibility-window-early3
data_files:
- split: train
path: data/hrf-compressibility-window-early-seed3/train/batch_*.zip
- config_name: hrf-compressibility-window-later1
data_files:
- split: train
path: data/hrf-compressibility-window-later-seed1/train/batch_*.zip
- config_name: hrf-compressibility-window-later2
data_files:
- split: train
path: data/hrf-compressibility-window-later-seed2/train/batch_*.zip
- config_name: hrf-compressibility-window-later3
data_files:
- split: train
path: data/hrf-compressibility-window-later-seed3/train/batch_*.zip
- config_name: hrf-incompressibility-window-baseline1
data_files:
- split: train
path: data/hrf-incompressibility-window-baseline-seed1/train/batch_*.zip
- config_name: hrf-incompressibility-window-baseline2
data_files:
- split: train
path: data/hrf-incompressibility-window-baseline-seed2/train/batch_*.zip
- config_name: hrf-incompressibility-window-baseline3
data_files:
- split: train
path: data/hrf-incompressibility-window-baseline-seed3/train/batch_*.zip
- config_name: hrf-incompressibility-window-early1
data_files:
- split: train
path: data/hrf-incompressibility-window-early-seed1/train/batch_*.zip
- config_name: hrf-incompressibility-window-early2
data_files:
- split: train
path: data/hrf-incompressibility-window-early-seed2/train/batch_*.zip
- config_name: hrf-incompressibility-window-early3
data_files:
- split: train
path: data/hrf-incompressibility-window-early-seed3/train/batch_*.zip
- config_name: hrf-incompressibility-window-later1
data_files:
- split: train
path: data/hrf-incompressibility-window-later-seed1/train/batch_*.zip
- config_name: hrf-incompressibility-window-later2
data_files:
- split: train
path: data/hrf-incompressibility-window-later-seed2/train/batch_*.zip
- config_name: hrf-incompressibility-window-later3
data_files:
- split: train
path: data/hrf-incompressibility-window-later-seed3/train/batch_*.zip
- config_name: hrf-aesthetic-window-baseline1
data_files:
- split: train
path: data/hrf-aesthetic-window-baseline-seed1/train/batch_*.zip
- config_name: hrf-aesthetic-window-baseline2
data_files:
- split: train
path: data/hrf-aesthetic-window-baseline-seed2/train/batch_*.zip
- config_name: hrf-aesthetic-window-baseline3
data_files:
- split: train
path: data/hrf-aesthetic-window-baseline-seed3/train/batch_*.zip
- config_name: hrf-aesthetic-window-early1
data_files:
- split: train
path: data/hrf-aesthetic-window-early-seed1/train/batch_*.zip
- config_name: hrf-aesthetic-window-early2
data_files:
- split: train
path: data/hrf-aesthetic-window-early-seed2/train/batch_*.zip
- config_name: hrf-aesthetic-window-early3
data_files:
- split: train
path: data/hrf-aesthetic-window-early-seed3/train/batch_*.zip
- config_name: hrf-aesthetic-window-later1
data_files:
- split: train
path: data/hrf-aesthetic-window-later-seed1/train/batch_*.zip
- config_name: hrf-aesthetic-window-later2
data_files:
- split: train
path: data/hrf-aesthetic-window-later-seed2/train/batch_*.zip
- config_name: hrf-aesthetic-window-later3
data_files:
- split: train
path: data/hrf-aesthetic-window-later-seed3/train/batch_*.zip
- config_name: hrf-aesthetic-window-adaptive
data_files:
- split: train
path: data/hrf-aesthetic-window-adaptive/train/batch_*.zip
- config_name: hrf-compressibility-window-adaptive
data_files:
- split: train
path: data/hrf-compressibility-window-adaptive/train/batch_*.zip
- config_name: hrf-incompressibility-window-adaptive
data_files:
- split: train
path: data/hrf-incompressibility-window-adaptive/train/batch_*.zip
- config_name: hrf-compressibility-window-adaptive2
data_files:
- split: train
path: data/hrf-compressibility-window-adaptive2/train/batch_*.zip
- config_name: hrf-aesthetic-window-adaptive2
data_files:
- split: train
path: data/hrf-aesthetic-window-adaptive2/train/batch_*.zip
dataset_info:
- config_name: ddpm-celebahq
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-compressibility-window-baseline1
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-compressibility-window-baseline2
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-compressibility-window-baseline3
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-compressibility-window-early1
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-compressibility-window-early2
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-compressibility-window-early3
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-compressibility-window-later1
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-compressibility-window-later2
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-compressibility-window-later3
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-incompressibility-window-baseline1
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-incompressibility-window-baseline2
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-incompressibility-window-baseline3
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-incompressibility-window-early1
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-incompressibility-window-early2
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-incompressibility-window-early3
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-incompressibility-window-later1
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-incompressibility-window-later2
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-incompressibility-window-later3
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-aesthetic-window-baseline1
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-aesthetic-window-baseline2
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-aesthetic-window-baseline3
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-aesthetic-window-early1
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-aesthetic-window-early2
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-aesthetic-window-early3
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-aesthetic-window-later1
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-aesthetic-window-later2
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: hrf-aesthetic-window-later3
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: ddpo-aesthetic
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: ddpo-compressibility
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: ddpo-incompressibility
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: ddpo-aesthetic-fix-lr
features:
- name: image
dtype: image
- name: reward
dtype: float
- config_name: ddpm-celebahq-mini
features:
- name: image
dtype: image
- config_name: ddpo-compressibility-mini
features:
- name: image
dtype: image
- config_name: ddpo-incompressibility-mini
features:
- name: image
dtype: image
- config_name: hrf-aesthetic-window-adaptive
features:
- name: image
dtype: image
- config_name: hrf-compressibility-window-adaptive
features:
- name: image
dtype: image
- config_name: hrf-incompressibility-window-adaptive
features:
- name: image
dtype: image
- config_name: hrf-aesthetic-window-adaptive2
features:
- name: image
dtype: image
- config_name: hrf-compressibility-window-adaptive2
features:
- name: image
dtype: image
---
# Dataset Card for Eval Finetuning Diffusion Models with Reinforcement Learning
XYZ |
Avelina/smollm-corpus | Avelina | "2025-01-11T16:41:28Z" | 3,704 | 5 | [
"task_categories:text-generation",
"language:en",
"license:odc-by",
"size_categories:100M<n<1B",
"region:us"
] | [
"text-generation"
] | "2025-01-11T01:39:39Z" | ---
license: odc-by
dataset_info:
- config_name: default
features:
- name: text
dtype: string
configs:
- config_name: default
data_files:
- split: train
path: data*/train-*
task_categories:
- text-generation
language:
- en
size_categories:
- 100M<n<1B
---
# SmolLM-Corpus: Now shuffled and sharded!
This is a version of the SmolLM-Corpus where the 3 subsets have been interleved, shuffled and sharded as 23698 `jsonl.zst` files for easy streaming!
The dataset is comprised of the `cosmopedia-v2` and `fineweb-edu-dedup` subsets from the original [SmolLM-Corpus repo](https://huggingface.co/datasets/HuggingFaceTB/smollm-corpus), with the `python-edu` subset being pulled from my [python-edu repo](https://huggingface.co/datasets/Avelina/python-edu).
## Dataset Structure
The dataset is split into 24 subdirectories, with the first 23 containing 1000 shards and the 24th containing the final 698. The repository is structured as follows:
```
data00/
├── train-00000-of-23698.jsonl.zst
├── ...
└── train-00999-of-23698.jsonl.zst
data01/
├── train-01000-of-23698.jsonl.zst
├── ...
└── train-01999-of-23698.jsonl.zst
...
data22/
├── train-22000-of-23698.jsonl.zst
├── ...
└── train-22999-of-23698.jsonl.zst
data23/
├── train-23000-of-23698.jsonl.zst
├── ...
└── train-23697-of-23698.jsonl.zst
```
In general, you can obtain the exact download URL for all shards using the following python function:
```py
def get_url_from_shard( index: int ) -> str:
if index >= 23_698:
raise ValueError( f'Shard index must be less than 23,698 but received {index}' )
group = index // 1000
return f'https://huggingface.co/datasets/Avelina/smollm-corpus/resolve/main/data{group:02d}/train-{index:05d}-of-23698.jsonl.zst'
```
## Generation Code
Here is the code which was used to generate the shuffled shards. Note the use of non-contiguous interleaving in attempt to uniformly pull documents from across entire subsets to loosely decouple shard index from original document position.
Please make sure you `pip install zstandard`!!!
```py
import tqdm
import datasets
from datasets import load_dataset
# Output directory and file format. Note that the file extension enforces zst compression is used.
OUTPUT_FMT = '/YOUR/FILE/PATH/HERE/data/train-{index:05d}-of-{num_shards:05d}.jsonl.zst'
# Total number of shards giving approximately 10,000 documents per shard
OUTPUT_NUM_SHARDS = 23698
# Grab the three datasets
ds_python = load_dataset( 'Avelina/python-edu' )
ds_cosmo = load_dataset( 'HuggingFaceTB/smollm-corpus', 'cosmopedia-v2' )
ds_edu = load_dataset( 'HuggingFaceTB/smollm-corpus', 'fineweb-edu-dedup' )
# Retain only the text columns and the train splits
ds_python = ds_python.select_columns( 'text' )[ 'train' ]
ds_cosmo = ds_cosmo.select_columns( 'text' )[ 'train' ]
ds_edu = ds_edu.select_columns( 'text' )[ 'train' ]
# Iterate over all shards with a nice progbar
for index in tqdm.tqdm( range( OUTPUT_NUM_SHARDS ) ):
# Get non-contiguous in-memory sub-shards for the three datasets
curr_python = ds_python.shard( num_shards=OUTPUT_NUM_SHARDS, index=index, contiguous=False, keep_in_memory=True )
curr_cosmo = ds_cosmo.shard( num_shards=OUTPUT_NUM_SHARDS, index=index, contiguous=False, keep_in_memory=True )
curr_edu = ds_edu.shard( num_shards=OUTPUT_NUM_SHARDS, index=index, contiguous=False, keep_in_memory=True )
# Concatenate the sub-shards
curr_shard = datasets.concatenate_datasets( [ curr_python, curr_cosmo, curr_edu ] )
# Deterministically shuffle using the current shard index for reproducibility
curr_shard = curr_shard.shuffle( seed=index, keep_in_memory=True )
# Dump the shards to .jsonl.zst
curr_shard.to_json( OUTPUT_FMT.format( index=index, num_shards=OUTPUT_NUM_SHARDS ) )
```
## In-Memory Decompression
Zstandard was chosen as it enables trivial in-memory decompression to minimise the storage impact of the dataset. Here is some example code which creates a python generator that yields each json line from a compressed shard stored at `file_name`, and a second function which creates a python generator that parses and yields the compressed shard.
```py
import json
from json import JSONDecodeError
import zstandard
def read_lines_zst( file_name ):
# Open the file for reading in binary mode
with open( file_name, 'rb' ) as file_handle:
# Initialise an empty buffer
buffer = ''
# Create a reader for the opened file
reader = zstandard.ZstdDecompressor( max_window_size=2**31 ).stream_reader( file_handle )
while True:
# Read a chunk of up to 128MB
chunk = reader.read( 2**27 ).decode()
# If chunk is empty we've reached the end of the file and can break out
if not chunk:
break
# Combine any prior buffer with the current chunk and split by newline
lines = ( buffer + chunk ).split( '\n' )
# Yield the full lines so far
for line in lines[ : -1 ]:
yield line
# The last 'line' is incomplete, so place in buffer for next chunk
buffer = lines[ -1 ]
# Always remember to close your reader!
reader.close()
def parse_jsonl_zst( file_name ):
# Iterate over the yielded lines of the compressed shard
for i, line in enumerate( read_lines_zst( file_name ) ):
try:
# Convert the line into a python dict and yield the text field
yield json.loads( line )[ 'text' ]
except ( KeyError, JSONDecodeError ):
# Catch KeyError for 'text' not present in dict
# Catch JSONDecodeError for malformed line
print( f'JSON error @ shard={file_name}, line={i}' )
```
Of course you *could* use HuggingFace's in-built streaming mechanics to handle things for you, but in my experience that approach is less reliable, doesn't handle `JSONDecodeError`s if there are malformed lines, can cause memory leaks, and has forced sharding behaviour when used inside a multi-worker PyTorch `DataLoader` which I've not yet found a way to disable! |
NeelNanda/pile-10k | NeelNanda | "2022-10-14T21:27:22Z" | 3,689 | 17 | [
"license:bigscience-bloom-rail-1.0",
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2022-10-02T20:59:26Z" | ---
license: bigscience-bloom-rail-1.0
---
The first 10K elements of [The Pile](https://pile.eleuther.ai/), useful for debugging models trained on it. See the [HuggingFace page for the full Pile](https://huggingface.co/datasets/the_pile) for more info. Inspired by [stas' great resource](https://huggingface.co/datasets/stas/openwebtext-10k) doing the same for OpenWebText |
lmms-lab/VizWiz-VQA | lmms-lab | "2024-03-08T05:11:16Z" | 3,670 | 4 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-01-04T10:31:44Z" | ---
dataset_info:
features:
- name: question_id
dtype: string
- name: image
dtype: image
- name: question
dtype: string
- name: answers
sequence: string
- name: category
dtype: string
splits:
- name: val
num_bytes: 2097998373.0
num_examples: 4319
- name: test
num_bytes: 3982325314.0
num_examples: 8000
download_size: 6050372614
dataset_size: 6080323687.0
---
# Dataset Card for "VizWiz-VQA"
<p align="center" width="100%">
<img src="https://i.postimg.cc/g0QRgMVv/WX20240228-113337-2x.png" width="100%" height="80%">
</p>
# Large-scale Multi-modality Models Evaluation Suite
> Accelerating the development of large-scale multi-modality models (LMMs) with `lmms-eval`
🏠 [Homepage](https://lmms-lab.github.io/) | 📚 [Documentation](docs/README.md) | 🤗 [Huggingface Datasets](https://huggingface.co/lmms-lab)
# This Dataset
This is a formatted version of [VizWiz-VQA](https://vizwiz.org/tasks-and-datasets/vqa/). It is used in our `lmms-eval` pipeline to allow for one-click evaluations of large multi-modality models.
```
@inproceedings{gurari2018vizwiz,
title={Vizwiz grand challenge: Answering visual questions from blind people},
author={Gurari, Danna and Li, Qing and Stangl, Abigale J and Guo, Anhong and Lin, Chi and Grauman, Kristen and Luo, Jiebo and Bigham, Jeffrey P},
booktitle={Proceedings of the IEEE conference on computer vision and pattern recognition},
pages={3608--3617},
year={2018}
}
```
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
eduagarcia-temp/OSCAR-2301_meta | eduagarcia-temp | "2023-08-28T14:07:22Z" | 3,666 | 0 | [
"size_categories:10M<n<100M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2023-08-27T20:24:54Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: text
dtype: string
- name: meta
struct:
- name: categories
sequence: string
- name: dedup
struct:
- name: exact_norm
struct:
- name: cluster_main_idx
dtype: int64
- name: cluster_size
dtype: int64
- name: exact_hash_idx
dtype: int64
- name: is_duplicate
dtype: bool
- name: minhash
struct:
- name: cluster_main_idx
dtype: int64
- name: cluster_size
dtype: int64
- name: is_duplicate
dtype: bool
- name: minhash_idx
dtype: int64
- name: harmful_pp
dtype: float64
- name: identification
struct:
- name: label
dtype: string
- name: prob
dtype: float64
- name: quality_warnings
sequence: string
- name: sentence_identifications
list:
- name: label
dtype: string
- name: prob
dtype: float64
- name: tlsh
dtype: string
- name: warc_headers
struct:
- name: content-length
dtype: int64
- name: content-type
dtype: string
- name: warc-block-digest
dtype: string
- name: warc-date
dtype: string
- name: warc-identified-content-language
dtype: string
- name: warc-record-id
dtype: string
- name: warc-refers-to
dtype: string
- name: warc-target-uri
dtype: string
- name: warc-type
dtype: string
splits:
- name: train
num_bytes: 127702717461
num_examples: 18031400
download_size: 40317121912
dataset_size: 127702717461
---
# Dataset Card for "OSCAR-2301_meta"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
klue/klue | klue | "2024-01-04T14:05:57Z" | 3,650 | 66 | [
"task_categories:fill-mask",
"task_categories:question-answering",
"task_categories:text-classification",
"task_categories:text-generation",
"task_categories:token-classification",
"task_ids:extractive-qa",
"task_ids:named-entity-recognition",
"task_ids:natural-language-inference",
"task_ids:parsing",
"task_ids:semantic-similarity-scoring",
"task_ids:text-scoring",
"task_ids:topic-classification",
"annotations_creators:expert-generated",
"language_creators:expert-generated",
"multilinguality:monolingual",
"source_datasets:original",
"language:ko",
"license:cc-by-sa-4.0",
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"arxiv:2105.09680",
"region:us",
"relation-extraction"
] | [
"fill-mask",
"question-answering",
"text-classification",
"text-generation",
"token-classification"
] | "2022-03-02T23:29:22Z" | ---
annotations_creators:
- expert-generated
language_creators:
- expert-generated
language:
- ko
license:
- cc-by-sa-4.0
multilinguality:
- monolingual
size_categories:
- 10K<n<100K
source_datasets:
- original
task_categories:
- fill-mask
- question-answering
- text-classification
- text-generation
- token-classification
task_ids:
- extractive-qa
- named-entity-recognition
- natural-language-inference
- parsing
- semantic-similarity-scoring
- text-scoring
- topic-classification
paperswithcode_id: klue
pretty_name: KLUE
config_names:
- dp
- mrc
- ner
- nli
- re
- sts
- wos
- ynat
tags:
- relation-extraction
dataset_info:
- config_name: dp
features:
- name: sentence
dtype: string
- name: index
list: int32
- name: word_form
list: string
- name: lemma
list: string
- name: pos
list: string
- name: head
list: int32
- name: deprel
list: string
splits:
- name: train
num_bytes: 7899965
num_examples: 10000
- name: validation
num_bytes: 1557462
num_examples: 2000
download_size: 3742577
dataset_size: 9457427
- config_name: mrc
features:
- name: title
dtype: string
- name: context
dtype: string
- name: news_category
dtype: string
- name: source
dtype: string
- name: guid
dtype: string
- name: is_impossible
dtype: bool
- name: question_type
dtype: int32
- name: question
dtype: string
- name: answers
sequence:
- name: answer_start
dtype: int32
- name: text
dtype: string
splits:
- name: train
num_bytes: 46505593
num_examples: 17554
- name: validation
num_bytes: 15583017
num_examples: 5841
download_size: 30098472
dataset_size: 62088610
- config_name: ner
features:
- name: sentence
dtype: string
- name: tokens
sequence: string
- name: ner_tags
sequence:
class_label:
names:
'0': B-DT
'1': I-DT
'2': B-LC
'3': I-LC
'4': B-OG
'5': I-OG
'6': B-PS
'7': I-PS
'8': B-QT
'9': I-QT
'10': B-TI
'11': I-TI
'12': O
splits:
- name: train
num_bytes: 19891905
num_examples: 21008
- name: validation
num_bytes: 4937563
num_examples: 5000
download_size: 5265887
dataset_size: 24829468
- config_name: nli
features:
- name: guid
dtype: string
- name: source
dtype: string
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype:
class_label:
names:
'0': entailment
'1': neutral
'2': contradiction
splits:
- name: train
num_bytes: 5719882
num_examples: 24998
- name: validation
num_bytes: 673260
num_examples: 3000
download_size: 2056116
dataset_size: 6393142
- config_name: re
features:
- name: guid
dtype: string
- name: sentence
dtype: string
- name: subject_entity
struct:
- name: word
dtype: string
- name: start_idx
dtype: int32
- name: end_idx
dtype: int32
- name: type
dtype: string
- name: object_entity
struct:
- name: word
dtype: string
- name: start_idx
dtype: int32
- name: end_idx
dtype: int32
- name: type
dtype: string
- name: label
dtype:
class_label:
names:
'0': no_relation
'1': org:dissolved
'2': org:founded
'3': org:place_of_headquarters
'4': org:alternate_names
'5': org:member_of
'6': org:members
'7': org:political/religious_affiliation
'8': org:product
'9': org:founded_by
'10': org:top_members/employees
'11': org:number_of_employees/members
'12': per:date_of_birth
'13': per:date_of_death
'14': per:place_of_birth
'15': per:place_of_death
'16': per:place_of_residence
'17': per:origin
'18': per:employee_of
'19': per:schools_attended
'20': per:alternate_names
'21': per:parents
'22': per:children
'23': per:siblings
'24': per:spouse
'25': per:other_family
'26': per:colleagues
'27': per:product
'28': per:religion
'29': per:title
- name: source
dtype: string
splits:
- name: train
num_bytes: 11145426
num_examples: 32470
- name: validation
num_bytes: 2559272
num_examples: 7765
download_size: 8190257
dataset_size: 13704698
- config_name: sts
features:
- name: guid
dtype: string
- name: source
dtype: string
- name: sentence1
dtype: string
- name: sentence2
dtype: string
- name: labels
struct:
- name: label
dtype: float64
- name: real-label
dtype: float64
- name: binary-label
dtype:
class_label:
names:
'0': negative
'1': positive
splits:
- name: train
num_bytes: 2832889
num_examples: 11668
- name: validation
num_bytes: 122641
num_examples: 519
download_size: 1587855
dataset_size: 2955530
- config_name: wos
features:
- name: guid
dtype: string
- name: domains
list: string
- name: dialogue
list:
- name: role
dtype: string
- name: text
dtype: string
- name: state
list: string
splits:
- name: train
num_bytes: 26676970
num_examples: 8000
- name: validation
num_bytes: 3488911
num_examples: 1000
download_size: 6358855
dataset_size: 30165881
- config_name: ynat
features:
- name: guid
dtype: string
- name: title
dtype: string
- name: label
dtype:
class_label:
names:
'0': IT과학
'1': 경제
'2': 사회
'3': 생활문화
'4': 세계
'5': 스포츠
'6': 정치
- name: url
dtype: string
- name: date
dtype: string
splits:
- name: train
num_bytes: 10109584
num_examples: 45678
- name: validation
num_bytes: 2039181
num_examples: 9107
download_size: 5012303
dataset_size: 12148765
configs:
- config_name: dp
data_files:
- split: train
path: dp/train-*
- split: validation
path: dp/validation-*
- config_name: mrc
data_files:
- split: train
path: mrc/train-*
- split: validation
path: mrc/validation-*
- config_name: ner
data_files:
- split: train
path: ner/train-*
- split: validation
path: ner/validation-*
- config_name: nli
data_files:
- split: train
path: nli/train-*
- split: validation
path: nli/validation-*
- config_name: re
data_files:
- split: train
path: re/train-*
- split: validation
path: re/validation-*
- config_name: sts
data_files:
- split: train
path: sts/train-*
- split: validation
path: sts/validation-*
- config_name: wos
data_files:
- split: train
path: wos/train-*
- split: validation
path: wos/validation-*
- config_name: ynat
data_files:
- split: train
path: ynat/train-*
- split: validation
path: ynat/validation-*
---
# Dataset Card for KLUE
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-instances)
- [Data Splits](#data-instances)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
## Dataset Description
- **Homepage:** https://klue-benchmark.com/
- **Repository:** https://github.com/KLUE-benchmark/KLUE
- **Paper:** [KLUE: Korean Language Understanding Evaluation](https://arxiv.org/abs/2105.09680)
- **Leaderboard:** [Leaderboard](https://klue-benchmark.com/leaderboard)
- **Point of Contact:** https://github.com/KLUE-benchmark/KLUE/issues
### Dataset Summary
KLUE is a collection of 8 tasks to evaluate natural language understanding capability of Korean language models. We delibrately select the 8 tasks, which are Topic Classification, Semantic Textual Similarity, Natural Language Inference, Named Entity Recognition, Relation Extraction, Dependency Parsing, Machine Reading Comprehension, and Dialogue State Tracking.
### Supported Tasks and Leaderboards
Topic Classification, Semantic Textual Similarity, Natural Language Inference, Named Entity Recognition, Relation Extraction, Dependency Parsing, Machine Reading Comprehension, and Dialogue State Tracking
### Languages
`ko-KR`
## Dataset Structure
### Data Instances
#### ynat
An example of 'train' looks as follows.
```
{'date': '2016.06.30. 오전 10:36',
'guid': 'ynat-v1_train_00000',
'label': 3,
'title': '유튜브 내달 2일까지 크리에이터 지원 공간 운영',
'url': 'https://news.naver.com/main/read.nhn?mode=LS2D&mid=shm&sid1=105&sid2=227&oid=001&aid=0008508947'}
```
#### sts
An example of 'train' looks as follows.
```
{'guid': 'klue-sts-v1_train_00000',
'labels': {'label': 3.7, 'real-label': 3.714285714285714, 'binary-label': 1},
'sentence1': '숙소 위치는 찾기 쉽고 일반적인 한국의 반지하 숙소입니다.',
'sentence2': '숙박시설의 위치는 쉽게 찾을 수 있고 한국의 대표적인 반지하 숙박시설입니다.',
'source': 'airbnb-rtt'}
```
#### nli
An example of 'train' looks as follows.
```
{'guid': 'klue-nli-v1_train_00000',
'hypothesis': '힛걸 진심 최고로 멋지다.',
'label': 0,
'premise': '힛걸 진심 최고다 그 어떤 히어로보다 멋지다',
'source': 'NSMC'}
```
#### ner
An example of 'train' looks as follows.
```
{'tokens': ['특', '히', ' ', '영', '동', '고', '속', '도', '로', ' ', '강', '릉', ' ', '방', '향', ' ', '문', '막', '휴', '게', '소', '에', '서', ' ', '만', '종', '분', '기', '점', '까', '지', ' ', '5', '㎞', ' ', '구', '간', '에', '는', ' ', '승', '용', '차', ' ', '전', '용', ' ', '임', '시', ' ', '갓', '길', '차', '로', '제', '를', ' ', '운', '영', '하', '기', '로', ' ', '했', '다', '.'],
'ner_tags': [12, 12, 12, 2, 3, 3, 3, 3, 3, 12, 2, 3, 12, 12, 12, 12, 2, 3, 3, 3, 3, 12, 12, 12, 2, 3, 3, 3, 3, 12, 12, 12, 8, 9, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
'sentence': '특히 <영동고속도로:LC> <강릉:LC> 방향 <문막휴게소:LC>에서 <만종분기점:LC>까지 <5㎞:QT> 구간에는 승용차 전용 임시 갓길차로제를 운영하기로 했다.'}
```
#### re
An example of 'train' looks as follows.
```
{'guid': 'klue-re-v1_train_00000',
'label': 0,
'object_entity': {'word': '조지 해리슨',
'start_idx': 13,
'end_idx': 18,
'type': 'PER'},
'sentence': '〈Something〉는 조지 해리슨이 쓰고 비틀즈가 1969년 앨범 《Abbey Road》에 담은 노래다.',
'source': 'wikipedia',
'subject_entity': {'word': '비틀즈',
'start_idx': 24,
'end_idx': 26,
'type': 'ORG'}}
```
#### dp
An example of 'train' looks as follows.
```
{'deprel': ['NP', 'NP_OBJ', 'VP', 'NP', 'NP_SBJ', 'NP', 'NP_MOD', 'NP_CNJ', 'NP_CNJ', 'NP', 'NP', 'NP_OBJ', 'AP', 'VP'],
'head': [2, 3, 14, 5, 14, 7, 10, 10, 10, 11, 12, 14, 14, 0],
'index': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14],
'lemma': ['해당', '그림 을', '보 면', '디즈니', '공주 들 이', '브리트니', '스피어스 의', '앨범 이나', '뮤직 비디오 ,', '화보', '속', '모습 을', '똑같이', '재연 하 였 다 .'],
'pos': ['NNG', 'NNG+JKO', 'VV+EC', 'NNP', 'NNG+XSN+JKS', 'NNP', 'NNP+JKG', 'NNG+JC', 'NNG+NNG+SP', 'NNG', 'NNG', 'NNG+JKO', 'MAG', 'NNG+XSA+EP+EF+SF'],
'sentence': '해당 그림을 보면 디즈니 공주들이 브리트니 스피어스의 앨범이나 뮤직비디오, 화보 속 모습을 똑같이 재연했다.',
'word_form': ['해당', '그림을', '보면', '디즈니', '공주들이', '브리트니', '스피어스의', '앨범이나', '뮤직비디오,', '화보', '속', '모습을', '똑같이', '재연했다.']}
```
#### mrc
An example of 'train' looks as follows.
```
{'answers': {'answer_start': [478, 478], 'text': ['한 달가량', '한 달']},
'context': '올여름 장마가 17일 제주도에서 시작됐다. 서울 등 중부지방은 예년보다 사나흘 정도 늦은 이달 말께 장마가 시작될 전망이다.17일 기상청에 따르면 제주도 남쪽 먼바다에 있는 장마전선의 영향으로 이날 제주도 산간 및 내륙지역에 호우주의보가 내려지면서 곳곳에 100㎜에 육박하는 많은 비가 내렸다. 제주의 장마는 평년보다 2~3일, 지난해보다는 하루 일찍 시작됐다. 장마는 고온다습한 북태평양 기단과 한랭 습윤한 오호츠크해 기단이 만나 형성되는 장마전선에서 내리는 비를 뜻한다.장마전선은 18일 제주도 먼 남쪽 해상으로 내려갔다가 20일께 다시 북상해 전남 남해안까지 영향을 줄 것으로 보인다. 이에 따라 20~21일 남부지방에도 예년보다 사흘 정도 장마가 일찍 찾아올 전망이다. 그러나 장마전선을 밀어올리는 북태평양 고기압 세력이 약해 서울 등 중부지방은 평년보다 사나흘가량 늦은 이달 말부터 장마가 시작될 것이라는 게 기상청의 설명이다. 장마전선은 이후 한 달가량 한반도 중남부를 오르내리며 곳곳에 비를 뿌릴 전망이다. 최근 30년간 평균치에 따르면 중부지방의 장마 시작일은 6월24~25일이었으며 장마기간은 32일, 강수일수는 17.2일이었다.기상청은 올해 장마기간의 평균 강수량이 350~400㎜로 평년과 비슷하거나 적을 것으로 내다봤다. 브라질 월드컵 한국과 러시아의 경기가 열리는 18일 오전 서울은 대체로 구름이 많이 끼지만 비는 오지 않을 것으로 예상돼 거리 응원에는 지장이 없을 전망이다.',
'guid': 'klue-mrc-v1_train_12759',
'is_impossible': False,
'news_category': '종합',
'question': '북태평양 기단과 오호츠크해 기단이 만나 국내에 머무르는 기간은?',
'question_type': 1,
'source': 'hankyung',
'title': '제주도 장마 시작 … 중부는 이달 말부터'}
```
#### wos
An example of 'train' looks as follows.
```
{'dialogue': [{'role': 'user',
'text': '쇼핑을 하려는데 서울 서쪽에 있을까요?',
'state': ['관광-종류-쇼핑', '관광-지역-서울 서쪽']},
{'role': 'sys',
'text': '서울 서쪽에 쇼핑이 가능한 곳이라면 노량진 수산물 도매시장이 있습니다.',
'state': []},
{'role': 'user',
'text': '오 네 거기 주소 좀 알려주세요.',
'state': ['관광-종류-쇼핑', '관광-지역-서울 서쪽', '관광-이름-노량진 수산물 도매시장']},
{'role': 'sys', 'text': '노량진 수산물 도매시장의 주소는 서울 동작구 93806입니다.', 'state': []},
{'role': 'user',
'text': '알려주시는김에 연락처랑 평점도 좀 알려주세요.',
'state': ['관광-종류-쇼핑', '관광-지역-서울 서쪽', '관광-이름-노량진 수산물 도매시장']},
{'role': 'sys', 'text': '그럼. 연락처는 6182006591이고 평점은 4점입니다.', 'state': []},
{'role': 'user',
'text': '와 감사합니다.',
'state': ['관광-종류-쇼핑', '관광-지역-서울 서쪽', '관광-이름-노량진 수산물 도매시장']},
{'role': 'sys', 'text': '감사합니다.', 'state': []}],
'domains': ['관광'],
'guid': 'wos-v1_train_00001'}
```
### Data Fields
#### ynat
+ `guid`: a `string` feature
+ `title`: a `string` feature
+ `label`: a classification label, with possible values `IT과학`(0), `경제`(1), `사회`(2), `생활문화`(3), `세계`(4), `스포츠`(5), `정치`(6)
+ `url`: a `string` feature
+ `date`: a `string` feature
#### sts
+ `guid`: a `string` feature
+ `source`: a `string` feature
+ `sentence1`: a `string` feature
+ `sentence2`: a `string` feature
+ `labels`: a dictionary feature containing
+ `label`: a `float64` feature
+ `real-label`: a `float64` feature
+ `binary-label`: a classification label, with possible values `negative`(0), `positive`(1)
#### nli
+ `guid`: a `string` feature
+ `source`: a `string` feature
+ `premise`: a `string` feature
+ `hypothesis`: a `string` feature
+ `label`: a classification label, with possible values `entailment`(0), `neutral`(1), `contradiction`(2)
#### ner
+ `sentence`: a `string` feature
+ `tokens`: a list of a `string` feature (tokenization is at character level)
+ `ner_tags`: a list of classification labels, with possible values including `B-DT`(0), `I-DT`(1),
`B-LC`(2), `I-LC`(3), `B-OG`(4), `I-OG`(5), `B-PS`(6), `I-PS`(7), `B-QT`(8), `I-QT`(9), `B-TI`(10),
`I-TI`(11), `O`(12)
#### re
+ `guid`: a `string` feature
+ `sentence`: a `string` feature
+ `subject_entity`: a dictionary feature containing
+ `word`: a `string` feature
+ `start_idx`: a `int32` feature
+ `end_idx`: a `int32` feature
+ `type`: a `string` feature
+ `object_entity`: a dictionary feature containing
+ `word`: a `string` feature
+ `start_idx`: a `int32` feature
+ `end_idx`: a `int32` feature
+ `type`: a `string` feature
+ `label`: a list of labels, with possible values including `no_relation`(0), `org:dissolved`(1),
`org:founded`(2), `org:place_of_headquarters`(3), `org:alternate_names`(4), `org:member_of`(5),
`org:members`(6), `org:political/religious_affiliation`(7), `org:product`(8), `org:founded_by`(9),`org:top_members/employees`(10),
`org:number_of_employees/members`(11), `per:date_of_birth`(12), `per:date_of_death`(13), `per:place_of_birth`(14),
`per:place_of_death`(15), `per:place_of_residence`(16), `per:origin`(17), `per:employee_of`(18),
`per:schools_attended`(19), `per:alternate_names`(20), `per:parents`(21), `per:children`(22),
`per:siblings`(23), `per:spouse`(24), `per:other_family`(25), `per:colleagues`(26), `per:product`(27),
`per:religion`(28), `per:title`(29),
+ `source`: a `string` feature
#### dp
+ `sentence`: a `string` feature
+ `index`: a list of `int32` feature
+ `word_form`: a list of `string` feature
+ `lemma`: a list of `string` feature
+ `pos`: a list of `string` feature
+ `head`: a list of `int32` feature
+ `deprel`: a list of `string` feature
#### mrc
+ `title`: a `string` feature
+ `context`: a `string` feature
+ `news_category`: a `string` feature
+ `source`: a `string` feature
+ `guid`: a `string` feature
+ `is_impossible`: a `bool` feature
+ `question_type`: a `int32` feature
+ `question`: a `string` feature
+ `answers`: a dictionary feature containing
+ `answer_start`: a `int32` feature
+ `text`: a `string` feature
#### wos
+ `guid`: a `string` feature
+ `domains`: a `string` feature
+ `dialogue`: a list of dictionary feature containing
+ `role`: a `string` feature
+ `text`: a `string` feature
+ `state`: a `string` feature
### Data Splits
#### ynat
You can see more details in [here](https://klue-benchmark.com/tasks/66/data/description).
+ train: 45,678
+ validation: 9,107
#### sts
You can see more details in [here](https://klue-benchmark.com/tasks/67/data/description).
+ train: 11,668
+ validation: 519
#### nli
You can see more details in [here](https://klue-benchmark.com/tasks/68/data/description).
+ train: 24,998
+ validation: 3,000
#### ner
You can see more details in [here](https://klue-benchmark.com/tasks/69/overview/description).
+ train: 21,008
+ validation: 5,000
#### re
You can see more details in [here](https://klue-benchmark.com/tasks/70/overview/description).
+ train: 32,470
+ validation: 7,765
#### dp
You can see more details in [here](https://klue-benchmark.com/tasks/71/data/description).
+ train: 10,000
+ validation: 2,000
#### mrc
You can see more details in [here](https://klue-benchmark.com/tasks/72/overview/description).
+ train: 17,554
+ validation: 5,841
#### wos
You can see more details in [here](https://klue-benchmark.com/tasks/73/overview/description).
+ train: 8,000
+ validation: 1,000
## Dataset Creation
### Curation Rationale
[Needs More Information]
### Source Data
#### Initial Data Collection and Normalization
[Needs More Information]
#### Who are the source language producers?
[Needs More Information]
### Annotations
#### Annotation process
[Needs More Information]
#### Who are the annotators?
[Needs More Information]
### Personal and Sensitive Information
[Needs More Information]
## Considerations for Using the Data
### Social Impact of Dataset
[Needs More Information]
### Discussion of Biases
[Needs More Information]
### Other Known Limitations
[Needs More Information]
## Additional Information
### Dataset Curators
[Needs More Information]
### Licensing Information
[Needs More Information]
### Citation Information
```
@misc{park2021klue,
title={KLUE: Korean Language Understanding Evaluation},
author={Sungjoon Park and Jihyung Moon and Sungdong Kim and Won Ik Cho and Jiyoon Han and Jangwon Park and Chisung Song and Junseong Kim and Yongsook Song and Taehwan Oh and Joohong Lee and Juhyun Oh and Sungwon Lyu and Younghoon Jeong and Inkwon Lee and Sangwoo Seo and Dongjun Lee and Hyunwoo Kim and Myeonghwa Lee and Seongbo Jang and Seungwon Do and Sunkyoung Kim and Kyungtae Lim and Jongwon Lee and Kyumin Park and Jamin Shin and Seonghyun Kim and Lucy Park and Alice Oh and Jungwoo Ha and Kyunghyun Cho},
year={2021},
eprint={2105.09680},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
### Contributions
Thanks to [@jungwhank](https://github.com/jungwhank), [@bzantium](https://github.com/bzantium) for adding this dataset. |
PolyAI/minds14 | PolyAI | "2024-09-10T13:25:16Z" | 3,640 | 81 | [
"task_categories:automatic-speech-recognition",
"task_ids:keyword-spotting",
"annotations_creators:expert-generated",
"annotations_creators:crowdsourced",
"annotations_creators:machine-generated",
"language_creators:crowdsourced",
"language_creators:expert-generated",
"multilinguality:multilingual",
"language:en",
"language:fr",
"language:it",
"language:es",
"language:pt",
"language:de",
"language:nl",
"language:ru",
"language:pl",
"language:cs",
"language:ko",
"language:zh",
"license:cc-by-4.0",
"size_categories:10K<n<100K",
"arxiv:2104.08524",
"region:us",
"speech-recognition"
] | [
"automatic-speech-recognition"
] | "2022-04-05T07:46:13Z" | ---
annotations_creators:
- expert-generated
- crowdsourced
- machine-generated
language_creators:
- crowdsourced
- expert-generated
language:
- en
- fr
- it
- es
- pt
- de
- nl
- ru
- pl
- cs
- ko
- zh
license:
- cc-by-4.0
multilinguality:
- multilingual
size_categories:
- 10K<n<100K
task_categories:
- automatic-speech-recognition
task_ids:
- keyword-spotting
pretty_name: MInDS-14
language_bcp47:
- en
- en-GB
- en-US
- en-AU
- fr
- it
- es
- pt
- de
- nl
- ru
- pl
- cs
- ko
- zh
tags:
- speech-recognition
---
# MInDS-14
## Dataset Description
- **Fine-Tuning script:** [pytorch/audio-classification](https://github.com/huggingface/transformers/tree/main/examples/pytorch/audio-classification)
- **Paper:** [Multilingual and Cross-Lingual Intent Detection from Spoken Data](https://arxiv.org/abs/2104.08524)
- **Total amount of disk used:** ca. 500 MB
MINDS-14 is training and evaluation resource for intent detection task with spoken data. It covers 14
intents extracted from a commercial system in the e-banking domain, associated with spoken examples in 14 diverse language varieties.
## Example
MInDS-14 can be downloaded and used as follows:
```py
from datasets import load_dataset
minds_14 = load_dataset("PolyAI/minds14", "fr-FR") # for French
# to download all data for multi-lingual fine-tuning uncomment following line
# minds_14 = load_dataset("PolyAI/all", "all")
# see structure
print(minds_14)
# load audio sample on the fly
audio_input = minds_14["train"][0]["audio"] # first decoded audio sample
intent_class = minds_14["train"][0]["intent_class"] # first transcription
intent = minds_14["train"].features["intent_class"].names[intent_class]
# use audio_input and language_class to fine-tune your model for audio classification
```
## Dataset Structure
We show detailed information the example configurations `fr-FR` of the dataset.
All other configurations have the same structure.
### Data Instances
**fr-FR**
- Size of downloaded dataset files: 471 MB
- Size of the generated dataset: 300 KB
- Total amount of disk used: 471 MB
An example of a datainstance of the config `fr-FR` looks as follows:
```
{
"path": "/home/patrick/.cache/huggingface/datasets/downloads/extracted/3ebe2265b2f102203be5e64fa8e533e0c6742e72268772c8ac1834c5a1a921e3/fr-FR~ADDRESS/response_4.wav",
"audio": {
"path": "/home/patrick/.cache/huggingface/datasets/downloads/extracted/3ebe2265b2f102203be5e64fa8e533e0c6742e72268772c8ac1834c5a1a921e3/fr-FR~ADDRESS/response_4.wav",
"array": array(
[0.0, 0.0, 0.0, ..., 0.0, 0.00048828, -0.00024414], dtype=float32
),
"sampling_rate": 8000,
},
"transcription": "je souhaite changer mon adresse",
"english_transcription": "I want to change my address",
"intent_class": 1,
"lang_id": 6,
}
```
### Data Fields
The data fields are the same among all splits.
- **path** (str): Path to the audio file
- **audio** (dict): Audio object including loaded audio array, sampling rate and path ot audio
- **transcription** (str): Transcription of the audio file
- **english_transcription** (str): English transcription of the audio file
- **intent_class** (int): Class id of intent
- **lang_id** (int): Id of language
### Data Splits
Every config only has the `"train"` split containing of *ca.* 600 examples.
## Dataset Creation
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Discussion of Biases
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Other Known Limitations
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Additional Information
### Dataset Curators
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Licensing Information
All datasets are licensed under the [Creative Commons license (CC-BY)](https://creativecommons.org/licenses/).
### Citation Information
```
@article{DBLP:journals/corr/abs-2104-08524,
author = {Daniela Gerz and
Pei{-}Hao Su and
Razvan Kusztos and
Avishek Mondal and
Michal Lis and
Eshan Singhal and
Nikola Mrksic and
Tsung{-}Hsien Wen and
Ivan Vulic},
title = {Multilingual and Cross-Lingual Intent Detection from Spoken Data},
journal = {CoRR},
volume = {abs/2104.08524},
year = {2021},
url = {https://arxiv.org/abs/2104.08524},
eprinttype = {arXiv},
eprint = {2104.08524},
timestamp = {Mon, 26 Apr 2021 17:25:10 +0200},
biburl = {https://dblp.org/rec/journals/corr/abs-2104-08524.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
```
### Contributions
Thanks to [@patrickvonplaten](https://github.com/patrickvonplaten) for adding this dataset
|
zh-plus/tiny-imagenet | zh-plus | "2022-07-12T09:04:30Z" | 3,639 | 65 | [
"task_categories:image-classification",
"task_ids:multi-class-image-classification",
"annotations_creators:crowdsourced",
"language_creators:crowdsourced",
"multilinguality:monolingual",
"source_datasets:extended|imagenet-1k",
"language:en",
"size_categories:100K<n<1M",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"image-classification"
] | "2022-07-01T03:33:16Z" | ---
annotations_creators:
- crowdsourced
extra_gated_prompt: "By clicking on \u201CAccess repository\u201D below, you also\
\ agree to ImageNet Terms of Access:\n[RESEARCHER_FULLNAME] (the \"Researcher\"\
) has requested permission to use the ImageNet database (the \"Database\") at Princeton\
\ University and Stanford University. In exchange for such permission, Researcher\
\ hereby agrees to the following terms and conditions:\n1. Researcher shall use\
\ the Database only for non-commercial research and educational purposes.\n2. Princeton\
\ University, Stanford University and Hugging Face make no representations or warranties\
\ regarding the Database, including but not limited to warranties of non-infringement\
\ or fitness for a particular purpose.\n3. Researcher accepts full responsibility\
\ for his or her use of the Database and shall defend and indemnify the ImageNet\
\ team, Princeton University, Stanford University and Hugging Face, including their\
\ employees, Trustees, officers and agents, against any and all claims arising from\
\ Researcher's use of the Database, including but not limited to Researcher's use\
\ of any copies of copyrighted images that he or she may create from the Database.\n\
4. Researcher may provide research associates and colleagues with access to the\
\ Database provided that they first agree to be bound by these terms and conditions.\n\
5. Princeton University, Stanford University and Hugging Face reserve the right\
\ to terminate Researcher's access to the Database at any time.\n6. If Researcher\
\ is employed by a for-profit, commercial entity, Researcher's employer shall also\
\ be bound by these terms and conditions, and Researcher hereby represents that\
\ he or she is fully authorized to enter into this agreement on behalf of such employer.\n\
7. The law of the State of New Jersey shall apply to all disputes under this agreement."
language:
- en
language_creators:
- crowdsourced
license: []
multilinguality:
- monolingual
paperswithcode_id: imagenet
pretty_name: Tiny-ImageNet
size_categories:
- 100K<n<1M
source_datasets:
- extended|imagenet-1k
task_categories:
- image-classification
task_ids:
- multi-class-image-classification
---
# Dataset Card for tiny-imagenet
## Dataset Description
- **Homepage:** https://www.kaggle.com/c/tiny-imagenet
- **Repository:** [Needs More Information]
- **Paper:** http://cs231n.stanford.edu/reports/2017/pdfs/930.pdf
- **Leaderboard:** https://paperswithcode.com/sota/image-classification-on-tiny-imagenet-1
### Dataset Summary
Tiny ImageNet contains 100000 images of 200 classes (500 for each class) downsized to 64×64 colored images. Each class has 500 training images, 50 validation images, and 50 test images.
### Languages
The class labels in the dataset are in English.
## Dataset Structure
### Data Instances
```json
{
'image': <PIL.JpegImagePlugin.JpegImageFile image mode=RGB size=64x64 at 0x1A800E8E190,
'label': 15
}
```
### Data Fields
- image: A PIL.Image.Image object containing the image. Note that when accessing the image column: dataset[0]["image"] the image file is automatically decoded. Decoding of a large number of image files might take a significant amount of time. Thus it is important to first query the sample index before the "image" column, i.e. dataset[0]["image"] should always be preferred over dataset["image"][0].
- label: an int classification label. -1 for test set as the labels are missing. Check `classes.py` for the map of numbers & labels.
### Data Splits
| | Train | Valid |
| ------------ | ------ | ----- |
| # of samples | 100000 | 10000 |
## Usage
### Example
#### Load Dataset
```python
def example_usage():
tiny_imagenet = load_dataset('Maysee/tiny-imagenet', split='train')
print(tiny_imagenet[0])
if __name__ == '__main__':
example_usage()
``` |
Omartificial-Intelligence-Space/FineWeb2-MSA | Omartificial-Intelligence-Space | "2024-12-15T11:17:57Z" | 3,618 | 1 | [
"language:ar",
"license:odc-by",
"size_categories:100M<n<1B",
"format:text",
"modality:text",
"library:datasets",
"library:mlcroissant",
"region:us",
"arabicf",
"fineweb",
"MSA"
] | null | "2024-12-13T12:26:16Z" | ---
license: odc-by
language:
- ar
tags:
- arabicf
- fineweb
- MSA
pretty_name: FineWeb2 MSA
size_categories:
- 10M<n<100M
---
# FineWeb2 MSA Arabic
![image/png](https://cdn-uploads.huggingface.co/production/uploads/628f7a71dd993507cfcbe587/7QWU4U2orwaXAZGC3lWy0.png)
This is the MSA Arabic Portion of The [FineWeb2](https://huggingface.co/datasets/HuggingFaceFW/fineweb-2#additional-information) Dataset.
This dataset contains a rich collection of text in **MSA Arabic** (ISO 639-3: arz), a widely spoken dialect within the Afro-Asiatic language family.
With over **439 million words** and **1.4 million** documents, it serves as a valuable resource for NLP development and linguistic research focused on Egyptian Arabic.
## Purpose of This Repository
This repository provides easy access to the **Arabic portion - MSA** of the extensive **FineWeb2** dataset. My primary goal is to make this valuable data more accessible and impactful for researchers, developers, and anyone working on **Arabic** natural language processing (NLP) projects.
By focusing on Arabic, I aim to:
- **Simplify Access**: Provide a direct and streamlined way to download the Arabic portion of the dataset without navigating through the larger collection.
- **Promote Research**: Enable more efficient use of Arabic text data for NLP, LLMs, and linguistic research.
- **Empower the Community**: Support Arabic language processing and contribute to the growth of multilingual NLP capabilities.
- **Encourage Collaboration**: Foster an environment where researchers and developers can build impactful applications using Arabic data.
## Credit to the Original Work
The dataset is released under the [Open Data Commons Attribution License (ODC-By) v1.0](https://opendatacommons.org/licenses/by/1-0/), with additional usage subject to CommonCrawl's Terms of Use..
### Citation
If you use this dataset, please cite it as follows:
```bibtex
@software{penedo2024fineweb-2,
author = {Penedo, Guilherme and Kydlíček, Hynek and Sabolčec, Vinko and Messmer, Bettina and Foroutan, Negar and Jaggi, Martin and von Werra, Leandro and Wolf, Thomas},
title = {FineWeb2: A sparkling update with 1000s of languages},
month = dec,
year = 2024,
doi = {10.57967/hf/3744},
url = {https://huggingface.co/datasets/HuggingFaceFW/fineweb-2}
}
|
JetBrains-Research/commit-chronicle | JetBrains-Research | "2023-10-05T10:50:00Z" | 3,605 | 7 | [
"task_categories:text-generation",
"task_categories:summarization",
"language:code",
"language:en",
"license:other",
"size_categories:10M<n<100M",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2308.07655",
"region:us",
"code",
"commit_message_generation"
] | [
"text-generation",
"summarization"
] | "2023-08-08T15:54:44Z" | ---
license: other
language:
- code
- en
task_categories:
- text-generation
- summarization
tags:
- code
- commit_message_generation
pretty_name: CommitChronicle
size_categories:
- 1M<n<10M
dataset_info:
- config_name: default
features:
- name: author
dtype: int64
- name: date
dtype: string
- name: timezone
dtype: int64
- name: hash
dtype: string
- name: message
dtype: string
- name: mods
list:
- name: change_type
dtype: string
- name: old_path
dtype: string
- name: new_path
dtype: string
- name: diff
dtype: string
- name: language
dtype: string
- name: license
dtype: string
- name: repo
dtype: string
- name: original_message
dtype: string
splits:
- name: test
num_bytes: 5760117409
num_examples: 1486267
- name: train
num_bytes: 30084265848
num_examples: 7659458
- name: validation
num_bytes: 5905326070
num_examples: 1554042
download_size: 14168436205
dataset_size: 41749709327
- config_name: subset_cmg
features:
- name: author
dtype: int64
- name: date
dtype: string
- name: timezone
dtype: int64
- name: hash
dtype: string
- name: message
dtype: string
- name: mods
list:
- name: change_type
dtype: string
- name: old_path
dtype: string
- name: new_path
dtype: string
- name: diff
dtype: string
- name: language
dtype: string
- name: license
dtype: string
- name: repo
dtype: string
- name: original_message
dtype: string
splits:
- name: test
num_bytes: 772774959
num_examples: 204336
download_size: 258151047
dataset_size: 772774959
- config_name: subset_llm
features:
- name: author
dtype: int64
- name: date
dtype: string
- name: timezone
dtype: int64
- name: hash
dtype: string
- name: message
dtype: string
- name: mods
list:
- name: change_type
dtype: string
- name: old_path
dtype: string
- name: new_path
dtype: string
- name: diff
dtype: string
- name: language
dtype: string
- name: license
dtype: string
- name: repo
dtype: string
- name: original_message
dtype: string
splits:
- name: test
num_bytes: 15121048
num_examples: 4025
download_size: 5068039
dataset_size: 15121048
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- config_name: subset_cmg
data_files:
- split: test
path: subset_cmg/test-*
- config_name: subset_llm
data_files:
- split: test
path: subset_llm/test-*
---
# 📜 CommitChronicle 🔮
This is the dataset for commit message generation (and/or completion), introduced in the paper "From Commit Message Generation to History-Aware Commit Message Completion", ASE 2023.
Its key features:
* *large-scale and multilingual*: contains 10.7M commits from 11.9k GitHub repositories in 20 programming languages;
* *diverse*: avoids restrictive filtering on commit messages or commit diffs structure;
* *suitable for experiments with commit history*: provides metadata about commit authors and dates and uses split-by-project.
## Dataset Creation
> 🔍 For further details, please refer to:
> * **Paper**: [https://arxiv.org/abs/2308.07655](https://arxiv.org/abs/2308.07655)
> * **Repository**: [https://github.com/JetBrains-Research/commit_message_generation](https://github.com/JetBrains-Research/commit_message_generation)
We used [GitHub Search](https://seart-ghs.si.usi.ch/) tool and official GitHub API to select relevant repositories with permissive licenses (Apache, BSD 3-clause, MIT).
On February 9th, 2023, we collected all commits made since 2017 from these repositories via [PyDriller](https://github.com/ishepard/pydriller).
Next, we extensively cleaned the data, including filtering outliers, dropping commits from bot authors, and dropping duplicates. Note: to avoid disclosing personal information, we replaced the commit authors' names and emails with unique identifiers.
## Dataset Structure
### Data Instances
Each data instance in the dataset is a commit. [A commit example](https://github.com/saridormi/commit_chronicle/commit/a7fb3b64184f0af5b08285cce14b9139baa94049) would look like the following:
```
{
'repo': 'saridormi/commit_chronicle',
'hash': 'a7fb3b64184f0af5b08285cce14b9139baa94049',
'author': 123,
'date': '05.07.2021 15:10:07',
'timezone': 0,
'license': 'MIT License',
'language': 'Jupyter Notebook',
'message': 'Add license badge to readme',
'original_message': 'Add license badge to readme',
'mods': [{'change_type': 'MODIFY',
'new_path': 'README.md',
'old_path': 'README.md'
'diff': '@@ -1,6 +1,6 @@\n'
' # Commits dataset\n'
' \n'
'-> :heavy_exclamation_mark: **TODO:** license\n'
'+![GitHub](https://img.shields.io/github/license/saridormi/commits_dataset?style=for-the-badge)\n'}],
}
```
### Data Fields
Each example has the following fields:
| **Field** | **Description** |
|:------------------:|:----------------------------------------:|
| `repo` | Commit repository. |
| `hash` | Commit hash. |
| `author` | Unique id for commit author |
| `date` | Commit date (from author). |
| `timezone` | Commit timezone (from author). |
| `license` | Commit repository's license. |
| `language` | Commit repository's main language. |
| `message` | Commit message (after processing). |
| `original_message` | Commit message (without any processing). |
| `mods` | List of file modifications from commit. |
Each file modification has the following fields:
| **Field** | **Description** |
|:-------------:|:-------------------------------------------------------------------------------------------------:|
| `change_type` | Type of change to current file. One of: `ADD`, `COPY`, `RENAME`, `DELETE`, `MODIFY` or `UNKNOWN`. |
| `old_path` | Path to file before change (might be empty). |
| `new_path` | Path to file after change (might be empty). |
| `diff` | `git diff` for current file. |
### Data Splits
We provide the following configurations:
* `default`
* `train`: full training split (7.66M commits)
* `validation`: full validation split (1.55M commits)
* `test`: full test split (1.49M commits)
* `subset_cmg`
* `test`: test subset used for experiments with CMG approaches (204k commits)
* `subset_llm`
* `test`: test subset used for experiments with a LLM (4k commits)
## Considerations for Using the Data
> Adopted from [the Stack](https://huggingface.co/datasets/bigcode/the-stack).
The released dataset may contain sensitive information such as emails, IP addresses, and API/ssh keys that have previously been published to public repositories on GitHub. In the event that the dataset contains personal information, researchers should only use public, non-personal information in support of conducting and publishing their open-access research.
Personal information should not be used for spamming purposes, including sending unsolicited emails or selling of personal information.
The dataset is a collection of commits from repositories with various licenses. Any use of all or part of the code gathered in this dataset must abide by the terms of the original licenses, including attribution clauses when relevant. We facilitate this by providing provenance information for each data point.
## Citation
```
TODO
``` |
Jackmin108/bert-base-uncased-refined-web-segment0 | Jackmin108 | "2023-08-17T17:45:25Z" | 3,597 | 0 | [
"size_categories:100M<n<1B",
"format:parquet",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2023-08-17T11:48:12Z" | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: input_ids
sequence: int32
- name: length
dtype: int64
splits:
- name: train
num_bytes: 234885131268
num_examples: 100000000
download_size: 10689166809
dataset_size: 234885131268
---
# Dataset Card for "bert-base-uncased-refined-web-segment0"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Major-TOM/Core-S2L1C | Major-TOM | "2024-08-29T16:19:01Z" | 3,581 | 20 | [
"license:cc-by-sa-4.0",
"size_categories:1M<n<10M",
"format:parquet",
"modality:image",
"modality:tabular",
"modality:text",
"modality:geospatial",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2402.12095",
"region:us",
"earth-observation",
"remote-sensing",
"sentinel-2",
"multi-spectral",
"satellite",
"geospatial"
] | null | "2024-02-25T16:42:11Z" | ---
license: cc-by-sa-4.0
tags:
- earth-observation
- remote-sensing
- sentinel-2
- multi-spectral
- satellite
- geospatial
size_categories:
- 1M<n<10M
dataset_info:
- config_name: default
features:
- name: product_id
dtype: string
- name: grid_cell
dtype: string
- name: product_datetime
dtype: string
- name: thumbnail
dtype: image
- name: B01
dtype: binary
- name: B02
dtype: binary
- name: B03
dtype: binary
- name: B04
dtype: binary
- name: B05
dtype: binary
- name: B06
dtype: binary
- name: B07
dtype: binary
- name: B08
dtype: binary
- name: B8A
dtype: binary
- name: B09
dtype: binary
- name: B10
dtype: binary
- name: B11
dtype: binary
- name: B12
dtype: binary
- name: cloud_mask
dtype: binary
configs:
- config_name: default
data_files: images/*.parquet
- config_name: metadata
data_files: metadata.parquet
---
# Core-S2L1C
Contains a global coverage of Sentinel-2 (Level 1C) patches, each of size 1,068 x 1,068 pixels.
| Source | Sensing Type | Number of Patches | Patch Size | Total Pixels |
|--------|--------------|-------------------|------------|--------------|
|Sentinel-2 Level-1C |Optical Multispectral|2,245,886|1,068x1,068|2.56 Trillion|
## Content
| Column | Details | Resolution |
|--------|---------|------------|
| B01 | Coastal aerosol, 442.7 nm (S2A), 442.3 nm (S2B) | 60m |
| B02 | Blue, 492.4 nm (S2A), 492.1 nm (S2B) | 10m |
| B03 | Green, 559.8 nm (S2A), 559.0 nm (S2B) | 10m |
| B04 | Red, 664.6 nm (S2A), 665.0 nm (S2B) | 10m |
| B05 | Vegetation red edge, 704.1 nm (S2A), 703.8 nm (S2B) | 20m |
| B06 | Vegetation red edge, 740.5 nm (S2A), 739.1 nm (S2B) | 20m |
| B07 | Vegetation red edge, 782.8 nm (S2A), 779.7 nm (S2B) | 20m |
| B08 | NIR, 832.8 nm (S2A), 833.0 nm (S2B) | 10m |
| B8A | Narrow NIR, 864.7 nm (S2A), 864.0 nm (S2B) | 20m |
| B09 | Water vapour, 945.1 nm (S2A), 943.2 nm (S2B) | 60m |
| B10 | SWIR – Cirrus, 1373.5 nm (S2A), 1376.9 nm (S2B) | 60m |
| B11 | SWIR, 1613.7 nm (S2A), 1610.4 nm (S2B) | 20m |
| B12 | SWIR, 2202.4 nm (S2A), 2185.7 nm (S2B) | 20m |
| cloud_mask | Cloud Mask produced by [SEnSeI](https://huggingface.co/aliFrancis/SEnSeIv2) | 10m |
| thumbnail | RGB composite [B04, B03, B02] saved as png | 10m |
## Spatial Coverage
This is a global monotemporal dataset. Nearly every piece of Earth captured by Sentinel-2 is contained at least once in this dataset (and only once, excluding some marginal overlaps).
The following figure demonstrates the spatial coverage (only black pixels are absent):
![image/png](https://cdn-uploads.huggingface.co/production/uploads/6304c06eeb6d777a838eab63/2KTarfsM0a1dNYEbXriUH.png)
## Example Use
Interface scripts are available at https://github.com/ESA-PhiLab/Major-TOM
Here's a sneak peek with a thumbnail image:
```python
from fsspec.parquet import open_parquet_file
import pyarrow.parquet as pq
from io import BytesIO
from PIL import Image
PARQUET_FILE = 'part_03900' # parquet number
ROW_INDEX = 42 # row number (about 500 per parquet)
url = "https://huggingface.co/datasets/Major-TOM/Core-S2L1C/resolve/main/images/{}.parquet".format(PARQUET_FILE)
with open_parquet_file(url,columns = ["thumbnail"]) as f:
with pq.ParquetFile(f) as pf:
first_row_group = pf.read_row_group(ROW_INDEX, columns=['thumbnail'])
stream = BytesIO(first_row_group['thumbnail'][0].as_py())
image = Image.open(stream)
```
## Cite
[![arxiv](https://img.shields.io/badge/Open_Access-arxiv:2402.12095-b31b1b)](https://arxiv.org/abs/2402.12095/)
```latex
@inproceedings{Major_TOM,
title={Major TOM: Expandable Datasets for Earth Observation},
author={Alistair Francis and Mikolaj Czerkawski},
year={2024},
booktitle={IGARSS 2024 - 2024 IEEE International Geoscience and Remote Sensing Symposium},
eprint={2402.12095},
archivePrefix={arXiv},
primaryClass={cs.CV}
}
```
Powered by [Φ-lab, European Space Agency (ESA) 🛰️](https://huggingface.co/ESA-philab) |
TIGER-Lab/MMEB-eval | TIGER-Lab | "2024-10-28T16:42:34Z" | 3,581 | 4 | [
"language:en",
"license:apache-2.0",
"size_categories:10K<n<100K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2410.05160",
"region:us",
"ranking"
] | null | "2024-10-08T00:40:40Z" | ---
dataset_info:
- config_name: A-OKVQA
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 14048199
num_examples: 1000
download_size: 1168340
dataset_size: 14048199
- config_name: CIFAR-100
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 1519890
num_examples: 1000
download_size: 20544
dataset_size: 1519890
- config_name: CIRR
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 70162098
num_examples: 1000
download_size: 1565489
dataset_size: 70162098
- config_name: ChartQA
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 14354641
num_examples: 1000
download_size: 1434448
dataset_size: 14354641
- config_name: Country211
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 3678000
num_examples: 1000
download_size: 31556
dataset_size: 3678000
- config_name: DocVQA
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 23044459
num_examples: 1000
download_size: 1734476
dataset_size: 23044459
- config_name: EDIS
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 184208708
num_examples: 1000
download_size: 3350382
dataset_size: 184208708
- config_name: FashionIQ
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 71169665
num_examples: 1000
download_size: 1729457
dataset_size: 71169665
- config_name: GQA
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 40809641
num_examples: 1000
download_size: 1764457
dataset_size: 40809641
- config_name: HatefulMemes
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 184890
num_examples: 1000
download_size: 9972
dataset_size: 184890
- config_name: ImageNet-1K
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 28773890
num_examples: 1000
download_size: 185019
dataset_size: 28773890
- config_name: ImageNet-A
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 28772890
num_examples: 1000
download_size: 147780
dataset_size: 28772890
- config_name: ImageNet-R
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 3456890
num_examples: 1000
download_size: 23656
dataset_size: 3456890
- config_name: InfographicsVQA
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 19114439
num_examples: 1000
download_size: 1439837
dataset_size: 19114439
- config_name: MSCOCO
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 97759085
num_examples: 1000
download_size: 1681753
dataset_size: 97759085
- config_name: MSCOCO_i2t
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 60201740
num_examples: 1000
download_size: 1785583
dataset_size: 60201740
- config_name: MSCOCO_t2i
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 87127008
num_examples: 1000
download_size: 1296167
dataset_size: 87127008
- config_name: N24News
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 630658
num_examples: 1000
download_size: 110698
dataset_size: 630658
- config_name: NIGHTS
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 75116000
num_examples: 1000
download_size: 1528646
dataset_size: 75116000
- config_name: OK-VQA
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 15332578
num_examples: 1000
download_size: 1564823
dataset_size: 15332578
- config_name: OVEN
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 717934263
num_examples: 1000
download_size: 406792141
dataset_size: 717934263
- config_name: ObjectNet
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 2036000
num_examples: 1000
download_size: 27132
dataset_size: 2036000
- config_name: Place365
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 7045000
num_examples: 1000
download_size: 89866
dataset_size: 7045000
- config_name: RefCOCO
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 96493941
num_examples: 1000
download_size: 1858145
dataset_size: 96493941
- config_name: RefCOCO-Matching
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 145712476
num_examples: 1000
download_size: 2879385
dataset_size: 145712476
- config_name: SUN397
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 7990000
num_examples: 1000
download_size: 118447
dataset_size: 7990000
- config_name: ScienceQA
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 23870406
num_examples: 1000
download_size: 958782
dataset_size: 23870406
- config_name: TextVQA
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 17435986
num_examples: 1000
download_size: 1571656
dataset_size: 17435986
- config_name: VOC2007
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 368000
num_examples: 1000
download_size: 13813
dataset_size: 368000
- config_name: VisDial
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 67989850
num_examples: 1000
download_size: 1730820
dataset_size: 67989850
- config_name: Visual7W
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 22047066
num_examples: 1000
download_size: 1564788
dataset_size: 22047066
- config_name: Visual7W-Pointing
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 94906832
num_examples: 1000
download_size: 1299380
dataset_size: 94906832
- config_name: VisualNews_i2t
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 118329649
num_examples: 1000
download_size: 81491360
dataset_size: 118329649
- config_name: VisualNews_t2i
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 97176206
num_examples: 1000
download_size: 1763677
dataset_size: 97176206
- config_name: VizWiz
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 20550246
num_examples: 1000
download_size: 1425789
dataset_size: 20550246
- config_name: WebQA
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 197701404
num_examples: 1000
download_size: 3257136
dataset_size: 197701404
- config_name: Wiki-SS-NQ
features:
- name: qry_text
dtype: string
- name: qry_img_path
dtype: string
- name: tgt_text
sequence: string
- name: tgt_img_path
sequence: string
splits:
- name: test
num_bytes: 74583207
num_examples: 1000
download_size: 1900579
dataset_size: 74583207
configs:
- config_name: A-OKVQA
data_files:
- split: test
path: A-OKVQA/test-*
- config_name: CIFAR-100
data_files:
- split: test
path: CIFAR-100/test-*
- config_name: CIRR
data_files:
- split: test
path: CIRR/test-*
- config_name: ChartQA
data_files:
- split: test
path: ChartQA/test-*
- config_name: Country211
data_files:
- split: test
path: Country211/test-*
- config_name: DocVQA
data_files:
- split: test
path: DocVQA/test-*
- config_name: EDIS
data_files:
- split: test
path: EDIS/test-*
- config_name: FashionIQ
data_files:
- split: test
path: FashionIQ/test-*
- config_name: GQA
data_files:
- split: test
path: GQA/test-*
- config_name: HatefulMemes
data_files:
- split: test
path: HatefulMemes/test-*
- config_name: ImageNet-1K
data_files:
- split: test
path: ImageNet-1K/test-*
- config_name: ImageNet-A
data_files:
- split: test
path: ImageNet-A/test-*
- config_name: ImageNet-R
data_files:
- split: test
path: ImageNet-R/test-*
- config_name: InfographicsVQA
data_files:
- split: test
path: InfographicsVQA/test-*
- config_name: MSCOCO
data_files:
- split: test
path: MSCOCO/test-*
- config_name: MSCOCO_i2t
data_files:
- split: test
path: MSCOCO_i2t/test-*
- config_name: MSCOCO_t2i
data_files:
- split: test
path: MSCOCO_t2i/test-*
- config_name: N24News
data_files:
- split: test
path: N24News/test-*
- config_name: NIGHTS
data_files:
- split: test
path: NIGHTS/test-*
- config_name: OK-VQA
data_files:
- split: test
path: OK-VQA/test-*
- config_name: OVEN
data_files:
- split: test
path: OVEN/test-*
- config_name: ObjectNet
data_files:
- split: test
path: ObjectNet/test-*
- config_name: Place365
data_files:
- split: test
path: Place365/test-*
- config_name: RefCOCO
data_files:
- split: test
path: RefCOCO/test-*
- config_name: RefCOCO-Matching
data_files:
- split: test
path: RefCOCO-Matching/test-*
- config_name: SUN397
data_files:
- split: test
path: SUN397/test-*
- config_name: ScienceQA
data_files:
- split: test
path: ScienceQA/test-*
- config_name: TextVQA
data_files:
- split: test
path: TextVQA/test-*
- config_name: VOC2007
data_files:
- split: test
path: VOC2007/test-*
- config_name: VisDial
data_files:
- split: test
path: VisDial/test-*
- config_name: Visual7W
data_files:
- split: test
path: Visual7W/test-*
- config_name: Visual7W-Pointing
data_files:
- split: test
path: Visual7W-Pointing/test-*
- config_name: VisualNews_i2t
data_files:
- split: test
path: VisualNews_i2t/test-*
- config_name: VisualNews_t2i
data_files:
- split: test
path: VisualNews_t2i/test-*
- config_name: VizWiz
data_files:
- split: test
path: VizWiz/test-*
- config_name: WebQA
data_files:
- split: test
path: WebQA/test-*
- config_name: Wiki-SS-NQ
data_files:
- split: test
path: Wiki-SS-NQ/test-*
license: apache-2.0
language:
- en
tags:
- ranking
pretty_name: MMEB
size_categories:
- 10K<n<100K
---
# Massive Multimodal Embedding Benchmark
We compile a large set of evaluation tasks to understand the capabilities of multimodal embedding models. This benchmark covers 4 meta tasks and 36 datasets meticulously selected for evaluation.
The dataset is published in our paper [VLM2Vec: Training Vision-Language Models for Massive Multimodal Embedding Tasks](https://arxiv.org/abs/2410.05160).
## Dataset Usage
For each dataset, we have 1000 examples for evaluation. Each example contains a query and a set of targets. Both the query and target could be any combination of image and text. The first one in the candidate list is the groundtruth target.
## Statistics
We show the statistics of all the datasets as follows:
<img width="900" alt="abs" src="statistics.png">
## Per-dataset Results
We list the performance of different embedding models in the following:
<img width="900" alt="abs" src="leaderboard.png">
## Submission
We will set a formal leaderboard soon. If you want to add your results to the leaderboard, please send email to us at [email protected].
## Cite Us
```
@article{jiang2024vlm2vec,
title={VLM2Vec: Training Vision-Language Models for Massive Multimodal Embedding Tasks},
author={Jiang, Ziyan and Meng, Rui and Yang, Xinyi and Yavuz, Semih and Zhou, Yingbo and Chen, Wenhu},
journal={arXiv preprint arXiv:2410.05160},
year={2024}
}
``` |
google/xtreme_s | google | "2024-09-10T13:12:26Z" | 3,573 | 58 | [
"task_categories:automatic-speech-recognition",
"annotations_creators:expert-generated",
"annotations_creators:crowdsourced",
"annotations_creators:machine-generated",
"language_creators:crowdsourced",
"language_creators:expert-generated",
"multilinguality:multilingual",
"source_datasets:extended|multilingual_librispeech",
"source_datasets:extended|covost2",
"language:afr",
"language:amh",
"language:ara",
"language:asm",
"language:ast",
"language:azj",
"language:bel",
"language:ben",
"language:bos",
"language:cat",
"language:ceb",
"language:cmn",
"language:ces",
"language:cym",
"language:dan",
"language:deu",
"language:ell",
"language:eng",
"language:spa",
"language:est",
"language:fas",
"language:ful",
"language:fin",
"language:tgl",
"language:fra",
"language:gle",
"language:glg",
"language:guj",
"language:hau",
"language:heb",
"language:hin",
"language:hrv",
"language:hun",
"language:hye",
"language:ind",
"language:ibo",
"language:isl",
"language:ita",
"language:jpn",
"language:jav",
"language:kat",
"language:kam",
"language:kea",
"language:kaz",
"language:khm",
"language:kan",
"language:kor",
"language:ckb",
"language:kir",
"language:ltz",
"language:lug",
"language:lin",
"language:lao",
"language:lit",
"language:luo",
"language:lav",
"language:mri",
"language:mkd",
"language:mal",
"language:mon",
"language:mar",
"language:msa",
"language:mlt",
"language:mya",
"language:nob",
"language:npi",
"language:nld",
"language:nso",
"language:nya",
"language:oci",
"language:orm",
"language:ory",
"language:pan",
"language:pol",
"language:pus",
"language:por",
"language:ron",
"language:rus",
"language:bul",
"language:snd",
"language:slk",
"language:slv",
"language:sna",
"language:som",
"language:srp",
"language:swe",
"language:swh",
"language:tam",
"language:tel",
"language:tgk",
"language:tha",
"language:tur",
"language:ukr",
"language:umb",
"language:urd",
"language:uzb",
"language:vie",
"language:wol",
"language:xho",
"language:yor",
"language:yue",
"language:zul",
"license:cc-by-4.0",
"size_categories:10K<n<100K",
"arxiv:2203.10752",
"arxiv:2205.12446",
"arxiv:2007.10310",
"region:us",
"speech-recognition"
] | [
"automatic-speech-recognition"
] | "2022-03-04T14:10:40Z" | ---
annotations_creators:
- expert-generated
- crowdsourced
- machine-generated
language_creators:
- crowdsourced
- expert-generated
language:
- afr
- amh
- ara
- asm
- ast
- azj
- bel
- ben
- bos
- cat
- ceb
- cmn
- ces
- cym
- dan
- deu
- ell
- eng
- spa
- est
- fas
- ful
- fin
- tgl
- fra
- gle
- glg
- guj
- hau
- heb
- hin
- hrv
- hun
- hye
- ind
- ibo
- isl
- ita
- jpn
- jav
- kat
- kam
- kea
- kaz
- khm
- kan
- kor
- ckb
- kir
- ltz
- lug
- lin
- lao
- lit
- luo
- lav
- mri
- mkd
- mal
- mon
- mar
- msa
- mlt
- mya
- nob
- npi
- nld
- nso
- nya
- oci
- orm
- ory
- pan
- pol
- pus
- por
- ron
- rus
- bul
- snd
- slk
- slv
- sna
- som
- srp
- swe
- swh
- tam
- tel
- tgk
- tha
- tur
- ukr
- umb
- urd
- uzb
- vie
- wol
- xho
- yor
- yue
- zul
license:
- cc-by-4.0
multilinguality:
- multilingual
size_categories:
- 10K<n<100K
source_datasets:
- extended|multilingual_librispeech
- extended|covost2
task_categories:
- automatic-speech-recognition
task_ids: []
paperswithcode_id: librispeech-1
pretty_name: 'The Cross-lingual TRansfer Evaluation of Multilingual Encoders for Speech
(XTREME-S) benchmark is a benchmark designed to evaluate speech representations
across languages, tasks, domains and data regimes. It covers 102 languages from
10+ language families, 3 different domains and 4 task families: speech recognition,
translation, classification and retrieval.'
tags:
- speech-recognition
---
# XTREME-S
## Dataset Description
- **Fine-Tuning script:** [research-projects/xtreme-s](https://github.com/huggingface/transformers/tree/master/examples/research_projects/xtreme-s)
- **Paper:** [XTREME-S: Evaluating Cross-lingual Speech Representations](https://arxiv.org/abs/2203.10752)
- **Leaderboard:** [TODO(PVP)]()
- **FLEURS amount of disk used:** 350 GB
- **Multilingual Librispeech amount of disk used:** 2700 GB
- **Voxpopuli amount of disk used:** 400 GB
- **Covost2 amount of disk used:** 70 GB
- **Minds14 amount of disk used:** 5 GB
- **Total amount of disk used:** ca. 3500 GB
The Cross-lingual TRansfer Evaluation of Multilingual Encoders for Speech (XTREME-S) benchmark is a benchmark designed to evaluate speech representations across languages, tasks, domains and data regimes. It covers 102 languages from 10+ language families, 3 different domains and 4 task families: speech recognition, translation, classification and retrieval.
***TLDR; XTREME-S is the first speech benchmark that is both diverse, fully accessible, and reproducible. All datasets can be downloaded with a single line of code.
An easy-to-use and flexible fine-tuning script is provided and actively maintained.***
XTREME-S covers speech recognition with Fleurs, Multilingual LibriSpeech (MLS) and VoxPopuli, speech translation with CoVoST-2, speech classification with LangID (Fleurs) and intent classification (MInds-14) and finally speech(-text) retrieval with Fleurs. Each of the tasks covers a subset of the 102 languages included in XTREME-S, from various regions:
- **Western Europe**: *Asturian, Bosnian, Catalan, Croatian, Danish, Dutch, English, Finnish, French, Galician, German, Greek, Hungarian, Icelandic, Irish, Italian, Kabuverdianu, Luxembourgish, Maltese, Norwegian, Occitan, Portuguese, Spanish, Swedish, Welsh*
- **Eastern Europe**: *Armenian, Belarusian, Bulgarian, Czech, Estonian, Georgian, Latvian, Lithuanian, Macedonian, Polish, Romanian, Russian, Serbian, Slovak, Slovenian, Ukrainian*
- **Central-Asia/Middle-East/North-Africa**: *Arabic, Azerbaijani, Hebrew, Kazakh, Kyrgyz, Mongolian, Pashto, Persian, Sorani-Kurdish, Tajik, Turkish, Uzbek*
- **Sub-Saharan Africa**: *Afrikaans, Amharic, Fula, Ganda, Hausa, Igbo, Kamba, Lingala, Luo, Northern-Sotho, Nyanja, Oromo, Shona, Somali, Swahili, Umbundu, Wolof, Xhosa, Yoruba, Zulu*
- **South-Asia**: *Assamese, Bengali, Gujarati, Hindi, Kannada, Malayalam, Marathi, Nepali, Oriya, Punjabi, Sindhi, Tamil, Telugu, Urdu*
- **South-East Asia**: *Burmese, Cebuano, Filipino, Indonesian, Javanese, Khmer, Lao, Malay, Maori, Thai, Vietnamese*
- **CJK languages**: *Cantonese and Mandarin Chinese, Japanese, Korean*
## Design principles
### Diversity
XTREME-S aims for task, domain and language
diversity. Tasks should be diverse and cover several domains to
provide a reliable evaluation of model generalization and
robustness to noisy naturally-occurring speech in different
environments. Languages should be diverse to ensure that
models can adapt to a wide range of linguistic and phonological
phenomena.
### Accessibility
The sub-dataset for each task can be downloaded
with a **single line of code** as shown in [Supported Tasks](#supported-tasks).
Each task is available under a permissive license that allows the use and redistribution
of the data for research purposes. Tasks have been selected based on their usage by
pre-existing multilingual pre-trained models, for simplicity.
### Reproducibility
We produce fully **open-sourced, maintained and easy-to-use** fine-tuning scripts
for each task as shown under [Fine-tuning Example](#fine-tuning-and-evaluation-example).
XTREME-S encourages submissions that leverage publicly available speech and text datasets. Users should detail which data they use.
In general, we encourage settings that can be reproduced by the community, but also encourage the exploration of new frontiers for speech representation learning.
## Fine-tuning and Evaluation Example
We provide a fine-tuning script under [**research-projects/xtreme-s**](https://github.com/huggingface/transformers/tree/master/examples/research_projects/xtreme-s).
The fine-tuning script is written in PyTorch and allows one to fine-tune and evaluate any [Hugging Face model](https://huggingface.co/models) on XTREME-S.
The example script is actively maintained by [@anton-l](https://github.com/anton-l) and [@patrickvonplaten](https://github.com/patrickvonplaten). Feel free
to reach out via issues or pull requests on GitHub if you have any questions.
## Leaderboards
The leaderboard for the XTREME-S benchmark can be found at [this address (TODO(PVP))]().
## Supported Tasks
Note that the suppoprted tasks are focused particularly on linguistic aspect of speech,
while nonlinguistic/paralinguistic aspects of speech relevant to e.g. speech synthesis or voice conversion are **not** evaluated.
<p align="center">
<img src="https://github.com/patrickvonplaten/scientific_images/raw/master/xtreme_s.png" alt="Datasets used in XTREME"/>
</p>
### 1. Speech Recognition (ASR)
We include three speech recognition datasets: FLEURS-ASR, MLS and VoxPopuli (optionally BABEL). Multilingual fine-tuning is used for these three datasets.
#### FLEURS-ASR
*FLEURS-ASR* is the speech version of the FLORES machine translation benchmark, covering 2000 n-way parallel sentences in n=102 languages.
```py
from datasets import load_dataset
fleurs_asr = load_dataset("google/xtreme_s", "fleurs.af_za") # for Afrikaans
# to download all data for multi-lingual fine-tuning uncomment following line
# fleurs_asr = load_dataset("google/xtreme_s", "fleurs.all")
# see structure
print(fleurs_asr)
# load audio sample on the fly
audio_input = fleurs_asr["train"][0]["audio"] # first decoded audio sample
transcription = fleurs_asr["train"][0]["transcription"] # first transcription
# use `audio_input` and `transcription` to fine-tune your model for ASR
# for analyses see language groups
all_language_groups = fleurs_asr["train"].features["lang_group_id"].names
lang_group_id = fleurs_asr["train"][0]["lang_group_id"]
all_language_groups[lang_group_id]
```
#### Multilingual LibriSpeech (MLS)
*MLS* is a large multilingual corpus derived from read audiobooks from LibriVox and consists of 8 languages. For this challenge the training data is limited to 10-hours splits.
```py
from datasets import load_dataset
mls = load_dataset("google/xtreme_s", "mls.pl") # for Polish
# to download all data for multi-lingual fine-tuning uncomment following line
# mls = load_dataset("google/xtreme_s", "mls.all")
# see structure
print(mls)
# load audio sample on the fly
audio_input = mls["train"][0]["audio"] # first decoded audio sample
transcription = mls["train"][0]["transcription"] # first transcription
# use `audio_input` and `transcription` to fine-tune your model for ASR
```
#### VoxPopuli
*VoxPopuli* is a large-scale multilingual speech corpus for representation learning and semi-supervised learning, from which we use the speech recognition dataset. The raw data is collected from 2009-2020 European Parliament event recordings. We acknowledge the European Parliament for creating and sharing these materials.
**VoxPopuli has to download the whole dataset 100GB since languages
are entangled into each other - maybe not worth testing here due to the size**
```py
from datasets import load_dataset
voxpopuli = load_dataset("google/xtreme_s", "voxpopuli.ro") # for Romanian
# to download all data for multi-lingual fine-tuning uncomment following line
# voxpopuli = load_dataset("google/xtreme_s", "voxpopuli.all")
# see structure
print(voxpopuli)
# load audio sample on the fly
audio_input = voxpopuli["train"][0]["audio"] # first decoded audio sample
transcription = voxpopuli["train"][0]["transcription"] # first transcription
# use `audio_input` and `transcription` to fine-tune your model for ASR
```
#### (Optionally) BABEL
*BABEL* from IARPA is a conversational speech recognition dataset in low-resource languages. First, download LDC2016S06, LDC2016S12, LDC2017S08, LDC2017S05 and LDC2016S13. BABEL is the only dataset in our benchmark who is less easily accessible, so you will need to sign in to get access to it on LDC. Although not officially part of the XTREME-S ASR datasets, BABEL is often used for evaluating speech representations on a difficult domain (phone conversations).
```py
from datasets import load_dataset
babel = load_dataset("google/xtreme_s", "babel.as")
```
**The above command is expected to fail with a nice error message,
explaining how to download BABEL**
The following should work:
```py
from datasets import load_dataset
babel = load_dataset("google/xtreme_s", "babel.as", data_dir="/path/to/IARPA_BABEL_OP1_102_LDC2016S06.zip")
# see structure
print(babel)
# load audio sample on the fly
audio_input = babel["train"][0]["audio"] # first decoded audio sample
transcription = babel["train"][0]["transcription"] # first transcription
# use `audio_input` and `transcription` to fine-tune your model for ASR
```
### 2. Speech Translation (ST)
We include the CoVoST-2 dataset for automatic speech translation.
#### CoVoST-2
The *CoVoST-2* benchmark has become a commonly used dataset for evaluating automatic speech translation. It covers language pairs from English into 15 languages, as well as 21 languages into English. We use only the "X->En" direction to evaluate cross-lingual representations. The amount of supervision varies greatly in this setting, from one hour for Japanese->English to 180 hours for French->English. This makes pretraining particularly useful to enable such few-shot learning. We enforce multiligual fine-tuning for simplicity. Results are splitted in high/med/low-resource language pairs as explained in the [paper (TODO(PVP))].
```py
from datasets import load_dataset
covost_2 = load_dataset("google/xtreme_s", "covost2.id.en") # for Indonesian to English
# to download all data for multi-lingual fine-tuning uncomment following line
# covost_2 = load_dataset("google/xtreme_s", "covost2.all")
# see structure
print(covost_2)
# load audio sample on the fly
audio_input = covost_2["train"][0]["audio"] # first decoded audio sample
transcription = covost_2["train"][0]["transcription"] # first transcription
translation = covost_2["train"][0]["translation"] # first translation
# use audio_input and translation to fine-tune your model for AST
```
### 3. Speech Classification
We include two multilingual speech classification datasets: FLEURS-LangID and Minds-14.
#### Language Identification - FLEURS-LangID
LangID can often be a domain classification, but in the case of FLEURS-LangID, recordings are done in a similar setting across languages and the utterances correspond to n-way parallel sentences, in the exact same domain, making this task particularly relevant for evaluating LangID. The setting is simple, FLEURS-LangID is splitted in train/valid/test for each language. We simply create a single train/valid/test for LangID by merging all.
```py
from datasets import load_dataset
fleurs_langID = load_dataset("google/xtreme_s", "fleurs.all") # to download all data
# see structure
print(fleurs_langID)
# load audio sample on the fly
audio_input = fleurs_langID["train"][0]["audio"] # first decoded audio sample
language_class = fleurs_langID["train"][0]["lang_id"] # first id class
language = fleurs_langID["train"].features["lang_id"].names[language_class]
# use audio_input and language_class to fine-tune your model for audio classification
```
#### Intent classification - Minds-14
Minds-14 is an intent classification made from e-banking speech datasets in 14 languages, with 14 intent labels. We impose a single multilingual fine-tuning to increase the size of the train and test sets and reduce the variance associated with the small size of the dataset per language.
```py
from datasets import load_dataset
minds_14 = load_dataset("google/xtreme_s", "minds14.fr-FR") # for French
# to download all data for multi-lingual fine-tuning uncomment following line
# minds_14 = load_dataset("google/xtreme_s", "minds14.all")
# see structure
print(minds_14)
# load audio sample on the fly
audio_input = minds_14["train"][0]["audio"] # first decoded audio sample
intent_class = minds_14["train"][0]["intent_class"] # first transcription
intent = minds_14["train"].features["intent_class"].names[intent_class]
# use audio_input and language_class to fine-tune your model for audio classification
```
### 4. (Optionally) Speech Retrieval
We optionally include one speech retrieval dataset: FLEURS-Retrieval as explained in the [FLEURS paper](https://arxiv.org/abs/2205.12446).
#### FLEURS-Retrieval
FLEURS-Retrieval provides n-way parallel speech and text data. Similar to how XTREME for text leverages Tatoeba to evaluate bitext mining a.k.a sentence translation retrieval, we use FLEURS-Retrieval to evaluate the quality of fixed-size representations of speech utterances. Our goal is to incentivize the creation of fixed-size speech encoder for speech retrieval. The system has to retrieve the English "key" utterance corresponding to the speech translation of "queries" in 15 languages. Results have to be reported on the test sets of FLEURS-Retrieval whose utterances are used as queries (and keys for English). We augment the English keys with a large number of utterances to make the task more difficult.
```py
from datasets import load_dataset
fleurs_retrieval = load_dataset("google/xtreme_s", "fleurs.af_za") # for Afrikaans
# to download all data for multi-lingual fine-tuning uncomment following line
# fleurs_retrieval = load_dataset("google/xtreme_s", "fleurs.all")
# see structure
print(fleurs_retrieval)
# load audio sample on the fly
audio_input = fleurs_retrieval["train"][0]["audio"] # decoded audio sample
text_sample_pos = fleurs_retrieval["train"][0]["transcription"] # positive text sample
text_sample_neg = fleurs_retrieval["train"][1:20]["transcription"] # negative text samples
# use `audio_input`, `text_sample_pos`, and `text_sample_neg` to fine-tune your model for retrieval
```
Users can leverage the training (and dev) sets of FLEURS-Retrieval with a ranking loss to build better cross-lingual fixed-size representations of speech.
## Dataset Structure
The XTREME-S benchmark is composed of the following datasets:
- [FLEURS](https://huggingface.co/datasets/google/fleurs#dataset-structure)
- [Multilingual Librispeech (MLS)](https://huggingface.co/datasets/facebook/multilingual_librispeech#dataset-structure)
Note that for MLS, XTREME-S uses `path` instead of `file` and `transcription` instead of `text`.
- [Voxpopuli](https://huggingface.co/datasets/facebook/voxpopuli#dataset-structure)
- [Minds14](https://huggingface.co/datasets/polyai/minds14#dataset-structure)
- [Covost2](https://huggingface.co/datasets/covost2#dataset-structure)
Note that for Covost2, XTREME-S uses `path` instead of `file` and `transcription` instead of `sentence`.
- [BABEL](https://huggingface.co/datasets/ldc/iarpa_babel#dataset-structure)
Please click on the link of the dataset cards to get more information about its dataset structure.
## Dataset Creation
The XTREME-S benchmark is composed of the following datasets:
- [FLEURS](https://huggingface.co/datasets/google/fleurs#dataset-creation)
- [Multilingual Librispeech (MLS)](https://huggingface.co/datasets/facebook/multilingual_librispeech#dataset-creation)
- [Voxpopuli](https://huggingface.co/datasets/facebook/voxpopuli#dataset-creation)
- [Minds14](https://huggingface.co/datasets/polyai/minds14#dataset-creation)
- [Covost2](https://huggingface.co/datasets/covost2#dataset-creation)
- [BABEL](https://huggingface.co/datasets/ldc/iarpa_babel#dataset-creation)
Please visit the corresponding dataset cards to get more information about the source data.
## Considerations for Using the Data
### Social Impact of Dataset
This dataset is meant to encourage the development of speech technology in a lot more languages of the world. One of the goal is to give equal access to technologies like speech recognition or speech translation to everyone, meaning better dubbing or better access to content from the internet (like podcasts, streaming or videos).
### Discussion of Biases
Most datasets have a fair distribution of gender utterances (e.g. the newly introduced FLEURS dataset). While many languages are covered from various regions of the world, the benchmark misses many languages that are all equally important. We believe technology built through XTREME-S should generalize to all languages.
### Other Known Limitations
The benchmark has a particular focus on read-speech because common evaluation benchmarks like CoVoST-2 or LibriSpeech evaluate on this type of speech. There is sometimes a known mismatch between performance obtained in a read-speech setting and a more noisy setting (in production for instance). Given the big progress that remains to be made on many languages, we believe better performance on XTREME-S should still correlate well with actual progress made for speech understanding.
## Additional Information
All datasets are licensed under the [Creative Commons license (CC-BY)](https://creativecommons.org/licenses/).
### Citation Information
#### XTREME-S
```
@article{conneau2022xtreme,
title={XTREME-S: Evaluating Cross-lingual Speech Representations},
author={Conneau, Alexis and Bapna, Ankur and Zhang, Yu and Ma, Min and von Platen, Patrick and Lozhkov, Anton and Cherry, Colin and Jia, Ye and Rivera, Clara and Kale, Mihir and others},
journal={arXiv preprint arXiv:2203.10752},
year={2022}
}
```
#### MLS
```
@article{Pratap2020MLSAL,
title={MLS: A Large-Scale Multilingual Dataset for Speech Research},
author={Vineel Pratap and Qiantong Xu and Anuroop Sriram and Gabriel Synnaeve and Ronan Collobert},
journal={ArXiv},
year={2020},
volume={abs/2012.03411}
}
```
#### VoxPopuli
```
@article{wang2021voxpopuli,
title={Voxpopuli: A large-scale multilingual speech corpus for representation learning, semi-supervised learning and interpretation},
author={Wang, Changhan and Riviere, Morgane and Lee, Ann and Wu, Anne and Talnikar, Chaitanya and Haziza, Daniel and Williamson, Mary and Pino, Juan and Dupoux, Emmanuel},
journal={arXiv preprint arXiv:2101.00390},
year={2021}
}
```
#### CoVoST 2
```
@article{DBLP:journals/corr/abs-2007-10310,
author = {Changhan Wang and
Anne Wu and
Juan Miguel Pino},
title = {CoVoST 2: {A} Massively Multilingual Speech-to-Text Translation Corpus},
journal = {CoRR},
volume = {abs/2007.10310},
year = {2020},
url = {https://arxiv.org/abs/2007.10310},
eprinttype = {arXiv},
eprint = {2007.10310},
timestamp = {Thu, 12 Aug 2021 15:37:06 +0200},
biburl = {https://dblp.org/rec/journals/corr/abs-2007-10310.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
```
#### Minds14
```
@article{gerz2021multilingual,
title={Multilingual and cross-lingual intent detection from spoken data},
author={Gerz, Daniela and Su, Pei-Hao and Kusztos, Razvan and Mondal, Avishek and Lis, Micha{\l} and Singhal, Eshan and Mrk{\v{s}}i{\'c}, Nikola and Wen, Tsung-Hsien and Vuli{\'c}, Ivan},
journal={arXiv preprint arXiv:2104.08524},
year={2021}
}
```
### Contributions
Thanks to [@patrickvonplaten](https://github.com/patrickvonplaten), [@anton-l](https://github.com/anton-l), [@aconneau](https://github.com/aconneau) for adding this dataset
|
big-banyan-tree/BBT_CommonCrawl_2024 | big-banyan-tree | "2024-10-11T08:17:41Z" | 3,563 | 3 | [
"language:en",
"license:mit",
"size_categories:10M<n<100M",
"format:arrow",
"modality:text",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-10-09T05:43:50Z" | ---
license: mit
language:
- en
pretty_name: BBT-CC24
size_categories:
- 10M<n<100M
configs:
- config_name: script_extraction
data_files: "script_extraction/*.arrow"
- config_name: ipmaxmind
data_files: "ipmaxmind/*.arrow"
---
# Context
BigBanyanTree is an initiative to empower colleges to set up their data engineering clusters, and drive interest towards data processing and analysis using tools such as Apache Spark. The data provided here is the direct result of this initiative. The data was processed by [Gautam](https://www.linkedin.com/in/gautam-menon-9a30a3233/) and [Suchit](https://www.linkedin.com/in/suchitg04/), under the guidance of [Harsh Singhal](https://www.linkedin.com/in/harshsinghal/).
# Content
Each `arrow` file contains a table with fields extracted from Common Crawl WARC files.
The datasets provided are derived from processing randomly sampled 900 WARC files from the [2024-33 CommonCrawl dump](https://data.commoncrawl.org/crawl-data/CC-MAIN-2024-33/index.html).
The MaxMind database used to enrich WARC data with geolocation information is GeoLite2-City_20240903 (released on 3rd Sept. 2024).
## <span style="color:red">⚠️ WARNING ⚠️</span>
The **URLs** and **IP addresses** extracted in this dataset are sourced from **publicly available Common Crawl data dumps**. Please be aware that:
- The data may contain **inaccuracies** or **outdated information**.
- **No validation or filtering** has been performed on the extracted URLs or IP addresses.
- As the data has **not been filtered**, it may contain URLs promoting **obscene or objectionable content**.
- Use this data **with caution**, especially for tasks involving personal or sensitive information.
## Disclaimer
These data points are included solely for the purpose of:
- **Analyzing domain distributions**
- **IP metadata analysis** |
alvations/c4p0-x1-en-es | alvations | "2024-03-24T03:55:08Z" | 3,552 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-03-23T10:03:20Z" | ---
dataset_info:
features:
- name: source
dtype: string
- name: target
dtype: string
- name: target_backto_source
dtype: string
- name: raw_target
list:
- name: generated_text
dtype: string
- name: raw_target_backto_source
list:
- name: generated_text
dtype: string
- name: prompt
dtype: string
- name: reverse_prompt
dtype: string
- name: source_langid
dtype: string
- name: target_langid
dtype: string
- name: target_backto_source_langid
dtype: string
- name: doc_id
dtype: int64
- name: sent_id
dtype: int64
- name: timestamp
dtype: string
- name: url
dtype: string
- name: doc_hash
dtype: string
splits:
- name: train
num_bytes: 2308
num_examples: 2
download_size: 15489
dataset_size: 2308
configs:
- config_name: default
data_files:
- split: train
path: 1f24f16745a166b0/train-*
---
|
Hemabhushan/capstone_sakuga_preproc_optical_flow | Hemabhushan | "2024-11-21T03:57:26Z" | 3,549 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-21T18:37:50Z" | ---
dataset_info:
- config_name: sample_subset
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 30244617541
num_examples: 2396
download_size: 5461228507
dataset_size: 30244617541
- config_name: seg1_part1
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31482846975
num_examples: 2498
download_size: 5683747736
dataset_size: 31482846975
- config_name: seg1_part10
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31533368384
num_examples: 2498
download_size: 5651523132
dataset_size: 31533368384
- config_name: seg1_part11
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31495470684
num_examples: 2498
download_size: 5646719194
dataset_size: 31495470684
- config_name: seg1_part12
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31533346986
num_examples: 2498
download_size: 5705163694
dataset_size: 31533346986
- config_name: seg1_part14
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31432281831
num_examples: 2498
download_size: 5627562296
dataset_size: 31432281831
- config_name: seg1_part15
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31508092434
num_examples: 2498
download_size: 5647225033
dataset_size: 31508092434
- config_name: seg1_part18
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31482843486
num_examples: 2498
download_size: 5703102313
dataset_size: 31482843486
- config_name: seg1_part2
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31457575891
num_examples: 2498
download_size: 5650519682
dataset_size: 31457575891
- config_name: seg1_part20
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31470181418
num_examples: 2498
download_size: 5625192608
dataset_size: 31470181418
- config_name: seg1_part21
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31508103461
num_examples: 2498
download_size: 5680819286
dataset_size: 31508103461
- config_name: seg1_part24
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31520754590
num_examples: 2498
download_size: 5697959164
dataset_size: 31520754590
- config_name: seg1_part25
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31482806663
num_examples: 2498
download_size: 5628329196
dataset_size: 31482806663
- config_name: seg1_part26
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31533316255
num_examples: 2498
download_size: 5662161621
dataset_size: 31533316255
- config_name: seg1_part27
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31533349249
num_examples: 2498
download_size: 5654417461
dataset_size: 31533349249
- config_name: seg1_part28
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31533349763
num_examples: 2498
download_size: 5644209592
dataset_size: 31533349763
- config_name: seg1_part29
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31508101677
num_examples: 2498
download_size: 5725512822
dataset_size: 31508101677
- config_name: seg1_part30
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31520696316
num_examples: 2498
download_size: 5649748978
dataset_size: 31520696316
- config_name: seg1_part31
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31470211581
num_examples: 2498
download_size: 5691521624
dataset_size: 31470211581
- config_name: seg1_part32
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31520703122
num_examples: 2498
download_size: 5611392470
dataset_size: 31520703122
- config_name: seg1_part33
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31533395644
num_examples: 2498
download_size: 5765660331
dataset_size: 31533395644
- config_name: seg1_part34
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31482802747
num_examples: 2498
download_size: 5654024836
dataset_size: 31482802747
- config_name: seg1_part35
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31508072238
num_examples: 2498
download_size: 5632935439
dataset_size: 31508072238
- config_name: seg1_part36
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31508118773
num_examples: 2498
download_size: 5708713170
dataset_size: 31508118773
- config_name: seg1_part39
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31508101149
num_examples: 2498
download_size: 5697274819
dataset_size: 31508101149
- config_name: seg1_part4
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31482829874
num_examples: 2498
download_size: 5700440041
dataset_size: 31482829874
- config_name: seg1_part40
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31508069004
num_examples: 2498
download_size: 5640935450
dataset_size: 31508069004
- config_name: seg1_part41
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31558603213
num_examples: 2498
download_size: 5713447755
dataset_size: 31558603213
- config_name: seg1_part42
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31482828955
num_examples: 2498
download_size: 5640954061
dataset_size: 31482828955
- config_name: seg1_part43
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31520704278
num_examples: 2498
download_size: 5736520090
dataset_size: 31520704278
- config_name: seg1_part44
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31166950691
num_examples: 2471
download_size: 5640666013
dataset_size: 31166950691
- config_name: seg1_part45
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
splits:
- name: train
num_bytes: 0
num_examples: 0
download_size: 6857
dataset_size: 0
- config_name: seg1_part6
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31545984682
num_examples: 2498
download_size: 5674259234
dataset_size: 31545984682
- config_name: seg1_part8
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31545987289
num_examples: 2498
download_size: 5733443343
dataset_size: 31545987289
- config_name: seg1_part9
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: word_count
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: frames
dtype: 'null'
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31533382844
num_examples: 2498
download_size: 5634081955
dataset_size: 31533382844
- config_name: seg2_part1
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: prev_conv
list:
- name: content
list:
- name: text
dtype: string
- name: type
dtype: string
- name: role
dtype: string
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31284870750
num_examples: 2498
download_size: 6023339313
dataset_size: 31284870750
- config_name: seg2_part3
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: prev_conv
list:
- name: content
list:
- name: text
dtype: string
- name: type
dtype: string
- name: role
dtype: string
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31297486132
num_examples: 2498
download_size: 6033401989
dataset_size: 31297486132
- config_name: seg2_part4
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: prev_conv
list:
- name: content
list:
- name: text
dtype: string
- name: type
dtype: string
- name: role
dtype: string
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31209057858
num_examples: 2498
download_size: 6033150218
dataset_size: 31209057858
- config_name: seg2_part7
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: prev_conv
list:
- name: content
list:
- name: text
dtype: string
- name: type
dtype: string
- name: role
dtype: string
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31310131354
num_examples: 2498
download_size: 6026279130
dataset_size: 31310131354
- config_name: seg3_part1
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31091165546
num_examples: 2498
download_size: 6013722850
dataset_size: 31091165546
- config_name: seg3_part3
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 30964835031
num_examples: 2498
download_size: 5981318118
dataset_size: 30964835031
- config_name: seg3_part4
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31141675653
num_examples: 2498
download_size: 6035418048
dataset_size: 31141675653
- config_name: seg3_part7
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31230125953
num_examples: 2498
download_size: 6080001698
dataset_size: 31230125953
- config_name: seg4_part1
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31230126642
num_examples: 2498
download_size: 6074698944
dataset_size: 31230126642
- config_name: seg4_part3
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31040618574
num_examples: 2498
download_size: 5968129650
dataset_size: 31040618574
- config_name: seg4_part5
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31015355157
num_examples: 2498
download_size: 6027043124
dataset_size: 31015355157
- config_name: seg4_part6
features:
- name: identifier
dtype: string
- name: hash_identifier
dtype: string
- name: url_link
dtype: string
- name: scene_start_time
dtype: string
- name: scene_end_time
dtype: string
- name: frame_number
dtype: float64
- name: key_frame_number
dtype: float64
- name: anime_tags
dtype: string
- name: user_tags
dtype: string
- name: text_description
dtype: string
- name: aesthetic_score
dtype: float64
- name: dynamic_score
dtype: float64
- name: rating
dtype: string
- name: text_prob
dtype: float64
- name: width
dtype: int64
- name: height
dtype: int64
- name: file_ext
dtype: string
- name: fps
dtype: float64
- name: Taxonomy_Time
dtype: string
- name: Taxonomy_Venue
dtype: string
- name: Taxonomy_Media
dtype: string
- name: Taxonomy_Filming
dtype: string
- name: Taxonomy_Composition
dtype: string
- name: Taxonomy_Character
dtype: string
- name: __index_level_0__
dtype: int64
- name: video_id
dtype: string
- name: video_segment_no
dtype: int64
- name: word_count
dtype: int64
- name: video_frames
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 31116405163
num_examples: 2498
download_size: 6079250810
dataset_size: 31116405163
configs:
- config_name: sample_subset
data_files:
- split: train
path: sample_subset/train-*
- config_name: seg1_part1
data_files:
- split: train
path: seg1_part1/train-*
- config_name: seg1_part10
data_files:
- split: train
path: seg1_part10/train-*
- config_name: seg1_part11
data_files:
- split: train
path: seg1_part11/train-*
- config_name: seg1_part12
data_files:
- split: train
path: seg1_part12/train-*
- config_name: seg1_part14
data_files:
- split: train
path: seg1_part14/train-*
- config_name: seg1_part15
data_files:
- split: train
path: seg1_part15/train-*
- config_name: seg1_part18
data_files:
- split: train
path: seg1_part18/train-*
- config_name: seg1_part2
data_files:
- split: train
path: seg1_part2/train-*
- config_name: seg1_part20
data_files:
- split: train
path: seg1_part20/train-*
- config_name: seg1_part21
data_files:
- split: train
path: seg1_part21/train-*
- config_name: seg1_part24
data_files:
- split: train
path: seg1_part24/train-*
- config_name: seg1_part25
data_files:
- split: train
path: seg1_part25/train-*
- config_name: seg1_part26
data_files:
- split: train
path: seg1_part26/train-*
- config_name: seg1_part27
data_files:
- split: train
path: seg1_part27/train-*
- config_name: seg1_part28
data_files:
- split: train
path: seg1_part28/train-*
- config_name: seg1_part29
data_files:
- split: train
path: seg1_part29/train-*
- config_name: seg1_part30
data_files:
- split: train
path: seg1_part30/train-*
- config_name: seg1_part31
data_files:
- split: train
path: seg1_part31/train-*
- config_name: seg1_part32
data_files:
- split: train
path: seg1_part32/train-*
- config_name: seg1_part33
data_files:
- split: train
path: seg1_part33/train-*
- config_name: seg1_part34
data_files:
- split: train
path: seg1_part34/train-*
- config_name: seg1_part35
data_files:
- split: train
path: seg1_part35/train-*
- config_name: seg1_part36
data_files:
- split: train
path: seg1_part36/train-*
- config_name: seg1_part39
data_files:
- split: train
path: seg1_part39/train-*
- config_name: seg1_part4
data_files:
- split: train
path: seg1_part4/train-*
- config_name: seg1_part40
data_files:
- split: train
path: seg1_part40/train-*
- config_name: seg1_part41
data_files:
- split: train
path: seg1_part41/train-*
- config_name: seg1_part42
data_files:
- split: train
path: seg1_part42/train-*
- config_name: seg1_part43
data_files:
- split: train
path: seg1_part43/train-*
- config_name: seg1_part44
data_files:
- split: train
path: seg1_part44/train-*
- config_name: seg1_part45
data_files:
- split: train
path: seg1_part45/train-*
- config_name: seg1_part6
data_files:
- split: train
path: seg1_part6/train-*
- config_name: seg1_part8
data_files:
- split: train
path: seg1_part8/train-*
- config_name: seg1_part9
data_files:
- split: train
path: seg1_part9/train-*
- config_name: seg2_part1
data_files:
- split: train
path: seg2_part1/train-*
- config_name: seg2_part3
data_files:
- split: train
path: seg2_part3/train-*
- config_name: seg2_part4
data_files:
- split: train
path: seg2_part4/train-*
- config_name: seg2_part7
data_files:
- split: train
path: seg2_part7/train-*
- config_name: seg3_part1
data_files:
- split: train
path: seg3_part1/train-*
- config_name: seg3_part3
data_files:
- split: train
path: seg3_part3/train-*
- config_name: seg3_part4
data_files:
- split: train
path: seg3_part4/train-*
- config_name: seg3_part7
data_files:
- split: train
path: seg3_part7/train-*
- config_name: seg4_part1
data_files:
- split: train
path: seg4_part1/train-*
- config_name: seg4_part3
data_files:
- split: train
path: seg4_part3/train-*
- config_name: seg4_part5
data_files:
- split: train
path: seg4_part5/train-*
- config_name: seg4_part6
data_files:
- split: train
path: seg4_part6/train-*
---
|
openlifescienceai/medmcqa | openlifescienceai | "2024-01-04T14:32:02Z" | 3,536 | 127 | [
"task_categories:question-answering",
"task_categories:multiple-choice",
"task_ids:multiple-choice-qa",
"task_ids:open-domain-qa",
"annotations_creators:no-annotation",
"language_creators:expert-generated",
"multilinguality:monolingual",
"source_datasets:original",
"language:en",
"license:apache-2.0",
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"question-answering",
"multiple-choice"
] | "2022-05-06T08:43:24Z" | ---
annotations_creators:
- no-annotation
language_creators:
- expert-generated
language:
- en
license:
- apache-2.0
multilinguality:
- monolingual
size_categories:
- 100K<n<1M
source_datasets:
- original
task_categories:
- question-answering
- multiple-choice
task_ids:
- multiple-choice-qa
- open-domain-qa
paperswithcode_id: medmcqa
pretty_name: MedMCQA
dataset_info:
features:
- name: id
dtype: string
- name: question
dtype: string
- name: opa
dtype: string
- name: opb
dtype: string
- name: opc
dtype: string
- name: opd
dtype: string
- name: cop
dtype:
class_label:
names:
'0': a
'1': b
'2': c
'3': d
- name: choice_type
dtype: string
- name: exp
dtype: string
- name: subject_name
dtype: string
- name: topic_name
dtype: string
splits:
- name: train
num_bytes: 131903297
num_examples: 182822
- name: test
num_bytes: 1399350
num_examples: 6150
- name: validation
num_bytes: 2221428
num_examples: 4183
download_size: 88311487
dataset_size: 135524075
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
- split: validation
path: data/validation-*
---
# Dataset Card for MedMCQA
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-instances)
- [Data Splits](#data-instances)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
## Dataset Description
- **Homepage:** https://medmcqa.github.io
- **Repository:** https://github.com/medmcqa/medmcqa
- **Paper:** [MedMCQA: A Large-scale Multi-Subject Multi-Choice Dataset for Medical domain Question Answering](https://proceedings.mlr.press/v174/pal22a)
- **Leaderboard:** https://paperswithcode.com/dataset/medmcqa
- **Point of Contact:** [Aaditya Ura](mailto:[email protected])
### Dataset Summary
MedMCQA is a large-scale, Multiple-Choice Question Answering (MCQA) dataset designed to address real-world medical entrance exam questions.
MedMCQA has more than 194k high-quality AIIMS & NEET PG entrance exam MCQs covering 2.4k healthcare topics and 21 medical subjects are collected with an average token length of 12.77 and high topical diversity.
Each sample contains a question, correct answer(s), and other options which require a deeper language understanding as it tests the 10+ reasoning abilities of a model across a wide range of medical subjects & topics. A detailed explanation of the solution, along with the above information, is provided in this study.
MedMCQA provides an open-source dataset for the Natural Language Processing community.
It is expected that this dataset would facilitate future research toward achieving better QA systems.
The dataset contains questions about the following topics:
- Anesthesia
- Anatomy
- Biochemistry
- Dental
- ENT
- Forensic Medicine (FM)
- Obstetrics and Gynecology (O&G)
- Medicine
- Microbiology
- Ophthalmology
- Orthopedics
- Pathology
- Pediatrics
- Pharmacology
- Physiology
- Psychiatry
- Radiology
- Skin
- Preventive & Social Medicine (PSM)
- Surgery
### Supported Tasks and Leaderboards
multiple-choice-QA, open-domain-QA: The dataset can be used to train a model for multi-choice questions answering, open domain questions answering. Questions in these exams are challenging and generally require deeper domain and language understanding as it tests the 10+ reasoning abilities across a wide range of medical subjects & topics.
### Languages
The questions and answers are available in English.
## Dataset Structure
### Data Instances
```
{
"question":"A 40-year-old man presents with 5 days of productive cough and fever. Pseudomonas aeruginosa is isolated from a pulmonary abscess. CBC shows an acute effect characterized by marked leukocytosis (50,000 mL) and the differential count reveals a shift to left in granulocytes. Which of the following terms best describes these hematologic findings?",
"exp": "Circulating levels of leukocytes and their precursors may occasionally reach very high levels (>50,000 WBC mL). These extreme elevations are sometimes called leukemoid reactions because they are similar to the white cell counts observed in leukemia, from which they must be distinguished. The leukocytosis occurs initially because of the accelerated release of granulocytes from the bone marrow (caused by cytokines, including TNF and IL-1) There is a rise in the number of both mature and immature neutrophils in the blood, referred to as a shift to the left. In contrast to bacterial infections, viral infections (including infectious mononucleosis) are characterized by lymphocytosis Parasitic infestations and certain allergic reactions cause eosinophilia, an increase in the number of circulating eosinophils. Leukopenia is defined as an absolute decrease in the circulating WBC count.",
"cop":1,
"opa":"Leukemoid reaction",
"opb":"Leukopenia",
"opc":"Myeloid metaplasia",
"opd":"Neutrophilia",
"subject_name":"Pathology",
"topic_name":"Basic Concepts and Vascular changes of Acute Inflammation",
"id":"4e1715fe-0bc3-494e-b6eb-2d4617245aef",
"choice_type":"single"
}
```
### Data Fields
- `id` : a string question identifier for each example
- `question` : question text (a string)
- `opa` : Option A
- `opb` : Option B
- `opc` : Option C
- `opd` : Option D
- `cop` : Correct option, i.e., 1,2,3,4
- `choice_type` ({"single", "multi"}): Question choice type.
- "single": Single-choice question, where each choice contains a single option.
- "multi": Multi-choice question, where each choice contains a combination of multiple suboptions.
- `exp` : Expert's explanation of the answer
- `subject_name` : Medical Subject name of the particular question
- `topic_name` : Medical topic name from the particular subject
### Data Splits
The goal of MedMCQA is to emulate the rigor of real word medical exams. To enable that, a predefined split of the dataset is provided. The split is by exams instead of the given questions. This also ensures the reusability and generalization ability of the models.
The training set of MedMCQA consists of all the collected mock & online test series, whereas the test set consists of all AIIMS PG exam MCQs (years 1991-present). The development set consists of NEET PG exam MCQs (years 2001-present) to approximate real exam evaluation.
Similar questions from train , test and dev set were removed based on similarity. The final split sizes are as follow:
| | Train | Test | Valid |
| ----- | ------ | ----- | ---- |
| Question #| 182,822 | 6,150 | 4,183|
| Vocab | 94,231 | 11,218 | 10,800 |
| Max Ques tokens | 220 | 135| 88 |
| Max Ans tokens | 38 | 21 | 25 |
## Dataset Creation
### Curation Rationale
Before this attempt, very few works have been done to construct biomedical MCQA datasets (Vilares and Gomez-Rodr, 2019), and they are (1) mostly small, containing up to few thousand questions, and (2) cover a limited number of Medical topics and Subjects. This paper addresses the aforementioned limitations by introducing MedMCQA, a new large-scale, Multiple-Choice Question Answering
(MCQA) dataset designed to address real-world medical entrance exam questions.
### Source Data
#### Initial Data Collection and Normalization
Historical Exam questions from official websites - AIIMS & NEET PG (1991- present)
The raw data is collected from open websites and books
#### Who are the source language producers?
The dataset was created by Ankit Pal, Logesh Kumar Umapathi and Malaikannan Sankarasubbu
### Annotations
#### Annotation process
The dataset does not contain any additional annotations.
#### Who are the annotators?
[Needs More Information]
### Personal and Sensitive Information
[Needs More Information]
## Considerations for Using the Data
### Social Impact of Dataset
[Needs More Information]
### Discussion of Biases
[Needs More Information]
### Other Known Limitations
[Needs More Information]
## Additional Information
### Dataset Curators
[Needs More Information]
### Licensing Information
[Needs More Information]
### Citation Information
If you find this useful in your research, please consider citing the dataset paper
```
@InProceedings{pmlr-v174-pal22a,
title = {MedMCQA: A Large-scale Multi-Subject Multi-Choice Dataset for Medical domain Question Answering},
author = {Pal, Ankit and Umapathi, Logesh Kumar and Sankarasubbu, Malaikannan},
booktitle = {Proceedings of the Conference on Health, Inference, and Learning},
pages = {248--260},
year = {2022},
editor = {Flores, Gerardo and Chen, George H and Pollard, Tom and Ho, Joyce C and Naumann, Tristan},
volume = {174},
series = {Proceedings of Machine Learning Research},
month = {07--08 Apr},
publisher = {PMLR},
pdf = {https://proceedings.mlr.press/v174/pal22a/pal22a.pdf},
url = {https://proceedings.mlr.press/v174/pal22a.html},
abstract = {This paper introduces MedMCQA, a new large-scale, Multiple-Choice Question Answering (MCQA) dataset designed to address real-world medical entrance exam questions. More than 194k high-quality AIIMS & NEET PG entrance exam MCQs covering 2.4k healthcare topics and 21 medical subjects are collected with an average token length of 12.77 and high topical diversity. Each sample contains a question, correct answer(s), and other options which requires a deeper language understanding as it tests the 10+ reasoning abilities of a model across a wide range of medical subjects & topics. A detailed explanation of the solution, along with the above information, is provided in this study.}
}
```
### Contributions
Thanks to [@monk1337](https://github.com/monk1337) for adding this dataset. |
common-canvas/commoncatalog-cc-by | common-canvas | "2024-05-16T19:01:29Z" | 3,529 | 26 | [
"task_categories:text-to-image",
"language:en",
"license:cc-by-4.0",
"size_categories:10M<n<100M",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2310.16825",
"region:us"
] | [
"text-to-image"
] | "2024-04-22T18:07:35Z" | ---
license: cc-by-4.0
dataset_info:
features:
- name: jpg
dtype: image
- name: blip2_caption
dtype: string
- name: caption
dtype: string
- name: licensename
dtype: string
- name: licenseurl
dtype: string
- name: width
dtype: int32
- name: height
dtype: int32
- name: original_width
dtype: int32
- name: original_height
dtype: int32
- name: photoid
dtype: int64
- name: uid
dtype: string
- name: unickname
dtype: string
- name: datetaken
dtype: timestamp[us]
- name: dateuploaded
dtype: int64
- name: capturedevice
dtype: string
- name: title
dtype: string
- name: usertags
dtype: string
- name: machinetags
dtype: string
- name: longitude
dtype: float64
- name: latitude
dtype: float64
- name: accuracy
dtype: int64
- name: pageurl
dtype: string
- name: downloadurl
dtype: string
- name: serverid
dtype: int64
- name: farmid
dtype: int64
- name: secret
dtype: string
- name: secretoriginal
dtype: string
- name: ext
dtype: string
- name: url
dtype: string
- name: key
dtype: string
- name: status
dtype: string
- name: error_message
dtype: string
- name: exif
dtype: string
- name: sha256
dtype: string
- name: description
dtype: string
task_categories:
- text-to-image
language:
- en
---
# Dataset Card for CommonCatalog CC-BY
This dataset is a large collection of high-resolution Creative Common images (composed of different licenses, see paper Table 1 in the Appendix) collected in 2014 from users of Yahoo Flickr.
The dataset contains images of up to 4k resolution, making this one of the highest resolution captioned image datasets.
## Dataset Details
### Dataset Description
We provide captions synthetic captions to approximately 100 million high resolution images collected from Yahoo Flickr Creative Commons (YFCC).
- **Curated by:** Aaron Gokaslan
- **Language(s) (NLP):** en
- **License:** See relevant yaml tag / dataset name.
### Dataset Sources
<!-- Provide the basic links for the dataset. -->
- **Repository:** https://github.com/mosaicml/diffusion
- **Paper:** https://arxiv.org/abs/2310.16825
- **Demo:** See CommonCanvas Gradios
## Uses
We use CommonCatalog to train a family latent diffusion models called CommonCanvas.
The goal is to produce a model that is competitive with Stable Diffusion 2, but to do so using an easily accessible dataset of known provenance.
Doing so makes replicating the model significantly easier, and provides a clearer mechanism for applying training-data attribution techniques.
### Direct Use
Training text-to-image models
Training image-to-text models
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
* Crafting content that is offensive or injurious towards individuals, including negative portrayals of their living conditions, cultural backgrounds, religious beliefs, etc.
* Deliberately creating or spreading content that is discriminatory or reinforces harmful stereotypes.
* Falsely representing individuals without their permission.
* Generating sexual content that may be seen by individuals without their consent.
* Producing or disseminating false or misleading information.
* Creating content that depicts extreme violence or bloodshed.
* Distributing content that modifies copyrighted or licensed material in a way that breaches its usage terms.
## Dataset Structure
The dataset is divided into 10 subsets each containing parquets about 4GB each. Each subfolder within contains a resolution range of the images and their respective aspect ratios.
The dataset is also divided along images licensed for commercial use (C) and those that are not (NC).
## Dataset Creation
### Curation Rationale
Creating a standardized, accessible dataset with synthetic caption and releasing it so other people can train on a common dataset for open source image generation.
### Source Data
Yahoo Flickr Creative Commons 100M Dataset and Synthetically Generated Caption Data.
#### Data Collection and Processing
All synthetic captions were generated with BLIP2. See paper for more details.
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
Users of Flickr
## Bias, Risks, and Limitations
See Yahoo Flickr Creative Commons 100M dataset for more information. The information was collected circa 2014 and known to have a bias towards internet connected Western countries. Some areas such as the global south lack representation.
## Citation
**BibTeX:**
```
@article{gokaslan2023commoncanvas,
title={CommonCanvas: An Open Diffusion Model Trained with Creative-Commons Images},
author={Gokaslan, Aaron and Cooper, A Feder and Collins, Jasmine and Seguin, Landan and Jacobson, Austin and Patel, Mihir and Frankle, Jonathan and Stephenson, Cory and Kuleshov, Volodymyr},
journal={arXiv preprint arXiv:2310.16825},
year={2023}
}
```
## Dataset Card Authors
[Aaron Gokaslan](https://huggingface.co/Skylion007)
## Dataset Card Contact
[Aaron Gokaslan](https://huggingface.co/Skylion007)
|
andstor/methods2test_small | andstor | "2024-11-03T09:40:11Z" | 3,527 | 0 | [
"task_categories:text-generation",
"language:en",
"license:mit",
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"arxiv:2203.12776",
"region:us",
"unit test",
"java",
"code"
] | [
"text-generation"
] | "2023-12-17T20:26:53Z" | ---
language:
- en
license: mit
task_categories:
- text-generation
configs:
- config_name: fm
data_files:
- split: train
path: data/fm/train-*
- split: test
path: data/fm/test-*
- split: validation
path: data/fm/validation-*
- config_name: fm_indented
data_files:
- split: train
path: data/fm_indented/train-*
- split: test
path: data/fm_indented/test-*
- split: validation
path: data/fm_indented/validation-*
- config_name: fm+t
data_files:
- split: train
path: data/fm+t/train-*
- split: test
path: data/fm+t/test-*
- split: validation
path: data/fm+t/validation-*
- config_name: fm+fc
data_files:
- split: train
path: data/fm+fc/train-*
- split: test
path: data/fm+fc/test-*
- split: validation
path: data/fm+fc/validation-*
- config_name: fm+fc+t+tc
data_files:
- split: train
path: data/fm+fc+t+tc/train-*
- split: test
path: data/fm+fc+t+tc/test-*
- split: validation
path: data/fm+fc+t+tc/validation-*
- config_name: fm+fc+c
data_files:
- split: train
path: data/fm+fc+c/train-*
- split: test
path: data/fm+fc+c/test-*
- split: validation
path: data/fm+fc+c/validation-*
- config_name: fm+fc+c+t+tc
data_files:
- split: train
path: data/fm+fc+c+t+tc/train-*
- split: test
path: data/fm+fc+c+t+tc/test-*
- split: validation
path: data/fm+fc+c+t+tc/validation-*
- config_name: fm+fc+c+m
data_files:
- split: train
path: data/fm+fc+c+m/train-*
- split: test
path: data/fm+fc+c+m/test-*
- split: validation
path: data/fm+fc+c+m/validation-*
- config_name: fm+fc+c+m+t+tc
data_files:
- split: train
path: data/fm+fc+c+m+t+tc/train-*
- split: test
path: data/fm+fc+c+m+t+tc/test-*
- split: validation
path: data/fm+fc+c+m+t+tc/validation-*
- config_name: fm+fc+c+m+f
data_files:
- split: train
path: data/fm+fc+c+m+f/train-*
- split: test
path: data/fm+fc+c+m+f/test-*
- split: validation
path: data/fm+fc+c+m+f/validation-*
- config_name: fm+fc+c+m+f+t+tc
data_files:
- split: train
path: data/fm+fc+c+m+f+t+tc/train-*
- split: test
path: data/fm+fc+c+m+f+t+tc/test-*
- split: validation
path: data/fm+fc+c+m+f+t+tc/validation-*
- config_name: t
data_files:
- split: train
path: data/t/train-*
- split: test
path: data/t/test-*
- split: validation
path: data/t/validation-*
- config_name: t_indented
data_files:
- split: train
path: data/t_indented/train-*
- split: test
path: data/t_indented/test-*
- split: validation
path: data/t_indented/validation-*
- config_name: t+tc
data_files:
- split: train
path: data/t+tc/train-*
- split: test
path: data/t+tc/test-*
- split: validation
path: data/t+tc/validation-*
dataset_info:
- config_name: fm
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 4696431
num_examples: 7440
- name: test
num_bytes: 642347
num_examples: 1017
- name: validation
num_bytes: 662917
num_examples: 953
download_size: 2633268
dataset_size: 6001695
- config_name: fm+fc
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 5387123
num_examples: 7440
- name: test
num_bytes: 738049
num_examples: 1017
- name: validation
num_bytes: 757167
num_examples: 953
download_size: 2925807
dataset_size: 6882339
- config_name: fm+fc+c
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 5906873
num_examples: 7440
- name: test
num_bytes: 820149
num_examples: 1017
- name: validation
num_bytes: 824441
num_examples: 953
download_size: 3170873
dataset_size: 7551463
- config_name: fm+fc+c+m
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 11930672
num_examples: 7440
- name: test
num_bytes: 1610045
num_examples: 1017
- name: validation
num_bytes: 1553249
num_examples: 953
download_size: 5406454
dataset_size: 15093966
- config_name: fm+fc+c+m+f
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 12722890
num_examples: 7440
- name: test
num_bytes: 1713683
num_examples: 1017
- name: validation
num_bytes: 1654607
num_examples: 953
download_size: 5753116
dataset_size: 16091180
- config_name: fm+fc+c+m+f+t+tc
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 18332635
num_examples: 7440
- name: test
num_bytes: 2461169
num_examples: 1017
- name: validation
num_bytes: 2510969
num_examples: 953
download_size: 8280985
dataset_size: 23304773
- config_name: fm+fc+c+m+t+tc
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 17537661
num_examples: 7440
- name: test
num_bytes: 2357359
num_examples: 1017
- name: validation
num_bytes: 2409506
num_examples: 953
download_size: 8178222
dataset_size: 22304526
- config_name: fm+fc+c+t+tc
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 11445562
num_examples: 7440
- name: test
num_bytes: 1565365
num_examples: 1017
- name: validation
num_bytes: 1676986
num_examples: 953
download_size: 5944482
dataset_size: 14687913
- config_name: fm+fc+t+tc
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 10923038
num_examples: 7440
- name: test
num_bytes: 1483265
num_examples: 1017
- name: validation
num_bytes: 1609296
num_examples: 953
download_size: 5715335
dataset_size: 14015599
- config_name: fm+t
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 8889443
num_examples: 7440
- name: test
num_bytes: 1207763
num_examples: 1017
- name: validation
num_bytes: 1336798
num_examples: 953
download_size: 4898458
dataset_size: 11434004
- config_name: fm_indented
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 5054397
num_examples: 7440
- name: test
num_bytes: 692948
num_examples: 1017
- name: validation
num_bytes: 714462
num_examples: 953
download_size: 2703115
dataset_size: 6461807
- config_name: t
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 4316096
num_examples: 7440
- name: test
num_bytes: 582266
num_examples: 1017
- name: validation
num_bytes: 689647
num_examples: 953
download_size: 2434024
dataset_size: 5588009
- config_name: t+tc
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 5648321
num_examples: 7440
- name: test
num_bytes: 761386
num_examples: 1017
- name: validation
num_bytes: 867350
num_examples: 953
download_size: 3024686
dataset_size: 7277057
- config_name: t_indented
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 4606253
num_examples: 7440
- name: test
num_bytes: 623576
num_examples: 1017
- name: validation
num_bytes: 734221
num_examples: 953
download_size: 2496661
dataset_size: 5964050
tags:
- unit test
- java
- code
pretty_name: Methods2Test Small
---
## Dataset Description
Microsoft created the `methods2test` dataset, consisting of Java Junit test cases with their corresponding focal methods.
It contains 780k pairs of JUnit test cases and focal methods which were extracted from a total of 91K Java open-source projects hosted on GitHub.
This is a smaller subset of the assembled version of the `methods2test` dataset.
It provides convenient access to the different context levels based on the raw source code (e.g. newlines are preserved). The test cases and associated classes are also made available.
The subset is created by randomly selecting only one sample from each of the 91k projects.
The mapping between test case and focal methods is based on heuristics rules and Java developer's best practice.
More information can be found here:
- [methods2test Github repo](https://github.com/microsoft/methods2test)
- [Methods2Test: A dataset of focal methods mapped to test cases](https://arxiv.org/pdf/2203.12776.pdf)
## Dataset Schema
```
t: <TEST_CASE>
t+tc: <TEST_CLASS_NAME> <TEST_CASE>
fm: <FOCAL_METHOD>
fm+t: <FOCAL_METHOD>
fm+fc: <FOCAL_CLASS_NAME> <FOCAL_METHOD>
fm+fc: <FOCAL_CLASS_NAME> <FOCAL_METHOD> <TEST_CLASS_NAME> <TEST_CASE>
fm+fc+c: <FOCAL_CLASS_NAME> <FOCAL_METHOD> <CONTRSUCTORS>
fm+fc+c: <FOCAL_CLASS_NAME> <FOCAL_METHOD> <CONTRSUCTORS> <TEST_CLASS_NAME> <TEST_CASE>
fm+fc+c+m: <FOCAL_CLASS_NAME> <FOCAL_METHOD> <CONTRSUCTORS> <METHOD_SIGNATURES>
fm+fc+c+m: <FOCAL_CLASS_NAME> <FOCAL_METHOD> <CONTRSUCTORS> <METHOD_SIGNATURES> <TEST_CLASS_NAME> <TEST_CASE>
fm+fc+c+m+f: <FOCAL_CLASS_NAME> <FOCAL_METHOD> <CONTRSUCTORS> <METHOD_SIGNATURES> <FIELDS>
fm+fc+c+m+f+t+tc: <FOCAL_CLASS_NAME> <FOCAL_METHOD> <CONTRSUCTORS> <METHOD_SIGNATURES> <FIELDS> <TEST_CLASS_NAME> <TEST_CASE>
```
## Focal Context
- fm: this representation incorporates exclusively the source
code of the focal method. Intuitively, this contains the most
important information for generating accurate test cases for
the given method.
- fm+fc: this representation adds the focal class name, which
can provide meaningful semantic information to the model.
- fm+fc+c: this representation adds the signatures of the constructor methods of the focal class. The idea behind this
augmentation is that the test case may require instantiating
an object of the focal class in order to properly test the focal
method.
- fm+fc+c+m: this representation adds the signatures of the
other public methods in the focal class. The rationale that motivated this inclusion is that the test case may need to
invoke other auxiliary methods within the class (e.g., getters,
setters) to set up or tear down the testing environment.
- fm+fc+c+m+f: this representation adds the public fields of
the focal class. The motivation is that test cases may need to
inspect the status of the public fields to properly test a focal
method.
The test case along with the class name is also provided for each focal context.
![image/png](https://huggingface.co/datasets/andstor/methods2test_small/resolve/main/focal_context.png)
The different levels of focal contexts are the following:
```
fm: focal method
fm+fc: focal method + focal class name
fm+fc+c: focal method + focal class name + constructor signatures
fm+fc+c+m: focal method + focal class name + constructor signatures + public method signatures
fm+fc+c+m+f: focal method + focal class name + constructor signatures + public method signatures + public fields
``` |
google-research-datasets/natural_questions | google-research-datasets | "2024-03-11T16:19:34Z" | 3,523 | 92 | [
"task_categories:question-answering",
"task_ids:open-domain-qa",
"annotations_creators:no-annotation",
"language_creators:crowdsourced",
"multilinguality:monolingual",
"source_datasets:original",
"language:en",
"license:cc-by-sa-3.0",
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"question-answering"
] | "2022-03-02T23:29:22Z" | ---
annotations_creators:
- no-annotation
language_creators:
- crowdsourced
language:
- en
license:
- cc-by-sa-3.0
multilinguality:
- monolingual
size_categories:
- 100K<n<1M
source_datasets:
- original
task_categories:
- question-answering
task_ids:
- open-domain-qa
paperswithcode_id: natural-questions
pretty_name: Natural Questions
dataset_info:
- config_name: default
features:
- name: id
dtype: string
- name: document
struct:
- name: html
dtype: string
- name: title
dtype: string
- name: tokens
sequence:
- name: end_byte
dtype: int64
- name: is_html
dtype: bool
- name: start_byte
dtype: int64
- name: token
dtype: string
- name: url
dtype: string
- name: question
struct:
- name: text
dtype: string
- name: tokens
sequence: string
- name: long_answer_candidates
sequence:
- name: end_byte
dtype: int64
- name: end_token
dtype: int64
- name: start_byte
dtype: int64
- name: start_token
dtype: int64
- name: top_level
dtype: bool
- name: annotations
sequence:
- name: id
dtype: string
- name: long_answer
struct:
- name: candidate_index
dtype: int64
- name: end_byte
dtype: int64
- name: end_token
dtype: int64
- name: start_byte
dtype: int64
- name: start_token
dtype: int64
- name: short_answers
sequence:
- name: end_byte
dtype: int64
- name: end_token
dtype: int64
- name: start_byte
dtype: int64
- name: start_token
dtype: int64
- name: text
dtype: string
- name: yes_no_answer
dtype:
class_label:
names:
'0': 'NO'
'1': 'YES'
splits:
- name: train
num_bytes: 143039948860
num_examples: 307373
- name: validation
num_bytes: 3451288641
num_examples: 7830
download_size: 56843626971
dataset_size: 146491237501
- config_name: dev
features:
- name: id
dtype: string
- name: document
struct:
- name: title
dtype: string
- name: url
dtype: string
- name: html
dtype: string
- name: tokens
sequence:
- name: token
dtype: string
- name: is_html
dtype: bool
- name: start_byte
dtype: int64
- name: end_byte
dtype: int64
- name: question
struct:
- name: text
dtype: string
- name: tokens
sequence: string
- name: long_answer_candidates
sequence:
- name: start_token
dtype: int64
- name: end_token
dtype: int64
- name: start_byte
dtype: int64
- name: end_byte
dtype: int64
- name: top_level
dtype: bool
- name: annotations
sequence:
- name: id
dtype: string
- name: long_answer
struct:
- name: start_token
dtype: int64
- name: end_token
dtype: int64
- name: start_byte
dtype: int64
- name: end_byte
dtype: int64
- name: candidate_index
dtype: int64
- name: short_answers
sequence:
- name: start_token
dtype: int64
- name: end_token
dtype: int64
- name: start_byte
dtype: int64
- name: end_byte
dtype: int64
- name: text
dtype: string
- name: yes_no_answer
dtype:
class_label:
names:
'0': 'NO'
'1': 'YES'
splits:
- name: validation
num_bytes: 3451288639
num_examples: 7830
download_size: 1337126358
dataset_size: 3451288639
configs:
- config_name: default
data_files:
- split: train
path: default/train-*
- split: validation
path: default/validation-*
- config_name: dev
data_files:
- split: validation
path: dev/validation-*
---
# Dataset Card for Natural Questions
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** [https://ai.google.com/research/NaturalQuestions/dataset](https://ai.google.com/research/NaturalQuestions/dataset)
- **Repository:** [https://github.com/google-research-datasets/natural-questions](https://github.com/google-research-datasets/natural-questions)
- **Paper:** [https://research.google/pubs/pub47761/](https://research.google/pubs/pub47761/)
- **Point of Contact:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
- **Size of downloaded dataset files:** 45.07 GB
- **Size of the generated dataset:** 99.80 GB
- **Total amount of disk used:** 144.87 GB
### Dataset Summary
The NQ corpus contains questions from real users, and it requires QA systems to
read and comprehend an entire Wikipedia article that may or may not contain the
answer to the question. The inclusion of real user questions, and the
requirement that solutions should read an entire page to find the answer, cause
NQ to be a more realistic and challenging task than prior QA datasets.
### Supported Tasks and Leaderboards
[https://ai.google.com/research/NaturalQuestions](https://ai.google.com/research/NaturalQuestions)
### Languages
en
## Dataset Structure
### Data Instances
- **Size of downloaded dataset files:** 45.07 GB
- **Size of the generated dataset:** 99.80 GB
- **Total amount of disk used:** 144.87 GB
An example of 'train' looks as follows. This is a toy example.
```
{
"id": "797803103760793766",
"document": {
"title": "Google",
"url": "http://www.wikipedia.org/Google",
"html": "<html><body><h1>Google Inc.</h1><p>Google was founded in 1998 By:<ul><li>Larry</li><li>Sergey</li></ul></p></body></html>",
"tokens":[
{"token": "<h1>", "start_byte": 12, "end_byte": 16, "is_html": True},
{"token": "Google", "start_byte": 16, "end_byte": 22, "is_html": False},
{"token": "inc", "start_byte": 23, "end_byte": 26, "is_html": False},
{"token": ".", "start_byte": 26, "end_byte": 27, "is_html": False},
{"token": "</h1>", "start_byte": 27, "end_byte": 32, "is_html": True},
{"token": "<p>", "start_byte": 32, "end_byte": 35, "is_html": True},
{"token": "Google", "start_byte": 35, "end_byte": 41, "is_html": False},
{"token": "was", "start_byte": 42, "end_byte": 45, "is_html": False},
{"token": "founded", "start_byte": 46, "end_byte": 53, "is_html": False},
{"token": "in", "start_byte": 54, "end_byte": 56, "is_html": False},
{"token": "1998", "start_byte": 57, "end_byte": 61, "is_html": False},
{"token": "by", "start_byte": 62, "end_byte": 64, "is_html": False},
{"token": ":", "start_byte": 64, "end_byte": 65, "is_html": False},
{"token": "<ul>", "start_byte": 65, "end_byte": 69, "is_html": True},
{"token": "<li>", "start_byte": 69, "end_byte": 73, "is_html": True},
{"token": "Larry", "start_byte": 73, "end_byte": 78, "is_html": False},
{"token": "</li>", "start_byte": 78, "end_byte": 83, "is_html": True},
{"token": "<li>", "start_byte": 83, "end_byte": 87, "is_html": True},
{"token": "Sergey", "start_byte": 87, "end_byte": 92, "is_html": False},
{"token": "</li>", "start_byte": 92, "end_byte": 97, "is_html": True},
{"token": "</ul>", "start_byte": 97, "end_byte": 102, "is_html": True},
{"token": "</p>", "start_byte": 102, "end_byte": 106, "is_html": True}
],
},
"question" :{
"text": "who founded google",
"tokens": ["who", "founded", "google"]
},
"long_answer_candidates": [
{"start_byte": 32, "end_byte": 106, "start_token": 5, "end_token": 22, "top_level": True},
{"start_byte": 65, "end_byte": 102, "start_token": 13, "end_token": 21, "top_level": False},
{"start_byte": 69, "end_byte": 83, "start_token": 14, "end_token": 17, "top_level": False},
{"start_byte": 83, "end_byte": 92, "start_token": 17, "end_token": 20 , "top_level": False}
],
"annotations": [{
"id": "6782080525527814293",
"long_answer": {"start_byte": 32, "end_byte": 106, "start_token": 5, "end_token": 22, "candidate_index": 0},
"short_answers": [
{"start_byte": 73, "end_byte": 78, "start_token": 15, "end_token": 16, "text": "Larry"},
{"start_byte": 87, "end_byte": 92, "start_token": 18, "end_token": 19, "text": "Sergey"}
],
"yes_no_answer": -1
}]
}
```
### Data Fields
The data fields are the same among all splits.
#### default
- `id`: a `string` feature.
- `document` a dictionary feature containing:
- `title`: a `string` feature.
- `url`: a `string` feature.
- `html`: a `string` feature.
- `tokens`: a dictionary feature containing:
- `token`: a `string` feature.
- `is_html`: a `bool` feature.
- `start_byte`: a `int64` feature.
- `end_byte`: a `int64` feature.
- `question`: a dictionary feature containing:
- `text`: a `string` feature.
- `tokens`: a `list` of `string` features.
- `long_answer_candidates`: a dictionary feature containing:
- `start_token`: a `int64` feature.
- `end_token`: a `int64` feature.
- `start_byte`: a `int64` feature.
- `end_byte`: a `int64` feature.
- `top_level`: a `bool` feature.
- `annotations`: a dictionary feature containing:
- `id`: a `string` feature.
- `long_answers`: a dictionary feature containing:
- `start_token`: a `int64` feature.
- `end_token`: a `int64` feature.
- `start_byte`: a `int64` feature.
- `end_byte`: a `int64` feature.
- `candidate_index`: a `int64` feature.
- `short_answers`: a dictionary feature containing:
- `start_token`: a `int64` feature.
- `end_token`: a `int64` feature.
- `start_byte`: a `int64` feature.
- `end_byte`: a `int64` feature.
- `text`: a `string` feature.
- `yes_no_answer`: a classification label, with possible values including `NO` (0), `YES` (1).
### Data Splits
| name | train | validation |
|---------|-------:|-----------:|
| default | 307373 | 7830 |
| dev | N/A | 7830 |
## Dataset Creation
### Curation Rationale
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
#### Who are the source language producers?
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Annotations
#### Annotation process
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
#### Who are the annotators?
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Personal and Sensitive Information
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Discussion of Biases
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Other Known Limitations
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Additional Information
### Dataset Curators
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Licensing Information
[Creative Commons Attribution-ShareAlike 3.0 Unported](https://creativecommons.org/licenses/by-sa/3.0/).
### Citation Information
```
@article{47761,
title = {Natural Questions: a Benchmark for Question Answering Research},
author = {Tom Kwiatkowski and Jennimaria Palomaki and Olivia Redfield and Michael Collins and Ankur Parikh and Chris Alberti and Danielle Epstein and Illia Polosukhin and Matthew Kelcey and Jacob Devlin and Kenton Lee and Kristina N. Toutanova and Llion Jones and Ming-Wei Chang and Andrew Dai and Jakob Uszkoreit and Quoc Le and Slav Petrov},
year = {2019},
journal = {Transactions of the Association of Computational Linguistics}
}
```
### Contributions
Thanks to [@thomwolf](https://github.com/thomwolf), [@lhoestq](https://github.com/lhoestq) for adding this dataset. |
ikala/tmmluplus | ikala | "2024-06-12T07:06:00Z" | 3,523 | 107 | [
"task_categories:question-answering",
"language:zh",
"license:mit",
"size_categories:10K<n<100K",
"format:csv",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us",
"traditional chinese",
"finance",
"medical",
"taiwan",
"benchmark",
"zh-tw",
"zh-hant"
] | [
"question-answering"
] | "2023-12-22T19:12:13Z" | ---
license: mit
license_name: mit
task_categories:
- question-answering
language:
- zh
tags:
- traditional chinese
- finance
- medical
- taiwan
- benchmark
- zh-tw
- zh-hant
pretty_name: tmmlu++
size_categories:
- 100K<n<1M
configs:
- config_name: engineering_math
data_files:
- split: train
path: "data/engineering_math_dev.csv"
- split: validation
path: "data/engineering_math_val.csv"
- split: test
path: "data/engineering_math_test.csv"
- config_name: dentistry
data_files:
- split: train
path: "data/dentistry_dev.csv"
- split: validation
path: "data/dentistry_val.csv"
- split: test
path: "data/dentistry_test.csv"
- config_name: traditional_chinese_medicine_clinical_medicine
data_files:
- split: train
path: "data/traditional_chinese_medicine_clinical_medicine_dev.csv"
- split: validation
path: "data/traditional_chinese_medicine_clinical_medicine_val.csv"
- split: test
path: "data/traditional_chinese_medicine_clinical_medicine_test.csv"
- config_name: clinical_psychology
data_files:
- split: train
path: "data/clinical_psychology_dev.csv"
- split: validation
path: "data/clinical_psychology_val.csv"
- split: test
path: "data/clinical_psychology_test.csv"
- config_name: technical
data_files:
- split: train
path: "data/technical_dev.csv"
- split: validation
path: "data/technical_val.csv"
- split: test
path: "data/technical_test.csv"
- config_name: culinary_skills
data_files:
- split: train
path: "data/culinary_skills_dev.csv"
- split: validation
path: "data/culinary_skills_val.csv"
- split: test
path: "data/culinary_skills_test.csv"
- config_name: mechanical
data_files:
- split: train
path: "data/mechanical_dev.csv"
- split: validation
path: "data/mechanical_val.csv"
- split: test
path: "data/mechanical_test.csv"
- config_name: logic_reasoning
data_files:
- split: train
path: "data/logic_reasoning_dev.csv"
- split: validation
path: "data/logic_reasoning_val.csv"
- split: test
path: "data/logic_reasoning_test.csv"
- config_name: real_estate
data_files:
- split: train
path: "data/real_estate_dev.csv"
- split: validation
path: "data/real_estate_val.csv"
- split: test
path: "data/real_estate_test.csv"
- config_name: general_principles_of_law
data_files:
- split: train
path: "data/general_principles_of_law_dev.csv"
- split: validation
path: "data/general_principles_of_law_val.csv"
- split: test
path: "data/general_principles_of_law_test.csv"
- config_name: finance_banking
data_files:
- split: train
path: "data/finance_banking_dev.csv"
- split: validation
path: "data/finance_banking_val.csv"
- split: test
path: "data/finance_banking_test.csv"
- config_name: anti_money_laundering
data_files:
- split: train
path: "data/anti_money_laundering_dev.csv"
- split: validation
path: "data/anti_money_laundering_val.csv"
- split: test
path: "data/anti_money_laundering_test.csv"
- config_name: ttqav2
data_files:
- split: train
path: "data/ttqav2_dev.csv"
- split: validation
path: "data/ttqav2_val.csv"
- split: test
path: "data/ttqav2_test.csv"
- config_name: marketing_management
data_files:
- split: train
path: "data/marketing_management_dev.csv"
- split: validation
path: "data/marketing_management_val.csv"
- split: test
path: "data/marketing_management_test.csv"
- config_name: business_management
data_files:
- split: train
path: "data/business_management_dev.csv"
- split: validation
path: "data/business_management_val.csv"
- split: test
path: "data/business_management_test.csv"
- config_name: organic_chemistry
data_files:
- split: train
path: "data/organic_chemistry_dev.csv"
- split: validation
path: "data/organic_chemistry_val.csv"
- split: test
path: "data/organic_chemistry_test.csv"
- config_name: advance_chemistry
data_files:
- split: train
path: "data/advance_chemistry_dev.csv"
- split: validation
path: "data/advance_chemistry_val.csv"
- split: test
path: "data/advance_chemistry_test.csv"
- config_name: physics
data_files:
- split: train
path: "data/physics_dev.csv"
- split: validation
path: "data/physics_val.csv"
- split: test
path: "data/physics_test.csv"
- config_name: secondary_physics
data_files:
- split: train
path: "data/secondary_physics_dev.csv"
- split: validation
path: "data/secondary_physics_val.csv"
- split: test
path: "data/secondary_physics_test.csv"
- config_name: human_behavior
data_files:
- split: train
path: "data/human_behavior_dev.csv"
- split: validation
path: "data/human_behavior_val.csv"
- split: test
path: "data/human_behavior_test.csv"
- config_name: national_protection
data_files:
- split: train
path: "data/national_protection_dev.csv"
- split: validation
path: "data/national_protection_val.csv"
- split: test
path: "data/national_protection_test.csv"
- config_name: jce_humanities
data_files:
- split: train
path: "data/jce_humanities_dev.csv"
- split: validation
path: "data/jce_humanities_val.csv"
- split: test
path: "data/jce_humanities_test.csv"
- config_name: politic_science
data_files:
- split: train
path: "data/politic_science_dev.csv"
- split: validation
path: "data/politic_science_val.csv"
- split: test
path: "data/politic_science_test.csv"
- config_name: agriculture
data_files:
- split: train
path: "data/agriculture_dev.csv"
- split: validation
path: "data/agriculture_val.csv"
- split: test
path: "data/agriculture_test.csv"
- config_name: official_document_management
data_files:
- split: train
path: "data/official_document_management_dev.csv"
- split: validation
path: "data/official_document_management_val.csv"
- split: test
path: "data/official_document_management_test.csv"
- config_name: financial_analysis
data_files:
- split: train
path: "data/financial_analysis_dev.csv"
- split: validation
path: "data/financial_analysis_val.csv"
- split: test
path: "data/financial_analysis_test.csv"
- config_name: pharmacy
data_files:
- split: train
path: "data/pharmacy_dev.csv"
- split: validation
path: "data/pharmacy_val.csv"
- split: test
path: "data/pharmacy_test.csv"
- config_name: educational_psychology
data_files:
- split: train
path: "data/educational_psychology_dev.csv"
- split: validation
path: "data/educational_psychology_val.csv"
- split: test
path: "data/educational_psychology_test.csv"
- config_name: statistics_and_machine_learning
data_files:
- split: train
path: "data/statistics_and_machine_learning_dev.csv"
- split: validation
path: "data/statistics_and_machine_learning_val.csv"
- split: test
path: "data/statistics_and_machine_learning_test.csv"
- config_name: management_accounting
data_files:
- split: train
path: "data/management_accounting_dev.csv"
- split: validation
path: "data/management_accounting_val.csv"
- split: test
path: "data/management_accounting_test.csv"
- config_name: introduction_to_law
data_files:
- split: train
path: "data/introduction_to_law_dev.csv"
- split: validation
path: "data/introduction_to_law_val.csv"
- split: test
path: "data/introduction_to_law_test.csv"
- config_name: computer_science
data_files:
- split: train
path: "data/computer_science_dev.csv"
- split: validation
path: "data/computer_science_val.csv"
- split: test
path: "data/computer_science_test.csv"
- config_name: veterinary_pathology
data_files:
- split: train
path: "data/veterinary_pathology_dev.csv"
- split: validation
path: "data/veterinary_pathology_val.csv"
- split: test
path: "data/veterinary_pathology_test.csv"
- config_name: accounting
data_files:
- split: train
path: "data/accounting_dev.csv"
- split: validation
path: "data/accounting_val.csv"
- split: test
path: "data/accounting_test.csv"
- config_name: fire_science
data_files:
- split: train
path: "data/fire_science_dev.csv"
- split: validation
path: "data/fire_science_val.csv"
- split: test
path: "data/fire_science_test.csv"
- config_name: optometry
data_files:
- split: train
path: "data/optometry_dev.csv"
- split: validation
path: "data/optometry_val.csv"
- split: test
path: "data/optometry_test.csv"
- config_name: insurance_studies
data_files:
- split: train
path: "data/insurance_studies_dev.csv"
- split: validation
path: "data/insurance_studies_val.csv"
- split: test
path: "data/insurance_studies_test.csv"
- config_name: pharmacology
data_files:
- split: train
path: "data/pharmacology_dev.csv"
- split: validation
path: "data/pharmacology_val.csv"
- split: test
path: "data/pharmacology_test.csv"
- config_name: taxation
data_files:
- split: train
path: "data/taxation_dev.csv"
- split: validation
path: "data/taxation_val.csv"
- split: test
path: "data/taxation_test.csv"
- config_name: trust_practice
data_files:
- split: train
path: "data/trust_practice_dev.csv"
- split: validation
path: "data/trust_practice_val.csv"
- split: test
path: "data/trust_practice_test.csv"
- config_name: geography_of_taiwan
data_files:
- split: train
path: "data/geography_of_taiwan_dev.csv"
- split: validation
path: "data/geography_of_taiwan_val.csv"
- split: test
path: "data/geography_of_taiwan_test.csv"
- config_name: physical_education
data_files:
- split: train
path: "data/physical_education_dev.csv"
- split: validation
path: "data/physical_education_val.csv"
- split: test
path: "data/physical_education_test.csv"
- config_name: auditing
data_files:
- split: train
path: "data/auditing_dev.csv"
- split: validation
path: "data/auditing_val.csv"
- split: test
path: "data/auditing_test.csv"
- config_name: administrative_law
data_files:
- split: train
path: "data/administrative_law_dev.csv"
- split: validation
path: "data/administrative_law_val.csv"
- split: test
path: "data/administrative_law_test.csv"
- config_name: education_(profession_level)
data_files:
- split: train
path: "data/education_(profession_level)_dev.csv"
- split: validation
path: "data/education_(profession_level)_val.csv"
- split: test
path: "data/education_(profession_level)_test.csv"
- config_name: economics
data_files:
- split: train
path: "data/economics_dev.csv"
- split: validation
path: "data/economics_val.csv"
- split: test
path: "data/economics_test.csv"
- config_name: veterinary_pharmacology
data_files:
- split: train
path: "data/veterinary_pharmacology_dev.csv"
- split: validation
path: "data/veterinary_pharmacology_val.csv"
- split: test
path: "data/veterinary_pharmacology_test.csv"
- config_name: nautical_science
data_files:
- split: train
path: "data/nautical_science_dev.csv"
- split: validation
path: "data/nautical_science_val.csv"
- split: test
path: "data/nautical_science_test.csv"
- config_name: occupational_therapy_for_psychological_disorders
data_files:
- split: train
path: "data/occupational_therapy_for_psychological_disorders_dev.csv"
- split: validation
path: "data/occupational_therapy_for_psychological_disorders_val.csv"
- split: test
path: "data/occupational_therapy_for_psychological_disorders_test.csv"
- config_name: basic_medical_science
data_files:
- split: train
path: "data/basic_medical_science_dev.csv"
- split: validation
path: "data/basic_medical_science_val.csv"
- split: test
path: "data/basic_medical_science_test.csv"
- config_name: macroeconomics
data_files:
- split: train
path: "data/macroeconomics_dev.csv"
- split: validation
path: "data/macroeconomics_val.csv"
- split: test
path: "data/macroeconomics_test.csv"
- config_name: trade
data_files:
- split: train
path: "data/trade_dev.csv"
- split: validation
path: "data/trade_val.csv"
- split: test
path: "data/trade_test.csv"
- config_name: chinese_language_and_literature
data_files:
- split: train
path: "data/chinese_language_and_literature_dev.csv"
- split: validation
path: "data/chinese_language_and_literature_val.csv"
- split: test
path: "data/chinese_language_and_literature_test.csv"
- config_name: tve_design
data_files:
- split: train
path: "data/tve_design_dev.csv"
- split: validation
path: "data/tve_design_val.csv"
- split: test
path: "data/tve_design_test.csv"
- config_name: junior_science_exam
data_files:
- split: train
path: "data/junior_science_exam_dev.csv"
- split: validation
path: "data/junior_science_exam_val.csv"
- split: test
path: "data/junior_science_exam_test.csv"
- config_name: junior_math_exam
data_files:
- split: train
path: "data/junior_math_exam_dev.csv"
- split: validation
path: "data/junior_math_exam_val.csv"
- split: test
path: "data/junior_math_exam_test.csv"
- config_name: junior_chinese_exam
data_files:
- split: train
path: "data/junior_chinese_exam_dev.csv"
- split: validation
path: "data/junior_chinese_exam_val.csv"
- split: test
path: "data/junior_chinese_exam_test.csv"
- config_name: junior_social_studies
data_files:
- split: train
path: "data/junior_social_studies_dev.csv"
- split: validation
path: "data/junior_social_studies_val.csv"
- split: test
path: "data/junior_social_studies_test.csv"
- config_name: tve_mathematics
data_files:
- split: train
path: "data/tve_mathematics_dev.csv"
- split: validation
path: "data/tve_mathematics_val.csv"
- split: test
path: "data/tve_mathematics_test.csv"
- config_name: tve_chinese_language
data_files:
- split: train
path: "data/tve_chinese_language_dev.csv"
- split: validation
path: "data/tve_chinese_language_val.csv"
- split: test
path: "data/tve_chinese_language_test.csv"
- config_name: tve_natural_sciences
data_files:
- split: train
path: "data/tve_natural_sciences_dev.csv"
- split: validation
path: "data/tve_natural_sciences_val.csv"
- split: test
path: "data/tve_natural_sciences_test.csv"
- config_name: junior_chemistry
data_files:
- split: train
path: "data/junior_chemistry_dev.csv"
- split: validation
path: "data/junior_chemistry_val.csv"
- split: test
path: "data/junior_chemistry_test.csv"
- config_name: music
data_files:
- split: train
path: "data/music_dev.csv"
- split: validation
path: "data/music_val.csv"
- split: test
path: "data/music_test.csv"
- config_name: education
data_files:
- split: train
path: "data/education_dev.csv"
- split: validation
path: "data/education_val.csv"
- split: test
path: "data/education_test.csv"
- config_name: three_principles_of_people
data_files:
- split: train
path: "data/three_principles_of_people_dev.csv"
- split: validation
path: "data/three_principles_of_people_val.csv"
- split: test
path: "data/three_principles_of_people_test.csv"
- config_name: taiwanese_hokkien
data_files:
- split: train
path: "data/taiwanese_hokkien_dev.csv"
- split: validation
path: "data/taiwanese_hokkien_val.csv"
- split: test
path: "data/taiwanese_hokkien_test.csv"
---
# TMMLU+ : Large scale traditional chinese massive multitask language understanding
<p align="center">
<img src="https://huggingface.co/datasets/ikala/tmmluplus/resolve/main/cover.png" alt="A close-up image of a neat paper note with a white background. The text 'TMMLU+' is written horizontally across the center of the note in bold, black. Join us to work in multimodal LLM : https://ikala.ai/recruit/" style="max-width: 400" width=400 />
</p>
We present TMMLU+, a traditional Chinese massive multitask language understanding dataset. TMMLU+ is a multiple-choice question-answering dataset featuring 66 subjects, ranging from elementary to professional level.
The TMMLU+ dataset is six times larger and contains more balanced subjects compared to its predecessor, [TMMLU](https://github.com/mtkresearch/MR-Models/tree/main/TC-Eval/data/TMMLU). We have included benchmark results in TMMLU+ from closed-source models and 20 open-weight Chinese large language models, with parameters ranging from 1.8B to 72B. The benchmark results show that Traditional Chinese variants still lag behind those trained on major Simplified Chinese models.
```python
from datasets import load_dataset
task_list = [
'engineering_math', 'dentistry', 'traditional_chinese_medicine_clinical_medicine', 'clinical_psychology', 'technical', 'culinary_skills', 'mechanical', 'logic_reasoning', 'real_estate',
'general_principles_of_law', 'finance_banking', 'anti_money_laundering', 'ttqav2', 'marketing_management', 'business_management', 'organic_chemistry', 'advance_chemistry',
'physics', 'secondary_physics', 'human_behavior', 'national_protection', 'jce_humanities', 'politic_science', 'agriculture', 'official_document_management',
'financial_analysis', 'pharmacy', 'educational_psychology', 'statistics_and_machine_learning', 'management_accounting', 'introduction_to_law', 'computer_science', 'veterinary_pathology',
'accounting', 'fire_science', 'optometry', 'insurance_studies', 'pharmacology', 'taxation', 'trust_practice', 'geography_of_taiwan', 'physical_education', 'auditing', 'administrative_law',
'education_(profession_level)', 'economics', 'veterinary_pharmacology', 'nautical_science', 'occupational_therapy_for_psychological_disorders',
'basic_medical_science', 'macroeconomics', 'trade', 'chinese_language_and_literature', 'tve_design', 'junior_science_exam', 'junior_math_exam', 'junior_chinese_exam',
'junior_social_studies', 'tve_mathematics', 'tve_chinese_language', 'tve_natural_sciences', 'junior_chemistry', 'music', 'education', 'three_principles_of_people',
'taiwanese_hokkien'
]
for task in task_list:
val = load_dataset('ikala/tmmluplus', task)['validation']
dev = load_dataset('ikala/tmmluplus', task)['train']
test = load_dataset('ikala/tmmluplus', task)['test']
```
For each dataset split
```python
for row in test:
print(row)
break
>> Dataset({
features: ['question', 'A', 'B', 'C', 'D', 'answer'],
num_rows: 11
})
```
Statistic on all four categories : STEM, Social Science, Humanities, Other
| Category | Test | Dev | Validation |
|----------------------------------|-------|------|------------|
| STEM | 3458 | 70 | 385 |
| Social Sciences | 5958 | 90 | 665 |
| Humanities | 1763 | 35 | 197 |
| Other (Business, Health, Misc.) | 8939 | 135 | 995 |
| **Total** | 20118 | 330 | 2242 |
## Benchmark on direct prompting
| model | STEM | Social Science | Humanities | Other | Average |
|------------|------------|------------|------------|------------|------------|
|Gemini-1.5-pro | 66.18|70.29|61.84|60.30|64.65|
| [Qwen/Qwen-72B](https://huggingface.co/Qwen/Qwen-72B) | 61.12 | 71.65 | 63.00 | 61.31 |64.27|
| gpt-4-0613 | 60.36 | 67.36 | 56.03 | 57.62 |60.34|
| Qwen-max | 59.92 | 66.95 | 57.43 | 56.48 |60.20|
| [Qwen/Qwen-72B-Chat](https://huggingface.co/Qwen/Qwen-72B-Chat) | 55.15 | 66.20 | 55.65 | 57.19 |58.55|
| [Qwen/Qwen-14B](https://huggingface.co/Qwen/Qwen-14B) | 46.94 | 56.69 | 49.43 | 48.81 |50.47|
| Gemini-pro | 45.38 | 57.29 | 48.80 | 48.21 |49.92|
| [01-ai/Yi-34B-Chat](https://huggingface.co/01-ai/Yi-34B-Chat) | 40.24 | 56.77 | 53.99 | 47.58 |49.64|
| Gemini-1.5-flash |53.47|53.42|42.99|46.56|49.11|
| [Reka Flash](https://www.reka.ai/)|45.26|52.91|46.31|43.76|47.06|
| [Qwen/Qwen-14B-Chat](https://huggingface.co/Qwen/Qwen-14B-Chat) | 43.86 | 53.29 | 44.78 | 45.13 |46.77|
| [Qwen/Qwen1.5-14B-Chat](https://huggingface.co/Qwen/Qwen1.5-14B-Chat)|39.65|52.76|43.90|44.95|45.31|
| [01-ai/Yi-6B-Chat](https://huggingface.co/01-ai/Yi-6B-Chat) | 39.62 | 50.24 | 44.44 | 44.26 |44.64|
| Claude-1.3 | 42.65 | 49.33 | 42.16 | 44.14 |44.57|
| [MediaTek-Research/Breeze-7B-Instruct-v0_1](https://huggingface.co/MediaTek-Research/Breeze-7B-Instruct-v0_1)| 36.46 | 48.38 |45.11 |40.75 | 42.67 |
| gpt-3.5-turbo-0613 | 41.56 | 46.72 | 36.73 | 42.03 |41.76|
| [CausalLM/14B](https://huggingface.co/CausalLM/14B) | 39.83 | 44.50 | 39.61 | 41.97 |41.48|
| [Skywork/Skywork-13B-base](https://huggingface.co/Skywork/Skywork-13B-base) | 36.93 | 47.27 | 41.04 | 40.10 |41.33|
| Claude-3-opus |42.95|45.49|35.79|40.24|41.12|
| [Qwen/Qwen-7B](https://huggingface.co/Qwen/Qwen-7B) | 37.53 | 45.48 | 38.09 | 38.96 |40.01|
| [meta-llama/Llama-3-70b-chat-hf](https://docs.together.ai/docs/inference-models) | 34.44 | 47.02 | 37.50 |39.51 | 39.62 |
| [Qwen/Qwen-7B-Chat](https://huggingface.co/Qwen/Qwen-7B-Chat) | 33.32 | 44.64 | 40.27 | 39.89 |39.53|
| [vivo-ai/BlueLM-7B-Base](https://huggingface.co/vivo-ai/BlueLM-7B-Base) | 33.94 | 41.52 | 37.38 | 38.74 |37.90|
| [baichuan-inc/Baichuan2-13B-Chat](https://huggingface.co/baichuan-inc/Baichuan2-13B-Chat) | 29.64 | 43.73 | 37.36 | 39.88 |37.65|
| [Qwen/Qwen-1_8B](https://huggingface.co/Qwen/Qwen-1_8B) | 32.65 | 38.95 | 38.34 | 35.27 |36.30|
| Claude-2 | 39.65 | 39.09 | 28.59 | 37.47 |36.20|
| [THUDM/chatglm3-6b](https://huggingface.co/THUDM/chatglm3-6b) | 31.05 | 39.31 | 35.64 | 35.60 |35.40|
| [deepseek-ai/deepseek-llm-7b-chat](https://huggingface.co/deepseek-ai/deepseek-llm-7b-chat) | 29.82 | 42.29 | 34.24 | 34.31 |35.17|
| [CausalLM/7B](https://huggingface.co/CausalLM/7B) | 31.03 | 38.17 | 35.87 | 35.39 |35.11|
| [Azure99/blossom-v3_1-mistral-7b](https://huggingface.co/Azure99/blossom-v3_1-mistral-7b) | 32.80 | 36.91 | 32.36 | 34.53 |34.15|
| [google/gemma-7b-it](https://huggingface.co/google/gemma-7b-it) | 31.89 | 35.70 | 34.00 | 33.79 | 33.84 |
| [Reka Edge](https://www.reka.ai/)|30.02|39.40|31.84|32.36|33.41|
| [microsoft/Orca-2-13b](https://huggingface.co/microsoft/Orca-2-13b) | 24.69 | 39.18 | 33.60 | 31.99 |32.37|
| [Qwen/Qwen-1_8B-Chat](https://huggingface.co/Qwen/Qwen-1_8B-Chat) | 26.60 | 36.36 | 31.81 | 31.96 |31.68|
| [meta-llama/Llama-3-8b-chat-hf](https://docs.together.ai/docs/inference-models) | 31.52 | 34.19 | 28.91 | 31.79 | 31.60 |
| [TigerResearch/tigerbot-13b-chat-v3](https://huggingface.co/TigerResearch/tigerbot-13b-chat-v3) | 24.73 | 29.63 | 25.72 | 27.22 |26.82|
| [hongyin/mistral-7b-80k](https://huggingface.co/hongyin/mistral-7b-80k) | 24.26 | 23.76 | 22.56 | 24.57 |23.79|
| [deepseek-ai/deepseek-llm-67b-chat](https://huggingface.co/deepseek-ai/deepseek-llm-67b-chat) | 19.10 | 26.06 | 21.51 | 21.77 |22.11|
| [yentinglin/Taiwan-LLM-13B-v2.0-chat](https://huggingface.co/yentinglin/Taiwan-LLM-13B-v2.0-chat) | 18.53 | 27.65 | 17.77 | 21.49 |21.36|
| [GeneZC/MiniChat-3B](https://huggingface.co/GeneZC/MiniChat-3B) | 17.66 | 23.35 | 22.71 | 20.34 |21.02|
| [LinkSoul/Chinese-Llama-2-7b](https://huggingface.co/LinkSoul/Chinese-Llama-2-7b) | 16.55 | 18.39 | 12.97 | 16.13 |16.01|
| [yentinglin/Taiwan-LLM-7B-v2.1-chat](https://huggingface.co/yentinglin/Taiwan-LLM-7B-v2.1-chat) | 14.99 | 16.23 | 15.00 | 16.22 |15.61|
| Claude-instant-1 | 12.52 | 17.13 | 15.10 | 13.57 |14.58|
| [FlagAlpha/Atom-7B](https://huggingface.co/FlagAlpha/Atom-7B) | 5.60 | 13.57 | 7.71 | 11.84 |9.68|
Results via [ievals](https://github.com/iKala/ievals) ( settings : 0-shot direct answering )
# Citation
```
@article{ikala2024improved,
title={An Improved Traditional Chinese Evaluation Suite for Foundation Model},
author={Tam, Zhi-Rui and Pai, Ya-Ting and Lee, Yen-Wei and Cheng, Sega and Shuai, Hong-Han},
journal={arXiv preprint arXiv:2403.01858},
year={2024}
}
```
|
random123123/BrushData | random123123 | "2024-05-17T15:33:02Z" | 3,523 | 8 | [
"license:apache-2.0",
"size_categories:10K<n<100K",
"format:webdataset",
"modality:text",
"library:datasets",
"library:webdataset",
"library:mlcroissant",
"region:us"
] | null | "2024-04-16T15:19:20Z" | ---
license: apache-2.0
---
|
NLPC-UOM/sentence_alignment_dataset-Sinhala-Tamil-English | NLPC-UOM | "2024-02-16T02:12:13Z" | 3,513 | 2 | [
"task_categories:sentence-similarity",
"task_categories:translation",
"language:si",
"language:ta",
"language:en",
"region:us"
] | [
"sentence-similarity",
"translation"
] | "2022-05-23T03:28:07Z" | ---
task_categories:
- sentence-similarity
- translation
language:
- si
- ta
- en
---
### **Dataset summary**
This is a gold-standard benchmark dataset for sentence alignment, between Sinhala-English-Tamil languages. Data had been crawled from the following news websites. The aligned documents annotated in the dataset NLPC-UOM/document_alignment_dataset-Sinhala-Tamil-English had been considered to annotate the aligned sentences.
| News Source | url |
| ------------- |-----------------------------|
| Army | https://www.army.lk/ |
| Hiru | http://www.hirunews.lk |
| ITN | https://www.newsfirst.lk |
| Newsfirst | https://www.itnnews.lk |
The aligned sentences have been manually annotated.
### **Dataset**
The folder structure for each news source is as follows.
```python
si-en
|--army
|--Sinhala
|--English
|--army.si-en
|--hiru <br/>
|--Sinhala
|--English
|--hiru.si-en
|--itn
|--Sinhala
|--English
|--itn.si-en
|--newsfirst
|--Sinhala
|--English
|--newsfirst.si-en
ta-en
si-ta
```
Sinhala/English/Tamil - contain the aligned documents in the two languages with respect to the news source. (army/hiru/itn/newsfirst) Aligned documents contain the same ID.<br/>
army.si-en - golden aligned sentence alignment. Each sentence is referenced according to the languageprefix_fileid_sentenceId. <br/>
### **Citation Information**
@article{fernando2022exploiting,<br/>
title={Exploiting bilingual lexicons to improve multilingual embedding-based document and sentence alignment for low-resource languages},<br/>
author={Fernando, Aloka and Ranathunga, Surangika and Sachintha, Dilan and Piyarathna, Lakmali and Rajitha, Charith},<br/>
journal={Knowledge and Information Systems},<br/>
pages={1--42},<br/>
year={2022},<br/>
publisher={Springer}<br/>
} |
Trelis/tiny-shakespeare | Trelis | "2023-09-06T16:27:30Z" | 3,502 | 9 | [
"task_categories:text-generation",
"language:en",
"size_categories:n<1K",
"format:csv",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us",
"fine-tuning",
"shakespeare"
] | [
"text-generation"
] | "2023-09-06T16:16:36Z" | ---
task_categories:
- text-generation
language:
- en
tags:
- fine-tuning
- shakespeare
size_categories:
- n<1K
---
# Data source
Downloaded via Andrej Karpathy's nanogpt repo from this [link](https://raw.githubusercontent.com/karpathy/char-rnn/master/data/tinyshakespeare/input.txt)
# Data Format
- The entire dataset is split into train (90%) and test (10%).
- All rows are at most 1024 tokens, using the Llama 2 tokenizer.
- All rows are split cleanly so that sentences are whole and unbroken. |
Dahoas/rm-static | Dahoas | "2023-03-06T00:13:07Z" | 3,501 | 113 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2022-12-22T16:50:14Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: response
dtype: string
- name: chosen
dtype: string
- name: rejected
dtype: string
splits:
- name: train
num_bytes: 113850006
num_examples: 76256
- name: test
num_bytes: 7649255
num_examples: 5103
download_size: 73006535
dataset_size: 121499261
---
# Dataset Card for "rm-static"
Split of [hh-static](https://huggingface.co/datasets/Dahoas/static-hh) used for training reward models after supervised fine-tuning. |
hackercupai/hackercup | hackercupai | "2024-12-14T06:10:28Z" | 3,498 | 22 | [
"license:apache-2.0",
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us",
"code"
] | null | "2024-06-19T03:19:31Z" | ---
license: apache-2.0
pretty_name: Meta Hacker Cup 2011-2023
tags:
- code
configs:
- config_name: default
data_files:
- split: sample
path: data_preview_sample_10rows.parquet
- split: full
path: output_dataset.parquet
---
# Data Preview
The data available in this preview contains a 10 row dataset:
- **Sample Dataset ("sample")**: This is a subset of the full dataset, containing data from 2023.
To view full dataset, download `output_dataset.parquet`. This contains data from 2011 to 2023.
## Fields
The dataset include the following fields:
- `name` (string)
- `year` (string)
- `round` (string)
- `statement` (string)
- `input` (string)
- `solution` (string)
- `code` (string)
- `sample_input` (string)
- `sample_output` (string)
- `images` (array of base64 image strings)
This dataset contains every Facebook/Meta Hacker Cup problem from 2011 through 2023. For each problem, you'll find these files:
* `<problem_name>.md`: The problem statement formatted in Markdown
* `<problem_name>.in`: The full input file
* `<problem_name>.out`: The full output file
* Note that some problems accept multiple possible outputs, in which case the full output file is simply an example of an output that would be accepted
* `<problem_name>_sample_input.txt`: The sample input provided by the problem statement
* `<problem_name>_sample_output.txt`: The sample output provided by the problem statement
Note that for problems from 2011 thorugh 2019, the problems were initially typeset in html. For those problems you can find:
* `<problem_name>.html`: The problem statement formatted in HTML
For these problems, the Markdown version of the statement (`<problem_name>.md`) was automatically generated from the HTML version and may contain errors.
For some problems, written solutions/analyses are available:
* `<problem_name>.sol.md`
**
For some problem, code solutions are available:
* `<problem_name>.(cpp|py|java)`
Some problems contains references to images that look like this:
* `{{PHOTO_ID:<photo_id>}}`, example: `{{PHOTO_ID:923060468192530}}`
In the same folder as the problem statement, you can find `<photo_id>.jpg` or `<photo_id>.gif`
## Starter Kits
Some quick start solutions for working with this data are available at [HackerCupAI Repo](https://github.com/HackerCupAI/starter-kits). Some frameworks include:
- [Langchain](https://www.langchain.com/)
- [AutoGen](https://github.com/microsoft/autogen)
The samples show basic steps for data ingest, generating solutions, and evaluation.
For an example of data ingest, check out [this example](https://github.com/HackerCupAI/starter-kits/blob/main/autogen/app/utils/utils.py)
## Notes
- Solutions prior to 2019 do not contain markdown solution files.
- The 2019 markdown solutions are not included in the dataset but can be found in `.cpp` files.
## Citation
If you use this dataset, please cite it as follows:
```bibtex
@misc{2024hackercupai,
title = {2024 Hacker Cup Dataset},
author = {May, Wesley and Harmeyer, David and Hoak, Amber and Li, Margaret and Dymchenko, Sergii and Yang, Weiwei and Saroufim, Mark},
}
``` |
longvideobench/LongVideoBench | longvideobench | "2024-10-14T05:43:04Z" | 3,484 | 17 | [
"task_categories:multiple-choice",
"task_categories:visual-question-answering",
"language:en",
"license:cc-by-nc-sa-4.0",
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"arxiv:2407.15754",
"region:us",
"long video understanding",
"long context",
"multimodal",
"neurips 2024"
] | [
"multiple-choice",
"visual-question-answering"
] | "2024-06-12T06:58:56Z" | ---
license: cc-by-nc-sa-4.0
extra_gated_prompt: >-
The LongVideoBench dataset contains links to web videos for data collection
purposes. LongVideoBench does not own the content linked within this dataset;
all rights and copyright belong to the respective channel owners. Ensuring
compliance with platform terms and conditions is the responsibility of these
source channels. By accessing this dataset, you acknowledge and agree to the
following terms:
extra_gated_fields:
I understand that LongVideoBench does not own the videos in this dataset: checkbox
I understand that LongVideoBench is not the creator of the videos in this dataset: checkbox
I understand that, LongVideoBench may modify/delete its contents subject to the requirements of the creators or source platforms: checkbox
I agree to use this dataset for non-commercial use ONLY: checkbox
I agree with the data license (CC-BY-NC-SA 4-0) for this dataset: checkbox
task_categories:
- multiple-choice
- visual-question-answering
language:
- en
tags:
- long video understanding
- long context
- multimodal
- neurips 2024
pretty_name: longvideobench
---
![](https://github.com/longvideobench/longvideobench.github.io/blob/main/logo.png?raw=true)
# Dataset Card for LongVideoBench
<!-- Provide a quick summary of the dataset. -->
Large multimodal models (LMMs) are handling increasingly longer and more complex inputs. However, few public benchmarks are available to assess these advancements. To address this, we introduce LongVideoBench, a question-answering benchmark with video-language interleaved inputs up to an hour long. It comprises 3,763 web-collected videos with subtitles across diverse themes, designed to evaluate LMMs on long-term multimodal understanding.
The main challenge that LongVideoBench targets is to accurately retrieve and reason over detailed information from lengthy inputs. We present a novel task called referring reasoning, where questions contain a referring query that references related video contexts, requiring the model to reason over these details.
LongVideoBench includes 6,678 human-annotated multiple-choice questions across 17 categories, making it one of the most comprehensive benchmarks for long-form video understanding. Evaluations show significant challenges even for advanced proprietary models (e.g., GPT-4o, Gemini-1.5-Pro, GPT-4-Turbo), with open-source models performing worse. Performance improves only when models process more frames, establishing LongVideoBench as a valuable benchmark for future long-context LMMs.
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** LongVideoBench Team
- **Language(s) (NLP):** English
- **License:** CC-BY-NC-SA 4.0
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [https://github.com/longvideobench/LongVideoBench](https://github.com/longvideobench/LongVideoBench)
- **Homepage:** [https://longvideobench.github.io](https://longvideobench.github.io)
- **Leaderboard:** [https://huggingface.co/spaces/longvideobench/LongVideoBench](https://huggingface.co/spaces/longvideobench/LongVideoBench)
## Leaderboard (until Oct. 14, 2024)
We rank models by Test Total Performance.
| Model | Test Total (5341) | Test 8s-15s | Test 15s-60s | Test 180s-600s | Test 900s-3600s | Val Total (1337) |
| --- | --- | --- | --- | --- | --- | --- |
| [GPT-4o (0513) (256)](https://platform.openai.com/docs/models/gpt-4o) | 66.7 | 71.6 | 76.8 | 66.7 | 61.6 | 66.7 |
| [Aria (256)](https://huggingface.co/rhymes-ai/Aria) | 65.0 | 69.4 | 76.6 | 64.6 | 60.1 | 64.2 |
| [LLaVA-Video-72B-Qwen2 (128)](https://huggingface.co/lmms-lab/LLaVA-Video-72B-Qwen2) | 64.9 | 72.4 | 77.4 | 63.9 | 59.3 | 63.9 |
| [Gemini-1.5-Pro (0514) (256)](https://console.cloud.google.com/vertex-ai/publishers/google/model-garden/gemini-1.5-pro-001) | 64.4 | 70.2 | 75.3 | 65.0 | 59.1 | 64.0 |
| [LLaVA-OneVision-QWen2-72B-OV (32)](https://huggingface.co/lmms-lab/llava-onevision-qwen2-72b-ov) | 63.2 | 74.3 | 77.4 | 61.6 | 56.5 | 61.3 |
| [LLaVA-Video-7B-Qwen2 (128)](https://huggingface.co/lmms-lab/LLaVA-Video-7B-Qwen2) | 62.7 | 69.7 | 76.5 | 62.1 | 56.6 | 61.1 |
| [Gemini-1.5-Flash (0514) (256)](https://console.cloud.google.com/vertex-ai/publishers/google/model-garden/gemini-1.5-flash-001) | 62.4 | 66.1 | 73.1 | 63.1 | 57.3 | 61.6 |
| [GPT-4-Turbo (0409) (256)](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4) | 60.7 | 66.4 | 71.1 | 61.7 | 54.5 | 59.1 |
| [InternVL2-40B (16)](https://huggingface.co/OpenGVLab/InternVL2-40B) | 60.6 | 71.4 | 76.6 | 57.5 | 54.4 | 59.3 |
| [GPT-4o-mini (250)](https://platform.openai.com/docs/models/gpt-4o-mini) | 58.8 | 66.6 | 73.4 | 56.9 | 53.4 | 56.5 |
| [MiniCPM-V-2.6 (64)](https://huggingface.co/openbmb/MiniCPM-V-2_6) | 57.7 | 62.5 | 69.1 | 54.9 | 49.8 | 54.9 |
| [Qwen2-VL-7B (256)](https://huggingface.co/openbmb/MiniCPM-V-2_6) | 56.8 | 60.1 | 67.6 | 56.7 | 52.5 | 55.6 |
| [Kangaroo (64)](https://huggingface.co/KangarooGroup/kangaroo) | 54.8 | 65.6 | 65.7 | 52.7 | 49.1 | 54.2 |
| [PLLaVA-34B (32)](https://github.com/magic-research/PLLaVA) | 53.5 | 60.1 | 66.8 | 50.8 | 49.1 | 53.2 |
| [InternVL-Chat-V1-5-26B (16)](https://huggingface.co/OpenGVLab/InternVL-Chat-V1-5) | 51.7 | 61.3 | 62.7 | 49.5 | 46.6 | 51.2 |
| [LLaVA-Next-Video-34B (32)](https://llava-vl.github.io/blog/2024-04-30-llava-next-video/) | 50.5 | 57.6 | 61.6 | 48.7 | 45.9 | 50.5 |
| [Phi-3-Vision-Instruct (16)](https://huggingface.co/microsoft/Phi-3-vision-128k-instruct) | 49.9 | 58.3 | 59.6 | 48.4 | 45.1 | 49.6 |
| [Idefics2 (16)](https://huggingface.co/HuggingFaceM4/idefics2-8b) | 49.4 | 57.4 | 60.4 | 47.3 | 44.7 | 49.7 |
| [Mantis-Idefics2 (16)](https://huggingface.co/TIGER-Lab/Mantis-8B-Idefics2) | 47.6 | 56.1 | 61.4 | 44.6 | 42.5 | 47.0 |
| [LLaVA-Next-Mistral-7B (8)](https://huggingface.co/llava-hf/llava-v1.6-mistral-7b-hf) | 47.1 | 53.4 | 57.2 | 46.9 | 42.1 | 49.1 |
| [PLLaVA-13B (32)](https://github.com/magic-research/PLLaVA) | 45.1 | 52.9 | 54.3 | 42.9 | 41.2 | 45.6 |
| [InstructBLIP-T5-XXL (8)](https://github.com/salesforce/LAVIS/tree/main/projects/instructblip) | 43.8 | 48.1 | 50.1 | 44.5 | 40.0 | 43.3 |
| [Mantis-BakLLaVA (16)](https://huggingface.co/TIGER-Lab/Mantis-bakllava-7b) | 43.7 | 51.3 | 52.7 | 41.1 | 40.1 | 43.7 |
| [BLIP-2-T5-XXL (8)](https://github.com/salesforce/LAVIS/tree/main/projects/blip2) | 43.5 | 46.7 | 47.4 | 44.2 | 40.9 | 42.7 |
| [LLaVA-Next-Video-M7B (32)](https://llava-vl.github.io/blog/2024-04-30-llava-next-video/) | 43.5 | 50.9 | 53.1 | 42.6 | 38.9 | 43.5 |
| [LLaVA-1.5-13B (8)](https://huggingface.co/llava-hf/llava-1.5-13b-hf) | 43.1 | 49.0 | 51.1 | 41.8 | 39.6 | 43.4 |
| [ShareGPT4Video (16)](https://github.com/InternLM/InternLM-XComposer/tree/main/projects/ShareGPT4Video) | 41.8 | 46.9 | 50.1 | 40.0 | 38.7 | 39.7 |
| [VideoChat2 (Mistral-7B) (16)](https://github.com/OpenGVLab/Ask-Anything/tree/main/video_chat2) | 41.2 | 49.3 | 49.3 | 39.0 | 37.5 | 39.3 |
| [LLaVA-1.5-7B (8)](https://huggingface.co/llava-hf/llava-1.5-7b-hf) | 40.4 | 45.0 | 47.4 | 40.1 | 37.0 | 40.3 |
| [mPLUG-Owl2 (8)](https://github.com/X-PLUG/mPLUG-Owl/tree/main/mPLUG-Owl2) | 39.4 | 49.4 | 47.3 | 38.7 | 34.3 | 39.1 |
| [PLLaVA-7B (32)](https://github.com/magic-research/PLLaVA) | 39.2 | 45.3 | 47.3 | 38.5 | 35.2 | 40.2 |
| [VideoLLaVA (8)](https://github.com/PKU-YuanGroup/Video-LLaVA/) | 37.6 | 43.1 | 44.6 | 36.4 | 34.4 | 39.1 |
| [VideoChat2 (Vicuna 7B) (16)](https://github.com/OpenGVLab/Ask-Anything/tree/main/video_chat2) | 35.1 | 38.1 | 40.5 | 33.5 | 33.6 | 36.0 |
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
1. Download the dataset via Hugging Face Client:
```shell
huggingface-cli download longvideobench/LongVideoBench --repo-type dataset --local-dir LongVideoBench --local-dir-use-symlinks False
```
2. Extract from the `.tar` files:
```shell
cat videos.tar.part.* > videos.tar
tar -xvf videos.tar
tar -xvf subtitles.tar
```
3. Use the [LongVideoBench] dataloader to load the data from raw MP4 files and subtitles:
- (a) Install the dataloader:
```shell
git clone https://github.com/LongVideoBench/LongVideoBench.git
cd LongVideoBench
pip install -e .
```
- (b) Load the dataset in python scripts:
```python
from longvideobench import LongVideoBenchDataset
# validation
dataset = LongVideoBenchDataset(YOUR_DATA_PATH, "lvb_val.json", max_num_frames=64)
# test
dataset = LongVideoBenchDataset(YOUR_DATA_PATH, "lvb_test_wo_gt.json", max_num_frames=64)
print(dataset[0]["inputs"]) # A list consisting of PIL.Image and strings.
```
The "inputs" are interleaved video frames and text subtitles, followed by questions and option prompts. You can then convert them to the format that your LMMs can accept.
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
This dataset is meant to evaluate LMMs on video understanding and long-context understanding abilities.
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
We do not advise to use this dataset for training.
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
- `lvb_val.json`: Validation set annotations.
- `lvb_test_wo_gt.json`: Test set annotations. Correct choice is not provided.
- `videos.tar.*`: Links to Videos.
- `subtitles.tar`: Links to Subtitles.
## Dataset Card Contact
[email protected]
```
@misc{wu2024longvideobenchbenchmarklongcontextinterleaved,
title={LongVideoBench: A Benchmark for Long-context Interleaved Video-Language Understanding},
author={Haoning Wu and Dongxu Li and Bei Chen and Junnan Li},
year={2024},
eprint={2407.15754},
archivePrefix={arXiv},
primaryClass={cs.CV},
url={https://arxiv.org/abs/2407.15754},
}
``` |
code-search-net/code_search_net | code-search-net | "2024-01-18T09:19:12Z" | 3,478 | 278 | [
"task_categories:text-generation",
"task_categories:fill-mask",
"task_ids:language-modeling",
"task_ids:masked-language-modeling",
"annotations_creators:no-annotation",
"language_creators:machine-generated",
"multilinguality:multilingual",
"source_datasets:original",
"language:code",
"license:other",
"size_categories:100K<n<1M",
"arxiv:1909.09436",
"region:us"
] | [
"text-generation",
"fill-mask"
] | "2022-03-02T23:29:22Z" | ---
annotations_creators:
- no-annotation
language_creators:
- machine-generated
language:
- code
license:
- other
multilinguality:
- multilingual
size_categories:
- 100K<n<1M
- 10K<n<100K
- 1M<n<10M
source_datasets:
- original
task_categories:
- text-generation
- fill-mask
task_ids:
- language-modeling
- masked-language-modeling
paperswithcode_id: codesearchnet
pretty_name: CodeSearchNet
dataset_info:
- config_name: all
features:
- name: repository_name
dtype: string
- name: func_path_in_repository
dtype: string
- name: func_name
dtype: string
- name: whole_func_string
dtype: string
- name: language
dtype: string
- name: func_code_string
dtype: string
- name: func_code_tokens
sequence: string
- name: func_documentation_string
dtype: string
- name: func_documentation_tokens
sequence: string
- name: split_name
dtype: string
- name: func_code_url
dtype: string
splits:
- name: train
num_bytes: 5850604083
num_examples: 1880853
- name: test
num_bytes: 308626333
num_examples: 100529
- name: validation
num_bytes: 274564382
num_examples: 89154
download_size: 5117370511
dataset_size: 6433794798
- config_name: java
features:
- name: repository_name
dtype: string
- name: func_path_in_repository
dtype: string
- name: func_name
dtype: string
- name: whole_func_string
dtype: string
- name: language
dtype: string
- name: func_code_string
dtype: string
- name: func_code_tokens
sequence: string
- name: func_documentation_string
dtype: string
- name: func_documentation_tokens
sequence: string
- name: split_name
dtype: string
- name: func_code_url
dtype: string
splits:
- name: train
num_bytes: 1429272535
num_examples: 454451
- name: test
num_bytes: 82377246
num_examples: 26909
- name: validation
num_bytes: 42358315
num_examples: 15328
download_size: 1060569153
dataset_size: 1554008096
- config_name: go
features:
- name: repository_name
dtype: string
- name: func_path_in_repository
dtype: string
- name: func_name
dtype: string
- name: whole_func_string
dtype: string
- name: language
dtype: string
- name: func_code_string
dtype: string
- name: func_code_tokens
sequence: string
- name: func_documentation_string
dtype: string
- name: func_documentation_tokens
sequence: string
- name: split_name
dtype: string
- name: func_code_url
dtype: string
splits:
- name: train
num_bytes: 738153234
num_examples: 317832
- name: test
num_bytes: 32286998
num_examples: 14291
- name: validation
num_bytes: 26888527
num_examples: 14242
download_size: 487525935
dataset_size: 797328759
- config_name: python
features:
- name: repository_name
dtype: string
- name: func_path_in_repository
dtype: string
- name: func_name
dtype: string
- name: whole_func_string
dtype: string
- name: language
dtype: string
- name: func_code_string
dtype: string
- name: func_code_tokens
sequence: string
- name: func_documentation_string
dtype: string
- name: func_documentation_tokens
sequence: string
- name: split_name
dtype: string
- name: func_code_url
dtype: string
splits:
- name: train
num_bytes: 1559645310
num_examples: 412178
- name: test
num_bytes: 84342064
num_examples: 22176
- name: validation
num_bytes: 92154786
num_examples: 23107
download_size: 940909997
dataset_size: 1736142160
- config_name: javascript
features:
- name: repository_name
dtype: string
- name: func_path_in_repository
dtype: string
- name: func_name
dtype: string
- name: whole_func_string
dtype: string
- name: language
dtype: string
- name: func_code_string
dtype: string
- name: func_code_tokens
sequence: string
- name: func_documentation_string
dtype: string
- name: func_documentation_tokens
sequence: string
- name: split_name
dtype: string
- name: func_code_url
dtype: string
splits:
- name: train
num_bytes: 480286523
num_examples: 123889
- name: test
num_bytes: 24056972
num_examples: 6483
- name: validation
num_bytes: 30168242
num_examples: 8253
download_size: 1664713350
dataset_size: 534511737
- config_name: ruby
features:
- name: repository_name
dtype: string
- name: func_path_in_repository
dtype: string
- name: func_name
dtype: string
- name: whole_func_string
dtype: string
- name: language
dtype: string
- name: func_code_string
dtype: string
- name: func_code_tokens
sequence: string
- name: func_documentation_string
dtype: string
- name: func_documentation_tokens
sequence: string
- name: split_name
dtype: string
- name: func_code_url
dtype: string
splits:
- name: train
num_bytes: 110681715
num_examples: 48791
- name: test
num_bytes: 5359280
num_examples: 2279
- name: validation
num_bytes: 4830744
num_examples: 2209
download_size: 111758028
dataset_size: 120871739
- config_name: php
features:
- name: repository_name
dtype: string
- name: func_path_in_repository
dtype: string
- name: func_name
dtype: string
- name: whole_func_string
dtype: string
- name: language
dtype: string
- name: func_code_string
dtype: string
- name: func_code_tokens
sequence: string
- name: func_documentation_string
dtype: string
- name: func_documentation_tokens
sequence: string
- name: split_name
dtype: string
- name: func_code_url
dtype: string
splits:
- name: train
num_bytes: 1532564870
num_examples: 523712
- name: test
num_bytes: 80203877
num_examples: 28391
- name: validation
num_bytes: 78163924
num_examples: 26015
download_size: 851894048
dataset_size: 1690932671
config_names:
- all
- go
- java
- javascript
- php
- python
- ruby
---
# Dataset Card for CodeSearchNet corpus
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** https://wandb.ai/github/CodeSearchNet/benchmark
- **Repository:** https://github.com/github/CodeSearchNet
- **Paper:** https://arxiv.org/abs/1909.09436
- **Data:** https://doi.org/10.5281/zenodo.7908468
- **Leaderboard:** https://wandb.ai/github/CodeSearchNet/benchmark/leaderboard
### Dataset Summary
CodeSearchNet corpus is a dataset of 2 milllion (comment, code) pairs from opensource libraries hosted on GitHub. It contains code and documentation for several programming languages.
CodeSearchNet corpus was gathered to support the [CodeSearchNet challenge](https://wandb.ai/github/CodeSearchNet/benchmark), to explore the problem of code retrieval using natural language.
### Supported Tasks and Leaderboards
- `language-modeling`: The dataset can be used to train a model for modelling programming languages, which consists in building language models for programming languages.
### Languages
- Go **programming** language
- Java **programming** language
- Javascript **programming** language
- PHP **programming** language
- Python **programming** language
- Ruby **programming** language
## Dataset Structure
### Data Instances
A data point consists of a function code along with its documentation. Each data point also contains meta data on the function, such as the repository it was extracted from.
```
{
'id': '0',
'repository_name': 'organisation/repository',
'func_path_in_repository': 'src/path/to/file.py',
'func_name': 'func',
'whole_func_string': 'def func(args):\n"""Docstring"""\n [...]',
'language': 'python',
'func_code_string': '[...]',
'func_code_tokens': ['def', 'func', '(', 'args', ')', ...],
'func_documentation_string': 'Docstring',
'func_documentation_string_tokens': ['Docstring'],
'split_name': 'train',
'func_code_url': 'https://github.com/<org>/<repo>/blob/<hash>/src/path/to/file.py#L111-L150'
}
```
### Data Fields
- `id`: Arbitrary number
- `repository_name`: name of the GitHub repository
- `func_path_in_repository`: tl;dr: path to the file which holds the function in the repository
- `func_name`: name of the function in the file
- `whole_func_string`: Code + documentation of the function
- `language`: Programming language in whoch the function is written
- `func_code_string`: Function code
- `func_code_tokens`: Tokens yielded by Treesitter
- `func_documentation_string`: Function documentation
- `func_documentation_string_tokens`: Tokens yielded by Treesitter
- `split_name`: Name of the split to which the example belongs (one of train, test or valid)
- `func_code_url`: URL to the function code on Github
### Data Splits
Three splits are available:
- train
- test
- valid
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
All information can be retrieved in the [original technical review](https://arxiv.org/pdf/1909.09436.pdf)
**Corpus collection**:
Corpus has been collected from publicly available open-source non-fork GitHub repositories, using libraries.io to identify all projects which are used by at least one other project, and sort them by “popularity” as indicated by the number of stars and forks.
Then, any projects that do not have a license or whose license does not explicitly permit the re-distribution of parts of the project were removed. Treesitter - GitHub's universal parser - has been used to then tokenize all Go, Java, JavaScript, Python, PHP and Ruby functions (or methods) using and, where available, their respective documentation text using a heuristic regular expression.
**Corpus filtering**:
Functions without documentation are removed from the corpus. This yields a set of pairs ($c_i$, $d_i$) where ci is some function documented by di. Pairs ($c_i$, $d_i$) are passed through the folllowing preprocessing tasks:
- Documentation $d_i$ is truncated to the first full paragraph to remove in-depth discussion of function arguments and return values
- Pairs in which $d_i$ is shorter than three tokens are removed
- Functions $c_i$ whose implementation is shorter than three lines are removed
- Functions whose name contains the substring “test” are removed
- Constructors and standard extenion methods (eg `__str__` in Python or `toString` in Java) are removed
- Duplicates and near duplicates functions are removed, in order to keep only one version of the function
#### Who are the source language producers?
OpenSource contributors produced the code and documentations.
The dataset was gatherered and preprocessed automatically.
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
Each example in the dataset has is extracted from a GitHub repository, and each repository has its own license. Example-wise license information is not (yet) included in this dataset: you will need to find out yourself which license the code is using.
### Citation Information
@article{husain2019codesearchnet,
title={{CodeSearchNet} challenge: Evaluating the state of semantic code search},
author={Husain, Hamel and Wu, Ho-Hsiang and Gazit, Tiferet and Allamanis, Miltiadis and Brockschmidt, Marc},
journal={arXiv preprint arXiv:1909.09436},
year={2019}
}
### Contributions
Thanks to [@SBrandeis](https://github.com/SBrandeis) for adding this dataset.
|
microsoft/wiki_qa | microsoft | "2024-01-04T16:41:46Z" | 3,477 | 50 | [
"task_categories:question-answering",
"task_ids:open-domain-qa",
"annotations_creators:crowdsourced",
"language_creators:found",
"multilinguality:monolingual",
"source_datasets:original",
"language:en",
"license:other",
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"question-answering"
] | "2022-03-02T23:29:22Z" | ---
annotations_creators:
- crowdsourced
language_creators:
- found
language:
- en
license:
- other
multilinguality:
- monolingual
size_categories:
- 10K<n<100K
source_datasets:
- original
task_categories:
- question-answering
task_ids:
- open-domain-qa
paperswithcode_id: wikiqa
pretty_name: WikiQA
dataset_info:
features:
- name: question_id
dtype: string
- name: question
dtype: string
- name: document_title
dtype: string
- name: answer
dtype: string
- name: label
dtype:
class_label:
names:
'0': '0'
'1': '1'
splits:
- name: test
num_bytes: 1333261
num_examples: 6165
- name: validation
num_bytes: 589765
num_examples: 2733
- name: train
num_bytes: 4453862
num_examples: 20360
download_size: 2861208
dataset_size: 6376888
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
- split: validation
path: data/validation-*
- split: train
path: data/train-*
---
# Dataset Card for "wiki_qa"
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** [https://www.microsoft.com/en-us/download/details.aspx?id=52419](https://www.microsoft.com/en-us/download/details.aspx?id=52419)
- **Repository:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
- **Paper:** [WikiQA: A Challenge Dataset for Open-Domain Question Answering](https://aclanthology.org/D15-1237/)
- **Point of Contact:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
- **Size of downloaded dataset files:** 7.10 MB
- **Size of the generated dataset:** 6.40 MB
- **Total amount of disk used:** 13.50 MB
### Dataset Summary
Wiki Question Answering corpus from Microsoft.
The WikiQA corpus is a publicly available set of question and sentence pairs, collected and annotated for research on open-domain question answering.
### Supported Tasks and Leaderboards
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Languages
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Dataset Structure
### Data Instances
#### default
- **Size of downloaded dataset files:** 7.10 MB
- **Size of the generated dataset:** 6.40 MB
- **Total amount of disk used:** 13.50 MB
An example of 'train' looks as follows.
```
{
"answer": "Glacier caves are often called ice caves , but this term is properly used to describe bedrock caves that contain year-round ice.",
"document_title": "Glacier cave",
"label": 0,
"question": "how are glacier caves formed?",
"question_id": "Q1"
}
```
### Data Fields
The data fields are the same among all splits.
#### default
- `question_id`: a `string` feature.
- `question`: a `string` feature.
- `document_title`: a `string` feature.
- `answer`: a `string` feature.
- `label`: a classification label, with possible values including `0` (0), `1` (1).
### Data Splits
| name |train|validation|test|
|-------|----:|---------:|---:|
|default|20360| 2733|6165|
## Dataset Creation
### Curation Rationale
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
#### Who are the source language producers?
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Annotations
#### Annotation process
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
#### Who are the annotators?
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Personal and Sensitive Information
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Discussion of Biases
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Other Known Limitations
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
## Additional Information
### Dataset Curators
[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
### Licensing Information
MICROSOFT RESEARCH DATA LICENSE AGREEMENT
FOR
MICROSOFT RESEARCH WIKIQA CORPUS
These license terms are an agreement between Microsoft Corporation (or based on where you live, one of its
affiliates) and you. Please read them. They apply to the data associated with this license above, which includes
the media on which you received it, if any. The terms also apply to any Microsoft:
- updates,
- supplements,
- Internet-based services, and
- support services
for this data, unless other terms accompany those items. If so, those terms apply.
BY USING THE DATA, YOU ACCEPT THESE TERMS. IF YOU DO NOT ACCEPT THEM, DO NOT USE THE DATA.
If you comply with these license terms, you have the rights below.
1. SCOPE OF LICENSE.
a. You may use, copy, modify, create derivative works, and distribute the Dataset:
i. for research and technology development purposes only. Examples of research and technology
development uses are teaching, academic research, public demonstrations and experimentation ;
and
ii. to publish (or present papers or articles) on your results from using such Dataset.
b. The data is licensed, not sold. This agreement only gives you some rights to use the data. Microsoft reserves
all other rights. Unless applicable law gives you more rights despite this limitation, you may use the data only
as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the
data that only allow you to use it in certain ways.
You may not
- work around any technical limitations in the data;
- reverse engineer, decompile or disassemble the data, except and only to the extent that applicable law
expressly permits, despite this limitation;
- rent, lease or lend the data;
- transfer the data or this agreement to any third party; or
- use the data directly in a commercial product without Microsoft’s permission.
2. DISTRIBUTION REQUIREMENTS:
a. If you distribute the Dataset or any derivative works of the Dataset, you will distribute them under the
same terms and conditions as in this Agreement, and you will not grant other rights to the Dataset or
derivative works that are different from those provided by this Agreement.
b. If you have created derivative works of the Dataset, and distribute such derivative works, you will
cause the modified files to carry prominent notices so that recipients know that they are not receiving
Page 1 of 3the original Dataset. Such notices must state: (i) that you have changed the Dataset; and (ii) the date
of any changes.
3. DISTRIBUTION RESTRICTIONS. You may not: (a) alter any copyright, trademark or patent notice in the
Dataset; (b) use Microsoft’s trademarks in a way that suggests your derivative works or modifications come from
or are endorsed by Microsoft; (c) include the Dataset in malicious, deceptive or unlawful programs.
4. OWNERSHIP. Microsoft retains all right, title, and interest in and to any Dataset provided to you under this
Agreement. You acquire no interest in the Dataset you may receive under the terms of this Agreement.
5. LICENSE TO MICROSOFT. Microsoft is granted back, without any restrictions or limitations, a non-exclusive,
perpetual, irrevocable, royalty-free, assignable and sub-licensable license, to reproduce, publicly perform or
display, use, modify, post, distribute, make and have made, sell and transfer your modifications to and/or
derivative works of the Dataset, for any purpose.
6. FEEDBACK. If you give feedback about the Dataset to Microsoft, you give to Microsoft, without charge, the right
to use, share and commercialize your feedback in any way and for any purpose. You also give to third parties,
without charge, any patent rights needed for their products, technologies and services to use or interface with
any specific parts of a Microsoft dataset or service that includes the feedback. You will not give feedback that is
subject to a license that requires Microsoft to license its Dataset or documentation to third parties because we
include your feedback in them. These rights survive this Agreement.
7. EXPORT RESTRICTIONS. The Dataset is subject to United States export laws and regulations. You must
comply with all domestic and international export laws and regulations that apply to the Dataset. These laws
include restrictions on destinations, end users and end use. For additional information, see
www.microsoft.com/exporting.
8. ENTIRE AGREEMENT. This Agreement, and the terms for supplements, updates, Internet-based services and
support services that you use, are the entire agreement for the Dataset.
9. SUPPORT SERVICES. Because this data is “as is,” we may not provide support services for it.
10. APPLICABLE LAW.
a. United States. If you acquired the software in the United States, Washington state law governs the
interpretation of this agreement and applies to claims for breach of it, regardless of conflict of laws principles.
The laws of the state where you live govern all other claims, including claims under state consumer protection
laws, unfair competition laws, and in tort.
b. Outside the United States. If you acquired the software in any other country, the laws of that country
apply.
11. LEGAL EFFECT. This Agreement describes certain legal rights. You may have other rights under the laws of your
country. You may also have rights with respect to the party from whom you acquired the Dataset. This
Agreement does not change your rights under the laws of your country if the laws of your country do not permit
it to do so.
12. DISCLAIMER OF WARRANTY. The Dataset is licensed “as-is.” You bear the risk of using it. Microsoft gives no
express warranties, guarantees or conditions. You may have additional consumer rights or statutory guarantees
under your local laws which this agreement cannot change. To the extent permitted under your local laws,
Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-
infringement.
13. LIMITATION ON AND EXCLUSION OF REMEDIES AND DAMAGES. YOU CAN RECOVER FROM
MICROSOFT AND ITS SUPPLIERS ONLY DIRECT DAMAGES UP TO U.S. $5.00. YOU CANNOT RECOVER ANY
OTHER DAMAGES, INCLUDING CONSEQUENTIAL, LOST PROFITS, SPECIAL, INDIRECT OR INCIDENTAL
DAMAGES.
This limitation applies to
- anything related to the software, services, content (including code) on third party Internet sites, or third party
programs; and Page 2 of 3
- claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other
tort to the extent permitted by applicable law.
It also applies even if Microsoft knew or should have known about the possibility of the damages. The above
limitation or exclusion may not apply to you because your country may not allow the exclusion or limitation of
incidental, consequential or other damages.
### Citation Information
```
@inproceedings{yang-etal-2015-wikiqa,
title = "{W}iki{QA}: A Challenge Dataset for Open-Domain Question Answering",
author = "Yang, Yi and
Yih, Wen-tau and
Meek, Christopher",
booktitle = "Proceedings of the 2015 Conference on Empirical Methods in Natural Language Processing",
month = sep,
year = "2015",
address = "Lisbon, Portugal",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/D15-1237",
doi = "10.18653/v1/D15-1237",
pages = "2013--2018",
}
```
### Contributions
Thanks to [@patrickvonplaten](https://github.com/patrickvonplaten), [@mariamabarham](https://github.com/mariamabarham), [@lewtun](https://github.com/lewtun), [@thomwolf](https://github.com/thomwolf) for adding this dataset. |
Gustavosta/Stable-Diffusion-Prompts | Gustavosta | "2022-09-18T22:38:59Z" | 3,474 | 459 | [
"annotations_creators:no-annotation",
"language_creators:found",
"source_datasets:original",
"language:en",
"license:unknown",
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2022-09-18T12:13:15Z" | ---
license:
- unknown
annotations_creators:
- no-annotation
language_creators:
- found
language:
- en
source_datasets:
- original
---
# Stable Diffusion Dataset
This is a set of about 80,000 prompts filtered and extracted from the image finder for Stable Diffusion: "[Lexica.art](https://lexica.art/)". It was a little difficult to extract the data, since the search engine still doesn't have a public API without being protected by cloudflare.
If you want to test the model with a demo, you can go to: "[spaces/Gustavosta/MagicPrompt-Stable-Diffusion](https://huggingface.co/spaces/Gustavosta/MagicPrompt-Stable-Diffusion)".
If you want to see the model, go to: "[Gustavosta/MagicPrompt-Stable-Diffusion](https://huggingface.co/Gustavosta/MagicPrompt-Stable-Diffusion)". |
jp1924/AudioCaps | jp1924 | "2024-02-15T05:34:30Z" | 3,468 | 7 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-01-27T11:35:39Z" | ---
dataset_info:
features:
- name: audiocap_id
dtype: int32
- name: youtube_id
dtype: string
- name: start_time
dtype: int32
- name: audio
dtype:
audio:
sampling_rate: 48000
- name: caption
dtype: string
splits:
- name: train
num_bytes: 2012866216147.6
num_examples: 45087
- name: validation
num_bytes: 94570191869
num_examples: 2230
- name: test
num_bytes: 187871958256.0
num_examples: 4400
download_size: 431887334157
dataset_size: 282442150125.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
WenhaoWang/VidProM | WenhaoWang | "2024-09-26T13:55:35Z" | 3,468 | 60 | [
"task_categories:text-to-video",
"task_categories:text-to-image",
"source_datasets:original",
"language:en",
"license:cc-by-nc-4.0",
"size_categories:1M<n<10M",
"format:csv",
"modality:tabular",
"modality:text",
"modality:video",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"arxiv:2403.06098",
"region:us",
"prompts",
"text-to-video",
"text-to-image",
"Pika",
"VideoCraft2",
"Text2Video-Zero",
"ModelScope",
"Video Generative Model Evaluation",
"Text-to-Video Diffusion Model Development",
"Text-to-Video Prompt Engineering",
"Efficient Video Generation",
"Fake Video Detection",
"Video Copy Detection for Diffusion Models"
] | [
"text-to-video",
"text-to-image"
] | "2024-02-25T15:20:21Z" | ---
license: cc-by-nc-4.0
task_categories:
- text-to-video
- text-to-image
language:
- en
pretty_name: VidProM
size_categories:
- 1M<n<10M
source_datasets:
- original
tags:
- prompts
- text-to-video
- text-to-image
- Pika
- VideoCraft2
- Text2Video-Zero
- ModelScope
- Video Generative Model Evaluation
- Text-to-Video Diffusion Model Development
- Text-to-Video Prompt Engineering
- Efficient Video Generation
- Fake Video Detection
- Video Copy Detection for Diffusion Models
configs:
- config_name: VidProM_unique
data_files: VidProM_unique.csv
---
<p align="center">
<img src="https://huggingface.co/datasets/WenhaoWang/VidProM/resolve/main/teasor.png" width="800">
</p>
# Summary
This is the dataset proposed in our paper [**VidProM: A Million-scale Real Prompt-Gallery Dataset for Text-to-Video Diffusion Models**](https://arxiv.org/abs/2403.06098) (NeurIPS 2024).
VidProM is the first dataset featuring 1.67 million unique text-to-video prompts and 6.69 million videos generated from 4 different state-of-the-art diffusion models.
It inspires many exciting new research areas, such as Text-to-Video Prompt Engineering, Efficient Video Generation, Fake Video Detection, and Video Copy Detection for Diffusion Models.
# Directory
```
*DATA_PATH
*VidProM_unique.csv
*VidProM_semantic_unique.csv
*VidProM_embed.hdf5
*original_files
*generate_1_ori.html
*generate_2_ori.html
...
*pika_videos
*pika_videos_1.tar
*pika_videos_2.tar
...
*vc2_videos
*vc2_videos_1.tar
*vc2_videos_2.tar
...
*t2vz_videos
*t2vz_videos_1.tar
*t2vz_videos_2.tar
...
*ms_videos
*ms_videos_1.tar
*ms_videos_2.tar
...
*example
```
# Download
### Automatical
Install the [datasets](https://huggingface.co/docs/datasets/v1.15.1/installation.html) library first, by:
```
pip install datasets
```
Then it can be downloaded automatically with
```python
import numpy as np
from datasets import load_dataset
dataset = load_dataset('WenhaoWang/VidProM')
```
### Manual
You can also download each file by ```wget```, for instance:
```
wget https://huggingface.co/datasets/WenhaoWang/VidProM/resolve/main/VidProM_unique.csv
```
### Users from China
For users from China, we cooperate with [Wisemodel](https://wisemodel.cn/home), and you can download them faster from [here](https://wisemodel.cn/datasets/WenhaoWang/VidProM).
# Explanation
``VidProM_unique.csv`` contains the UUID, prompt, time, and 6 NSFW probabilities.
It can easily be read by
```python
import pandas
df = pd.read_csv("VidProM_unique.csv")
```
Below are three rows from ``VidProM_unique.csv``:
| uuid | prompt | time | toxicity | obscene | identity_attack | insult | threat | sexual_explicit |
|--------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------|----------|---------|-----------------|---------|---------|-----------------|
| 6a83eb92-faa0-572b-9e1f-67dec99b711d | Flying among clouds and stars, kitten Max discovered a world full of winged friends. Returning home, he shared his stories and everyone smiled as they imagined flying together in their dreams. | Sun Sep 3 12:27:44 2023 | 0.00129 | 0.00016 | 7e-05 | 0.00064 | 2e-05 | 2e-05 |
| 3ba1adf3-5254-59fb-a13e-57e6aa161626 | Use a clean and modern font for the text "Relate Reality 101." Add a small, stylized heart icon or a thought bubble above or beside the text to represent emotions and thoughts. Consider using a color scheme that includes warm, inviting colors like deep reds, soft blues, or soothing purples to evoke feelings of connection and intrigue. | Wed Sep 13 18:15:30 2023 | 0.00038 | 0.00013 | 8e-05 | 0.00018 | 3e-05 | 3e-05 |
| 62e5a2a0-4994-5c75-9976-2416420526f7 | zoomed out, sideview of an Grey Alien sitting at a computer desk | Tue Oct 24 20:24:21 2023 | 0.01777 | 0.00029 | 0.00336 | 0.00256 | 0.00017 | 5e-05 |
``VidProM_semantic_unique.csv`` is a semantically unique version of ``VidProM_unique.csv``.
``VidProM_embed.hdf5`` is the 3072-dim embeddings of our prompts. They are embedded by text-embedding-3-large, which is the latest text embedding model of OpenAI.
It can easily be read by
```python
import numpy as np
import h5py
def read_descriptors(filename):
hh = h5py.File(filename, "r")
descs = np.array(hh["embeddings"])
names = np.array(hh["uuid"][:], dtype=object).astype(str).tolist()
return names, descs
uuid, features = read_descriptors('VidProM_embed.hdf5')
```
``original_files`` are the HTML files from [official Pika Discord](https://discord.com/invite/pika) collected by [DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter). You can do whatever you want with it under [CC BY-NC 4.0 license](https://creativecommons.org/licenses/by-nc/4.0/deed.en).
``pika_videos``, ``vc2_videos``, ``t2vz_videos``, and ``ms_videos`` are the generated videos by 4 state-of-the-art text-to-video diffusion models. Each contains 30 tar files.
``example`` is a subfolder which contains 10,000 datapoints.
# Datapoint
<p align="center">
<img src="https://huggingface.co/datasets/WenhaoWang/VidProM/resolve/main/datapoint.png" width="800">
</p>
# Comparison with DiffusionDB
<p align="center">
<img src="https://huggingface.co/datasets/WenhaoWang/VidProM/resolve/main/compare_table.jpg" width="800">
</p>
<p align="center">
<img src="https://huggingface.co/datasets/WenhaoWang/VidProM/resolve/main/compare_visual.png" width="800">
</p>
<p align="center">
<img src="https://huggingface.co/datasets/WenhaoWang/VidProM/resolve/main/WizMap_V_D.jpg" width="800">
</p>
Click the [WizMap](https://poloclub.github.io/wizmap/?dataURL=https://huggingface.co/datasets/WenhaoWang/VidProM/resolve/main/data_vidprom_diffusiondb.ndjson&gridURL=https://huggingface.co/datasets/WenhaoWang/VidProM/resolve/main/grid_vidprom_diffusiondb.json)
(and wait for 5 seconds) for an interactive visualization of our 1.67 million prompts. Above is a thumbnail.
Please check our paper for a detailed comparison.
# Curators
VidProM is created by [Wenhao Wang](https://wangwenhao0716.github.io/) and Professor [Yi Yang](https://scholar.google.com/citations?user=RMSuNFwAAAAJ&hl=zh-CN).
# License
The prompts and videos generated by [Pika](https://discord.com/invite/pika) in our VidProM are licensed under the [CC BY-NC 4.0 license](https://creativecommons.org/licenses/by-nc/4.0/deed.en). Additionally, similar to their original repositories, the videos from [VideoCraft2](https://github.com/AILab-CVC/VideoCrafter), [Text2Video-Zero](https://github.com/Picsart-AI-Research/Text2Video-Zero), and [ModelScope](https://huggingface.co/ali-vilab/modelscope-damo-text-to-video-synthesis) are released under the [Apache license](https://www.apache.org/licenses/LICENSE-2.0), the [CreativeML Open RAIL-M license](https://github.com/Picsart-AI-Research/Text2Video-Zero/blob/main/LICENSE), and the [CC BY-NC 4.0 license](https://creativecommons.org/licenses/by-nc/4.0/deed.en), respectively. Our code is released under the [CC BY-NC 4.0 license](https://creativecommons.org/licenses/by-nc/4.0/deed.en).
# Citation
```
@article{wang2024vidprom,
title={VidProM: A Million-scale Real Prompt-Gallery Dataset for Text-to-Video Diffusion Models},
author={Wang, Wenhao and Yang, Yi},
booktitle={Thirty-eighth Conference on Neural Information Processing Systems},
year={2024},
url={https://openreview.net/forum?id=pYNl76onJL}
}
```
# Contact
If you have any questions, feel free to contact Wenhao Wang ([email protected]).
|
m-a-p/PIN-100M | m-a-p | "2025-01-21T20:16:47Z" | 3,465 | 2 | [
"language:en",
"language:zh",
"license:apache-2.0",
"size_categories:10K<n<100K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"arxiv:2406.13923",
"region:us",
"multimodal",
"interleaved"
] | null | "2024-05-25T04:58:09Z" | ---
license: apache-2.0
language:
- en
- zh
configs:
- config_name: pin
data_files:
- split: train
path:
- data/DocLayNet/DocLayNet.jsonl
tags:
- multimodal
- interleaved
size_categories:
- 100B<n<1T
pretty_name: pin-100m
---
# PIN-100M
The full version of the dataset, related to the paper "PIN: A Knowledge-Intensive Dataset for Paired and Interleaved Multimodal Documents"
Paper: https://arxiv.org/abs/2406.13923
This dataset contains 100M samples with PIN format.
**Please note that the required storage space exceeds 150TB!!**
🚀 News
[ 2024.12.20 ] !NEW! 🔥The currently available version is not the complete version; this project is still ongoing! (It has been released early because we reached the private storage limit on Hugging Face.)
<img src="assets/intro.png">
## 0 Usage
Download ALL files
```bash
huggingface-cli download m-a-p/PIN-100M --repo-type=dataset --resume-download --local-dir "your_local_path"
```
Download ONLY **Jsonl** files
```bash
huggingface-cli download m-a-p/PIN-100M --repo-type=dataset --resume-download --include "*.jsonl" --local-dir "your_local_path"
```
Decompression
```bash
cat data.tar.part* > data.tar
tar -xvf data.tar
```
## 1 Dataset statistics
**Working**
Storage space statistics may have some error, so these values are for reference only.
## 2 Data Structure
### 2.1 Subsets
We process 8 subsets, including PIN-PMC, DocLayNet, Linux-CN, chinese-markdown, OBELICS, MMC4, leetcode, and PG19.
<img src="assets/dataset-example.png">
Note: We do not release the PIN-arXiv subset in the preview version.
### 2.2 Folder Structure
The directory `content images` holds the images mentioned within the markdown text, and `overall images` display the overall visual representation of the markdown files. Moreover, the `JSONL` file encapsulate the textual content along with associated data details.
An example subset:
```
example_dataset/
│
├── content_image/
├── overall_image/
└── example_dataset.jsonl
```
A subset with multiple parts:
```
example_dataset/
│
├── part00/
│ ├── content_image/
│ ├── overall_image/
│ └── part00.jsonl
│
├── part01/
│ ├── content_image/
│ ├── overall_image/
│ └── part01.jsonl
│
... - More similar parts
```
### 2.3 content_image Folder
This folder contains all the content images used in the markdown files.
Note: All images need to be converted to PNG format. The filename should be unique within the folder.
```
content_image/
│
├── 1.png
├── 2.png
...
```
### 2.4 overall_image Folder
This folder contains all the overall images for each sample.
Note: All images need to be converted to PNG format. The filename should be unique within the folder.
```
overall_image/
│
├── 1.png
├── 2.png
...
```
#### 2.5 JSON Lines Format
we provide a detailed example of the annotations included with each data entry.
```
{
"id": 1919,
"meta": {
"language": "en",
"oi_exist": true,
"oi_source": "compiling",
"source_dataset": "example_source (e.g. OBELICS)",
"ori_meta": {
"document_url": "https://www.example.com/2022/02/21/example/",
...
}
},
"doc_id": 1997,
"page_id": 0,
"date_download": "2024-03-01"
},
"license": "CC-BY-4.0",
"quality_signals": {
"doc_length": 100,
...
},
"content_image": [
"content_image/1997-0.png",
"content_image/1997-1.png"
],
"md": "<img src='content_image/1997-0.png'>\n\nThis is a fake sample data line, just for show.\n\nThis is a fake sample data line, just for show.\n\n<img src='content_image/1997-1.png'>\n\nThis is a fake sample data line, just for show.",
"overall_image": "overall_image/1997.png"
}
```
Field Descriptions:
**Field Descriptions:**
- **id**: Unique identifier for each entry.
- **meta**: Metadata for each multimodal document entry.
- **language**: The document's language, such as Chinese (zh) or English (en).
- **source_dataset**: If the document is converted from another dataset, the original dataset name is noted here; otherwise, it is None.
- **doc_id**: A unique document identifier providing name and other details.
- **page_id**: A unique page identifier indicating the document's page number. If there is only one page, this is None. Page IDs are usually numbered starting from 1 in multi-page documents.
- **date_download**: date (download), the date the document was downloaded.
- **ori_meta**: Original metadata from the dataset, if available; otherwise, None.
- **oi_exist**: Indicates whether an overall image exists. True or False.
- **oi_source**: Source of the overall image; 'ori' for images taken from the original dataset and 'compiling' for images generated through code compilation. If this tag is missing, the image is likely compiled.
- ...
- **quality_signals**: Quality indicators inspired by the design of redpajama v2.
- **doc_length**: Length of the document.
- ...
- **content_image**: List of images mentioned in the document; None if no images are present.
- **overall_image**: Path to the corresponding overall image. (A list or a single path)
- **md**: Contains the markdown content.
- **license**: License information for the current sample.
## 3 Examples of jsonl files
We selected samples consisting of short markdown documents.
### 3.1 An example of DocLynet
Notably, the dataset's overall images are converted from the original dataset's PDFs into PNG format.
```json
{
"id": 0,
"meta": {
"language": "en",
"oi_exist": true,
"oi_source": "ori",
"source_dataset": "DocLayNet",
"ori_meta": null,
"doc_id": "NYSE_F_2004.pdf",
"page_id": "0",
"date_download": "2024-3-24"
},
"quality_signals": null,
"license": "https://cdla.io/permissive-1-0/",
"content_image": [
"content_image/34102.jpg"
],
"overall_image": "overall_image/3562e47265520f7a72f3eac73aadfe19a78531698c3b50d7670b8ad9b214106b.png",
"md": "<img src='content_image/34102.jpg'>\n\n# Ford Motor Company / 2004 Annual Report \n\n# R W A R D F O R W A R D \n\n"
}
```
### 3.2 An example of OBELICS
```json
{
"id": 466502,
"meta": {
"language": "en",
"oi_exist": true,
"oi_source": "compiling",
"source_dataset": "OBELICS",
"ori_meta": {
"document_url": "https://www.donegaldaily.com/2022/02/21/watch-incredible-storm-surge-at-portsalon-golf-club/",
"unformatted_src": "https://www.donegaldaily.com/wp-content/uploads/2022/02/Screenshot-2022-02-21-at-17.54.30.jpg",
"src": "https://www.donegaldaily.com/wp-content/uploads/2022/02/Screenshot-2022-02-21-at-17.54.30.jpg",
"formatted_filename": "Screenshot at",
"rendered_width": 817,
"rendered_height": 419,
"original_width": 817,
"original_height": 419,
"format": "jpeg",
"general_meta": {
"url": "https://www.donegaldaily.com/2022/02/21/watch-incredible-storm-surge-at-portsalon-golf-club/",
"warc_filename": "crawl-data/CC-MAIN-2022-27/segments/1656103271864.14/warc/CC-MAIN-20220626192142-20220626222142-00308.warc.gz",
"warc_record_offset": 795020636,
"warc_record_length": 31271
}
},
"doc_id": 98496,
"page_id": 0,
"date_download": "2024-4-22"
},
"md": "<img src='content_image/98496-0.png'>\n\nThe golf course at Portsalon Golf Club took a battering today as a result of Storm Franklin.\n\nDonegal had been left battered and bruised overnight after Storm Franklin ripped across the county.\n\nThere were trees down on the approach roads to Donegal Town and in Gartan.\n\nThere were also trees down in Inishowen while there is also heavy water reported along the sides of roads with motorists asked to slow down and not put themselves in danger.\n\nDonegal’s coastline took a huge impact with massive waves reported along the coastline around the county.\n\nThe video, taken by Johnny Shields was taken from the tee box of the third hole.",
"license": "CC-BY-4.0",
"quality_signals": null,
"content_image": [
"content_image/98496-0.png"
],
"overall_image": "overall_image/98496-0.png"
}
```
### 3.3 An example of chinese-markdown
```json
{
"id": 7,
"meta": {
"language": "zh",
"oi_exist": true,
"oi_source": "compiling",
"source_dataset": "chinese-markdown",
"ori_meta": null,
"doc_id": 7,
"page_id": null,
"date_download": "2024-04-30"
},
"md": "---\ntitle: 常见问题 QA\ncategory: 其它\norder: 1\n---\n\n> 持续更新中...\n> 如有问题可以到 <https://github.com/alibaba/ice/issues/new> 反馈\n\n## ICE 的浏览器兼容策略是什么\n\n由于 ICE 优先使用 React 16+,其需要的最低 IE 版本为 11,如果您需要在以下的版本使用,您可能需要引入一些 polyfill 来支持 `Map`, `Set` 等特性。参考[React 官网说明](https://reactjs.org/blog/2017/09/26/react-v16.0.html#javascript-environment-requirements)。\n\n以下代码可以帮助你在低版本 IE 下自动跳转到我们提供的提示浏览器升级页面。当然您也可以使用自定义的浏览器升级页面。\n\n```\n<!--[if lt IE 11]>\n<script>location.href = \"//www.taobao.com/markets/tbhome/ali-page-updater\"; </script>\n<![endif]-->\n```\n\n添加如上代码后,如果使用 IE11 及以下浏览器访问页面,则会自动跳转到统一引导升级浏览器的页面。\n\n## WebStorm/IDEA 编辑器卡顿现象\n\n由于项目在安装依赖后,产生文件夹 `node_modules` 含有较多的碎小文件,编辑器在索引文件引起的卡顿。\nWebStorm 中尤为明显,可通过 exclude `node_modules` 目录,不需要检索该文件夹下的内容。\n\n## 如何设置网页在浏览器 Tab 上面的 Icon (favicon)\n\n细心的同学可能会看到页面在浏览器 Tab 上面会有自定义的 Icon:\n\n![](//img.alicdn.com/tfs/TB1ct6bPpXXXXXYXFXXXXXXXXXX-484-82.png)\n\n如果你想要在自己站点上面加上这个 Icon 可以按照如下步骤添加:\n\n1. 准备一个 Icon,文件格式可以为 `.png` 或者 `.ico`,正方形,分辨率可以是 32x32px 或者 64x64px 文件体积要求尽可能小。\n2. 上传 CDN 拿到一个 url 或者在自己服务器配置静态资源服务\n3. 在 HTML 页面 `<head>` 标签里面添加如下代码:`<link rel=\"shortcut icon\" href=\"your-icon-url\">`\n ![](//img.alicdn.com/tfs/TB1IC53PpXXXXbmXVXXXXXXXXXX-1834-774.png)\n\n这样就添加成功啦!\n\n## 如何在页面显示原始的 HTML 内容\n\n出于安全方面的考虑,React 默认会将节点中 html 代码进行转义,比如:\n\n```jsx\nclass Demo extends Component {\n render() {\n const content = 'hello <span>world</span>';\n return <div>{content}</div>;\n }\n}\n\n// 输出 hello <span>world</span>\n```\n\n如上,`<span>` 标签并不会在页面上被解析,而是被当成字符串输出了。React 提供了 `dangerouslySetInnerHTML` 属性帮助我们进行类似 `innerHTML` 的操作:\n\n```jsx\nclass Demo extends Component {\n render() {\n const content = 'hello <span>world</span>';\n return <div dangerouslySetInnerHTML={{ __html: content }} />;\n }\n}\n\n// 输出 hello world\n```\n\n更多内容请参考 [Dangerously Set innerHTML](https://reactjs.org/docs/dom-elements.html#dangerouslysetinnerhtml)\n\n## 之前创建的项目,遇到如下报错怎么办\n\n![截图](content_image/7-0.png)\n\n这是由于 ES6 Modules 的标准在物料中不兼容导致的。您可以把 `src/navs.js` 中最后一行修改为:\n\n```js\nexport const headerNavs = transform([\n ...autoGenHeaderNavs,\n ...customHeaderNavs,\n]);\n\nexport const asideNavs = transform([...autoGenAsideNavs, ...customAsideNavs]);\n```",
"license": "MIT",
"quality_signals": null,
"content_image": [
"content_image/7-0.png"
],
"overall_image": "overall_image/7.png"
}
```
### 3.4 An example of leetcode
```json
{
"id": 1,
"meta": {
"language": "en",
"doc_id": 1,
"page_id": null,
"oi_exist": true,
"oi_source": "compiling",
"source_dataset": "leetcode",
"date_download": "2024-05-05",
"ori_meta": {
"slug": "two-sum",
"difficulty": "Easy"
}
},
"quality_signals": null,
"license": "MIT",
"content_image": null,
"md": "# Two Sum\n\n- slug: two-sum\n- difficulty: Easy\n\nGiven an array of integers `nums` and an integer `target`, return _indices of the two numbers such that they add up to `target`_.\n\nYou may assume that each input would have **_exactly_ one solution**, and you may not use the _same_ element twice.\n\nYou can return the answer in any order.\n\n**Example 1:**\n\n**Input:** nums = \\[2,7,11,15\\], target = 9\n**Output:** \\[0,1\\]\n**Explanation:** Because nums\\[0\\] + nums\\[1\\] == 9, we return \\[0, 1\\].\n\n**Example 2:**\n\n**Input:** nums = \\[3,2,4\\], target = 6\n**Output:** \\[1,2\\]\n\n**Example 3:**\n\n**Input:** nums = \\[3,3\\], target = 6\n**Output:** \\[0,1\\]\n\n**Constraints:**\n\n* `2 <= nums.length <= 104`\n* `-109 <= nums[i] <= 109`\n* `-109 <= target <= 109`\n* **Only one valid answer exists.**\n\n**Follow-up:** Can you come up with an algorithm that is less than `O(n2)` time complexity?\n\n## A solution in Java\n\n```java\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic int[] twoSum(int[] nums, int target) {\n Map<Integer, Integer> map = new HashMap<>();\n for (int i = 0; i < nums.length; i++) {\n int complement = target - nums[i];\n if (map.containsKey(complement)) {\n return new int[]{map.get(complement), i};\n }\n map.put(nums[i], i);\n }\n throw new IllegalArgumentException(\"No two sum solution\");\n}\n```\nThe algorithm leverages a hash map (unordered_map in C++, HashMap in Java, dictionary in Python, and Map in JavaScript). It iterates through the given 'nums' array and calculates the complementary value (target - current value). If the complementary value is already in the hash map, it means that we found a solution, and we return those indices. If the complement is not in the hash map, we store the current element in the hash map with its index. If the algorithm doesn't find the solution, it returns an empty array or throws an exception (in Java).\n\nThis approach has a time complexity of O(n) and a space complexity of O(n) as well.\n \n\n## A solution in C++\n\n```cpp\n#include <vector>\n#include <unordered_map>\n\nstd::vector<int> twoSum(std::vector<int>& nums, int target) {\n std::unordered_map<int, int> map;\n for (int i = 0; i < nums.size(); i++) {\n int complement = target - nums[i];\n if (map.find(complement) != map.end()) {\n return {map[complement], i};\n }\n map[nums[i]] = i;\n }\n return {};\n}\n```\nThe algorithm leverages a hash map (unordered_map in C++, HashMap in Java, dictionary in Python, and Map in JavaScript). It iterates through the given 'nums' array and calculates the complementary value (target - current value). If the complementary value is already in the hash map, it means that we found a solution, and we return those indices. If the complement is not in the hash map, we store the current element in the hash map with its index. If the algorithm doesn't find the solution, it returns an empty array or throws an exception (in Java).\n\nThis approach has a time complexity of O(n) and a space complexity of O(n) as well.\n \n\n## A solution in Python\n\n```python\ndef twoSum(nums, target):\n map = {}\n for i, num in enumerate(nums):\n complement = target - num\n if complement in map:\n return [map[complement], i]\n map[num] = i\n return []\n```\nThe algorithm leverages a hash map (unordered_map in C++, HashMap in Java, dictionary in Python, and Map in JavaScript). It iterates through the given 'nums' array and calculates the complementary value (target - current value). If the complementary value is already in the hash map, it means that we found a solution, and we return those indices. If the complement is not in the hash map, we store the current element in the hash map with its index. If the algorithm doesn't find the solution, it returns an empty array or throws an exception (in Java).\n\nThis approach has a time complexity of O(n) and a space complexity of O(n) as well.\n \n\n## A solution in Javascript\n\n```javascript\nfunction twoSum(nums, target) {\n const map = new Map();\n for (let i = 0; i < nums.length; i++) {\n const complement = target - nums[i];\n if (map.has(complement)) {\n return [map.get(complement), i];\n }\n map.set(nums[i], i);\n }\n return [];\n}\n```\nThe algorithm leverages a hash map (unordered_map in C++, HashMap in Java, dictionary in Python, and Map in JavaScript). It iterates through the given 'nums' array and calculates the complementary value (target - current value). If the complementary value is already in the hash map, it means that we found a solution, and we return those indices. If the complement is not in the hash map, we store the current element in the hash map with its index. If the algorithm doesn't find the solution, it returns an empty array or throws an exception (in Java).\n\nThis approach has a time complexity of O(n) and a space complexity of O(n) as well.\n \n",
"overall_image": "overall_image/1.png"
}
```
### 3.5 An example of linux-cn
```json
{
"id": 8,
"meta": {
"language": "zh",
"doc_id": 134,
"page_id": null,
"oi_exist": true,
"oi_source": "compiling",
"source_dataset": "linux-cn",
"date_download": "2024-05-06",
"ori_meta": {
"title": "Ubuntu 11.04正式发布!",
"author": "",
"fromurl": "",
"summary": "刚才接到的消息,Ubuntu 11.04已经正式发布!\r\n\r\n超快!易用!免费!\r\nUbuntu操作系统为世界上数以百万计的电脑、上网本和服务器提供了动力!\r\nUbuntu可以为你完成各种工作,管理你的文件、打印机、摄像头和MP3!并且它 ...",
"pic": "/data/attachment/album/201104/28/193933lnqqwwwn8l64wbn1.jpg.thumb.jpg",
"largepic": "/data/attachment/album/201104/28/193933lnqqwwwn8l64wbn1.jpg",
"titlepic": false,
"thumb": false,
"islctt": false,
"selector": "",
"translator": "",
"reviewer": "",
"editorchoice": false,
"tags": [
"Ubuntu 11.04",
"发布"
],
"category": "新闻",
"count": {
"commentnum": 0,
"favtimes": 0,
"likes": 0,
"sharetimes": 1,
"viewnum": 6165
},
"comments_data": [
],
"related": [
],
"excerpt": "刚才接到的消息,Ubuntu 11.04已经正式发布!\r\n\r\n超快!易用!免费!\r\nUbuntu操作系统为世界上数以百万计的电脑、上网本和服务器提供了动力!\r\nUbuntu可以为你完成各种工作,管理你的文件、打印机、摄像头和MP3!并且它 ...",
"date": "2011-05-09 13:24:00",
"updated": "2011-05-09 13:24:00",
"id": 134,
"permalink": "/article-134-1.html"
}
},
"quality_signals": null,
"license": "CC-BY-NC-4.0",
"content_image": [
"content_image/album_201104_28_193933lnqqwwwn8l64wbn1.jpg",
"content_image/album_201104_28_193935sy4l3bh4bh1ycbbc.jpg",
"content_image/album_201104_28_193936lyvc36fwv91l1359.jpg",
"content_image/album_201104_28_19393800rpr8pf0s8p8w0s.jpg"
],
"md": "# Ubuntu 11.04正式发布!\n\n刚才接到的消息,Ubuntu 11.04已经正式发布! \n \n 超快!易用!免费! \n Ubuntu操作系统为世界上数以百万计的电脑、上网本和服务器提供了动力! \n Ubuntu可以为你完成各种工作,管理你的文件、打印机、摄像头和MP3!并且它还带有数千个免费程序。 \n \n <img src=\"content_image/album_201104_28_193933lnqqwwwn8l64wbn1.jpg\" alt=\"\" title=\"\"> \n **数千个免费程序** \n \n <img src=\"content_image/album_201104_28_193935sy4l3bh4bh1ycbbc.jpg\" alt=\"\" title=\"\"> \n **终生免费升级** \n \n <img src=\"content_image/album_201104_28_193936lyvc36fwv91l1359.jpg\" alt=\"\" title=\"\"> \n **内建的病毒防护** \n \n <img src=\"content_image/album_201104_28_19393800rpr8pf0s8p8w0s.jpg\" alt=\"\" title=\"\"> \n **云中的音乐** \n \n 下载地址:\n\n\n\n\n> 列表: \n> <http://releases.ubuntu.com/11.04/> \n> 桌面版: \n> <http://www.ubuntu.com/download/ubuntu/download> \n> 服务器版: \n> <http://www.ubuntu.com/download/server/download>\n\n\n\n \n BT种子地址:\n\n\n\n\n> \n> * [ubuntu-11.04-alternate-amd64.iso.torrent](http://releases.ubuntu.com/11.04/ubuntu-11.04-alternate-amd64.iso.torrent)\n> * [ubuntu-11.04-alternate-i386.iso.torrent](http://releases.ubuntu.com/11.04/ubuntu-11.04-alternate-i386.iso.torrent)\n> * [ubuntu-11.04-desktop-amd64.iso.torrent](http://releases.ubuntu.com/11.04/ubuntu-11.04-desktop-amd64.iso.torrent)\n> * [ubuntu-11.04-desktop-i386.iso.torrent](http://releases.ubuntu.com/11.04/ubuntu-11.04-desktop-i386.iso.torrent)\n> * [ubuntu-11.04-netbook-i386.iso.torrent](http://releases.ubuntu.com/11.04/ubuntu-11.04-netbook-i386.iso.torrent)\n> * [ubuntu-11.04-server-amd64.iso.torrent](http://releases.ubuntu.com/11.04/ubuntu-11.04-server-amd64.iso.torrent)\n> * [ubuntu-11.04-server-i386.iso.torrent](http://releases.ubuntu.com/11.04/ubuntu-11.04-server-i386.iso.torrent)\n> \n> \n> \n\n\n\n \n 当前尚无DVD版本出现 \n \n \n \n 该贴已经同步到 [wxy的微博](http://api.t.sina.com.cn/1747813575/statuses/9786340397) \n \n \n \n\n\n \n\n\n*[本文内容由 wxy 提供](thread-7135-1-1.html)*\n \n\n\n\n 已同步至 [wxy的微博](http://api.t.sina.com.cn/1747813575/statuses/10347235925)",
"overall_image": "overall_image/134.png"
}
```
### 3.6 An example of mmc-core-ff
```json
{
"meta": {
"language": "en",
"oi_exist": true,
"oi_source": "compiling",
"doc_id": 11,
"page_id": 0,
"source_dataset": "mmc4-core-ff",
"source_jsonl": "mmc4-core-ff/docs_no_face_shard_10375_v3.jsonl",
"ori_meta": {
"url": "http://position-light.blogspot.com/2015/06/whats-up-with-reading-and-northern.html",
"text_list": [
"The Position Light: What's Up with the Reading and Northern?",
"The Reading and Northern has been a rare bright spot in the world of signaling.",
"A commitment to its Reading heritage has resulted in numerous signaling structures being preserved along with attempts to install \"classic\" signaling where new signaling is being installed on its mostly unsignaled territory.",
"The R&N also controls the former Conrail Lehigh Line and for one reason or another has decided not to touch the surviving LVRR signaling along that route.",
"Still, I am still not completely clear on the full extent of the R&N's signal preservation efforts as hinted at in a number of photos I have come across.",
"We begin near the town of Mach Chunk where the R&N runs a tourist operation in the Lehigh Gorge.",
"i have bicycles along the right of way a number of time and I never noticed this cantilever mast and its freshly painted (albeit turned) signals.",
"Is this a sign of a new interlocking or signaling project?",
"Pottsville is the location of some preserved Reading signal bridges and a tower.",
"Both have been out of service for decades, but then I find a photo showing what appears to be a lit Reading US&S three headed signal displaying a restricting indication.",
"Could be that the photographer is having some fun with Photoshoppe, or it could be another R&N instance of an \"island\" interlocking designed to eliminate the need for crews to hand throw switches.",
"Clearly I need to take another field trip to the area, but if anyone has any information (or photos) please let me know.",
"Yes, that dual Signal Cantilever was taken from Schuylkill Haven and refurbished and placed into service as part of the new CP COAL Interlocking aptly named for the nearby town of Coalport.",
"This new interlocking controls R&N connector feed track and switch from Nesquehoning Jct onto the NS Lehigh Line.",
"Be aware, that R&N is constructing a new Y connector bridge over the Lehigh River.",
"The switch at Nesquehoning Jct as well at the Y connecting point northwest along the old CNJ into Nesquehoning and the other apex connecting point at the old Lehigh Valley overpass will make up the new Y along with the new bridge.",
"Expect the R&N to make all 3 points new CP Interlockings as NS will also use the new route to get to Reading & Philadelphia directly off the Lehigh Line.",
"Coming attractions for 2016.",
"Also, R&N is talking about a new signaled controlled passing track siding midway between Port Clinton and Reading.",
"Believe they will leverage the siding that's already in place (don't know name of that area, but, between two grade crossings).",
"Could see even more new R&N signaling if Distants are added to the mix as well.",
"Thank you for the information!",
"I knew something was up with them.",
"Mike - Have updates with pics for R&N.",
"Can share them with you but not sure of best way via e-mail or blog address.",
"Can you provide and I can forward what I have?",
"You can drop a line to [email protected] Thanks!"
],
"image_info": [
{
"face_detections": null,
"image_id": "11-0.png",
"image_name": "338146395110.jpg",
"matched_sim": 0.2532651722,
"matched_text_index": 12,
"raw_url": "http://www.railpictures.net/images/d2/6/0/1/6601.1425352225.jpg"
},
{
"face_detections": null,
"image_id": "11-1.png",
"image_name": "75dca5908f72.jpg",
"matched_sim": 0.2665729225,
"matched_text_index": 18,
"raw_url": "http://www.railpictures.net/images/d2/0/3/5/5035.1411414707.jpg"
}
],
"similarity_matrix": [
[
0.2208167017,
0.2216126323,
0.2174896896,
0.2322429568,
0.1835552454,
0.1933521628,
0.1114124805,
0.1734878719,
0.1712893993,
0.1681747884,
0.2151062787,
0.1558438838,
0.2532651722,
0.2029514462,
0.1683746874,
0.1972030103,
0.2269551754,
0.1497862041,
0.2076308429,
0.1459720433,
0.1406365782,
0.1131924018,
0.0637710392,
0.1748069972,
0.1665924788,
0.1288469583,
0.1271829307
],
[
0.2275835425,
0.2447894663,
0.2326766551,
0.2530837059,
0.197981596,
0.1727618128,
0.1842465401,
0.2053450346,
0.2174785137,
0.2176187485,
0.216365099,
0.152155906,
0.2394197732,
0.2332755029,
0.2077463269,
0.2373518944,
0.2454088479,
0.1549753994,
0.2665729225,
0.2099550366,
0.163154155,
0.1208794788,
0.0917887241,
0.1707040668,
0.1544941813,
0.1439596266,
0.1319040358
]
],
"could_have_url_duplicate": 0
},
"date_download": "2024-05-11"
},
"md": "The Position Light: What's Up with the Reading and Northern? The Reading and Northern has been a rare bright spot in the world of signaling. A commitment to its Reading heritage has resulted in numerous signaling structures being preserved along with attempts to install \"classic\" signaling where new signaling is being installed on its mostly unsignaled territory. The R&N also controls the former Conrail Lehigh Line and for one reason or another has decided not to touch the surviving LVRR signaling along that route. Still, I am still not completely clear on the full extent of the R&N's signal preservation efforts as hinted at in a number of photos I have come across. We begin near the town of Mach Chunk where the R&N runs a tourist operation in the Lehigh Gorge. i have bicycles along the right of way a number of time and I never noticed this cantilever mast and its freshly painted (albeit turned) signals. Is this a sign of a new interlocking or signaling project? Pottsville is the location of some preserved Reading signal bridges and a tower. Both have been out of service for decades, but then I find a photo showing what appears to be a lit Reading US&S three headed signal displaying a restricting indication. Could be that the photographer is having some fun with Photoshoppe, or it could be another R&N instance of an \"island\" interlocking designed to eliminate the need for crews to hand throw switches. Clearly I need to take another field trip to the area, but if anyone has any information (or photos) please let me know. Yes, that dual Signal Cantilever was taken from Schuylkill Haven and refurbished and placed into service as part of the new CP COAL Interlocking aptly named for the nearby town of Coalport.\n\n\n\n<img src='content_image/11-0.png'>\n\nThis new interlocking controls R&N connector feed track and switch from Nesquehoning Jct onto the NS Lehigh Line. Be aware, that R&N is constructing a new Y connector bridge over the Lehigh River. The switch at Nesquehoning Jct as well at the Y connecting point northwest along the old CNJ into Nesquehoning and the other apex connecting point at the old Lehigh Valley overpass will make up the new Y along with the new bridge. Expect the R&N to make all 3 points new CP Interlockings as NS will also use the new route to get to Reading & Philadelphia directly off the Lehigh Line. Coming attractions for 2016. Also, R&N is talking about a new signaled controlled passing track siding midway between Port Clinton and Reading.\n\n\n\n<img src='content_image/11-1.png'>\n\nBelieve they will leverage the siding that's already in place (don't know name of that area, but, between two grade crossings). Could see even more new R&N signaling if Distants are added to the mix as well. Thank you for the information! I knew something was up with them. Mike - Have updates with pics for R&N. Can share them wi",
"license": "ODC-BY",
"quality_signals": null,
"content_image": [
"content_image/11-0.png",
"content_image/11-1.png"
],
"overall_image": "overall_image/11-0.png"
}
```
### 3.7 An example of PG19
```json
{
"meta": {
"language": "en",
"oi_exist": true,
"oi_source": "compiling",
"doc_id": 871,
"page_id": 0,
"source_dataset": "pg19",
"split": "train",
"ori_meta": {
"url": "http://www.gutenberg.org/ebooks/9304",
"short_book_title": "Initiation into Philosophy by Emile Faguet",
"publication_date": 1914
},
"date_download": "2024-05-10"
},
"md": "# Initiation into Philosophy by Emile Faguet \n\n Produced by Ted Garvin, Thomas Hutchinson and PG Distributed Proofreaders \n\n \n\n \n\n \n\n \n\n INITIATION INTO PHILOSOPHY \n\n \nBy Emile Faguet \n\n Of the French Academy \n\n \nAuthor of \"The Cult Of Incompetence,\" \"Initiation Into Literature,\" etc. \n\n \nTranslated from the French by Sir Homer Gordon, Bart. \n\n 1914 \n\n \n\n \nPREFACE \n\n This volume, as indicated by the title, is designed to show the way to the beginner, to satisfy and more espec ially to excite his initial curiosity. It affords an adequate idea of the march of facts and of ideas. The rea der is led, somewhat rapidly, from the remote origins to the most recent efforts of the human mind. \n\n It should be a convenient repertory to which the mind may revert in order to see broadly the general opinion o f an epoch--and what connected it with those that followed or preceded it. It aims above all at being _a frame _ in which can conveniently be inscribed, in the course of further studies, new conceptions more detailed and more thoroughly examined. \n\n It will have fulfilled its design should it incite to research and meditation, and if it prepares for them cor rectly. \n\n E. FAGUET. \n\n \n\n \nCONTENTS \n\n \nPART I ANTIQUITY \n\n \nCHAPTER I BEFORE SOCRATES \n\n Philosophical Interpreters of the Universe, of the Creation and Constitution of the World. \n\n \nCHAPTER II THE SOPHISTS \n\n Logicians and Professors of Logic, and of the Analysis of Ideas, and of Discussion. \n\n \nCHAPTER III SOCRATES \n\n Philosophy Entirely Reduced to Morality, and Morality Considered as the End of all Intellectual Activity. \n\n \nCHAPTER IV PLATO \n\n Plato, like Socrates, is Pre-eminently a Moralist, but he Reverts to General Consideration of the Universe, an d Deals with Politics and Legislation. \n\n \nCHAPTER V ARISTOTLE",
"license": "Apache 2.0",
"quality_signals": null,
"content_image": null,
"overall_image": "overall_image/871-0.png"
}
```
### 3.8 An example of PIN-PMC
```json
{
"meta": {
"language": "en",
"doc_id": "PMC3015258",
"oi_exist": true,
"oi_source": "ori",
"source_dataset": "PIN-PMC",
"ori_meta": null,
"page_id": null,
"date_download": "2024-05-28"
},
"md": "# A Simple Stereoscopic Endoscope\n\n## Abstract\n\nA very simple method is described for producing and viewing stereoscopic endoscopic images.\nThe addition of two simple prisms to the end of a conventional television-monitored endoscope with a simple viewing device produces a stereoscopic endoscope which appears to be suitable for surgical use......",
"license": [
"https://www.ncbi.nlm.nih.gov/pmc/tools/textmining/"
],
"quality_signals": {
"doc_length": 8269
},
"content_image": [
"content_image/PMC3015258/jsls-2-1-67-g03.jpg",
"content_image/PMC3015258/jsls-2-1-67-g04.jpg",
"content_image/PMC3015258/jsls-2-1-67-g01.jpg",
"content_image/PMC3015258/jsls-2-1-67-g02.jpg",
"content_image/PMC3015258/jsls-2-1-67-g05.jpg"
],
"overall_image": [
"overall_image/PMC3015258/jsls-2-1-67_3.png",
"overall_image/PMC3015258/jsls-2-1-67_0.png",
"overall_image/PMC3015258/jsls-2-1-67_1.png",
"overall_image/PMC3015258/jsls-2-1-67_2.png"
],
"id": 60827
}
```
## 4 License
For data generated or produced by us, please adhere to the Apache 2.0 License.
For data sourced from third parties, compliance with the respective third-party licenses is required.
## Citation
```
@misc{2406.13923,
Author = {Junjie Wang and Yin Zhang and Yatai Ji and Yuxiang Zhang and Chunyang Jiang and Yubo Wang and Kang Zhu and Zekun Wang and Tiezhen Wang and Wenhao Huang and Jie Fu and Bei Chen and Qunshu Lin and Minghao Liu and Ge Zhang and Wenhu Chen},
Title = {PIN: A Knowledge-Intensive Dataset for Paired and Interleaved Multimodal Documents},
Year = {2024},
Eprint = {arXiv:2406.13923},
}
``` |
lmms-lab/LMMs-Eval-Lite | lmms-lab | "2024-07-04T04:16:56Z" | 3,454 | 3 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"modality:text",
"modality:timeseries",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-06-27T03:29:05Z" | ---
dataset_info:
- config_name: ai2d
features:
- name: question
dtype: string
- name: options
sequence: string
- name: answer
dtype: string
- name: image
dtype: image
splits:
- name: lite
num_bytes: 90543302.1658031
num_examples: 500
download_size: 81458737
dataset_size: 90543302.1658031
- config_name: chartqa
features:
- name: type
dtype: string
- name: question
dtype: string
- name: answer
dtype: string
- name: image
dtype: image
splits:
- name: lite
num_bytes: 23170424.2
num_examples: 500
download_size: 23219432
dataset_size: 23170424.2
- config_name: coco2017_cap_val
features:
- name: question_id
dtype: string
- name: image
dtype: image
- name: question
dtype: string
- name: answer
sequence: string
- name: id
dtype: int64
- name: license
dtype: int8
- name: file_name
dtype: string
- name: coco_url
dtype: string
- name: height
dtype: int32
- name: width
dtype: int32
- name: date_captured
dtype: string
splits:
- name: lite
num_bytes: 81724646.1
num_examples: 500
download_size: 81036195
dataset_size: 81724646.1
- config_name: docvqa_val
features:
- name: questionId
dtype: string
- name: question
dtype: string
- name: question_types
sequence: string
- name: image
dtype: image
- name: docId
dtype: int64
- name: ucsf_document_id
dtype: string
- name: ucsf_document_page_no
dtype: string
- name: answers
sequence: string
- name: data_split
dtype: string
splits:
- name: lite
num_bytes: 334538449.19872874
num_examples: 500
download_size: 249349131
dataset_size: 334538449.19872874
- config_name: flickr30k_test
features:
- name: image
dtype: image
- name: caption
sequence: string
- name: sentids
sequence: string
- name: img_id
dtype: string
- name: filename
dtype: string
splits:
- name: lite
num_bytes: 69689341.17644653
num_examples: 500
download_size: 66621555
dataset_size: 69689341.17644653
- config_name: gqa
features:
- name: id
dtype: string
- name: imageId
dtype: string
- name: question
dtype: string
- name: answer
dtype: string
- name: fullAnswer
dtype: string
- name: isBalanced
dtype: bool
- name: groups
struct:
- name: global
dtype: string
- name: local
dtype: string
- name: entailed
dtype: string
- name: equivalent
dtype: string
- name: types
struct:
- name: structural
dtype: string
- name: semantic
dtype: string
- name: detailed
dtype: string
- name: annotations
sequence:
- name: question
struct:
- name: objectId
dtype: string
- name: value
dtype: string
- name: answer
struct:
- name: objectId
dtype: string
- name: value
dtype: string
- name: fullAnswer
struct:
- name: objectId
dtype: string
- name: value
dtype: string
- name: semantic
list:
- name: operation
dtype: string
- name: argument
dtype: string
- name: dependencies
sequence: int32
- name: semanticStr
dtype: string
splits:
- name: lite
num_bytes: 243022.3008427413
num_examples: 500
download_size: 107530
dataset_size: 243022.3008427413
- config_name: infovqa_val
features:
- name: questionId
dtype: string
- name: question
dtype: string
- name: answers
sequence: string
- name: answer_type
sequence: string
- name: image
dtype: image
- name: image_url
dtype: string
- name: operation/reasoning
sequence: string
- name: ocr
dtype: string
- name: data_split
dtype: string
splits:
- name: lite
num_bytes: 304765105.6765441
num_examples: 500
download_size: 233689969
dataset_size: 304765105.6765441
- config_name: mmbench_cn_dev
features:
- name: index
dtype: int64
- name: question
dtype: string
- name: hint
dtype: string
- name: answer
dtype: string
- name: A
dtype: string
- name: B
dtype: string
- name: C
dtype: string
- name: D
dtype: string
- name: category
dtype: string
- name: image
dtype: image
- name: source
dtype: string
- name: L2-category
dtype: string
- name: comment
dtype: string
- name: split
dtype: string
splits:
- name: lite
num_bytes: 11861120.35112035
num_examples: 500
download_size: 12795903
dataset_size: 11861120.35112035
- config_name: mmbench_en_dev
features:
- name: index
dtype: int64
- name: question
dtype: string
- name: hint
dtype: string
- name: answer
dtype: string
- name: A
dtype: string
- name: B
dtype: string
- name: C
dtype: string
- name: D
dtype: string
- name: category
dtype: string
- name: image
dtype: image
- name: source
dtype: string
- name: L2-category
dtype: string
- name: comment
dtype: string
- name: split
dtype: string
splits:
- name: lite
num_bytes: 11871291.175791176
num_examples: 500
download_size: 12524588
dataset_size: 11871291.175791176
- config_name: nocaps_val
features:
- name: image
dtype: image
- name: image_coco_url
dtype: string
- name: image_date_captured
dtype: string
- name: image_file_name
dtype: string
- name: image_height
dtype: int32
- name: image_width
dtype: int32
- name: image_id
dtype: int32
- name: image_license
dtype: int8
- name: image_open_images_id
dtype: string
- name: annotations_ids
sequence: int32
- name: annotations_captions
sequence: string
splits:
- name: lite
num_bytes: 157984760.66666666
num_examples: 500
download_size: 155545761
dataset_size: 157984760.66666666
- config_name: ok_vqa_val2014
features:
- name: question_id
dtype: string
- name: image
dtype: image
- name: question
dtype: string
- name: answers
sequence: string
- name: question_type
dtype: string
- name: answer_type
dtype: string
splits:
- name: lite
num_bytes: 82607924.29647246
num_examples: 500
download_size: 80223931
dataset_size: 82607924.29647246
- config_name: refcoco_bbox_val
features:
- name: question_id
dtype: string
- name: image
dtype: image
- name: question
dtype: string
- name: answer
sequence: string
- name: segmentation
sequence: float32
- name: bbox
sequence: float32
- name: iscrowd
dtype: int8
- name: file_name
dtype: string
splits:
- name: lite
num_bytes: 87885477.24435365
num_examples: 500
download_size: 88424601
dataset_size: 87885477.24435365
- config_name: seedbench
features:
- name: answer
dtype: string
- name: choice_a
dtype: string
- name: choice_b
dtype: string
- name: choice_c
dtype: string
- name: choice_d
dtype: string
- name: data_id
dtype: string
- name: data_type
dtype: string
- name: question
dtype: string
- name: question_id
dtype: string
- name: question_type_id
dtype: int16
- name: image
sequence: image
- name: segment
sequence: int64
splits:
- name: lite
num_bytes: 755921749.3379655
num_examples: 500
download_size: 181839440
dataset_size: 755921749.3379655
- config_name: textcaps_val
features:
- name: question_id
dtype: string
- name: question
dtype: string
- name: image
dtype: image
- name: image_id
dtype: string
- name: image_classes
sequence: string
- name: flickr_original_url
dtype: string
- name: flickr_300k_url
dtype: string
- name: image_width
dtype: int64
- name: image_height
dtype: int64
- name: set_name
dtype: string
- name: image_name
dtype: string
- name: image_path
dtype: string
- name: caption_id
sequence: int64
- name: caption_str
sequence: string
- name: reference_strs
sequence: string
splits:
- name: lite
num_bytes: 145274544.53569174
num_examples: 500
download_size: 135721574
dataset_size: 145274544.53569174
- config_name: textvqa_val
features:
- name: image_id
dtype: string
- name: question_id
dtype: int32
- name: question
dtype: string
- name: question_tokens
sequence: string
- name: image
dtype: image
- name: image_width
dtype: int32
- name: image_height
dtype: int32
- name: flickr_original_url
dtype: string
- name: flickr_300k_url
dtype: string
- name: answers
sequence: string
- name: image_classes
sequence: string
- name: set_name
dtype: string
- name: ocr_tokens
sequence: string
splits:
- name: lite
num_bytes: 143485382.6
num_examples: 500
download_size: 139843809
dataset_size: 143485382.6
- config_name: vizwiz_vqa_val
features:
- name: question_id
dtype: string
- name: image
dtype: image
- name: question
dtype: string
- name: answers
sequence: string
- name: category
dtype: string
splits:
- name: lite
num_bytes: 242880108.01111367
num_examples: 500
download_size: 232689462
dataset_size: 242880108.01111367
- config_name: vqav2_val
features:
- name: question_type
dtype: string
- name: multiple_choice_answer
dtype: string
- name: answers
list:
- name: answer
dtype: string
- name: answer_confidence
dtype: string
- name: answer_id
dtype: int64
- name: image_id
dtype: int64
- name: answer_type
dtype: string
- name: question_id
dtype: int64
- name: question
dtype: string
- name: image
dtype: image
splits:
- name: lite
num_bytes: 79046522.98300941
num_examples: 500
download_size: 78981610
dataset_size: 79046522.98300941
configs:
- config_name: ai2d
data_files:
- split: lite
path: ai2d/lite-*
- config_name: chartqa
data_files:
- split: lite
path: chartqa/lite-*
- config_name: coco2017_cap_val
data_files:
- split: lite
path: coco2017_cap_val/lite-*
- config_name: docvqa_val
data_files:
- split: lite
path: docvqa_val/lite-*
- config_name: flickr30k_test
data_files:
- split: lite
path: flickr30k_test/lite-*
- config_name: gqa
data_files:
- split: lite
path: gqa/lite-*
- config_name: infovqa_val
data_files:
- split: lite
path: infovqa_val/lite-*
- config_name: mmbench_cn_dev
data_files:
- split: lite
path: mmbench_cn_dev/lite-*
- config_name: mmbench_en_dev
data_files:
- split: lite
path: mmbench_en_dev/lite-*
- config_name: nocaps_val
data_files:
- split: lite
path: nocaps_val/lite-*
- config_name: ok_vqa_val2014
data_files:
- split: lite
path: ok_vqa_val2014/lite-*
- config_name: refcoco_bbox_val
data_files:
- split: lite
path: refcoco_bbox_val/lite-*
- config_name: seedbench
data_files:
- split: lite
path: seedbench/lite-*
- config_name: textcaps_val
data_files:
- split: lite
path: textcaps_val/lite-*
- config_name: textvqa_val
data_files:
- split: lite
path: textvqa_val/lite-*
- config_name: vizwiz_vqa_val
data_files:
- split: lite
path: vizwiz_vqa_val/lite-*
- config_name: vqav2_val
data_files:
- split: lite
path: vqav2_val/lite-*
---
|
mhardalov/exams | mhardalov | "2024-02-06T07:20:12Z" | 3,443 | 31 | [
"task_categories:question-answering",
"task_ids:multiple-choice-qa",
"annotations_creators:found",
"language_creators:found",
"multilinguality:monolingual",
"multilinguality:multilingual",
"source_datasets:original",
"language:ar",
"language:bg",
"language:de",
"language:es",
"language:fr",
"language:hr",
"language:hu",
"language:it",
"language:lt",
"language:mk",
"language:pl",
"language:pt",
"language:sq",
"language:sr",
"language:tr",
"language:vi",
"license:cc-by-sa-4.0",
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"arxiv:2011.03080",
"region:us"
] | [
"question-answering"
] | "2022-03-02T23:29:22Z" | ---
annotations_creators:
- found
language_creators:
- found
language:
- ar
- bg
- de
- es
- fr
- hr
- hu
- it
- lt
- mk
- pl
- pt
- sq
- sr
- tr
- vi
license:
- cc-by-sa-4.0
multilinguality:
- monolingual
- multilingual
size_categories:
- 10K<n<100K
- 1K<n<10K
- n<1K
source_datasets:
- original
task_categories:
- question-answering
task_ids:
- multiple-choice-qa
paperswithcode_id: exams
pretty_name: EXAMS
config_names:
- alignments
- crosslingual_bg
- crosslingual_hr
- crosslingual_hu
- crosslingual_it
- crosslingual_mk
- crosslingual_pl
- crosslingual_pt
- crosslingual_sq
- crosslingual_sr
- crosslingual_test
- crosslingual_tr
- crosslingual_vi
- crosslingual_with_para_bg
- crosslingual_with_para_hr
- crosslingual_with_para_hu
- crosslingual_with_para_it
- crosslingual_with_para_mk
- crosslingual_with_para_pl
- crosslingual_with_para_pt
- crosslingual_with_para_sq
- crosslingual_with_para_sr
- crosslingual_with_para_test
- crosslingual_with_para_tr
- crosslingual_with_para_vi
- multilingual
- multilingual_with_para
dataset_info:
- config_name: alignments
features:
- name: source_id
dtype: string
- name: target_id_list
sequence: string
splits:
- name: full
num_bytes: 1265256
num_examples: 10834
download_size: 184096
dataset_size: 1265256
- config_name: crosslingual_bg
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 1077329
num_examples: 2344
- name: validation
num_bytes: 281771
num_examples: 593
download_size: 514922
dataset_size: 1359100
- config_name: crosslingual_hr
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 807104
num_examples: 2341
- name: validation
num_bytes: 176594
num_examples: 538
download_size: 450090
dataset_size: 983698
- config_name: crosslingual_hu
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 677535
num_examples: 1731
- name: validation
num_bytes: 202012
num_examples: 536
download_size: 401455
dataset_size: 879547
- config_name: crosslingual_it
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 399312
num_examples: 1010
- name: validation
num_bytes: 93175
num_examples: 246
download_size: 226376
dataset_size: 492487
- config_name: crosslingual_mk
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 825702
num_examples: 1665
- name: validation
num_bytes: 204318
num_examples: 410
download_size: 394548
dataset_size: 1030020
- config_name: crosslingual_pl
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 573410
num_examples: 1577
- name: validation
num_bytes: 141633
num_examples: 394
download_size: 341925
dataset_size: 715043
- config_name: crosslingual_pt
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 374798
num_examples: 740
- name: validation
num_bytes: 87714
num_examples: 184
download_size: 208021
dataset_size: 462512
- config_name: crosslingual_sq
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 423744
num_examples: 1194
- name: validation
num_bytes: 110093
num_examples: 311
download_size: 247052
dataset_size: 533837
- config_name: crosslingual_sr
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 649560
num_examples: 1323
- name: validation
num_bytes: 145724
num_examples: 314
download_size: 327466
dataset_size: 795284
- config_name: crosslingual_test
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: test
num_bytes: 8402575
num_examples: 19736
download_size: 3438526
dataset_size: 8402575
- config_name: crosslingual_tr
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 717599
num_examples: 1571
- name: validation
num_bytes: 182730
num_examples: 393
download_size: 440914
dataset_size: 900329
- config_name: crosslingual_vi
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 953167
num_examples: 1955
- name: validation
num_bytes: 231976
num_examples: 488
download_size: 462940
dataset_size: 1185143
- config_name: crosslingual_with_para_bg
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 47066808
num_examples: 2344
- name: validation
num_bytes: 11916026
num_examples: 593
download_size: 15794611
dataset_size: 58982834
- config_name: crosslingual_with_para_hr
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 24889604
num_examples: 2341
- name: validation
num_bytes: 5695066
num_examples: 538
download_size: 9839452
dataset_size: 30584670
- config_name: crosslingual_with_para_hu
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 19035663
num_examples: 1731
- name: validation
num_bytes: 6043265
num_examples: 536
download_size: 9263625
dataset_size: 25078928
- config_name: crosslingual_with_para_it
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 16409235
num_examples: 1010
- name: validation
num_bytes: 4018329
num_examples: 246
download_size: 6907617
dataset_size: 20427564
- config_name: crosslingual_with_para_mk
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 38445894
num_examples: 1665
- name: validation
num_bytes: 9673574
num_examples: 410
download_size: 12878474
dataset_size: 48119468
- config_name: crosslingual_with_para_pl
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 16373781
num_examples: 1577
- name: validation
num_bytes: 4158832
num_examples: 394
download_size: 6539172
dataset_size: 20532613
- config_name: crosslingual_with_para_pt
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 12185383
num_examples: 740
- name: validation
num_bytes: 3093712
num_examples: 184
download_size: 4956969
dataset_size: 15279095
- config_name: crosslingual_with_para_sq
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 17341277
num_examples: 1194
- name: validation
num_bytes: 4449952
num_examples: 311
download_size: 7112236
dataset_size: 21791229
- config_name: crosslingual_with_para_sr
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 24575845
num_examples: 1323
- name: validation
num_bytes: 5772509
num_examples: 314
download_size: 8035415
dataset_size: 30348354
- config_name: crosslingual_with_para_test
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: test
num_bytes: 207974374
num_examples: 13510
download_size: 62878029
dataset_size: 207974374
- config_name: crosslingual_with_para_tr
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 18597131
num_examples: 1571
- name: validation
num_bytes: 4763097
num_examples: 393
download_size: 7346658
dataset_size: 23360228
- config_name: crosslingual_with_para_vi
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 40882999
num_examples: 1955
- name: validation
num_bytes: 10260374
num_examples: 488
download_size: 13028078
dataset_size: 51143373
- config_name: multilingual
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 3381837
num_examples: 7961
- name: validation
num_bytes: 1141687
num_examples: 2672
- name: test
num_bytes: 5746781
num_examples: 13510
download_size: 4323915
dataset_size: 10270305
- config_name: multilingual_with_para
features:
- name: id
dtype: string
- name: question
struct:
- name: stem
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: para
dtype: string
- name: answerKey
dtype: string
- name: info
struct:
- name: grade
dtype: int32
- name: subject
dtype: string
- name: language
dtype: string
splits:
- name: train
num_bytes: 127294567
num_examples: 7961
- name: validation
num_bytes: 42711689
num_examples: 2672
- name: test
num_bytes: 207974374
num_examples: 13510
download_size: 112597818
dataset_size: 377980630
configs:
- config_name: alignments
data_files:
- split: full
path: alignments/full-*
- config_name: crosslingual_bg
data_files:
- split: train
path: crosslingual_bg/train-*
- split: validation
path: crosslingual_bg/validation-*
- config_name: crosslingual_hr
data_files:
- split: train
path: crosslingual_hr/train-*
- split: validation
path: crosslingual_hr/validation-*
- config_name: crosslingual_hu
data_files:
- split: train
path: crosslingual_hu/train-*
- split: validation
path: crosslingual_hu/validation-*
- config_name: crosslingual_it
data_files:
- split: train
path: crosslingual_it/train-*
- split: validation
path: crosslingual_it/validation-*
- config_name: crosslingual_mk
data_files:
- split: train
path: crosslingual_mk/train-*
- split: validation
path: crosslingual_mk/validation-*
- config_name: crosslingual_pl
data_files:
- split: train
path: crosslingual_pl/train-*
- split: validation
path: crosslingual_pl/validation-*
- config_name: crosslingual_pt
data_files:
- split: train
path: crosslingual_pt/train-*
- split: validation
path: crosslingual_pt/validation-*
- config_name: crosslingual_sq
data_files:
- split: train
path: crosslingual_sq/train-*
- split: validation
path: crosslingual_sq/validation-*
- config_name: crosslingual_sr
data_files:
- split: train
path: crosslingual_sr/train-*
- split: validation
path: crosslingual_sr/validation-*
- config_name: crosslingual_test
data_files:
- split: test
path: crosslingual_test/test-*
- config_name: crosslingual_tr
data_files:
- split: train
path: crosslingual_tr/train-*
- split: validation
path: crosslingual_tr/validation-*
- config_name: crosslingual_vi
data_files:
- split: train
path: crosslingual_vi/train-*
- split: validation
path: crosslingual_vi/validation-*
- config_name: crosslingual_with_para_bg
data_files:
- split: train
path: crosslingual_with_para_bg/train-*
- split: validation
path: crosslingual_with_para_bg/validation-*
- config_name: crosslingual_with_para_hr
data_files:
- split: train
path: crosslingual_with_para_hr/train-*
- split: validation
path: crosslingual_with_para_hr/validation-*
- config_name: crosslingual_with_para_hu
data_files:
- split: train
path: crosslingual_with_para_hu/train-*
- split: validation
path: crosslingual_with_para_hu/validation-*
- config_name: crosslingual_with_para_it
data_files:
- split: train
path: crosslingual_with_para_it/train-*
- split: validation
path: crosslingual_with_para_it/validation-*
- config_name: crosslingual_with_para_mk
data_files:
- split: train
path: crosslingual_with_para_mk/train-*
- split: validation
path: crosslingual_with_para_mk/validation-*
- config_name: crosslingual_with_para_pl
data_files:
- split: train
path: crosslingual_with_para_pl/train-*
- split: validation
path: crosslingual_with_para_pl/validation-*
- config_name: crosslingual_with_para_pt
data_files:
- split: train
path: crosslingual_with_para_pt/train-*
- split: validation
path: crosslingual_with_para_pt/validation-*
- config_name: crosslingual_with_para_sq
data_files:
- split: train
path: crosslingual_with_para_sq/train-*
- split: validation
path: crosslingual_with_para_sq/validation-*
- config_name: crosslingual_with_para_sr
data_files:
- split: train
path: crosslingual_with_para_sr/train-*
- split: validation
path: crosslingual_with_para_sr/validation-*
- config_name: crosslingual_with_para_test
data_files:
- split: test
path: crosslingual_with_para_test/test-*
- config_name: crosslingual_with_para_tr
data_files:
- split: train
path: crosslingual_with_para_tr/train-*
- split: validation
path: crosslingual_with_para_tr/validation-*
- config_name: crosslingual_with_para_vi
data_files:
- split: train
path: crosslingual_with_para_vi/train-*
- split: validation
path: crosslingual_with_para_vi/validation-*
- config_name: multilingual
data_files:
- split: train
path: multilingual/train-*
- split: validation
path: multilingual/validation-*
- split: test
path: multilingual/test-*
- config_name: multilingual_with_para
data_files:
- split: train
path: multilingual_with_para/train-*
- split: validation
path: multilingual_with_para/validation-*
- split: test
path: multilingual_with_para/test-*
default: true
---
# Dataset Card for [Dataset Name]
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Repository:** https://github.com/mhardalov/exams-qa
- **Paper:** [EXAMS: A Multi-Subject High School Examinations Dataset for Cross-Lingual and Multilingual Question Answering](https://arxiv.org/abs/2011.03080)
- **Point of Contact:** [hardalov@@fmi.uni-sofia.bg](hardalov@@fmi.uni-sofia.bg)
### Dataset Summary
EXAMS is a benchmark dataset for multilingual and cross-lingual question answering from high school examinations. It consists of more than 24,000 high-quality high school exam questions in 16 languages, covering 8 language families and 24 school subjects from Natural Sciences and Social Sciences, among others.
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
The languages in the dataset are:
- ar
- bg
- de
- es
- fr
- hr
- hu
- it
- lt
- mk
- pl
- pt
- sq
- sr
- tr
- vi
## Dataset Structure
### Data Instances
An example of a data instance (with support paragraphs, in Bulgarian) is:
```
{'answerKey': 'C',
'id': '35dd6b52-7e71-11ea-9eb1-54bef70b159e',
'info': {'grade': 12, 'language': 'Bulgarian', 'subject': 'Biology'},
'question': {'choices': {'label': ['A', 'B', 'C', 'D'],
'para': ['Това води до наследствени изменения между организмите. Мирновременните вождове са наследствени. Черният, сивият и кафявият цвят на оцветяване на тялото се определя от пигмента меланин и възниква в резултат на наследствени изменения. Тези различия, според Монтескьо, не са наследствени. Те са и важни наследствени вещи в клана. Те са били наследствени архонти и управляват демократично. Реликвите са исторически, религиозни, семейни (наследствени) и технически. Общо са направени 800 изменения. Не всички наследствени аномалии на хемоглобина са вредни, т.е. Моногенните наследствени болести, които водят до мигрена, са редки. Няма наследствени владетели. Повечето от тях са наследствени и се предават на потомството. Всичките синове са ерцхерцози на всичките наследствени земи и претенденти. През 1509 г. Фраунбергите са издигнати на наследствени имперски графове. Фамилията Валдбург заради постиженията са номинирани на „наследствени имперски трушсеси“. Фамилията Валдбург заради постиженията са номинирани на „наследствени имперски трушсеси“. Описани са единични наследствени случаи, но по-често липсва фамилна обремененост. Позициите им са наследствени и се предават в рамките на клана. Внесени са изменения в конструкцията на веригите. и са направени изменения в ходовата част. На храма са правени лоши архитектурни изменения. Изменения са предприети и вътре в двореца. Имало двама наследствени вождове. Имало двама наследствени вождове. Годишният календар, „компасът“ и биологичния часовник са наследствени и при много бозайници.',
'Постепенно задълбочаващите се функционални изменения довеждат и до структурни изменения. Те се дължат както на растягането на кожата, така и на въздействието на хормоналните изменения върху кожната тъкан. тези изменения се долавят по-ясно. Впоследствие, той претърпява изменения. Ширината остава без изменения. След тяхното издаване се налагат изменения в първоначалния Кодекс, защото не е съобразен с направените в Дигестите изменения. Еволюционният преход се характеризира със следните изменения: Наблюдават се и сезонни изменения в теглото. Приемат се изменения и допълнения към Устава. Тук се размножават и предизвикват възпалителни изменения. Общо са направени 800 изменения. Бронирането не претърпява съществени изменения. При животните се откриват изменения при злокачествената форма. Срещат се и дегенеративни изменения в семенните каналчета. ТАВКР „Баку“ се строи по изменения проект 1143.4. Трансът се съпровожда с определени изменения на мозъчната дейност. На изменения е подложен и Светия Синод. Внесени са изменения в конструкцията на веригите. На храма са правени лоши архитектурни изменения. Оттогава стиховете претърпяват изменения няколко пъти. Настъпват съществени изменения в музикалната култура. По-късно той претърпява леки изменения. Настъпват съществени изменения в музикалната култура. Претърпява сериозни изменения само носовата надстройка. Хоризонталното брониране е оставено без изменения.',
'Модификациите са обратими. Тези реакции са обратими. В началните стадии тези натрупвания са обратими. Всички такива ефекти са временни и обратими. Много от реакциите са обратими и идентични с тези при гликолизата. Ако в обращение има книжни пари, те са обратими в злато при поискване . Общо са направени 800 изменения. Непоследователността е представена от принципа на "симетрия", при който взаимоотношенията са разглеждани като симетрични или обратими. Откакто формулите в клетките на електронната таблица не са обратими, тази техника е с ограничена стойност. Ефектът на Пелтие-Зеебек и ефектът Томсън са обратими (ефектът на Пелтие е обратен на ефекта на Зеебек). Плазмолизата протича в три етапа, в зависимост от силата и продължителността на въздействието:\n\nПървите два етапа са обратими. Внесени са изменения в конструкцията на веригите. и са направени изменения в ходовата част. На храма са правени лоши архитектурни изменения. Изменения са предприети и вътре в двореца. Оттогава насетне екипите не са претърпявали съществени изменения. Изменения са направени и в колесника на машината. Тези изменения са обявени през октомври 1878 година. Последните изменения са внесени през януари 2009 година. В процеса на последващото проектиране са внесени някои изменения. Сериозните изменения са в края на Втората световна война. Внесени са изменения в конструкцията на погребите и подемниците. Внесени са изменения в конструкцията на погребите и подемниците. Внесени са изменения в конструкцията на погребите и подемниците. Постепенно задълбочаващите се функционални изменения довеждат и до структурни изменения.',
'Ерозионни процеси от масов характер липсват. Обновлението в редиците на партията приема масов характер. Тя обаче няма масов характер поради спецификата на формата. Движението против десятъка придобива масов характер и в Балчишка околия. Понякога екзекутирането на „обсебените от Сатана“ взимало невероятно масов характер. Укриването на дължими като наряд продукти в селата придобива масов характер. Периодичните миграции са в повечето случаи с масов характер и са свързани със сезонните изменения в природата, а непериодичните са премествания на животни, които настъпват след пожари, замърсяване на средата, висока численост и др. Имат необратим характер. Именно по време на двувековните походи на западните рицари използването на гербовете придобива масов характер. След присъединяването на Южен Кавказ към Русия, изселването на азербайджанци от Грузия придобива масов характер. Те имат нормативен характер. Те имат установителен характер. Освобождаването на работна сила обикновено има масов характер, защото обхваща големи контингенти от носителите на труд. Валежите имат подчертано континентален характер. Имат най-често издънков характер. Приливите имат предимно полуденонощен характер. Някои от тях имат мистериален характер. Тези сведения имат случаен, епизодичен характер. Те имат сезонен или годишен характер. Временните обезпечителни мерки имат временен характер. Други имат пожелателен характер (Здравко, Слава). Ловът и събирачеството имат спомагателен характер. Фактически успяват само малко да усилят бронирането на артилерийските погреби, другите изменения носят само частен характер. Някои карикатури имат само развлекателен характер, докато други имат политически нюанси. Поемите на Хезиод имат по-приложен характер.'],
'text': ['дължат се на фенотипни изменения',
'имат масов характер',
'са наследствени',
'са обратими']},
'stem': 'Мутационите изменения:'}}
```
### Data Fields
A data instance contains the following fields:
- `id`: A question ID, unique across the dataset
- `question`: the question contains the following:
- `stem`: a stemmed representation of the question textual
- `choices`: a set of 3 to 5 candidate answers, which each have:
- `text`: the text of the answers
- `label`: a label in `['A', 'B', 'C', 'D', 'E']` used to match to the `answerKey`
- `para`: (optional) a supported paragraph from Wikipedia in the same language as the question and answer
- `answerKey`: the key corresponding to the right answer's `label`
- `info`: some additional information on the question including:
- `grade`: the school grade for the exam this question was taken from
- `subject`: a free text description of the academic subject
- `language`: the English name of the language for this question
### Data Splits
Depending on the configuration, the dataset have different splits:
- "alignments": a single "full" split
- "multilingual" and "multilingual_with_para": "train", "validation" and "test" splits
- "crosslingual_test" and "crosslingual_with_para_test": a single "test" split
- the rest of crosslingual configurations: "train" and "validation" splits
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
Eχαµs was collected from official state exams prepared by the ministries of education of various countries. These exams are taken by students graduating from high school, and often require knowledge learned through the entire course.
The questions cover a large variety of subjects and material based on the country’s education system. They cover major school subjects such as Biology, Chemistry, Geography, History, and Physics, but we also highly specialized ones such as Agriculture, Geology, Informatics, as well as some applied and profiled studies.
Some countries allow students to take official examinations in several languages. This dataset provides 9,857 parallel question pairs spread across seven languages coming from Croatia (Croatian, Serbian, Italian, Hungarian), Hungary (Hungarian, German, French, Spanish, Croatian, Serbian, Italian), and North Macedonia (Macedonian, Albanian, Turkish).
For all languages in the dataset, the first step in the process of data collection was to download the PDF files per year, per subject, and per language (when parallel languages were available in the same source), convert the PDF files to text, and select those that were well formatted and followed the document structure.
Then, Regular Expressions (RegEx) were used to parse the questions, their corresponding choices and the correct answer choice. In order to ensure that all our questions are answerable using textual input only, questions that contained visual information were removed, as selected by using curated list of words such as map, table, picture, graph, etc., in the corresponding language.
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
The dataset, which contains paragraphs from Wikipedia, is licensed under CC-BY-SA 4.0. The code in this repository is licensed according the [LICENSE file](https://raw.githubusercontent.com/mhardalov/exams-qa/main/LICENSE).
### Citation Information
```
@inproceedings{hardalov-etal-2020-exams,
title = "{EXAMS}: A Multi-subject High School Examinations Dataset for Cross-lingual and Multilingual Question Answering",
author = "Hardalov, Momchil and
Mihaylov, Todor and
Zlatkova, Dimitrina and
Dinkov, Yoan and
Koychev, Ivan and
Nakov, Preslav",
editor = "Webber, Bonnie and
Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.emnlp-main.438",
doi = "10.18653/v1/2020.emnlp-main.438",
pages = "5427--5444",
}
```
### Contributions
Thanks to [@yjernite](https://github.com/yjernite) for adding this dataset. |
open-web-math/open-web-math | open-web-math | "2023-10-17T20:14:00Z" | 3,425 | 281 | [
"size_categories:1M<n<10M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2310.06786",
"region:us"
] | null | "2023-09-06T00:25:12Z" | ---
dataset_info:
features:
- name: url
dtype: string
- name: text
dtype: string
- name: date
dtype: string
- name: metadata
dtype: string
splits:
- name: train
num_bytes: 56651995057
num_examples: 6315233
download_size: 16370689925
dataset_size: 56651995057
license: odc-by
task_categories:
- text-generation
language:
- en
pretty_name: OpenWebMath
size_categories:
- 10B<n<100B
---
<img src="imgs/OpenWebMath-left.png" width="300">
[Keiran Paster](https://keirp.com)\*, [Marco Dos Santos](https://marco-dossantos.github.io/)\*, [Zhangir Azerbayev](https://zhangir-azerbayev.github.io/), [Jimmy Ba](https://jimmylba.github.io/)
[GitHub ](https://github.com/keirp/OpenWebMath) | [ArXiv](https://arxiv.org/abs/2310.06786)
| [PDF](https://arxiv.org/pdf/2310.06786.pdf)
**OpenWebMath** is a dataset containing the majority of the high-quality, mathematical text from the internet. It is filtered and extracted from over 200B HTML files on Common Crawl down to a set of **6.3 million documents** containing a total of **14.7B tokens**. OpenWebMath is intended for use in _pretraining_ and _finetuning_ large language models.
You can download the dataset using Hugging Face:
```python
from datasets import load_dataset
ds = load_dataset("open-web-math/open-web-math")
```
# OpenWebMath Contents
The dataset is structured as follows:
```python
{
"text": ..., # document text.
"url": ..., # document url.
"date": ..., # date the page was crawled.
"metadata": ..., # JSON containing information from the extraction process.
}
```
OpenWebMath contains documents from over 130k different domains, including data from forums, educational pages, and blogs. The dataset contains documents covering mathematics, physics, statistics, computer science, and more. The following table shows the most common domains in OpenWebMath by character count.
| Domain | # Characters | % Characters |
| ----------------- | ------------- | ------------ |
| stackexchange.com | 4,655,132,784 | 9.55% |
| nature.com | 1,529,935,838 | 3.14% |
| wordpress.com | 1,294,166,938 | 2.66% |
| physicsforums.com | 1,160,137,919 | 2.38% |
| github.io | 725,689,722 | 1.49% |
| zbmath.org | 620,019,503 | 1.27% |
| wikipedia.org | 618,024,754 | 1.27% |
| groundai.com | 545,214,990 | 1.12% |
| blogspot.com | 520,392,333 | 1.07% |
| mathoverflow.net | 499,102,560 | 1.02% |
# OpenWebMath Pipeline
<img src="imgs/pipeline.png" alt="Overview of the OpenWebMath Pipeline">
OpenWebMath builds on the massive [Common Crawl](https://commoncrawl.org/) dataset, which contains over 200B HTML documents. We filtered the data to only include documents that are: (1) in English, (2) contain mathematical content, and (3) are of high quality. We also put a strong emphasis on extracting LaTeX content from the HTML documents as well as reducing boilerplate in comparison to other web datasets.
The OpenWebMath pipeline consists of five steps:
1. **Prefiltering HTML Documents**:
- We apply a simple prefilter to all HTML documents in Common Crawl in order to skip documents without mathematical content to unnecessary processing time.
2. **Text Extraction**:
- Extract text, including LaTeX content, from the HTML documents while removing boilerplate.
3. **Content Classification and Filtering**:
- Apply a [FastText language identification model](https://fasttext.cc/docs/en/language-identification.html) to keep only English documents.
- Filter high perplexity documents using a [KenLM](https://github.com/kpu/kenlm) model trained on [Proof-Pile](https://huggingface.co/datasets/hoskinson-center/proof-pile).
- Filter non-mathematical documents using our own _MathScore_ model.
4. **Deduplication**:
- Deduplicate the dataset using SimHash in [text-dedup](https://github.com/ChenghaoMou/text-dedup).
5. **Manual Inspection**:
- Inspect the documents gathered from previous steps and remove low quality pages.
For a detailed discussion on the processing pipeline, please refer to our paper.
# License
OpenWebMath is made available under an ODC-By 1.0 license; users should also abide by the CommonCrawl ToU: [https://commoncrawl.org/terms-of-use/](https://commoncrawl.org/terms-of-use/). We do not alter the license of any of the underlying data.
# Citation Information
```
@misc{paster2023openwebmath,
title={OpenWebMath: An Open Dataset of High-Quality Mathematical Web Text},
author={Keiran Paster and Marco Dos Santos and Zhangir Azerbayev and Jimmy Ba},
year={2023},
eprint={2310.06786},
archivePrefix={arXiv},
primaryClass={cs.AI}
}
```
|
Voxel51/Coursera_homework_dataset_train | Voxel51 | "2024-07-31T16:49:19Z" | 3,421 | 1 | [
"task_categories:object-detection",
"language:en",
"size_categories:10K<n<100K",
"format:imagefolder",
"modality:image",
"library:datasets",
"library:mlcroissant",
"library:fiftyone",
"arxiv:1908.03195",
"region:us",
"fiftyone",
"image",
"object-detection"
] | [
"object-detection"
] | "2024-07-26T20:35:39Z" | ---
annotations_creators: []
language: en
size_categories:
- 10K<n<100K
task_categories:
- object-detection
task_ids: []
pretty_name: homework_dataset_train
tags:
- fiftyone
- image
- object-detection
dataset_summary: '
This is a [FiftyOne](https://github.com/voxel51/fiftyone) dataset with 18287 samples.
## Installation
If you haven''t already, install FiftyOne:
```bash
pip install -U fiftyone
```
## Usage
```python
import fiftyone as fo
import fiftyone.utils.huggingface as fouh
# Load the dataset
# Note: other available arguments include ''max_samples'', etc
dataset = fouh.load_from_hub("Voxel51/Coursera_homework_dataset_train")
# Launch the App
session = fo.launch_app(dataset)
```
'
---
# Dataset Card for Homework Training Set for Coursera MOOC - Hands Data Centric Visual AI
This dataset is the **training dataset for the homework assignments** of the Hands-on Data Centric AI Coursera course.
This is a [FiftyOne](https://github.com/voxel51/fiftyone) dataset with 18287 samples.
## Installation
If you haven't already, install FiftyOne:
```bash
pip install -U fiftyone
```
## Usage
```python
import fiftyone as fo
import fiftyone.utils.huggingface as fouh
# Load the dataset
# Note: other available arguments include 'max_samples', etc
dataset = fouh.load_from_hub("Voxel51/Coursera_homework_dataset_train")
# Launch the App
session = fo.launch_app(dataset)
```
## Dataset Details
### Dataset Description
This dataset is a modified subset of the [LVIS dataset](https://www.lvisdataset.org/).
The dataset here only contains detections, some of which have been artificially perturbed and altered to demonstrate data centric AI techniques and methodologies for the course.
This dataset has the following labels:
- 'bolt'
- 'knob'
- 'tag'
- 'button'
- 'bottle_cap'
- 'belt'
- 'strap'
- 'necktie'
- 'shirt'
- 'sweater'
- 'streetlight'
- 'pole'
- 'reflector'
- 'headlight'
- 'taillight'
- 'traffic_light'
- 'rearview_mirror'
### Dataset Sources
- **Repository:** https://www.lvisdataset.org/
- **Paper:** https://arxiv.org/abs/1908.03195
## Uses
The labels in this dataset have been perturbed to illustrate data centric AI techniques for the Hands-on Data Centric AI Coursera MOOC.
## Dataset Structure
Each image in the dataset comes with detailed annotations in FiftyOne detection format. A typical annotation looks like this:
```python
<Detection: {
'id': '66a2f24cce2f9d11d98d3a21',
'attributes': {},
'tags': [],
'label': 'shirt',
'bounding_box': [
0.25414,
0.35845238095238097,
0.041960000000000004,
0.051011904761904765,
],
'mask': None,
'confidence': None,
'index': None,
}>
```
## Dataset Creation
### Curation Rationale
The selected labels for this dataset is because these objects can be confusing to a model. Thus, making them a great choice for demonstrating data centric AI techniques.
### Source Data
This is a subset of the [LVIS dataset.](https://www.lvisdataset.org/)
## Citation
**BibTeX:**
```bibtex
@inproceedings{gupta2019lvis,
title={{LVIS}: A Dataset for Large Vocabulary Instance Segmentation},
author={Gupta, Agrim and Dollar, Piotr and Girshick, Ross},
booktitle={Proceedings of the {IEEE} Conference on Computer Vision and Pattern Recognition},
year={2019}
}
```
|
bigstupidhats/openai_MMMLU_arb | bigstupidhats | "2024-11-29T09:32:15Z" | 3,410 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-29T08:25:58Z" | ---
dataset_info:
- config_name: abstract_algebra
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 72350.44153254523
num_examples: 100
download_size: 11499
dataset_size: 72350.44153254523
- config_name: anatomy
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 97673.09606893605
num_examples: 135
download_size: 22742
dataset_size: 97673.09606893605
- config_name: astronomy
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 109972.67112946874
num_examples: 152
download_size: 33903
dataset_size: 109972.67112946874
- config_name: business_ethics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 72350.44153254523
num_examples: 100
download_size: 24427
dataset_size: 72350.44153254523
- config_name: clinical_knowledge
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 191728.67006124483
num_examples: 265
download_size: 46922
dataset_size: 191728.67006124483
- config_name: college_biology
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 104184.63580686512
num_examples: 144
download_size: 38338
dataset_size: 104184.63580686512
- config_name: college_chemistry
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 72350.44153254523
num_examples: 100
download_size: 20462
dataset_size: 72350.44153254523
- config_name: college_computer_science
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 72350.44153254523
num_examples: 100
download_size: 33391
dataset_size: 72350.44153254523
- config_name: college_mathematics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 72350.44153254523
num_examples: 100
download_size: 18878
dataset_size: 72350.44153254523
- config_name: college_medicine
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 125166.26385130323
num_examples: 173
download_size: 54143
dataset_size: 125166.26385130323
- config_name: college_physics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 73797.45036319613
num_examples: 102
download_size: 20769
dataset_size: 73797.45036319613
- config_name: computer_security
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 72350.44153254523
num_examples: 100
download_size: 24068
dataset_size: 72350.44153254523
- config_name: conceptual_physics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 170023.53760148128
num_examples: 235
download_size: 29074
dataset_size: 170023.53760148128
- config_name: econometrics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 82479.50334710155
num_examples: 114
download_size: 29766
dataset_size: 82479.50334710155
- config_name: electrical_engineering
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 104908.14022219057
num_examples: 145
download_size: 20777
dataset_size: 104908.14022219057
- config_name: elementary_mathematics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 273484.66899302095
num_examples: 378
download_size: 51448
dataset_size: 273484.66899302095
- config_name: formal_logic
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 91161.55633100698
num_examples: 126
download_size: 25601
dataset_size: 91161.55633100698
- config_name: global_facts
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 72350.44153254523
num_examples: 100
download_size: 13796
dataset_size: 72350.44153254523
- config_name: high_school_biology
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 224286.3687508902
num_examples: 310
download_size: 78285
dataset_size: 224286.3687508902
- config_name: high_school_chemistry
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 146871.3963110668
num_examples: 203
download_size: 40014
dataset_size: 146871.3963110668
- config_name: high_school_computer_science
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 72350.44153254523
num_examples: 100
download_size: 32202
dataset_size: 72350.44153254523
- config_name: high_school_european_history
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 119378.22852869962
num_examples: 165
download_size: 182784
dataset_size: 119378.22852869962
- config_name: high_school_geography
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 143253.87423443954
num_examples: 198
download_size: 32130
dataset_size: 143253.87423443954
- config_name: high_school_government_and_politics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 139636.35215781227
num_examples: 193
download_size: 48784
dataset_size: 139636.35215781227
- config_name: high_school_macroeconomics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 282166.7219769264
num_examples: 390
download_size: 70007
dataset_size: 282166.7219769264
- config_name: high_school_mathematics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 195346.1921378721
num_examples: 270
download_size: 38633
dataset_size: 195346.1921378721
- config_name: high_school_microeconomics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 172194.05084745763
num_examples: 238
download_size: 47850
dataset_size: 172194.05084745763
- config_name: high_school_physics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 109249.16671414328
num_examples: 151
download_size: 38837
dataset_size: 109249.16671414328
- config_name: high_school_psychology
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 394309.90635237144
num_examples: 545
download_size: 113613
dataset_size: 394309.90635237144
- config_name: high_school_statistics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 156276.95371029768
num_examples: 216
download_size: 76494
dataset_size: 156276.95371029768
- config_name: high_school_us_history
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 147594.90072639225
num_examples: 204
download_size: 205628
dataset_size: 147594.90072639225
- config_name: high_school_world_history
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 171470.54643213216
num_examples: 237
download_size: 254857
dataset_size: 171470.54643213216
- config_name: human_aging
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 161341.48461757586
num_examples: 223
download_size: 38982
dataset_size: 161341.48461757586
- config_name: human_sexuality
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 94779.07840763424
num_examples: 131
download_size: 26030
dataset_size: 94779.07840763424
- config_name: international_law
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 87544.03425437972
num_examples: 121
download_size: 35229
dataset_size: 87544.03425437972
- config_name: jurisprudence
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 78138.47685514884
num_examples: 108
download_size: 26611
dataset_size: 78138.47685514884
- config_name: logical_fallacies
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 117931.2196980487
num_examples: 163
download_size: 26662
dataset_size: 117931.2196980487
- config_name: machine_learning
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 81032.49451645065
num_examples: 112
download_size: 24490
dataset_size: 81032.49451645065
- config_name: management
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 74520.95477852158
num_examples: 103
download_size: 16412
dataset_size: 74520.95477852158
- config_name: marketing
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 169300.0331861558
num_examples: 234
download_size: 44445
dataset_size: 169300.0331861558
- config_name: medical_genetics
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 72350.44153254523
num_examples: 100
download_size: 17991
dataset_size: 72350.44153254523
- config_name: miscellaneous
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 566503.957199829
num_examples: 783
download_size: 118897
dataset_size: 566503.957199829
- config_name: moral_disputes
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 250332.52770260646
num_examples: 346
download_size: 75135
dataset_size: 250332.52770260646
- config_name: moral_scenarios
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 647536.4517162797
num_examples: 895
download_size: 130719
dataset_size: 647536.4517162797
- config_name: nutrition
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 221392.35108958837
num_examples: 306
download_size: 66880
dataset_size: 221392.35108958837
- config_name: philosophy
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 225009.87316621564
num_examples: 311
download_size: 57405
dataset_size: 225009.87316621564
- config_name: prehistory
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 234415.43056544653
num_examples: 324
download_size: 67197
dataset_size: 234415.43056544653
- config_name: professional_accounting
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 204028.24512177752
num_examples: 282
download_size: 89142
dataset_size: 204028.24512177752
- config_name: professional_law
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 1109855.7731092437
num_examples: 1534
download_size: 1293223
dataset_size: 1109855.7731092437
- config_name: professional_medicine
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 196793.20096852302
num_examples: 272
download_size: 160070
dataset_size: 196793.20096852302
- config_name: professional_psychology
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 442784.70217917673
num_examples: 612
download_size: 158926
dataset_size: 442784.70217917673
- config_name: public_relations
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 79585.48568579974
num_examples: 110
download_size: 23042
dataset_size: 79585.48568579974
- config_name: security_studies
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 177258.5817547358
num_examples: 245
download_size: 144246
dataset_size: 177258.5817547358
- config_name: sociology
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 145424.3874804159
num_examples: 201
download_size: 51479
dataset_size: 145424.3874804159
- config_name: us_foreign_policy
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 72350.44153254523
num_examples: 100
download_size: 22041
dataset_size: 72350.44153254523
- config_name: virology
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 120101.73294402506
num_examples: 166
download_size: 32006
dataset_size: 120101.73294402506
- config_name: world_religions
features:
- name: question
dtype: string
- name: subject
dtype: string
- name: choices
sequence: string
- name: answer
dtype: int64
splits:
- name: test
num_bytes: 123719.25502065233
num_examples: 171
download_size: 20673
dataset_size: 123719.25502065233
configs:
- config_name: abstract_algebra
data_files:
- split: test
path: abstract_algebra/test-*
- config_name: anatomy
data_files:
- split: test
path: anatomy/test-*
- config_name: astronomy
data_files:
- split: test
path: astronomy/test-*
- config_name: business_ethics
data_files:
- split: test
path: business_ethics/test-*
- config_name: clinical_knowledge
data_files:
- split: test
path: clinical_knowledge/test-*
- config_name: college_biology
data_files:
- split: test
path: college_biology/test-*
- config_name: college_chemistry
data_files:
- split: test
path: college_chemistry/test-*
- config_name: college_computer_science
data_files:
- split: test
path: college_computer_science/test-*
- config_name: college_mathematics
data_files:
- split: test
path: college_mathematics/test-*
- config_name: college_medicine
data_files:
- split: test
path: college_medicine/test-*
- config_name: college_physics
data_files:
- split: test
path: college_physics/test-*
- config_name: computer_security
data_files:
- split: test
path: computer_security/test-*
- config_name: conceptual_physics
data_files:
- split: test
path: conceptual_physics/test-*
- config_name: econometrics
data_files:
- split: test
path: econometrics/test-*
- config_name: electrical_engineering
data_files:
- split: test
path: electrical_engineering/test-*
- config_name: elementary_mathematics
data_files:
- split: test
path: elementary_mathematics/test-*
- config_name: formal_logic
data_files:
- split: test
path: formal_logic/test-*
- config_name: global_facts
data_files:
- split: test
path: global_facts/test-*
- config_name: high_school_biology
data_files:
- split: test
path: high_school_biology/test-*
- config_name: high_school_chemistry
data_files:
- split: test
path: high_school_chemistry/test-*
- config_name: high_school_computer_science
data_files:
- split: test
path: high_school_computer_science/test-*
- config_name: high_school_european_history
data_files:
- split: test
path: high_school_european_history/test-*
- config_name: high_school_geography
data_files:
- split: test
path: high_school_geography/test-*
- config_name: high_school_government_and_politics
data_files:
- split: test
path: high_school_government_and_politics/test-*
- config_name: high_school_macroeconomics
data_files:
- split: test
path: high_school_macroeconomics/test-*
- config_name: high_school_mathematics
data_files:
- split: test
path: high_school_mathematics/test-*
- config_name: high_school_microeconomics
data_files:
- split: test
path: high_school_microeconomics/test-*
- config_name: high_school_physics
data_files:
- split: test
path: high_school_physics/test-*
- config_name: high_school_psychology
data_files:
- split: test
path: high_school_psychology/test-*
- config_name: high_school_statistics
data_files:
- split: test
path: high_school_statistics/test-*
- config_name: high_school_us_history
data_files:
- split: test
path: high_school_us_history/test-*
- config_name: high_school_world_history
data_files:
- split: test
path: high_school_world_history/test-*
- config_name: human_aging
data_files:
- split: test
path: human_aging/test-*
- config_name: human_sexuality
data_files:
- split: test
path: human_sexuality/test-*
- config_name: international_law
data_files:
- split: test
path: international_law/test-*
- config_name: jurisprudence
data_files:
- split: test
path: jurisprudence/test-*
- config_name: logical_fallacies
data_files:
- split: test
path: logical_fallacies/test-*
- config_name: machine_learning
data_files:
- split: test
path: machine_learning/test-*
- config_name: management
data_files:
- split: test
path: management/test-*
- config_name: marketing
data_files:
- split: test
path: marketing/test-*
- config_name: medical_genetics
data_files:
- split: test
path: medical_genetics/test-*
- config_name: miscellaneous
data_files:
- split: test
path: miscellaneous/test-*
- config_name: moral_disputes
data_files:
- split: test
path: moral_disputes/test-*
- config_name: moral_scenarios
data_files:
- split: test
path: moral_scenarios/test-*
- config_name: nutrition
data_files:
- split: test
path: nutrition/test-*
- config_name: philosophy
data_files:
- split: test
path: philosophy/test-*
- config_name: prehistory
data_files:
- split: test
path: prehistory/test-*
- config_name: professional_accounting
data_files:
- split: test
path: professional_accounting/test-*
- config_name: professional_law
data_files:
- split: test
path: professional_law/test-*
- config_name: professional_medicine
data_files:
- split: test
path: professional_medicine/test-*
- config_name: professional_psychology
data_files:
- split: test
path: professional_psychology/test-*
- config_name: public_relations
data_files:
- split: test
path: public_relations/test-*
- config_name: security_studies
data_files:
- split: test
path: security_studies/test-*
- config_name: sociology
data_files:
- split: test
path: sociology/test-*
- config_name: us_foreign_policy
data_files:
- split: test
path: us_foreign_policy/test-*
- config_name: virology
data_files:
- split: test
path: virology/test-*
- config_name: world_religions
data_files:
- split: test
path: world_religions/test-*
---
|
tau/scrolls | tau | "2024-01-12T09:30:24Z" | 3,407 | 27 | [
"task_categories:question-answering",
"task_categories:summarization",
"task_categories:text-generation",
"task_ids:multiple-choice-qa",
"task_ids:natural-language-inference",
"language:en",
"arxiv:2201.03533",
"arxiv:2104.02112",
"arxiv:2104.07091",
"arxiv:2104.05938",
"arxiv:1712.07040",
"arxiv:2105.03011",
"arxiv:2112.08608",
"arxiv:2110.01799",
"region:us",
"query-based-summarization",
"long-texts"
] | [
"question-answering",
"summarization",
"text-generation"
] | "2022-03-02T23:29:22Z" | ---
language:
- en
task_categories:
- question-answering
- summarization
- text-generation
task_ids:
- multiple-choice-qa
- natural-language-inference
paperswithcode_id: scrolls
configs:
- gov_report
- summ_screen_fd
- qmsum
- qasper
- narrative_qa
- quality
- contract_nli
tags:
- query-based-summarization
- long-texts
---
## Dataset Description
- **Homepage:** [SCROLLS](https://www.scrolls-benchmark.com/)
- **Repository:** [SCROLLS Github repository](https://github.com/tau-nlp/scrolls)
- **Paper:** [SCROLLS: Standardized CompaRison Over Long Language Sequences
](https://arxiv.org/pdf/2201.03533.pdf)
- **Leaderboard:** [Leaderboard](https://www.scrolls-benchmark.com/leaderboard)
- **Point of Contact:** [[email protected]]([email protected])
# Dataset Card for SCROLLS
## Overview
SCROLLS is a suite of datasets that require synthesizing information over long texts. The benchmark includes seven natural language tasks across multiple domains, including summarization, question answering, and natural language inference.
## Leaderboard
The SCROLLS benchmark leaderboard can be found [here](https://www.scrolls-benchmark.com/leaderboard).
## Tasks
SCROLLS comprises the following tasks:
#### GovReport ([Huang et al., 2021](https://arxiv.org/pdf/2104.02112.pdf))
GovReport is a summarization dataset of reports addressing various national policy issues published by the
Congressional Research Service and the U.S. Government Accountability Office, where each document is paired with a hand-written executive summary.
The reports and their summaries are longer than their equivalents in other popular long-document summarization datasets;
for example, GovReport's documents are approximately 1.5 and 2.5 times longer than the documents in Arxiv and PubMed, respectively.
#### SummScreenFD ([Chen et al., 2021](https://arxiv.org/pdf/2104.07091.pdf))
SummScreenFD is a summarization dataset in the domain of TV shows (e.g. Friends, Game of Thrones).
Given a transcript of a specific episode, the goal is to produce the episode's recap.
The original dataset is divided into two complementary subsets, based on the source of its community contributed transcripts.
For SCROLLS, we use the ForeverDreaming (FD) subset, as it incorporates 88 different shows,
making it a more diverse alternative to the TV MegaSite (TMS) subset, which has only 10 shows.
Community-authored recaps for the ForeverDreaming transcripts were collected from English Wikipedia and TVMaze.
#### QMSum ([Zhong et al., 2021](https://arxiv.org/pdf/2104.05938.pdf))
QMSum is a query-based summarization dataset, consisting of 232 meetings transcripts from multiple domains.
The corpus covers academic group meetings at the International Computer Science Institute and their summaries, industrial product meetings for designing a remote control,
and committee meetings of the Welsh and Canadian Parliaments, dealing with a variety of public policy issues.
Annotators were tasked with writing queries about the broad contents of the meetings, as well as specific questions about certain topics or decisions,
while ensuring that the relevant text for answering each query spans at least 200 words or 10 turns.
#### NarrativeQA ([Kočiský et al., 2018](https://arxiv.org/pdf/1712.07040.pdf))
NarrativeQA (Kočiský et al., 2021) is an established question answering dataset over entire books from Project Gutenberg and movie scripts from different websites.
Annotators were given summaries of the books and scripts obtained from Wikipedia, and asked to generate question-answer pairs,
resulting in about 30 questions and answers for each of the 1,567 books and scripts.
They were encouraged to use their own words rather then copying, and avoid asking yes/no questions or ones about the cast.
Each question was then answered by an additional annotator, providing each question with two reference answers (unless both answers are identical).
#### Qasper ([Dasigi et al., 2021](https://arxiv.org/pdf/2105.03011.pdf))
Qasper is a question answering dataset over NLP papers filtered from the Semantic Scholar Open Research Corpus (S2ORC).
Questions were written by NLP practitioners after reading only the title and abstract of the papers,
while another set of NLP practitioners annotated the answers given the entire document.
Qasper contains abstractive, extractive, and yes/no questions, as well as unanswerable ones.
#### QuALITY ([Pang et al., 2021](https://arxiv.org/pdf/2112.08608.pdf))
QuALITY is a multiple-choice question answering dataset over articles and stories sourced from Project Gutenberg,
the Open American National Corpus, and more.
Experienced writers wrote questions and distractors, and were incentivized to write answerable, unambiguous questions such that in order to correctly answer them,
human annotators must read large portions of the given document.
Reference answers were then calculated using the majority vote between of the annotators and writer's answers.
To measure the difficulty of their questions, Pang et al. conducted a speed validation process,
where another set of annotators were asked to answer questions given only a short period of time to skim through the document.
As a result, 50% of the questions in QuALITY are labeled as hard, i.e. the majority of the annotators in the speed validation setting chose the wrong answer.
#### ContractNLI ([Koreeda and Manning, 2021](https://arxiv.org/pdf/2110.01799.pdf))
Contract NLI is a natural language inference dataset in the legal domain.
Given a non-disclosure agreement (the premise), the task is to predict whether a particular legal statement (the hypothesis) is entailed, not entailed (neutral), or cannot be entailed (contradiction) from the contract.
The NDAs were manually picked after simple filtering from the Electronic Data Gathering, Analysis, and Retrieval system (EDGAR) and Google.
The dataset contains a total of 607 contracts and 17 unique hypotheses, which were combined to produce the dataset's 10,319 examples.
## Data Fields
All the datasets in the benchmark are in the same input-output format
- `input`: a `string` feature. The input document.
- `output`: a `string` feature. The target.
- `id`: a `string` feature. Unique per input.
- `pid`: a `string` feature. Unique per input-output pair (can differ from 'id' in NarrativeQA and Qasper, where there is more then one valid target).
## Citation
If you use the SCROLLS data, **please make sure to cite all of the original dataset papers.** [[bibtex](https://scrolls-tau.s3.us-east-2.amazonaws.com/scrolls_datasets.bib)]
```
@inproceedings{shaham-etal-2022-scrolls,
title = "{SCROLLS}: Standardized {C}ompa{R}ison Over Long Language Sequences",
author = "Shaham, Uri and
Segal, Elad and
Ivgi, Maor and
Efrat, Avia and
Yoran, Ori and
Haviv, Adi and
Gupta, Ankit and
Xiong, Wenhan and
Geva, Mor and
Berant, Jonathan and
Levy, Omer",
booktitle = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2022.emnlp-main.823",
pages = "12007--12021",
}
``` |
andstor/methods2test | andstor | "2023-12-23T03:01:51Z" | 3,397 | 0 | [
"task_categories:text-generation",
"language:en",
"license:mit",
"size_categories:10M<n<100M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"arxiv:2203.12776",
"region:us",
"unit test",
"java",
"code"
] | [
"text-generation"
] | "2023-12-07T13:37:44Z" | ---
language:
- en
license: mit
task_categories:
- text-generation
configs:
- config_name: fm
data_files:
- split: train
path: data/fm/train-*
- split: test
path: data/fm/test-*
- split: validation
path: data/fm/validation-*
- config_name: fm_indented
data_files:
- split: train
path: data/fm_indented/train-*
- split: test
path: data/fm_indented/test-*
- split: validation
path: data/fm_indented/validation-*
- config_name: fm+t
data_files:
- split: train
path: data/fm+t/train-*
- split: test
path: data/fm+t/test-*
- split: validation
path: data/fm+t/validation-*
- config_name: fm+fc
data_files:
- split: train
path: data/fm+fc/train-*
- split: test
path: data/fm+fc/test-*
- split: validation
path: data/fm+fc/validation-*
- config_name: fm+fc+t+tc
data_files:
- split: train
path: data/fm+fc+t+tc/train-*
- split: test
path: data/fm+fc+t+tc/test-*
- split: validation
path: data/fm+fc+t+tc/validation-*
- config_name: fm+fc+c
data_files:
- split: train
path: data/fm+fc+c/train-*
- split: test
path: data/fm+fc+c/test-*
- split: validation
path: data/fm+fc+c/validation-*
- config_name: fm+fc+c+t+tc
data_files:
- split: train
path: data/fm+fc+c+t+tc/train-*
- split: test
path: data/fm+fc+c+t+tc/test-*
- split: validation
path: data/fm+fc+c+t+tc/validation-*
- config_name: fm+fc+c+m
data_files:
- split: train
path: data/fm+fc+c+m/train-*
- split: test
path: data/fm+fc+c+m/test-*
- split: validation
path: data/fm+fc+c+m/validation-*
- config_name: fm+fc+c+m+t+tc
data_files:
- split: train
path: data/fm+fc+c+m+t+tc/train-*
- split: test
path: data/fm+fc+c+m+t+tc/test-*
- split: validation
path: data/fm+fc+c+m+t+tc/validation-*
- config_name: fm+fc+c+m+f
data_files:
- split: train
path: data/fm+fc+c+m+f/train-*
- split: test
path: data/fm+fc+c+m+f/test-*
- split: validation
path: data/fm+fc+c+m+f/validation-*
- config_name: fm+fc+c+m+f+t+tc
data_files:
- split: train
path: data/fm+fc+c+m+f+t+tc/train-*
- split: test
path: data/fm+fc+c+m+f+t+tc/test-*
- split: validation
path: data/fm+fc+c+m+f+t+tc/validation-*
- config_name: t
data_files:
- split: train
path: data/t/train-*
- split: test
path: data/t/test-*
- split: validation
path: data/t/validation-*
- config_name: t_indented
data_files:
- split: train
path: data/t_indented/train-*
- split: test
path: data/t_indented/test-*
- split: validation
path: data/t_indented/validation-*
- config_name: t+tc
data_files:
- split: train
path: data/t+tc/train-*
- split: test
path: data/t+tc/test-*
- split: validation
path: data/t+tc/validation-*
dataset_info:
- config_name: fm
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 440444124
num_examples: 624022
- name: test
num_bytes: 59407291
num_examples: 78388
- name: validation
num_bytes: 57170315
num_examples: 78534
download_size: 99172217
dataset_size: 557021730
- config_name: fm+fc
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 506130678
num_examples: 624022
- name: test
num_bytes: 68407490
num_examples: 78388
- name: validation
num_bytes: 65318956
num_examples: 78534
download_size: 109141139
dataset_size: 639857124
- config_name: fm+fc+c
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 569209100
num_examples: 624022
- name: test
num_bytes: 75552573
num_examples: 78388
- name: validation
num_bytes: 73101169
num_examples: 78534
download_size: 117996353
dataset_size: 717862842
- config_name: fm+fc+c+m
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 1354004338
num_examples: 624022
- name: test
num_bytes: 187724929
num_examples: 78388
- name: validation
num_bytes: 184349299
num_examples: 78534
download_size: 222922572
dataset_size: 1726078566
- config_name: fm+fc+c+m+f
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 1476073209
num_examples: 624022
- name: test
num_bytes: 201686811
num_examples: 78388
- name: validation
num_bytes: 201259950
num_examples: 78534
download_size: 240405885
dataset_size: 1879019970
- config_name: fm+fc+c+m+f+t+tc
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 2019918359
num_examples: 624022
- name: test
num_bytes: 269021331
num_examples: 78388
- name: validation
num_bytes: 272958781
num_examples: 78534
download_size: 371500476
dataset_size: 2561898471
- config_name: fm+fc+c+m+t+tc
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 1897682665
num_examples: 624022
- name: test
num_bytes: 255053799
num_examples: 78388
- name: validation
num_bytes: 256030595
num_examples: 78534
download_size: 360175965
dataset_size: 2408767059
- config_name: fm+fc+c+t+tc
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 1109827485
num_examples: 624022
- name: test
num_bytes: 142558255
num_examples: 78388
- name: validation
num_bytes: 144523616
num_examples: 78534
download_size: 251861137
dataset_size: 1396909356
- config_name: fm+fc+t+tc
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 1046592848
num_examples: 624022
- name: test
num_bytes: 135403379
num_examples: 78388
- name: validation
num_bytes: 136729952
num_examples: 78534
download_size: 243052074
dataset_size: 1318726179
- config_name: fm+t
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 868034154
num_examples: 624022
- name: test
num_bytes: 114371187
num_examples: 78388
- name: validation
num_bytes: 112688219
num_examples: 78534
download_size: 217267853
dataset_size: 1095093560
- config_name: fm_indented
features:
- name: id
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 473170158
num_examples: 624022
- name: test
num_bytes: 64280367
num_examples: 78388
- name: validation
num_bytes: 61093848
num_examples: 78534
download_size: 103174190
dataset_size: 598544373
- config_name: t
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 380207303
num_examples: 624022
- name: test
num_bytes: 47993188
num_examples: 78388
- name: validation
num_bytes: 49808813
num_examples: 78534
download_size: 113820250
dataset_size: 478009304
- config_name: t+tc
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 550955294
num_examples: 624022
- name: test
num_bytes: 68323462
num_examples: 78388
- name: validation
num_bytes: 72740770
num_examples: 78534
download_size: 136767271
dataset_size: 692019526
- config_name: t_indented
features:
- name: id
dtype: string
- name: source
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 405853738
num_examples: 624022
- name: test
num_bytes: 51457514
num_examples: 78388
- name: validation
num_bytes: 52970428
num_examples: 78534
download_size: 117732776
dataset_size: 510281680
tags:
- unit test
- java
- code
---
## Dataset Description
Microsoft created the methods2test dataset, consisting of Java Junit test cases with its corresponding focal methods.
It contains 780k pairs of JUnit test cases and focal methods which were extracted from a total of 91K
Java open source project hosted on GitHub.
This is an assembled version of the methods2test dataset. It provides convenient access to the different context levels based on the raw source code (e.g. newlines are preserved). The test cases and associated classes are also made available.
The mapping between test case and focal methods are based heuristics rules and Java developer's best practice.
More information could be found here:
- [methods2test Github repo](https://github.com/microsoft/methods2test)
- [Methods2Test: A dataset of focal methods mapped to test cases](https://arxiv.org/pdf/2203.12776.pdf)
## Dataset Schema
```
t: <TEST_CASE>
t_tc: <TEST_CASE> <TEST_CLASS_NAME>
fm: <FOCAL_METHOD>
fm_fc: <FOCAL_CLASS_NAME> <FOCAL_METHOD>
fm_fc_c: <FOCAL_CLASS_NAME> <FOCAL_METHOD> <CONTRSUCTORS>
fm_fc_c_m: <FOCAL_CLASS_NAME> <FOCAL_METHOD> <CONTRSUCTORS> <METHOD_SIGNATURES>
fm_fc_c_m_f: <FOCAL_CLASS_NAME> <FOCAL_METHOD> <CONTRSUCTORS> <METHOD_SIGNATURES> <FIELDS>
```
## Focal Context
- fm: this representation incorporates exclusively the source
code of the focal method. Intuitively, this contains the most
important information for generating accurate test cases for
the given method.
- fm+fc: this representations adds the focal class name, which
can provide meaningful semantic information to the model.
- fm+fc+c: this representation adds the signatures of the constructor methods of the focal class. The idea behind this
augmentation is that the test case may require instantiating
an object of the focal class in order to properly test the focal
method.
- fm+fc+c+m: this representation adds the signatures of the
other public methods in the focal class. The rationale which
motivated this inclusion is that the test case may need to
invoke other auxiliary methods within the class (e.g., getters,
setters) to set up or tear down the testing environment.
- fm+fc+c+m+f : this representation adds the public fields of
the focal class. The motivation is that test cases may need to
inspect the status of the public fields to properly test a focal
method.
![image/png](https://huggingface.co/datasets/andstor/methods2test/resolve/main/figure-1-focal-context.png)
The different levels of focal contexts are the following:
```
T: test case
T_TC: test case + test class name
FM: focal method
FM_FC: focal method + focal class name
FM_FC_C: focal method + focal class name + constructor signatures
FM_FC_C_M: focal method + focal class name + constructor signatures + public method signatures
FM_FC_C_M_F: focal method + focal class name + constructor signatures + public method signatures + public fields
```
## Limitations
The original authors validate the heuristics by inspecting a
statistically significant sample (confidence level of 95% within 10%
margin of error) of 97 samples from the training set. Two authors
independently evaluated the sample, then met to discuss the disagreements. We found that 90.72% of the samples have a correct
link between the test case and the corresponding focal method
## Contribution
All thanks to the original authors.
|
japanese-asr/whisper_transcriptions.mls.wer_10.0 | japanese-asr | "2024-09-14T07:57:24Z" | 3,388 | 1 | [
"size_categories:1M<n<10M",
"format:parquet",
"modality:audio",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-11T09:52:44Z" | ---
dataset_info:
- config_name: subset_0
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29741913577.241814
num_examples: 62101
download_size: 28406057868
dataset_size: 29741913577.241814
- config_name: subset_1
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29815585138.73427
num_examples: 62323
download_size: 28488972470
dataset_size: 29815585138.73427
- config_name: subset_10
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29831443458.675167
num_examples: 62172
download_size: 28490041949
dataset_size: 29831443458.675167
- config_name: subset_100
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29740102232.58974
num_examples: 62114
download_size: 28402573685
dataset_size: 29740102232.58974
- config_name: subset_101
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29804666990.485275
num_examples: 62225
download_size: 28477636147
dataset_size: 29804666990.485275
- config_name: subset_102
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29847859656.366245
num_examples: 62219
download_size: 28508104461
dataset_size: 29847859656.366245
- config_name: subset_103
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29776271336.13424
num_examples: 62248
download_size: 28453790146
dataset_size: 29776271336.13424
- config_name: subset_104
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29864954995.718533
num_examples: 62348
download_size: 28540369174
dataset_size: 29864954995.718533
- config_name: subset_105
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29845768222.852547
num_examples: 62287
download_size: 28508203679
dataset_size: 29845768222.852547
- config_name: subset_106
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29868915195.73696
num_examples: 62355
download_size: 28531446961
dataset_size: 29868915195.73696
- config_name: subset_107
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29833296511.762436
num_examples: 62252
download_size: 28502966117
dataset_size: 29833296511.762436
- config_name: subset_108
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29846554379.21017
num_examples: 62398
download_size: 28521313998
dataset_size: 29846554379.21017
- config_name: subset_109
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29808600165.9863
num_examples: 62240
download_size: 28473663596
dataset_size: 29808600165.9863
- config_name: subset_11
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29807895865.53131
num_examples: 62230
download_size: 28470625940
dataset_size: 29807895865.53131
- config_name: subset_110
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29829181073.93217
num_examples: 62281
download_size: 28508841100
dataset_size: 29829181073.93217
- config_name: subset_111
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29845682710.49548
num_examples: 62335
download_size: 28524753965
dataset_size: 29845682710.49548
- config_name: subset_112
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29826059756.774582
num_examples: 62252
download_size: 28493408051
dataset_size: 29826059756.774582
- config_name: subset_113
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29736425530.042995
num_examples: 62066
download_size: 28408328564
dataset_size: 29736425530.042995
- config_name: subset_114
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 4936296.0
num_examples: 11
download_size: 4709772
dataset_size: 4936296.0
- config_name: subset_115
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29775544304.801655
num_examples: 62159
download_size: 28447112935
dataset_size: 29775544304.801655
- config_name: subset_116
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29764092406.31982
num_examples: 62150
download_size: 28424856922
dataset_size: 29764092406.31982
- config_name: subset_117
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29734215090.831867
num_examples: 62098
download_size: 28401429108
dataset_size: 29734215090.831867
- config_name: subset_118
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29785403327.377136
num_examples: 62307
download_size: 28454761582
dataset_size: 29785403327.377136
- config_name: subset_119
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29887080358.46854
num_examples: 62437
download_size: 28560903814
dataset_size: 29887080358.46854
- config_name: subset_12
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29778963955.27637
num_examples: 62217
download_size: 28456064768
dataset_size: 29778963955.27637
- config_name: subset_120
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29795324063.32621
num_examples: 62213
download_size: 28459179628
dataset_size: 29795324063.32621
- config_name: subset_121
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29823497463.618946
num_examples: 62219
download_size: 28486036307
dataset_size: 29823497463.618946
- config_name: subset_122
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29825885978.784977
num_examples: 62198
download_size: 28495894587
dataset_size: 29825885978.784977
- config_name: subset_123
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29824518738.544853
num_examples: 62207
download_size: 28482461945
dataset_size: 29824518738.544853
- config_name: subset_124
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29734472830.244003
num_examples: 62044
download_size: 28397807256
dataset_size: 29734472830.244003
- config_name: subset_125
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29751692495.66535
num_examples: 62132
download_size: 28418245723
dataset_size: 29751692495.66535
- config_name: subset_126
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29860413580.83239
num_examples: 62262
download_size: 28531745153
dataset_size: 29860413580.83239
- config_name: subset_127
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29844407241.442238
num_examples: 62182
download_size: 28520446380
dataset_size: 29844407241.442238
- config_name: subset_128
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29802989154.327606
num_examples: 62225
download_size: 28463177779
dataset_size: 29802989154.327606
- config_name: subset_129
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29850861116.343075
num_examples: 62330
download_size: 28520805908
dataset_size: 29850861116.343075
- config_name: subset_13
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29796741055.90437
num_examples: 62202
download_size: 28466354764
dataset_size: 29796741055.90437
- config_name: subset_130
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 4263112.0
num_examples: 9
download_size: 4073797
dataset_size: 4263112.0
- config_name: subset_131
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29720943599.72362
num_examples: 61994
download_size: 28379216482
dataset_size: 29720943599.72362
- config_name: subset_132
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29797620915.980434
num_examples: 62210
download_size: 28461599359
dataset_size: 29797620915.980434
- config_name: subset_133
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29782749863.416126
num_examples: 62161
download_size: 28447689082
dataset_size: 29782749863.416126
- config_name: subset_134
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29775975351.80884
num_examples: 62252
download_size: 28445935648
dataset_size: 29775975351.80884
- config_name: subset_135
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29804785291.47995
num_examples: 62332
download_size: 28474094120
dataset_size: 29804785291.47995
- config_name: subset_136
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29764251087.81636
num_examples: 62135
download_size: 28435055519
dataset_size: 29764251087.81636
- config_name: subset_137
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29796171709.935783
num_examples: 62226
download_size: 28468528453
dataset_size: 29796171709.935783
- config_name: subset_138
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29818809747.066654
num_examples: 62253
download_size: 28486190334
dataset_size: 29818809747.066654
- config_name: subset_14
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29713149830.08086
num_examples: 62058
download_size: 28370992605
dataset_size: 29713149830.08086
- config_name: subset_15
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29857118469.690784
num_examples: 62295
download_size: 28520133081
dataset_size: 29857118469.690784
- config_name: subset_16
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44717472597.38111
num_examples: 93380
download_size: 42705151644
dataset_size: 44717472597.38111
- config_name: subset_17
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44567963372.985085
num_examples: 93081
download_size: 42557871062
dataset_size: 44567963372.985085
- config_name: subset_18
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 487488.0
num_examples: 1
download_size: 482536
dataset_size: 487488.0
- config_name: subset_19
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44578540518.278465
num_examples: 93092
download_size: 42574195823
dataset_size: 44578540518.278465
- config_name: subset_2
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29779834173.05709
num_examples: 62194
download_size: 28457283265
dataset_size: 29779834173.05709
- config_name: subset_20
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44612078576.6915
num_examples: 93192
download_size: 42608986260
dataset_size: 44612078576.6915
- config_name: subset_21
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44716611297.93694
num_examples: 93435
download_size: 42724070775
dataset_size: 44716611297.93694
- config_name: subset_22
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44642599292.25394
num_examples: 93272
download_size: 42638436011
dataset_size: 44642599292.25394
- config_name: subset_23
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44773238981.78718
num_examples: 93425
download_size: 42769260156
dataset_size: 44773238981.78718
- config_name: subset_24
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44643199115.213066
num_examples: 93280
download_size: 42643630676
dataset_size: 44643199115.213066
- config_name: subset_25
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44775786873.71317
num_examples: 93521
download_size: 42787596471
dataset_size: 44775786873.71317
- config_name: subset_26
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44724169162.378235
num_examples: 93381
download_size: 42734030121
dataset_size: 44724169162.378235
- config_name: subset_27
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44673408393.32555
num_examples: 93316
download_size: 42671093570
dataset_size: 44673408393.32555
- config_name: subset_28
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44647540966.199005
num_examples: 93158
download_size: 42647063249
dataset_size: 44647540966.199005
- config_name: subset_29
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 2062592.0
num_examples: 5
download_size: 1973372
dataset_size: 2062592.0
- config_name: subset_3
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29781556446.893677
num_examples: 62185
download_size: 28457664262
dataset_size: 29781556446.893677
- config_name: subset_30
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44619423613.42147
num_examples: 93230
download_size: 42616420647
dataset_size: 44619423613.42147
- config_name: subset_31
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44686832436.53614
num_examples: 93302
download_size: 42675035403
dataset_size: 44686832436.53614
- config_name: subset_32
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44667832285.4235
num_examples: 93252
download_size: 42664546252
dataset_size: 44667832285.4235
- config_name: subset_33
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44767218387.43479
num_examples: 93463
download_size: 42772690686
dataset_size: 44767218387.43479
- config_name: subset_34
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44759250508.174644
num_examples: 93435
download_size: 42765984681
dataset_size: 44759250508.174644
- config_name: subset_35
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44661416756.55649
num_examples: 93247
download_size: 42662115327
dataset_size: 44661416756.55649
- config_name: subset_36
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44697682741.818405
num_examples: 93316
download_size: 42706948136
dataset_size: 44697682741.818405
- config_name: subset_37
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44642072687.35433
num_examples: 93214
download_size: 42642766174
dataset_size: 44642072687.35433
- config_name: subset_38
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44703619421.43295
num_examples: 93385
download_size: 42705784293
dataset_size: 44703619421.43295
- config_name: subset_39
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44691234018.50057
num_examples: 93382
download_size: 42688816370
dataset_size: 44691234018.50057
- config_name: subset_4
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29804659756.840668
num_examples: 62191
download_size: 28469961087
dataset_size: 29804659756.840668
- config_name: subset_40
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 3749600.0
num_examples: 8
download_size: 3544062
dataset_size: 3749600.0
- config_name: subset_41
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44710781677.17146
num_examples: 93390
download_size: 42686048101
dataset_size: 44710781677.17146
- config_name: subset_42
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44595765733.45953
num_examples: 93072
download_size: 42586674927
dataset_size: 44595765733.45953
- config_name: subset_43
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44642287217.473366
num_examples: 93216
download_size: 42643283814
dataset_size: 44642287217.473366
- config_name: subset_44
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44680782381.93174
num_examples: 93330
download_size: 42679060966
dataset_size: 44680782381.93174
- config_name: subset_45
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44632730866.69406
num_examples: 93280
download_size: 42632582031
dataset_size: 44632730866.69406
- config_name: subset_46
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44777176265.9063
num_examples: 93367
download_size: 42774172043
dataset_size: 44777176265.9063
- config_name: subset_47
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44723520638.39092
num_examples: 93353
download_size: 42739592034
dataset_size: 44723520638.39092
- config_name: subset_48
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44699581610.83543
num_examples: 93264
download_size: 42691617545
dataset_size: 44699581610.83543
- config_name: subset_49
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29776883327.862305
num_examples: 62152
download_size: 28437717863
dataset_size: 29776883327.862305
- config_name: subset_5
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29770798634.794384
num_examples: 62238
download_size: 28425692361
dataset_size: 29770798634.794384
- config_name: subset_50
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29799476758.913025
num_examples: 62205
download_size: 28471561046
dataset_size: 29799476758.913025
- config_name: subset_51
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29840586701.87915
num_examples: 62210
download_size: 28512098634
dataset_size: 29840586701.87915
- config_name: subset_52
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29863217529.513218
num_examples: 62358
download_size: 28535498406
dataset_size: 29863217529.513218
- config_name: subset_53
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29800253005.76768
num_examples: 62172
download_size: 28473498615
dataset_size: 29800253005.76768
- config_name: subset_54
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29790807067.45104
num_examples: 62220
download_size: 28462782039
dataset_size: 29790807067.45104
- config_name: subset_55
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29756188946.40286
num_examples: 62087
download_size: 28428539117
dataset_size: 29756188946.40286
- config_name: subset_56
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 4052942.222222222
num_examples: 8
download_size: 3756055
dataset_size: 4052942.222222222
- config_name: subset_57
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29815135874.530243
num_examples: 62215
download_size: 28480808968
dataset_size: 29815135874.530243
- config_name: subset_58
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29768678268.566994
num_examples: 62182
download_size: 28432025537
dataset_size: 29768678268.566994
- config_name: subset_59
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29792832738.55261
num_examples: 62236
download_size: 28467550664
dataset_size: 29792832738.55261
- config_name: subset_6
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29760036816.438248
num_examples: 62119
download_size: 28431028357
dataset_size: 29760036816.438248
- config_name: subset_60
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29795475110.996426
num_examples: 62199
download_size: 28458203598
dataset_size: 29795475110.996426
- config_name: subset_61
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29810850600.259956
num_examples: 62218
download_size: 28472533876
dataset_size: 29810850600.259956
- config_name: subset_62
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29793053976.98263
num_examples: 62354
download_size: 28468189848
dataset_size: 29793053976.98263
- config_name: subset_63
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29830633313.527344
num_examples: 62245
download_size: 28488179308
dataset_size: 29830633313.527344
- config_name: subset_64
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29815723311.686073
num_examples: 62295
download_size: 28481396085
dataset_size: 29815723311.686073
- config_name: subset_65
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29816082510.257248
num_examples: 62317
download_size: 28483004258
dataset_size: 29816082510.257248
- config_name: subset_66
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29823849926.137985
num_examples: 62310
download_size: 28501727354
dataset_size: 29823849926.137985
- config_name: subset_67
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29779693486.822372
num_examples: 62204
download_size: 28440702784
dataset_size: 29779693486.822372
- config_name: subset_68
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29791077271.97951
num_examples: 62238
download_size: 28470796275
dataset_size: 29791077271.97951
- config_name: subset_69
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29804033246.804424
num_examples: 62214
download_size: 28469224555
dataset_size: 29804033246.804424
- config_name: subset_7
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 435616.0
num_examples: 1
download_size: 429318
dataset_size: 435616.0
- config_name: subset_70
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29769624892.128063
num_examples: 62139
download_size: 28427172030
dataset_size: 29769624892.128063
- config_name: subset_71
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29682503840.61426
num_examples: 61996
download_size: 28356389982
dataset_size: 29682503840.61426
- config_name: subset_72
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 1459624.0
num_examples: 3
download_size: 1398432
dataset_size: 1459624.0
- config_name: subset_73
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29818453655.1624
num_examples: 62294
download_size: 28492225008
dataset_size: 29818453655.1624
- config_name: subset_74
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29809226392.643494
num_examples: 62275
download_size: 28471896097
dataset_size: 29809226392.643494
- config_name: subset_75
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29813615006.46017
num_examples: 62247
download_size: 28481389086
dataset_size: 29813615006.46017
- config_name: subset_76
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29787940909.366352
num_examples: 62176
download_size: 28457880270
dataset_size: 29787940909.366352
- config_name: subset_77
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29786889343.17914
num_examples: 62093
download_size: 28459598814
dataset_size: 29786889343.17914
- config_name: subset_78
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29766008072.24979
num_examples: 62168
download_size: 28429784870
dataset_size: 29766008072.24979
- config_name: subset_79
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29771618615.300034
num_examples: 62183
download_size: 28430361717
dataset_size: 29771618615.300034
- config_name: subset_8
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29784846446.236767
num_examples: 62208
download_size: 28456467319
dataset_size: 29784846446.236767
- config_name: subset_80
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29825455286.15237
num_examples: 62242
download_size: 28494746372
dataset_size: 29825455286.15237
- config_name: subset_81
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44611282857.507706
num_examples: 93149
download_size: 42605499635
dataset_size: 44611282857.507706
- config_name: subset_82
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44633857773.640816
num_examples: 93209
download_size: 42617883549
dataset_size: 44633857773.640816
- config_name: subset_83
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44731114501.95697
num_examples: 93285
download_size: 42734681724
dataset_size: 44731114501.95697
- config_name: subset_84
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44652331899.52017
num_examples: 93224
download_size: 42640405452
dataset_size: 44652331899.52017
- config_name: subset_85
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44738443066.61914
num_examples: 93391
download_size: 42752260132
dataset_size: 44738443066.61914
- config_name: subset_86
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44658486276.88759
num_examples: 93256
download_size: 42653904477
dataset_size: 44658486276.88759
- config_name: subset_87
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44624811790.03121
num_examples: 93178
download_size: 42605954586
dataset_size: 44624811790.03121
- config_name: subset_88
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44661490498.22551
num_examples: 93239
download_size: 42652915154
dataset_size: 44661490498.22551
- config_name: subset_89
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44716406347.45679
num_examples: 93404
download_size: 42727072094
dataset_size: 44716406347.45679
- config_name: subset_9
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29792706171.83141
num_examples: 62109
download_size: 28450168341
dataset_size: 29792706171.83141
- config_name: subset_90
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 44676470960.951996
num_examples: 93187
download_size: 42678524371
dataset_size: 44676470960.951996
- config_name: subset_91
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29764280076.125225
num_examples: 62196
download_size: 28429282112
dataset_size: 29764280076.125225
- config_name: subset_92
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29850619337.3585
num_examples: 62351
download_size: 28512846915
dataset_size: 29850619337.3585
- config_name: subset_93
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29837744714.346436
num_examples: 62233
download_size: 28501403452
dataset_size: 29837744714.346436
- config_name: subset_94
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29839780119.8114
num_examples: 62323
download_size: 28513940315
dataset_size: 29839780119.8114
- config_name: subset_95
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29748188964.62823
num_examples: 62172
download_size: 28413924658
dataset_size: 29748188964.62823
- config_name: subset_96
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29800727262.69699
num_examples: 62260
download_size: 28475125160
dataset_size: 29800727262.69699
- config_name: subset_97
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29767717411.338116
num_examples: 62148
download_size: 28440311229
dataset_size: 29767717411.338116
- config_name: subset_98
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 4303888.0
num_examples: 9
download_size: 4144170
dataset_size: 4303888.0
- config_name: subset_99
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
sequence: int64
- name: transcription/ja_gpt3.5
sequence: int64
- name: whisper_transcription
sequence: int64
- name: whisper_transcription/ja_gpt3.5
sequence: int64
- name: input_length
dtype: int64
splits:
- name: train
num_bytes: 29787160440.723938
num_examples: 62157
download_size: 28445447346
dataset_size: 29787160440.723938
configs:
- config_name: subset_0
data_files:
- split: train
path: subset_0/train-*
- config_name: subset_1
data_files:
- split: train
path: subset_1/train-*
- config_name: subset_10
data_files:
- split: train
path: subset_10/train-*
- config_name: subset_100
data_files:
- split: train
path: subset_100/train-*
- config_name: subset_101
data_files:
- split: train
path: subset_101/train-*
- config_name: subset_102
data_files:
- split: train
path: subset_102/train-*
- config_name: subset_103
data_files:
- split: train
path: subset_103/train-*
- config_name: subset_104
data_files:
- split: train
path: subset_104/train-*
- config_name: subset_105
data_files:
- split: train
path: subset_105/train-*
- config_name: subset_106
data_files:
- split: train
path: subset_106/train-*
- config_name: subset_107
data_files:
- split: train
path: subset_107/train-*
- config_name: subset_108
data_files:
- split: train
path: subset_108/train-*
- config_name: subset_109
data_files:
- split: train
path: subset_109/train-*
- config_name: subset_11
data_files:
- split: train
path: subset_11/train-*
- config_name: subset_110
data_files:
- split: train
path: subset_110/train-*
- config_name: subset_111
data_files:
- split: train
path: subset_111/train-*
- config_name: subset_112
data_files:
- split: train
path: subset_112/train-*
- config_name: subset_113
data_files:
- split: train
path: subset_113/train-*
- config_name: subset_114
data_files:
- split: train
path: subset_114/train-*
- config_name: subset_115
data_files:
- split: train
path: subset_115/train-*
- config_name: subset_116
data_files:
- split: train
path: subset_116/train-*
- config_name: subset_117
data_files:
- split: train
path: subset_117/train-*
- config_name: subset_118
data_files:
- split: train
path: subset_118/train-*
- config_name: subset_119
data_files:
- split: train
path: subset_119/train-*
- config_name: subset_12
data_files:
- split: train
path: subset_12/train-*
- config_name: subset_120
data_files:
- split: train
path: subset_120/train-*
- config_name: subset_121
data_files:
- split: train
path: subset_121/train-*
- config_name: subset_122
data_files:
- split: train
path: subset_122/train-*
- config_name: subset_123
data_files:
- split: train
path: subset_123/train-*
- config_name: subset_124
data_files:
- split: train
path: subset_124/train-*
- config_name: subset_125
data_files:
- split: train
path: subset_125/train-*
- config_name: subset_126
data_files:
- split: train
path: subset_126/train-*
- config_name: subset_127
data_files:
- split: train
path: subset_127/train-*
- config_name: subset_128
data_files:
- split: train
path: subset_128/train-*
- config_name: subset_129
data_files:
- split: train
path: subset_129/train-*
- config_name: subset_13
data_files:
- split: train
path: subset_13/train-*
- config_name: subset_130
data_files:
- split: train
path: subset_130/train-*
- config_name: subset_131
data_files:
- split: train
path: subset_131/train-*
- config_name: subset_132
data_files:
- split: train
path: subset_132/train-*
- config_name: subset_133
data_files:
- split: train
path: subset_133/train-*
- config_name: subset_134
data_files:
- split: train
path: subset_134/train-*
- config_name: subset_135
data_files:
- split: train
path: subset_135/train-*
- config_name: subset_136
data_files:
- split: train
path: subset_136/train-*
- config_name: subset_137
data_files:
- split: train
path: subset_137/train-*
- config_name: subset_138
data_files:
- split: train
path: subset_138/train-*
- config_name: subset_14
data_files:
- split: train
path: subset_14/train-*
- config_name: subset_15
data_files:
- split: train
path: subset_15/train-*
- config_name: subset_16
data_files:
- split: train
path: subset_16/train-*
- config_name: subset_17
data_files:
- split: train
path: subset_17/train-*
- config_name: subset_18
data_files:
- split: train
path: subset_18/train-*
- config_name: subset_19
data_files:
- split: train
path: subset_19/train-*
- config_name: subset_2
data_files:
- split: train
path: subset_2/train-*
- config_name: subset_20
data_files:
- split: train
path: subset_20/train-*
- config_name: subset_21
data_files:
- split: train
path: subset_21/train-*
- config_name: subset_22
data_files:
- split: train
path: subset_22/train-*
- config_name: subset_23
data_files:
- split: train
path: subset_23/train-*
- config_name: subset_24
data_files:
- split: train
path: subset_24/train-*
- config_name: subset_25
data_files:
- split: train
path: subset_25/train-*
- config_name: subset_26
data_files:
- split: train
path: subset_26/train-*
- config_name: subset_27
data_files:
- split: train
path: subset_27/train-*
- config_name: subset_28
data_files:
- split: train
path: subset_28/train-*
- config_name: subset_29
data_files:
- split: train
path: subset_29/train-*
- config_name: subset_3
data_files:
- split: train
path: subset_3/train-*
- config_name: subset_30
data_files:
- split: train
path: subset_30/train-*
- config_name: subset_31
data_files:
- split: train
path: subset_31/train-*
- config_name: subset_32
data_files:
- split: train
path: subset_32/train-*
- config_name: subset_33
data_files:
- split: train
path: subset_33/train-*
- config_name: subset_34
data_files:
- split: train
path: subset_34/train-*
- config_name: subset_35
data_files:
- split: train
path: subset_35/train-*
- config_name: subset_36
data_files:
- split: train
path: subset_36/train-*
- config_name: subset_37
data_files:
- split: train
path: subset_37/train-*
- config_name: subset_38
data_files:
- split: train
path: subset_38/train-*
- config_name: subset_39
data_files:
- split: train
path: subset_39/train-*
- config_name: subset_4
data_files:
- split: train
path: subset_4/train-*
- config_name: subset_40
data_files:
- split: train
path: subset_40/train-*
- config_name: subset_41
data_files:
- split: train
path: subset_41/train-*
- config_name: subset_42
data_files:
- split: train
path: subset_42/train-*
- config_name: subset_43
data_files:
- split: train
path: subset_43/train-*
- config_name: subset_44
data_files:
- split: train
path: subset_44/train-*
- config_name: subset_45
data_files:
- split: train
path: subset_45/train-*
- config_name: subset_46
data_files:
- split: train
path: subset_46/train-*
- config_name: subset_47
data_files:
- split: train
path: subset_47/train-*
- config_name: subset_48
data_files:
- split: train
path: subset_48/train-*
- config_name: subset_49
data_files:
- split: train
path: subset_49/train-*
- config_name: subset_5
data_files:
- split: train
path: subset_5/train-*
- config_name: subset_50
data_files:
- split: train
path: subset_50/train-*
- config_name: subset_51
data_files:
- split: train
path: subset_51/train-*
- config_name: subset_52
data_files:
- split: train
path: subset_52/train-*
- config_name: subset_53
data_files:
- split: train
path: subset_53/train-*
- config_name: subset_54
data_files:
- split: train
path: subset_54/train-*
- config_name: subset_55
data_files:
- split: train
path: subset_55/train-*
- config_name: subset_56
data_files:
- split: train
path: subset_56/train-*
- config_name: subset_57
data_files:
- split: train
path: subset_57/train-*
- config_name: subset_58
data_files:
- split: train
path: subset_58/train-*
- config_name: subset_59
data_files:
- split: train
path: subset_59/train-*
- config_name: subset_6
data_files:
- split: train
path: subset_6/train-*
- config_name: subset_60
data_files:
- split: train
path: subset_60/train-*
- config_name: subset_61
data_files:
- split: train
path: subset_61/train-*
- config_name: subset_62
data_files:
- split: train
path: subset_62/train-*
- config_name: subset_63
data_files:
- split: train
path: subset_63/train-*
- config_name: subset_64
data_files:
- split: train
path: subset_64/train-*
- config_name: subset_65
data_files:
- split: train
path: subset_65/train-*
- config_name: subset_66
data_files:
- split: train
path: subset_66/train-*
- config_name: subset_67
data_files:
- split: train
path: subset_67/train-*
- config_name: subset_68
data_files:
- split: train
path: subset_68/train-*
- config_name: subset_69
data_files:
- split: train
path: subset_69/train-*
- config_name: subset_7
data_files:
- split: train
path: subset_7/train-*
- config_name: subset_70
data_files:
- split: train
path: subset_70/train-*
- config_name: subset_71
data_files:
- split: train
path: subset_71/train-*
- config_name: subset_72
data_files:
- split: train
path: subset_72/train-*
- config_name: subset_73
data_files:
- split: train
path: subset_73/train-*
- config_name: subset_74
data_files:
- split: train
path: subset_74/train-*
- config_name: subset_75
data_files:
- split: train
path: subset_75/train-*
- config_name: subset_76
data_files:
- split: train
path: subset_76/train-*
- config_name: subset_77
data_files:
- split: train
path: subset_77/train-*
- config_name: subset_78
data_files:
- split: train
path: subset_78/train-*
- config_name: subset_79
data_files:
- split: train
path: subset_79/train-*
- config_name: subset_8
data_files:
- split: train
path: subset_8/train-*
- config_name: subset_80
data_files:
- split: train
path: subset_80/train-*
- config_name: subset_81
data_files:
- split: train
path: subset_81/train-*
- config_name: subset_82
data_files:
- split: train
path: subset_82/train-*
- config_name: subset_83
data_files:
- split: train
path: subset_83/train-*
- config_name: subset_84
data_files:
- split: train
path: subset_84/train-*
- config_name: subset_85
data_files:
- split: train
path: subset_85/train-*
- config_name: subset_86
data_files:
- split: train
path: subset_86/train-*
- config_name: subset_87
data_files:
- split: train
path: subset_87/train-*
- config_name: subset_88
data_files:
- split: train
path: subset_88/train-*
- config_name: subset_89
data_files:
- split: train
path: subset_89/train-*
- config_name: subset_9
data_files:
- split: train
path: subset_9/train-*
- config_name: subset_90
data_files:
- split: train
path: subset_90/train-*
- config_name: subset_91
data_files:
- split: train
path: subset_91/train-*
- config_name: subset_92
data_files:
- split: train
path: subset_92/train-*
- config_name: subset_93
data_files:
- split: train
path: subset_93/train-*
- config_name: subset_94
data_files:
- split: train
path: subset_94/train-*
- config_name: subset_95
data_files:
- split: train
path: subset_95/train-*
- config_name: subset_96
data_files:
- split: train
path: subset_96/train-*
- config_name: subset_97
data_files:
- split: train
path: subset_97/train-*
- config_name: subset_98
data_files:
- split: train
path: subset_98/train-*
- config_name: subset_99
data_files:
- split: train
path: subset_99/train-*
---
|