File size: 3,088 Bytes
a29e26b d133ec5 e1c7fe6 77bcbc4 e1c7fe6 77bcbc4 e1c7fe6 ab58ece e1c7fe6 77bcbc4 e1c7fe6 a29e26b 5cd16c9 ab58ece 136d57e e1c7fe6 77bcbc4 e1c7fe6 77bcbc4 e1c7fe6 77bcbc4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 |
import os
import re
import datasets
logger = datasets.logging.get_logger(__name__)
_DESCRIPTION = """\
Common Crawl - Malayalam.
"""
_CITATION = """\
@article{qburst,
title={Common Crawl - Malayalam},
author={n.d},
year={2020},
journal={n.d},
}
"""
_URLs = {
"malayalam_wiki_2020": "https://huggingface.co/datasets/rajeshradhakrishnan/malayalam_2020_wiki/resolve/main/",
"checksum_url": "https://huggingface.co/datasets/rajeshradhakrishnan/malayalam_2020_wiki/resolve/main/ml_sha256.txt"
}
class MalayalamWikiConfig(datasets.BuilderConfig):
"""BuilderConfig for MalayalamWiki."""
def __init__(self, **kwargs):
"""BuilderConfig for MalayalamWiki.
Args:
**kwargs: keyword arguments forwarded to super.
"""
super(MalayalamWikiConfig, self).__init__(**kwargs)
class MalayalamWiki(datasets.GeneratorBasedBuilder):
"""Malayalam News topic classification dataset."""
VERSION = datasets.Version("1.0.0")
BUILDER_CONFIGS = [
MalayalamWikiConfig(
name="malayalam_wiki_2020", version=VERSION, description="Common Crawl - Malayalam."
),
]
def remove_special_characters(self, txt):
chars_to_ignore_regex = '[\,\?\.\!\-\;\:\"\“\%\‘\”\�Utrnle\_]'
unicode_ignore_regex = r'[\u200e\u200c\u200d]'
english_ignore_regex = r'[a-zA-Z]'
txt = txt.strip()
txt = re.sub(chars_to_ignore_regex, '',txt)
txt = re.sub(unicode_ignore_regex, '',txt) + " "
txt = re.sub(english_ignore_regex, '',txt) + " "
return txt
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
"text": datasets.Value("string")
}
),
supervised_keys=None,
homepage="https://github.com/qburst/common-crawl-malayalam",
citation=_CITATION
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
checksum_url = _URLs["checksum_url"]
checksum_file = dl_manager.download(checksum_url)
with open(checksum_file, encoding="utf-8") as f:
data_filenames = [line.strip() for line in f if line]
data_urls = [_URLs["malayalam_wiki_2020"] + data_filename for data_filename in data_filenames[1:2]]
downloaded_files = dl_manager.download(data_urls)
return [
datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepaths": downloaded_files}),
]
def _generate_examples(self, filepaths):
"""This function returns the examples in the raw (text) form by iterating on all the files."""
for file_id,filepath in enumerate(filepaths):
logger.info("generating examples from = %s", filepath)
with open(filepath, encoding="utf-8") as f:
for row_id, row in enumerate(f):
yield f"{file_id}_{row_id}", {"text": self.remove_special_characters(row).strip()}
|