|
import os |
|
import sys |
|
import utils |
|
import random |
|
import urllib |
|
import asyncio |
|
import aiohttp |
|
import aiofiles |
|
import argparse |
|
import concurrent |
|
from constants import * |
|
|
|
TIMEOUT = 30 |
|
|
|
async def process_link(thread_pool, session, image_object, existing_image_ids, width, height, convert_to_avif, use_low_quality, min_tags): |
|
image_id = str(image_object["id"]) |
|
if image_id in existing_image_ids: |
|
|
|
return |
|
existing_image_ids.add(image_id) |
|
error = None |
|
for i in range(1, MAX_RETRY + 2): |
|
try: |
|
if utils.get_sigint_count() >= 1: |
|
break |
|
|
|
if not use_low_quality: |
|
image_url = image_object["file_url"] |
|
else: |
|
image_url = image_object["sample_url"] |
|
image_ext = os.path.splitext(image_url)[1].lower() |
|
if image_ext not in IMAGE_EXT: |
|
print(f"Image {image_id} is not an image, skipped.") |
|
return |
|
tags = image_object["tags"].split() |
|
tag_count = len(tags) |
|
if tag_count < min_tags: |
|
|
|
return |
|
rating = image_object["rating"] |
|
if rating == "e": tags.append("nsfw") |
|
elif rating == "q": tags.append("qfw") |
|
else: tags.append("sfw") |
|
random.shuffle(tags) |
|
image_path = os.path.join(IMAGE_DIR, image_id + image_ext) |
|
tags_path = os.path.join(IMAGE_DIR, image_id + ".txt") |
|
tags_text = ", ".join(tag.replace("_", " ").replace("nekomimi", "cat girl") for tag in tags) |
|
async with session.get(image_url) as img_response: |
|
img_data = await img_response.read() |
|
os.makedirs(IMAGE_DIR, exist_ok=True) |
|
async with aiofiles.open(image_path, "wb") as f: |
|
await f.write(img_data) |
|
async with aiofiles.open(tags_path, "w", encoding="utf8") as f: |
|
await f.write(tags_text) |
|
if not await utils.submit_validation(thread_pool, image_path, tags_path, width, height, convert_to_avif): |
|
existing_image_ids.remove(image_id) |
|
return |
|
except Exception as e: |
|
error = e |
|
if i > MAX_RETRY: |
|
break |
|
|
|
await asyncio.sleep(0.1) |
|
existing_image_ids.remove(image_id) |
|
print(f"All retry attempts failed, image {image_id} skipped. Final error {error.__class__.__name__}: {error}") |
|
|
|
def parse_args(): |
|
parser = argparse.ArgumentParser(description="Scrape images from yande.re.") |
|
parser.add_argument("-s", "--site", default="https://yande.re", help="Domain to scrape from, default to https://yande.re") |
|
parser.add_argument("-W", "--width", type=int, help="Scale the width of the image to the specified value, must either provide both width and height or not provide both") |
|
parser.add_argument("-H", "--height", type=int, help="Scale the height of the image to the specified value, must either provide both width and height or not provide both") |
|
parser.add_argument("-a", "--avif", action="store_true", help="If set, will convert the image into avif, need to have pillow-avif-plugin installed") |
|
parser.add_argument("-l", "--low-quality", action="store_true", help="If set, will download the sample instead of the original image") |
|
parser.add_argument("-t", "--min-tags", type=int, default=0, help="Filter out images with less than the specified amount of tags, default to 0") |
|
parser.add_argument("tags_to_search", nargs=argparse.REMAINDER, help="List of tags to search for, when not specified, matches every image") |
|
args = parser.parse_args() |
|
if args.width is None or args.height is None: |
|
if args.width is not None or args.height is not None: |
|
print("You must either provide both width and height or not provide both at the same time!") |
|
sys.exit(1) |
|
else: |
|
if args.width < 1: |
|
print("Width must be greater than or equal to 1!") |
|
sys.exit(1) |
|
if args.height < 1: |
|
print("Height must be greater than or equal to 1!") |
|
sys.exit(1) |
|
if args.avif: |
|
try: |
|
import pillow_avif |
|
except ImportError: |
|
print("You need to pip install pillow-avif-plugin to use avif conversion!") |
|
sys.exit(1) |
|
if args.min_tags < 0: |
|
print("Min tags must be greater than or equal to 0!") |
|
sys.exit(1) |
|
if not args.tags_to_search: |
|
args.tags_to_search = [""] |
|
return args |
|
|
|
async def main(): |
|
args = parse_args() |
|
print("Starting...") |
|
page_number = 1 |
|
search_tags = "+".join(urllib.parse.quote(tag, safe="") for tag in args.tags_to_search) |
|
|
|
os.makedirs(IMAGE_DIR, exist_ok=True) |
|
existing_image_ids = utils.get_existing_image_id_set(IMAGE_DIR) |
|
utils.register_sigint_callback() |
|
|
|
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=TIMEOUT)) as session: |
|
thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=os.cpu_count()) |
|
tasks = [] |
|
while True: |
|
try: |
|
if utils.get_sigint_count() >= 1: |
|
break |
|
request_url = f"{args.site}/post.json?limit=1000&tags={search_tags}&page={page_number}" |
|
print(f"Going to {request_url}") |
|
async with session.get(request_url) as response: |
|
image_objects = await response.json() |
|
image_count = len(image_objects) |
|
if image_count == 0: |
|
print("Website returned 0 images.") |
|
break |
|
print(f"Got {image_count} posts.") |
|
page_number += 1 |
|
for image_object in image_objects: |
|
if utils.get_sigint_count() >= 1: |
|
break |
|
while len(tasks) >= MAX_TASKS: |
|
if utils.get_sigint_count() >= 1: |
|
break |
|
await asyncio.sleep(0.1) |
|
for i in range(len(tasks) - 1, -1, -1): |
|
task = tasks[i] |
|
if task.done(): |
|
await task |
|
del tasks[i] |
|
tasks.append(asyncio.create_task(process_link(thread_pool, session, image_object, existing_image_ids, args.width, args.height, args.avif, args.low_quality, args.min_tags))) |
|
except Exception as e: |
|
print(f"An error occurred: {e}\nPausing for 0.1 second before retrying...") |
|
await asyncio.sleep(0.1) |
|
if utils.get_sigint_count() >= 1: |
|
print("Script interrupted by user, gracefully exiting...\nYou can interrupt again to exit semi-forcefully, but it will break image checks!") |
|
else: |
|
print("No more images to download, waiting already submitted tasks to finish...") |
|
while tasks and utils.get_sigint_count() <= 1: |
|
await asyncio.sleep(0.1) |
|
for i in range(len(tasks) - 1, -1, -1): |
|
task = tasks[i] |
|
if task.done(): |
|
await task |
|
del tasks[i] |
|
if utils.get_sigint_count() >= 1: |
|
if utils.get_sigint_count() >= 2: |
|
print("Another interrupt received, exiting semi-forcefully...\nYou can interrupt again for truly forceful exit, but it most likely will break a lot of things!") |
|
sys.exit(1) |
|
|
|
if __name__ == "__main__": |
|
try: |
|
asyncio.run(main()) |
|
except KeyboardInterrupt: |
|
print("\nScript interrupted by user, exiting...") |
|
sys.exit(1) |
|
|