v2ray commited on
Commit
b6b8783
·
1 Parent(s): 35fbdd7

Added yande.re scraping script.

Browse files
README.md CHANGED
@@ -8,4 +8,4 @@ size_categories:
8
  - 10K<n<100K
9
  ---
10
  # Anime Collection
11
- A repo containing scripts to scrape Gelbooru and images scraped from it.
 
8
  - 10K<n<100K
9
  ---
10
  # Anime Collection
11
+ A repo containing scripts to scrape booru sites and images scraped from it.
compressed/chunk_0.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ab6410db23cba6cd1c8c2fa8d6093d8dbe2cb14cd1aed59b705faa8ecf67c212
3
- size 1186099548
 
 
 
 
compressed/chunk_1.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7cf5fb155623e7bf3132edd76510415b75b5f4b268e0ee6a811d11372bc22cbc
3
- size 1486048602
 
 
 
 
compressed/chunk_2.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:be51cd34ade9334928c2a69f4dd56ea7861c3abbc4fb7a1ea0ad1e0068df1003
3
- size 1882298018
 
 
 
 
compressed/chunk_3.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:03c8b2c3b7930d1cde0a835ed340eeb6cecd746f595847b64c5b17b0091d1a10
3
- size 2012245804
 
 
 
 
compressed/chunk_4.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f3ffeff7a338386ae13ae3badce0a95a14f568a52e7323f075a6c4a0353d49a4
3
- size 1833546263
 
 
 
 
compressed/chunk_5.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:325903b06e284c4733f206ea714381ad9c3cc2d7ac8ef4796acfaa8edb8430bc
3
- size 2124046324
 
 
 
 
compressed/chunk_6.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:93bbfd7e91e0f390a140bd697086c639484582f307de907e82a17c8d6d440674
3
- size 2363517040
 
 
 
 
compressed/chunk_7.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3a59a298d71b79ebef62bb71f7839eda94b8c3d9995412eb14d2be7df014da88
3
- size 2403955847
 
 
 
 
compressed/chunk_8.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4ff1903e3890e0bf19dc380886e8cb24b03edf1d0d7656b6a275f38f2384ac16
3
- size 2239588950
 
 
 
 
compressed/chunk_9.tar.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9b7a27af56b467ec31b7208b960b0c27c93ad8b4bf97ef71d370477e1fbd0d8e
3
- size 2723277731
 
 
 
 
scrape.py → scrape_gel.py RENAMED
@@ -17,7 +17,7 @@ MAX_TASKS = 50
17
  MAX_RETRY = 3
18
  IMAGE_DIR = "images"
19
  IMAGE_EXT = {
20
- ".png", ".jpg", ".jpeg", ".bmp", ".tiff", ".tif",
21
  ".webp", ".heic", ".heif", ".avif", ".jxl",
22
  }
23
 
 
17
  MAX_RETRY = 3
18
  IMAGE_DIR = "images"
19
  IMAGE_EXT = {
20
+ ".png", ".jpg", ".jpeg", ".bmp", ".tiff", ".tif",
21
  ".webp", ".heic", ".heif", ".avif", ".jxl",
22
  }
23
 
scrape_yan.py ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import random
4
+ import urllib
5
+ import signal
6
+ import asyncio
7
+ import aiohttp
8
+ import aiofiles
9
+ import argparse
10
+ import concurrent
11
+ from PIL import Image
12
+
13
+ MAX_TASKS = 50
14
+ MAX_RETRY = 3
15
+ IMAGE_DIR = "images"
16
+ IMAGE_EXT = {
17
+ ".png", ".jpg", ".jpeg", ".bmp", ".tiff", ".tif",
18
+ ".webp", ".heic", ".heif", ".avif", ".jxl",
19
+ }
20
+
21
+ SIGINT_COUNTER = 0
22
+
23
+ def sigint_handler(signum, frame):
24
+ global SIGINT_COUNTER
25
+ SIGINT_COUNTER += 1
26
+ print()
27
+ if SIGINT_COUNTER >= 3:
28
+ print("Script force quit by user, exiting...")
29
+ sys.exit(1)
30
+
31
+ def validate_image(image_path, tags_path):
32
+ try:
33
+ with Image.open(image_path) as img:
34
+ img.verify()
35
+ return True
36
+ except Exception as e:
37
+ print(f"Error validating image {image_path}: {e}")
38
+ return False
39
+
40
+ def handle_validation_result(future, image_path, tags_path):
41
+ if future.result():
42
+ return
43
+ try:
44
+ os.remove(image_path)
45
+ except Exception as e:
46
+ print(f"Error deleting image file: {e}")
47
+ try:
48
+ os.remove(tags_path)
49
+ print(f"Deleted invalid image and tags files: {image_path}, {tags_path}")
50
+ except Exception as e:
51
+ print(f"Error deleting tags file: {e}")
52
+
53
+ async def process_link(image_object, image_ids_to_ignore, session, thread_pool):
54
+ image_id = str(image_object["id"])
55
+ if image_id in image_ids_to_ignore:
56
+ # print(f"Image {image_id} already exists, skipped.")
57
+ return
58
+ for i in range(1, MAX_RETRY + 2): # 1 indexed.
59
+ try:
60
+ if SIGINT_COUNTER >= 1:
61
+ break
62
+ # print(f"Processing image {image_id}...")
63
+ image_ext = image_object["file_ext"]
64
+ if not image_ext:
65
+ print(f"Image {image_id} has no file extension, skipped.")
66
+ return
67
+ image_ext = "." + image_ext
68
+ if image_ext not in IMAGE_EXT:
69
+ print(f"Image {image_id} is not an image, skipped.")
70
+ return
71
+ tags = image_object["tags"].split()
72
+ tags.append("nsfw" if image_object["rating"] in {"e", "q"} else "sfw")
73
+ random.shuffle(tags)
74
+ image_path = os.path.join(IMAGE_DIR, image_id + image_ext)
75
+ tags_path = os.path.join(IMAGE_DIR, image_id + ".txt")
76
+ tags_text = ", ".join(tag.replace("_", " ").replace("nekomimi", "cat girl") for tag in tags)
77
+ async with session.get(image_object["file_url"]) as img_response:
78
+ img_data = await img_response.read()
79
+ os.makedirs(IMAGE_DIR, exist_ok=True)
80
+ async with aiofiles.open(image_path, "wb") as f:
81
+ await f.write(img_data)
82
+ async with aiofiles.open(tags_path, "w", encoding="utf8") as f:
83
+ await f.write(tags_text)
84
+ future = thread_pool.submit(validate_image, image_path, tags_path)
85
+ future.add_done_callback(lambda x: handle_validation_result(x, image_path, tags_path))
86
+ return
87
+ except Exception as e:
88
+ if i > MAX_RETRY:
89
+ break
90
+ # print(f"A {e.__class__.__name__} occurred with image {image_id}: {e}\nPausing for 0.1 second before retrying attempt {i}/{MAX_RETRY}...")
91
+ await asyncio.sleep(0.1)
92
+ print(f"All retry attempts failed, image {image_id} skipped.")
93
+
94
+ def parse_args():
95
+ parser = argparse.ArgumentParser(description="Scrape images from yande.re.")
96
+ parser.add_argument("-s", "--site", type=str, default="https://yande.re", help="Domain to scrape from, defaults to https://yande.re")
97
+ parser.add_argument("tags_to_search", nargs=argparse.REMAINDER, help="List of tags to search for, when not specified, matches every image")
98
+ args = parser.parse_args()
99
+ if not args.tags_to_search:
100
+ args.tags_to_search = [""]
101
+ return args
102
+
103
+ async def main():
104
+ args = parse_args()
105
+ print("Starting...")
106
+ page_number = 1
107
+ search_tags = "+".join(urllib.parse.quote(tag, safe="") for tag in args.tags_to_search)
108
+
109
+ image_ids_to_ignore = set()
110
+ if os.path.isdir(IMAGE_DIR):
111
+ for path in os.listdir(IMAGE_DIR):
112
+ image_id, ext = os.path.splitext(path)
113
+ if ext != ".txt":
114
+ continue
115
+ image_ids_to_ignore.add(image_id)
116
+
117
+ signal.signal(signal.SIGINT, sigint_handler)
118
+
119
+ async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=10)) as session:
120
+ thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=os.cpu_count())
121
+ tasks = []
122
+ while True:
123
+ try:
124
+ if SIGINT_COUNTER >= 1:
125
+ break
126
+ request_url = f"{args.site}/post.json?limit=1000&tags={search_tags}&page={page_number}"
127
+ print(f"Going to {request_url}")
128
+ async with session.get(request_url) as response:
129
+ image_objects = await response.json()
130
+ image_count = len(image_objects)
131
+ if image_count == 0:
132
+ print("Website returned 0 images.")
133
+ break
134
+ print(f"Got {image_count} posts.")
135
+ page_number += 1
136
+ for image_object in image_objects:
137
+ if SIGINT_COUNTER >= 1:
138
+ break
139
+ while len(tasks) >= MAX_TASKS:
140
+ if SIGINT_COUNTER >= 1:
141
+ break
142
+ await asyncio.sleep(0.1)
143
+ for i in range(len(tasks) - 1, -1, -1):
144
+ task = tasks[i]
145
+ if task.done():
146
+ await task
147
+ del tasks[i]
148
+ tasks.append(asyncio.create_task(process_link(image_object, image_ids_to_ignore, session, thread_pool)))
149
+ except Exception as e:
150
+ print(f"An error occurred: {e}\nPausing for 0.1 second before retrying...")
151
+ await asyncio.sleep(0.1)
152
+ if SIGINT_COUNTER >= 1:
153
+ print("Script interrupted by user, gracefully exiting...\nYou can interrupt again to exit semi-forcefully, but it will break image checks!")
154
+ else:
155
+ print("No more images to download, waiting already submitted tasks to finish...")
156
+ while tasks and SIGINT_COUNTER <= 1:
157
+ await asyncio.sleep(0.1)
158
+ for i in range(len(tasks) - 1, -1, -1):
159
+ task = tasks[i]
160
+ if task.done():
161
+ await task
162
+ del tasks[i]
163
+ while True:
164
+ if SIGINT_COUNTER >= 2:
165
+ print("Another interrupt received, exiting semi-forcefully...\nYou can interrupt again for truly forceful exit, but it most likely will break a lot of things!")
166
+ thread_pool.shutdown(cancel_futures=True)
167
+ break
168
+ await asyncio.sleep(0.1)
169
+ if not thread_pool._work_queue.qsize():
170
+ break
171
+ if SIGINT_COUNTER >= 2:
172
+ sys.exit(1)
173
+
174
+ if __name__ == "__main__":
175
+ try:
176
+ asyncio.run(main())
177
+ except KeyboardInterrupt:
178
+ print("\nScript interrupted by user, exiting...")
179
+ sys.exit(1)