Hopefully yande.re images are better.
Browse files- compressed/chunk_0.tar.gz +3 -0
- compressed/chunk_1.tar.gz +3 -0
- compressed/chunk_2.tar.gz +3 -0
- compressed/chunk_3.tar.gz +3 -0
- compressed/chunk_4.tar.gz +3 -0
- compressed/chunk_5.tar.gz +3 -0
- compressed/chunk_6.tar.gz +3 -0
- compressed/chunk_7.tar.gz +3 -0
- compressed/chunk_8.tar.gz +3 -0
- compressed/chunk_9.tar.gz +3 -0
- scrape_gel.py +5 -2
- scrape_yan.py +8 -5
compressed/chunk_0.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:12f8aedef957fd0540ed276d1b795fdac7f4ff4b340cd4ba115513c73fe5e54d
|
3 |
+
size 6504537662
|
compressed/chunk_1.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1281ba239492db6fc46d0c1c0414d6cbb64177e41e85d332aa08a4cbdfe3b566
|
3 |
+
size 6187281166
|
compressed/chunk_2.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4480c0949a88655221b81b733259b8ad8fbecd0d4697d3fb002b3f8bdd7a1d6c
|
3 |
+
size 6582511633
|
compressed/chunk_3.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4ffe6e6cd465eda38596a329a964d90133f6e266a267ccc94ec328ddd6db246f
|
3 |
+
size 7214485145
|
compressed/chunk_4.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5f04415b1e4a8cacded40b17920330c4d0757b798129860fb0f8047a4030ca09
|
3 |
+
size 6457710727
|
compressed/chunk_5.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:274b3422c434169f30a07f1424b8083895849546ed82fb7108ff8de6d7269b0b
|
3 |
+
size 7044618031
|
compressed/chunk_6.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:81e37b08ccfde371666a2ee8ad5cabd75e32f03a446006227d1417c5fabdf3bb
|
3 |
+
size 6876412339
|
compressed/chunk_7.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e204f4cd38b02fde1c0901394fb07860da78cb8ffada00827b86cd980a22489b
|
3 |
+
size 6981906967
|
compressed/chunk_8.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:422ea225d1490139d04185edbd906307d56a3b83bc851b4d13cd2f066d160898
|
3 |
+
size 3530250109
|
compressed/chunk_9.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:520e1c25dba7198c8cb3dbd8694b18b309879ec100036a0338cdbb4ff777fd8b
|
3 |
+
size 3009101474
|
scrape_gel.py
CHANGED
@@ -15,6 +15,7 @@ from bs4 import BeautifulSoup
|
|
15 |
|
16 |
MAX_TASKS = 50
|
17 |
MAX_RETRY = 3
|
|
|
18 |
IMAGE_DIR = "images"
|
19 |
IMAGE_EXT = {
|
20 |
".png", ".jpg", ".jpeg", ".bmp", ".tiff", ".tif",
|
@@ -58,6 +59,7 @@ async def process_link(image_url, image_ids_to_ignore, session, thread_pool):
|
|
58 |
if image_id in image_ids_to_ignore:
|
59 |
# print(f"Image {image_id} already exists, skipped.")
|
60 |
return
|
|
|
61 |
for i in range(1, MAX_RETRY + 2): # 1 indexed.
|
62 |
try:
|
63 |
if SIGINT_COUNTER >= 1:
|
@@ -95,11 +97,12 @@ async def process_link(image_url, image_ids_to_ignore, session, thread_pool):
|
|
95 |
future.add_done_callback(lambda x: handle_validation_result(x, image_path, tags_path))
|
96 |
return
|
97 |
except Exception as e:
|
|
|
98 |
if i > MAX_RETRY:
|
99 |
break
|
100 |
# print(f"A {e.__class__.__name__} occurred with image {image_id}: {e}\nPausing for 0.1 second before retrying attempt {i}/{MAX_RETRY}...")
|
101 |
await asyncio.sleep(0.1)
|
102 |
-
print(f"All retry attempts failed, image {image_id} skipped.")
|
103 |
|
104 |
def parse_args():
|
105 |
parser = argparse.ArgumentParser(description="Scrape images from Gelbooru.")
|
@@ -126,7 +129,7 @@ async def main():
|
|
126 |
|
127 |
signal.signal(signal.SIGINT, sigint_handler)
|
128 |
|
129 |
-
async with aiohttp.ClientSession(cookies={"fringeBenefits": "yup"}, timeout=aiohttp.ClientTimeout(total=
|
130 |
thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=os.cpu_count())
|
131 |
tasks = []
|
132 |
while True:
|
|
|
15 |
|
16 |
MAX_TASKS = 50
|
17 |
MAX_RETRY = 3
|
18 |
+
TIMEOUT = 10
|
19 |
IMAGE_DIR = "images"
|
20 |
IMAGE_EXT = {
|
21 |
".png", ".jpg", ".jpeg", ".bmp", ".tiff", ".tif",
|
|
|
59 |
if image_id in image_ids_to_ignore:
|
60 |
# print(f"Image {image_id} already exists, skipped.")
|
61 |
return
|
62 |
+
error = None
|
63 |
for i in range(1, MAX_RETRY + 2): # 1 indexed.
|
64 |
try:
|
65 |
if SIGINT_COUNTER >= 1:
|
|
|
97 |
future.add_done_callback(lambda x: handle_validation_result(x, image_path, tags_path))
|
98 |
return
|
99 |
except Exception as e:
|
100 |
+
error = e
|
101 |
if i > MAX_RETRY:
|
102 |
break
|
103 |
# print(f"A {e.__class__.__name__} occurred with image {image_id}: {e}\nPausing for 0.1 second before retrying attempt {i}/{MAX_RETRY}...")
|
104 |
await asyncio.sleep(0.1)
|
105 |
+
print(f"All retry attempts failed, image {image_id} skipped. Final error {error.__class__.__name__}: {error}")
|
106 |
|
107 |
def parse_args():
|
108 |
parser = argparse.ArgumentParser(description="Scrape images from Gelbooru.")
|
|
|
129 |
|
130 |
signal.signal(signal.SIGINT, sigint_handler)
|
131 |
|
132 |
+
async with aiohttp.ClientSession(cookies={"fringeBenefits": "yup"}, timeout=aiohttp.ClientTimeout(total=TIMEOUT)) as session:
|
133 |
thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=os.cpu_count())
|
134 |
tasks = []
|
135 |
while True:
|
scrape_yan.py
CHANGED
@@ -12,6 +12,7 @@ from PIL import Image
|
|
12 |
|
13 |
MAX_TASKS = 50
|
14 |
MAX_RETRY = 3
|
|
|
15 |
IMAGE_DIR = "images"
|
16 |
IMAGE_EXT = {
|
17 |
".png", ".jpg", ".jpeg", ".bmp", ".tiff", ".tif",
|
@@ -55,16 +56,17 @@ async def process_link(image_object, image_ids_to_ignore, session, thread_pool):
|
|
55 |
if image_id in image_ids_to_ignore:
|
56 |
# print(f"Image {image_id} already exists, skipped.")
|
57 |
return
|
|
|
58 |
for i in range(1, MAX_RETRY + 2): # 1 indexed.
|
59 |
try:
|
60 |
if SIGINT_COUNTER >= 1:
|
61 |
break
|
62 |
# print(f"Processing image {image_id}...")
|
63 |
-
|
|
|
64 |
if not image_ext:
|
65 |
print(f"Image {image_id} has no file extension, skipped.")
|
66 |
return
|
67 |
-
image_ext = "." + image_ext
|
68 |
if image_ext not in IMAGE_EXT:
|
69 |
print(f"Image {image_id} is not an image, skipped.")
|
70 |
return
|
@@ -74,7 +76,7 @@ async def process_link(image_object, image_ids_to_ignore, session, thread_pool):
|
|
74 |
image_path = os.path.join(IMAGE_DIR, image_id + image_ext)
|
75 |
tags_path = os.path.join(IMAGE_DIR, image_id + ".txt")
|
76 |
tags_text = ", ".join(tag.replace("_", " ").replace("nekomimi", "cat girl") for tag in tags)
|
77 |
-
async with session.get(
|
78 |
img_data = await img_response.read()
|
79 |
os.makedirs(IMAGE_DIR, exist_ok=True)
|
80 |
async with aiofiles.open(image_path, "wb") as f:
|
@@ -85,11 +87,12 @@ async def process_link(image_object, image_ids_to_ignore, session, thread_pool):
|
|
85 |
future.add_done_callback(lambda x: handle_validation_result(x, image_path, tags_path))
|
86 |
return
|
87 |
except Exception as e:
|
|
|
88 |
if i > MAX_RETRY:
|
89 |
break
|
90 |
# print(f"A {e.__class__.__name__} occurred with image {image_id}: {e}\nPausing for 0.1 second before retrying attempt {i}/{MAX_RETRY}...")
|
91 |
await asyncio.sleep(0.1)
|
92 |
-
print(f"All retry attempts failed, image {image_id} skipped.")
|
93 |
|
94 |
def parse_args():
|
95 |
parser = argparse.ArgumentParser(description="Scrape images from yande.re.")
|
@@ -116,7 +119,7 @@ async def main():
|
|
116 |
|
117 |
signal.signal(signal.SIGINT, sigint_handler)
|
118 |
|
119 |
-
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=
|
120 |
thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=os.cpu_count())
|
121 |
tasks = []
|
122 |
while True:
|
|
|
12 |
|
13 |
MAX_TASKS = 50
|
14 |
MAX_RETRY = 3
|
15 |
+
TIMEOUT = 30
|
16 |
IMAGE_DIR = "images"
|
17 |
IMAGE_EXT = {
|
18 |
".png", ".jpg", ".jpeg", ".bmp", ".tiff", ".tif",
|
|
|
56 |
if image_id in image_ids_to_ignore:
|
57 |
# print(f"Image {image_id} already exists, skipped.")
|
58 |
return
|
59 |
+
error = None
|
60 |
for i in range(1, MAX_RETRY + 2): # 1 indexed.
|
61 |
try:
|
62 |
if SIGINT_COUNTER >= 1:
|
63 |
break
|
64 |
# print(f"Processing image {image_id}...")
|
65 |
+
image_url = image_object["file_url"] # sample_url
|
66 |
+
image_ext = os.path.splitext(image_url)[1].lower()
|
67 |
if not image_ext:
|
68 |
print(f"Image {image_id} has no file extension, skipped.")
|
69 |
return
|
|
|
70 |
if image_ext not in IMAGE_EXT:
|
71 |
print(f"Image {image_id} is not an image, skipped.")
|
72 |
return
|
|
|
76 |
image_path = os.path.join(IMAGE_DIR, image_id + image_ext)
|
77 |
tags_path = os.path.join(IMAGE_DIR, image_id + ".txt")
|
78 |
tags_text = ", ".join(tag.replace("_", " ").replace("nekomimi", "cat girl") for tag in tags)
|
79 |
+
async with session.get(image_url) as img_response:
|
80 |
img_data = await img_response.read()
|
81 |
os.makedirs(IMAGE_DIR, exist_ok=True)
|
82 |
async with aiofiles.open(image_path, "wb") as f:
|
|
|
87 |
future.add_done_callback(lambda x: handle_validation_result(x, image_path, tags_path))
|
88 |
return
|
89 |
except Exception as e:
|
90 |
+
error = e
|
91 |
if i > MAX_RETRY:
|
92 |
break
|
93 |
# print(f"A {e.__class__.__name__} occurred with image {image_id}: {e}\nPausing for 0.1 second before retrying attempt {i}/{MAX_RETRY}...")
|
94 |
await asyncio.sleep(0.1)
|
95 |
+
print(f"All retry attempts failed, image {image_id} skipped. Final error {error.__class__.__name__}: {error}")
|
96 |
|
97 |
def parse_args():
|
98 |
parser = argparse.ArgumentParser(description="Scrape images from yande.re.")
|
|
|
119 |
|
120 |
signal.signal(signal.SIGINT, sigint_handler)
|
121 |
|
122 |
+
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=TIMEOUT)) as session:
|
123 |
thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=os.cpu_count())
|
124 |
tasks = []
|
125 |
while True:
|