Datasets:
Sub-tasks:
multi-class-classification
Languages:
English
Size:
1K<n<10K
Tags:
natural-language-understanding
ideology classification
text classification
natural language processing
License:
File size: 2,906 Bytes
dac12a4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
import requests
import time
import random
"""This script is used to get many posts from the desired subreddit(s)"""
subreddit_list = [
"theredpillrebooted",
"RedPillWomen",
"Feminism",
"marriedredpill",
"TheBluePill",
"PurplePillDebate",
"RedPillWives",
]
# url = f'https://www.reddit.com/r/{subreddit}/.json?t=all&limit=100'
url_template = "https://www.reddit.com/r/{}/.json?t=all{}"
# headers = {
# 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'
# }
headers = {"User-Agent": "Testing Bot Gundam Wing"}
# response = requests.get(url, headers=headers)
params = ""
counter = 10
post_list = []
for subreddit in subreddit_list:
while counter > 0:
print(f"Getting posts with params: {params}")
print("\n\n\n\n")
url = url_template.format(subreddit, params)
response = requests.get(url, headers=headers)
if response.ok:
data = response.json()
# save data to file
# with open(f"reddit_{subreddit}_{counter}.json", "w") as f:
# f.write(response.text)
posts = data["data"]["children"]
print(f"Got {len(posts)} posts")
for post in posts:
# print(post["data"]["title"])
pdata = post["data"]
post_id = pdata["id"]
title = pdata["title"]
# url = pdata["url"]
text = pdata.get("selftext")
score = pdata["score"]
author = pdata["author"]
date = pdata["created_utc"]
url = pdata.get("url_overridden_by_dest")
print(f"{post_id}: {title} - {url}")
# print("Keys are ", pdata.keys())
# post_list.append(
# {
# "id": post_id,
# "title": title,
# "text": text,
# "url": url,
# "score": score,
# "author": author,
# "date": date,
# "pdata": pdata,
# }
# )
post_list.append(
[subreddit, post_id, title, text, url, score, author, date, pdata]
)
print(f"Got {len(posts)} posts")
# params = f"?after={data['data']['after']}"
try:
params = "&after=" + data["data"]["after"]
except:
print(
"No more posts, broke on ", subreddit, "with counter at ", counter
)
break
counter -= 1
time.sleep(random.randint(1, 45))
else:
print(f"Error: {response.status_code}")
counter = 10
params = ""
|