Created
June 28, 2023 06:08
-
-
Save xavdid/f0999e3ea08cc8cdaafce27618e092fd to your computer and use it in GitHub Desktop.
Get the ordering of your saved items
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import json | |
import sys | |
from urllib.parse import parse_qs, urlparse | |
import requests | |
# find your feed url on https://www.reddit.com/prefs/feeds/ | |
# EDIT THIS!! | |
FEED_URL = "https://www.reddit.com/user/USERNAME/saved.json?feed=abc123&user=USERNAME" | |
# --- | |
# max page size is 100 | |
PAGE_SIZE = 100 | |
def _raise_reddit_error(response): | |
if "error" in response: | |
raise ValueError( | |
f'Received API error from Reddit (code {response["error"]}): {response["message"]}' | |
) | |
def _call_reddit_api(url: str, params=None): | |
response = requests.get( | |
url, | |
{"limit": PAGE_SIZE, "raw_json": 1, **(params or {})}, | |
headers={"user-agent": "reddit-to-sqlite-script"}, | |
).json() | |
_raise_reddit_error(response) | |
return response | |
def _load_paged_resource(username, feed_id): | |
result = [] | |
after = None | |
# max number of pages we can fetch | |
for i in range(10): | |
print(f"loading page {i}", file=sys.stderr) | |
response = _call_reddit_api( | |
f"https://www.reddit.com/user/{username}/saved.json", | |
params={"after": after, "feed": feed_id, "user": username}, | |
) | |
result += [c["data"] for c in response["data"]["children"]] | |
after = response["data"]["after"] | |
if len(response["data"]["children"]) < PAGE_SIZE: | |
break | |
return result | |
def build_mapping(results, kind): | |
return { | |
result["id"]: idx + 1 | |
for idx, result in enumerate(r for r in results if r["name"].startswith(kind)) | |
} | |
if __name__ == "__main__": | |
if "feed=abc123" in FEED_URL or "user=USERNAME" in FEED_URL: | |
raise ValueError( | |
"Make sure to edit the top of the script to inclde your specific feed url." | |
) | |
data = parse_qs(urlparse(FEED_URL).query) | |
feed = data["feed"][0] | |
user = data["user"][0] | |
results = list(reversed(_load_paged_resource(user, feed))) | |
print( | |
json.dumps( | |
{ | |
"posts": build_mapping(results, "t3"), | |
"comments": build_mapping(results, "t1"), | |
}, | |
indent=2, | |
) | |
) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment