Merge pull request #713 from OMEGARAZER/development

This commit is contained in:
Serene
2022-12-16 10:59:35 +10:00
committed by GitHub
7 changed files with 158 additions and 47 deletions

View File

@@ -4,6 +4,7 @@
import json
import logging
import re
from time import sleep
from typing import Iterator, Union
import dict2xml
@@ -28,23 +29,28 @@ class Archiver(RedditConnector):
def download(self):
for generator in self.reddit_lists:
for submission in generator:
try:
if (submission.author and submission.author.name in self.args.ignore_user) or (
submission.author is None and "DELETED" in self.args.ignore_user
):
logger.debug(
f"Submission {submission.id} in {submission.subreddit.display_name} skipped"
f" due to {submission.author.name if submission.author else 'DELETED'} being an ignored user"
)
continue
if submission.id in self.excluded_submission_ids:
logger.debug(f"Object {submission.id} in exclusion list, skipping")
continue
logger.debug(f"Attempting to archive submission {submission.id}")
self.write_entry(submission)
except prawcore.PrawcoreException as e:
logger.error(f"Submission {submission.id} failed to be archived due to a PRAW exception: {e}")
try:
for submission in generator:
try:
if (submission.author and submission.author.name in self.args.ignore_user) or (
submission.author is None and "DELETED" in self.args.ignore_user
):
logger.debug(
f"Submission {submission.id} in {submission.subreddit.display_name} skipped due to"
f" {submission.author.name if submission.author else 'DELETED'} being an ignored user"
)
continue
if submission.id in self.excluded_submission_ids:
logger.debug(f"Object {submission.id} in exclusion list, skipping")
continue
logger.debug(f"Attempting to archive submission {submission.id}")
self.write_entry(submission)
except prawcore.PrawcoreException as e:
logger.error(f"Submission {submission.id} failed to be archived due to a PRAW exception: {e}")
except prawcore.PrawcoreException as e:
logger.error(f"The submission after {submission.id} failed to download due to a PRAW exception: {e}")
logger.debug("Waiting 60 seconds to continue")
sleep(60)
def get_submissions_from_link(self) -> list[list[praw.models.Submission]]:
supplied_submissions = []

View File

@@ -2,6 +2,7 @@
# coding=utf-8
import logging
from time import sleep
import prawcore
@@ -18,9 +19,14 @@ class RedditCloner(RedditDownloader, Archiver):
def download(self):
for generator in self.reddit_lists:
for submission in generator:
try:
self._download_submission(submission)
self.write_entry(submission)
except prawcore.PrawcoreException as e:
logger.error(f"Submission {submission.id} failed to be cloned due to a PRAW exception: {e}")
try:
for submission in generator:
try:
self._download_submission(submission)
self.write_entry(submission)
except prawcore.PrawcoreException as e:
logger.error(f"Submission {submission.id} failed to be cloned due to a PRAW exception: {e}")
except prawcore.PrawcoreException as e:
logger.error(f"The submission after {submission.id} failed to download due to a PRAW exception: {e}")
logger.debug("Waiting 60 seconds to continue")
sleep(60)

View File

@@ -13,6 +13,7 @@ from abc import ABCMeta, abstractmethod
from datetime import datetime
from enum import Enum, auto
from pathlib import Path
from time import sleep
from typing import Callable, Iterator
import appdirs
@@ -353,26 +354,31 @@ class RedditConnector(metaclass=ABCMeta):
generators = []
for user in self.args.user:
try:
self.check_user_existence(user)
except errors.BulkDownloaderException as e:
logger.error(e)
continue
if self.args.submitted:
logger.debug(f"Retrieving submitted posts of user {self.args.user}")
generators.append(
self.create_filtered_listing_generator(
self.reddit_instance.redditor(user).submissions,
try:
self.check_user_existence(user)
except errors.BulkDownloaderException as e:
logger.error(e)
continue
if self.args.submitted:
logger.debug(f"Retrieving submitted posts of user {user}")
generators.append(
self.create_filtered_listing_generator(
self.reddit_instance.redditor(user).submissions,
)
)
)
if not self.authenticated and any((self.args.upvoted, self.args.saved)):
logger.warning("Accessing user lists requires authentication")
else:
if self.args.upvoted:
logger.debug(f"Retrieving upvoted posts of user {self.args.user}")
generators.append(self.reddit_instance.redditor(user).upvoted(limit=self.args.limit))
if self.args.saved:
logger.debug(f"Retrieving saved posts of user {self.args.user}")
generators.append(self.reddit_instance.redditor(user).saved(limit=self.args.limit))
if not self.authenticated and any((self.args.upvoted, self.args.saved)):
logger.warning("Accessing user lists requires authentication")
else:
if self.args.upvoted:
logger.debug(f"Retrieving upvoted posts of user {user}")
generators.append(self.reddit_instance.redditor(user).upvoted(limit=self.args.limit))
if self.args.saved:
logger.debug(f"Retrieving saved posts of user {user}")
generators.append(self.reddit_instance.redditor(user).saved(limit=self.args.limit))
except prawcore.PrawcoreException as e:
logger.error(f"User {user} failed to be retrieved due to a PRAW exception: {e}")
logger.debug("Waiting 60 seconds to continue")
sleep(60)
return generators
else:
return []

View File

@@ -8,6 +8,7 @@ import time
from datetime import datetime
from multiprocessing import Pool
from pathlib import Path
from time import sleep
import praw
import praw.exceptions
@@ -42,11 +43,16 @@ class RedditDownloader(RedditConnector):
def download(self):
for generator in self.reddit_lists:
for submission in generator:
try:
self._download_submission(submission)
except prawcore.PrawcoreException as e:
logger.error(f"Submission {submission.id} failed to download due to a PRAW exception: {e}")
try:
for submission in generator:
try:
self._download_submission(submission)
except prawcore.PrawcoreException as e:
logger.error(f"Submission {submission.id} failed to download due to a PRAW exception: {e}")
except prawcore.PrawcoreException as e:
logger.error(f"The submission after {submission.id} failed to download due to a PRAW exception: {e}")
logger.debug("Waiting 60 seconds to continue")
sleep(60)
def _download_submission(self, submission: praw.models.Submission):
if submission.id in self.excluded_submission_ids: