Add defensive programming to site downloaders

This commit is contained in:
Serene-Arc
2021-04-06 11:04:08 +10:00
committed by Ali Parlakci
parent 9cb4dd4cf3
commit a291104144
6 changed files with 47 additions and 20 deletions

View File

@@ -7,7 +7,7 @@ from typing import Optional
import bs4
from praw.models import Submission
from bulkredditdownloader.exceptions import NotADownloadableLinkError
from bulkredditdownloader.exceptions import SiteDownloaderError
from bulkredditdownloader.resource import Resource
from bulkredditdownloader.site_authenticator import SiteAuthenticator
from bulkredditdownloader.site_downloaders.base_downloader import BaseDownloader
@@ -21,8 +21,9 @@ class Erome(BaseDownloader):
def find_resources(self, authenticator: Optional[SiteAuthenticator] = None) -> list[Resource]:
links = self._get_links(self.post.url)
if not links:
raise NotADownloadableLinkError('Erome parser could not find any links')
raise SiteDownloaderError('Erome parser could not find any links')
out = []
for link in links: