Redgifs fixed?

If this doesn't work then I give up...
This commit is contained in:
SoulSuck24
2022-09-16 20:41:17 -04:00
parent 0a9ecac410
commit 95749584ec
2 changed files with 6 additions and 3 deletions

View File

@@ -27,6 +27,8 @@ class DownloadFactory:
sanitised_url = DownloadFactory.sanitise_url(url) sanitised_url = DownloadFactory.sanitise_url(url)
if re.match(r'(i\.)?imgur.*\.gif.+$', sanitised_url): if re.match(r'(i\.)?imgur.*\.gif.+$', sanitised_url):
return Imgur return Imgur
elif re.match(r'(i\.)?(redgifs|gifdeliverynetwork)', sanitised_url):
return Redgifs
elif re.match(r'.*/.*\.\w{3,4}(\?[\w;&=]*)?$', sanitised_url) and \ elif re.match(r'.*/.*\.\w{3,4}(\?[\w;&=]*)?$', sanitised_url) and \
not DownloadFactory.is_web_resource(sanitised_url): not DownloadFactory.is_web_resource(sanitised_url):
return Direct return Direct
@@ -40,8 +42,6 @@ class DownloadFactory:
return Gfycat return Gfycat
elif re.match(r'(m\.)?imgur.*', sanitised_url): elif re.match(r'(m\.)?imgur.*', sanitised_url):
return Imgur return Imgur
elif re.match(r'(redgifs|gifdeliverynetwork)', sanitised_url):
return Redgifs
elif re.match(r'reddit\.com/r/', sanitised_url): elif re.match(r'reddit\.com/r/', sanitised_url):
return SelfPost return SelfPost
elif re.match(r'(m\.)?youtu\.?be', sanitised_url): elif re.match(r'(m\.)?youtu\.?be', sanitised_url):

View File

@@ -24,7 +24,7 @@ class Redgifs(BaseDownloader):
@staticmethod @staticmethod
def _get_link(url: str) -> set[str]: def _get_link(url: str) -> set[str]:
try: try:
redgif_id = re.match(r'.*/(.*?)/?$', url).group(1) redgif_id = re.match(r'.*/(.*?)(\..{3,})?$', url).group(1)
except AttributeError: except AttributeError:
raise SiteDownloaderError(f'Could not extract Redgifs ID from {url}') raise SiteDownloaderError(f'Could not extract Redgifs ID from {url}')
@@ -55,4 +55,7 @@ class Redgifs(BaseDownloader):
except (KeyError, AttributeError): except (KeyError, AttributeError):
raise SiteDownloaderError('Failed to find JSON data in page') raise SiteDownloaderError('Failed to find JSON data in page')
# Update subdomain if old one is returned
out = {re.sub('thumbs2', 'thumbs3', link) for link in out}
out = {re.sub('thumbs3', 'thumbs4', link) for link in out}
return out return out