Rename function
This commit is contained in:
@@ -26,7 +26,7 @@ class BaseDownloader(ABC):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_link(url: str, cookies: dict = None, headers: dict = None) -> requests.Response:
|
def retrieve_url(url: str, cookies: dict = None, headers: dict = None) -> requests.Response:
|
||||||
res = requests.get(url, cookies=cookies, headers=headers)
|
res = requests.get(url, cookies=cookies, headers=headers)
|
||||||
if res.status_code != 200:
|
if res.status_code != 200:
|
||||||
raise ResourceNotFound(f'Server responded with {res.status_code} to {url}')
|
raise ResourceNotFound(f'Server responded with {res.status_code} to {url}')
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ class Erome(BaseDownloader):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_links(url: str) -> set[str]:
|
def _get_links(url: str) -> set[str]:
|
||||||
page = Erome.get_link(url)
|
page = Erome.retrieve_url(url)
|
||||||
soup = bs4.BeautifulSoup(page.text, 'html.parser')
|
soup = bs4.BeautifulSoup(page.text, 'html.parser')
|
||||||
front_images = soup.find_all('img', attrs={'class': 'lasyload'})
|
front_images = soup.find_all('img', attrs={'class': 'lasyload'})
|
||||||
out = [im.get('data-src') for im in front_images]
|
out = [im.get('data-src') for im in front_images]
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ class Gallery(BaseDownloader):
|
|||||||
' Chrome/67.0.3396.87 Safari/537.36 OPR/54.0.2952.64',
|
' Chrome/67.0.3396.87 Safari/537.36 OPR/54.0.2952.64',
|
||||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
||||||
}
|
}
|
||||||
page = Gallery.get_link(url, headers=resource_headers)
|
page = Gallery.retrieve_url(url, headers=resource_headers)
|
||||||
soup = bs4.BeautifulSoup(page.text, 'html.parser')
|
soup = bs4.BeautifulSoup(page.text, 'html.parser')
|
||||||
|
|
||||||
links = soup.findAll('a', attrs={'target': '_blank', 'href': re.compile(r'https://preview\.redd\.it.*')})
|
links = soup.findAll('a', attrs={'target': '_blank', 'href': re.compile(r'https://preview\.redd\.it.*')})
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ class Gfycat(GifDeliveryNetwork):
|
|||||||
gfycat_id = re.match(r'.*/(.*?)/?$', url).group(1)
|
gfycat_id = re.match(r'.*/(.*?)/?$', url).group(1)
|
||||||
url = 'https://gfycat.com/' + gfycat_id
|
url = 'https://gfycat.com/' + gfycat_id
|
||||||
|
|
||||||
response = Gfycat.get_link(url)
|
response = Gfycat.retrieve_url(url)
|
||||||
if 'gifdeliverynetwork' in response.url:
|
if 'gifdeliverynetwork' in response.url:
|
||||||
return GifDeliveryNetwork._get_link(url)
|
return GifDeliveryNetwork._get_link(url)
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ class GifDeliveryNetwork(BaseDownloader):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_link(url: str) -> str:
|
def _get_link(url: str) -> str:
|
||||||
page = GifDeliveryNetwork.get_link(url)
|
page = GifDeliveryNetwork.retrieve_url(url)
|
||||||
|
|
||||||
soup = BeautifulSoup(page.text, 'html.parser')
|
soup = BeautifulSoup(page.text, 'html.parser')
|
||||||
content = soup.find('source', attrs={'id': 'mp4Source', 'type': 'video/mp4'})
|
content = soup.find('source', attrs={'id': 'mp4Source', 'type': 'video/mp4'})
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ class Imgur(BaseDownloader):
|
|||||||
link = link.replace('i.imgur', 'imgur')
|
link = link.replace('i.imgur', 'imgur')
|
||||||
link = link.rstrip('.gifv')
|
link = link.rstrip('.gifv')
|
||||||
|
|
||||||
res = Imgur.get_link(link, cookies={'over18': '1', 'postpagebeta': '0'})
|
res = Imgur.retrieve_url(link, cookies={'over18': '1', 'postpagebeta': '0'})
|
||||||
|
|
||||||
soup = bs4.BeautifulSoup(res.text, 'html.parser')
|
soup = bs4.BeautifulSoup(res.text, 'html.parser')
|
||||||
scripts = soup.find_all('script', attrs={'type': 'text/javascript'})
|
scripts = soup.find_all('script', attrs={'type': 'text/javascript'})
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ class Redgifs(GifDeliveryNetwork):
|
|||||||
' Chrome/67.0.3396.87 Safari/537.36 OPR/54.0.2952.64',
|
' Chrome/67.0.3396.87 Safari/537.36 OPR/54.0.2952.64',
|
||||||
}
|
}
|
||||||
|
|
||||||
page = Redgifs.get_link(url, headers=headers)
|
page = Redgifs.retrieve_url(url, headers=headers)
|
||||||
|
|
||||||
soup = BeautifulSoup(page.text, 'html.parser')
|
soup = BeautifulSoup(page.text, 'html.parser')
|
||||||
content = soup.find('script', attrs={'data-react-helmet': 'true', 'type': 'application/ld+json'})
|
content = soup.find('script', attrs={'data-react-helmet': 'true', 'type': 'application/ld+json'})
|
||||||
|
|||||||
Reference in New Issue
Block a user