Skip to content

Commit

Permalink
refactor: make update_cookies use primary_base_domain by default
Browse files Browse the repository at this point in the history
  • Loading branch information
NTFSvolume committed Jan 28, 2025
1 parent a384f3e commit b1f8fae
Show file tree
Hide file tree
Showing 8 changed files with 14 additions and 9 deletions.
7 changes: 6 additions & 1 deletion cyberdrop_dl/scraper/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,12 @@ def parse_url(self, link_str: str, relative_to: URL | None = None) -> URL:
new_url = new_url.with_scheme(base.scheme or "https")
return new_url

def update_cookies(self, cookies: dict, response_url: URL) -> None:
def update_cookies(self, cookies: dict, url: URL | None = None) -> None:
"""Update cookies for the provided URL
If `url` is `None`, defaults to `self.primary_base_domain`
"""
response_url = url or self.primary_base_domain
self.client.client_manager.cookies.update_cookies(cookies, response_url)


Expand Down
4 changes: 2 additions & 2 deletions cyberdrop_dl/scraper/crawlers/coomer_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,14 +116,14 @@ async def favorites(self, scrape_item: ScrapeItem) -> None:
raise ScrapeError(401, msg, origin=scrape_item)

cookies = {"session": self.manager.config_manager.authentication_data.coomer.session}
self.update_cookies(cookies, self.primary_base_domain)
self.update_cookies(cookies)

async with self.request_limiter:
favourites_api_url = (self.api_url / "account/favorites").with_query({"type": "artist"})
JSON_Resp = await self.client.get_json(self.domain, favourites_api_url, origin=scrape_item)

cookies = {"session": ""}
self.update_cookies(cookies, self.primary_base_domain)
self.update_cookies(cookies)

for user in JSON_Resp:
id = user["id"]
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/gofile_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ async def get_account_token(self, _) -> None:
self.api_key = self.api_key or await self._get_new_api_key()
self.headers["Authorization"] = f"Bearer {self.api_key}"
cookies = {"accountToken": self.api_key}
self.update_cookies(cookies, self.primary_base_domain)
self.update_cookies(cookies)

async def _get_new_api_key(self) -> str:
create_account_address = self.api / "accounts"
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/imagebam_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,4 +117,4 @@ def get_view_url(self, url: URL) -> URL:
def set_cookies(self) -> None:
"""Set cookies to bypass confirmation."""
cookies = {"nsfw_inter": "1"}
self.update_cookies(cookies, self.primary_base_domain)
self.update_cookies(cookies)
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/realbooru_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,4 +82,4 @@ async def file(self, scrape_item: ScrapeItem) -> None:
def set_cookies(self) -> None:
"""Sets the cookies for the client."""
cookies = {"resize-original": "1"}
self.update_cookies(cookies, self.primary_base_domain)
self.update_cookies(cookies)
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/rule34xxx_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,4 +84,4 @@ async def file(self, scrape_item: ScrapeItem) -> None:
async def set_cookies(self) -> None:
"""Sets the cookies for the client."""
cookies = {"resize-original": "1"}
self.update_cookies(cookies, self.primary_base_domain)
self.update_cookies(cookies)
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/xenforo_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ async def forum_login(self, login_url: URL, session_cookie: str, username: str,

cookies = {"xf_user": session_cookie}
if session_cookie:
self.update_cookies(cookies, self.primary_base_domain)
self.update_cookies(cookies)

text, logged_in = await self.check_login_with_request(login_url)
if logged_in:
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/xxxbunker_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,4 +223,4 @@ async def check_session_cookie(self) -> None:
return

cookies = {"PHPSESSID": self.session_cookie}
self.update_cookies(cookies, self.primary_base_domain)
self.update_cookies(cookies)

0 comments on commit b1f8fae

Please sign in to comment.