Skip to content
This repository has been archived by the owner on Jul 5, 2024. It is now read-only.

Commit

Permalink
Fix gofile not properly utiliing wrapper format.
Browse files Browse the repository at this point in the history
  • Loading branch information
Jules-WinnfieldX committed Dec 6, 2023
1 parent 649e31d commit a20607f
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
2 changes: 1 addition & 1 deletion cyberdrop_dl/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "5.0.51"
__version__ = "5.0.52"
8 changes: 4 additions & 4 deletions cyberdrop_dl/scraper/crawlers/gofile_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ async def fetch(self, scrape_item: ScrapeItem) -> None:
task_id = await self.scraping_progress.add_task(scrape_item.url)

await self.get_token(self.client)
await self.get_website_token(self.client)
await self.get_website_token(self.js_address, self.client)

await self.album(scrape_item)

Expand All @@ -57,7 +57,7 @@ async def album(self, scrape_item: ScrapeItem) -> None:
if e.status == http.HTTPStatus.UNAUTHORIZED:
self.websiteToken = ""
self.manager.cache_manager.remove("gofile_website_token")
await self.get_website_token(self.client)
await self.get_website_token(self.js_address, self.client)
params["websiteToken"] = self.websiteToken
async with self.request_limiter:
JSON_Resp = await self.client.get_json(self.domain, self.api_address / "getContent", params)
Expand Down Expand Up @@ -110,7 +110,7 @@ async def get_token(self, session: ScraperClient) -> None:
raise ScrapeFailure(403, "Couldn't generate GoFile token")

@error_handling_wrapper
async def get_website_token(self, session: ScraperClient) -> None:
async def get_website_token(self, js_address: URL, session: ScraperClient) -> None:
"""Creates an anon gofile account to use."""
if self.websiteToken:
return
Expand All @@ -121,7 +121,7 @@ async def get_website_token(self, session: ScraperClient) -> None:
return

async with self.request_limiter:
text = await session.get_text(self.domain, self.js_address)
text = await session.get_text(self.domain, js_address)
text = str(text)
self.websiteToken = re.search(r'fetchData\.websiteToken\s*=\s*"(.*?)"', text).group(1)
if not self.websiteToken:
Expand Down

0 comments on commit a20607f

Please sign in to comment.