From d344104f278ad5ba7e74ccd58d9ea0caeb74d9b0 Mon Sep 17 00:00:00 2001 From: niazarak <14030993+niazarak@users.noreply.github.com> Date: Fri, 29 Nov 2024 18:01:49 +0200 Subject: [PATCH] Use correct list typing for py 3.8 support list[str] is not supported in Python 3.8 --- apps/python-sdk/firecrawl/firecrawl.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/apps/python-sdk/firecrawl/firecrawl.py b/apps/python-sdk/firecrawl/firecrawl.py index a973e2800..88fc72f6c 100644 --- a/apps/python-sdk/firecrawl/firecrawl.py +++ b/apps/python-sdk/firecrawl/firecrawl.py @@ -314,7 +314,7 @@ def map_url(self, url: str, params: Optional[Dict[str, Any]] = None) -> Any: else: self._handle_error(response, 'map') - def batch_scrape_urls(self, urls: list[str], + def batch_scrape_urls(self, urls: List[str], params: Optional[Dict[str, Any]] = None, poll_interval: Optional[int] = 2, idempotency_key: Optional[str] = None) -> Any: @@ -322,7 +322,7 @@ def batch_scrape_urls(self, urls: list[str], Initiate a batch scrape job for the specified URLs using the Firecrawl API. Args: - urls (list[str]): The URLs to scrape. + urls (List[str]): The URLs to scrape. params (Optional[Dict[str, Any]]): Additional parameters for the scraper. poll_interval (Optional[int]): Time in seconds between status checks when waiting for job completion. Defaults to 2 seconds. idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests. @@ -354,12 +354,12 @@ def batch_scrape_urls(self, urls: list[str], self._handle_error(response, 'start batch scrape job') - def async_batch_scrape_urls(self, urls: list[str], params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> Dict[str, Any]: + def async_batch_scrape_urls(self, urls: List[str], params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> Dict[str, Any]: """ Initiate a crawl job asynchronously. Args: - urls (list[str]): The URLs to scrape. + urls (List[str]): The URLs to scrape. params (Optional[Dict[str, Any]]): Additional parameters for the scraper. idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests. @@ -380,12 +380,12 @@ def async_batch_scrape_urls(self, urls: list[str], params: Optional[Dict[str, An else: self._handle_error(response, 'start batch scrape job') - def batch_scrape_urls_and_watch(self, urls: list[str], params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> 'CrawlWatcher': + def batch_scrape_urls_and_watch(self, urls: List[str], params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> 'CrawlWatcher': """ Initiate a batch scrape job and return a CrawlWatcher to monitor the job via WebSocket. Args: - urls (list[str]): The URLs to scrape. + urls (List[str]): The URLs to scrape. params (Optional[Dict[str, Any]]): Additional parameters for the scraper. idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests.