From 74b59a8ada2a0b2949a932d716e6ae442101f29b Mon Sep 17 00:00:00 2001 From: AndrewKorzh <92707967+AndrewKorzh@users.noreply.github.com> Date: Thu, 15 Aug 2024 15:44:16 +0300 Subject: [PATCH] Add form action (#35) * # * add-FormAction * add documentation * Update local_browser_manager.py * add examples * code formatting * FormAction to FillForm * Formatting and Unused imports * form_action to fill_form * Extra parameter --------- Co-authored-by: matthew --- README.md | 3 +- examples/settings.py | 2 + examples/spiders/fill_form.py | 38 ++++ examples/spiders/har.py | 29 +++ scrapypuppeteer/actions.py | 48 +++- scrapypuppeteer/browser_managers/__init__.py | 5 +- .../browser_managers/local_browser_manager.py | 209 +++++++++++------- .../service_browser_manager.py | 31 +-- scrapypuppeteer/middleware.py | 29 ++- scrapypuppeteer/request.py | 5 +- scrapypuppeteer/response.py | 2 +- setup.py | 13 +- tests/actions/test_actions.py | 10 +- 13 files changed, 301 insertions(+), 123 deletions(-) create mode 100644 examples/spiders/fill_form.py create mode 100644 examples/spiders/har.py diff --git a/README.md b/README.md index cf7e42e..a4b1f56 100644 --- a/README.md +++ b/README.md @@ -84,6 +84,7 @@ Here is the list of available actions: - `Scroll(selector, wait_options)` - scroll page - `Screenshot(options)` - take screenshot - `Har()` - to get the HAR file, pass the `har_recording=True` argument to `PuppeteerRequest` at the start of execution. +- `FormAction(input_mapping, submit_button)` - to fill out and submit forms on page. - `RecaptchaSolver(solve_recaptcha)` - find or solve recaptcha on page - `CustomJsAction(js_function)` - evaluate JS function on page @@ -174,4 +175,4 @@ In this case RecaptchaMiddleware will just skip the request. - [ ] headers and cookies management - [ ] proxy support for puppeteer - [x] error handling for requests -- [ ] har support +- [x] har support diff --git a/examples/settings.py b/examples/settings.py index bdfcff7..1ac96c6 100644 --- a/examples/settings.py +++ b/examples/settings.py @@ -10,3 +10,5 @@ } PUPPETEER_SERVICE_URL = "http://localhost:3000" + +PUPPETEER_LOCAL = False diff --git a/examples/spiders/fill_form.py b/examples/spiders/fill_form.py new file mode 100644 index 0000000..91a2a18 --- /dev/null +++ b/examples/spiders/fill_form.py @@ -0,0 +1,38 @@ +import scrapy +from scrapypuppeteer import PuppeteerRequest, PuppeteerScreenshotResponse +from scrapypuppeteer.actions import Screenshot, FillForm +import base64 + + +class FormActionSpider(scrapy.Spider): + name = "fill_form" + start_urls = ["https://www.roboform.com/filling-test-all-fields"] + + def start_requests(self): + for url in self.start_urls: + yield PuppeteerRequest(url, callback=self.form_action, close_page=False) + + def form_action(self, response): + input_mapping = { + 'input[name="02frstname"]': {"value": "SomeName", "delay": 50}, + 'input[name="05_company"]': {"value": "SomeCompany", "delay": 100}, + 'input[name="06position"]': {"value": "SomePosition", "delay": 100}, + } + + yield response.follow( + FillForm(input_mapping), close_page=False, callback=self.screenshot + ) + + def screenshot(self, response): + action = Screenshot( + options={ + "fullPage": True, + } + ) + yield response.follow(action, callback=self.make_screenshot, close_page=False) + + @staticmethod + def make_screenshot(response: PuppeteerScreenshotResponse, **kwargs): + data = response.screenshot + with open(f"screenshot.png", "wb") as fh: + fh.write(base64.b64decode(data)) diff --git a/examples/spiders/har.py b/examples/spiders/har.py new file mode 100644 index 0000000..7638867 --- /dev/null +++ b/examples/spiders/har.py @@ -0,0 +1,29 @@ +import scrapy +from scrapypuppeteer import PuppeteerRequest +from scrapypuppeteer.actions import Har + + +def write_to_file(file_path, content): + with open(file_path, "a", encoding="utf-8") as file: + file.write(content) + + +class HarSpider(scrapy.Spider): + name = "har" + start_urls = ["https://github.com/pyppeteer/pyppeteer"] + + def start_requests(self): + for url in self.start_urls: + yield PuppeteerRequest( + url, callback=self.har, close_page=False, har_recording=True + ) + + def har(self, response): + yield response.follow( + Har(), + close_page=False, + callback=self.save_har, + ) + + def save_har(self, response): + write_to_file("result.har", response.har) diff --git a/scrapypuppeteer/actions.py b/scrapypuppeteer/actions.py index 369039b..b0a9d70 100644 --- a/scrapypuppeteer/actions.py +++ b/scrapypuppeteer/actions.py @@ -58,7 +58,11 @@ class GoTo(PuppeteerServiceAction): endpoint = "goto" def __init__( - self, url: str, navigation_options: dict = None, wait_options: dict = None, har_recording: bool = False + self, + url: str, + navigation_options: dict = None, + wait_options: dict = None, + har_recording: bool = False, ): self.url = url self.navigation_options = navigation_options @@ -223,15 +227,53 @@ def __init__(self, options: dict = None, **kwargs): def payload(self): return {"options": self.options} - - + + class Har(PuppeteerServiceAction): + """ + The `Har` action is used to capture and retrieve the HTTP Archive (HAR) file, + which contains detailed information about network requests and responses + made during the session. + + This action is called without any arguments. To generate the HAR file, + you must pass the `har_recording=True` argument to `PuppeteerRequest` + when initiating the request. + """ + endpoint = "har" def payload(self): return {} +class FillForm(PuppeteerServiceAction): + """ + Fill out and submit forms on a webpage. + + Available options: + + * ``input_mapping`` (dict): A dictionary where each key is a CSS selector, and + each value is another dictionary containing details about the input for that element. + Each entry in the dictionary should follow this structure: + + * ``selector`` (str): The CSS selector for the input element (used as the key). + * ``value`` (str): The text to be inputted into the element. + * ``delay`` (int, optional): A delay (in milliseconds) between each keystroke + when inputting the text. Defaults to 0 if not provided. + + * ``submit_button`` (str, optional): The CSS selector for the form's submit button. + If provided, the button will be clicked after filling in the form. + """ + + endpoint = "fill_form" + + def __init__(self, input_mapping: dict, submit_button: str = None): + self.input_mapping = input_mapping + self.submit_button = submit_button + + def payload(self): + return {"inputMapping": self.input_mapping, "submitButton": self.submit_button} + class RecaptchaSolver(PuppeteerServiceAction): """ diff --git a/scrapypuppeteer/browser_managers/__init__.py b/scrapypuppeteer/browser_managers/__init__.py index cc5db82..c7f77b3 100644 --- a/scrapypuppeteer/browser_managers/__init__.py +++ b/scrapypuppeteer/browser_managers/__init__.py @@ -2,15 +2,16 @@ from abc import ABC, abstractmethod + class BrowserManager(ABC): @abstractmethod def process_request(self, request, spider): pass - + @abstractmethod def close_used_contexts(self): pass @abstractmethod def process_response(self, middleware, request, response, spider): - pass \ No newline at end of file + pass diff --git a/scrapypuppeteer/browser_managers/local_browser_manager.py b/scrapypuppeteer/browser_managers/local_browser_manager.py index 55d727f..212c34d 100644 --- a/scrapypuppeteer/browser_managers/local_browser_manager.py +++ b/scrapypuppeteer/browser_managers/local_browser_manager.py @@ -1,14 +1,15 @@ +import asyncio +import base64 +import uuid + +import syncer +from pyppeteer import launch + from scrapypuppeteer.response import ( PuppeteerHtmlResponse, PuppeteerScreenshotResponse, ) -from scrapypuppeteer.request import ActionRequest, PuppeteerRequest, CloseContextRequest - -import asyncio -from pyppeteer import launch -import syncer -import uuid -import base64 +from scrapypuppeteer.request import PuppeteerRequest, CloseContextRequest from scrapypuppeteer.browser_managers import BrowserManager @@ -20,7 +21,6 @@ def __init__(self): self.pages = {} self.context_page_map = {} - async def check_context_and_page(self, context_id, page_id): if not context_id or not page_id: context_id, page_id = await self.open_new_page() @@ -71,29 +71,30 @@ def __init__(self): "screenshot": self.screenshot, "action": self.action, "recaptcha_solver": self.recaptcha_solver, - "har": self.har + "har": self.har, + "fill_form": self.fill_form, } def process_request(self, request): - + if isinstance(request, PuppeteerRequest): endpoint = request.action.endpoint action_function = self.action_map.get(endpoint) if action_function: return action_function(request) - + if isinstance(request, CloseContextRequest): return self.close_contexts(request) - + def close_contexts(self, request: CloseContextRequest): self.context_manager.close_contexts(request) - + def close_used_contexts(self): self.context_manager.close_browser() def process_response(self, middleware, request, response, spider): return response - + async def wait_with_options(self, page, wait_options): timeout = wait_options.get("selectorOrTimeout", 1000) visible = wait_options.get("visible", False) @@ -102,14 +103,17 @@ async def wait_with_options(self, page, wait_options): if isinstance(timeout, (int, float)): await asyncio.sleep(timeout / 1000) else: - await page.waitFor(selector=timeout, options={ - 'visible': visible, - 'hidden': hidden, - 'timeout': 30000 - }) + await page.waitFor( + selector=timeout, + options={"visible": visible, "hidden": hidden, "timeout": 30000}, + ) def goto(self, request: PuppeteerRequest): - context_id, page_id = syncer.sync(self.context_manager.check_context_and_page(request.context_id, request.page_id)) + context_id, page_id = syncer.sync( + self.context_manager.check_context_and_page( + request.context_id, request.page_id + ) + ) page = self.context_manager.get_page_by_id(context_id, page_id) async def async_goto(): @@ -120,17 +124,23 @@ async def async_goto(): wait_options = request.action.payload().get("waitOptions", {}) or {} await self.wait_with_options(page, wait_options) response_html = await page.content() - return PuppeteerHtmlResponse(url, - request, - context_id = context_id, - page_id = page_id, - html = response_html, - cookies=cookies) - + return PuppeteerHtmlResponse( + url, + request, + context_id=context_id, + page_id=page_id, + html=response_html, + cookies=cookies, + ) + return syncer.sync(async_goto()) def click(self, request: PuppeteerRequest): - context_id, page_id = syncer.sync(self.context_manager.check_context_and_page(request.context_id, request.page_id)) + context_id, page_id = syncer.sync( + self.context_manager.check_context_and_page( + request.context_id, request.page_id + ) + ) page = self.context_manager.get_page_by_id(context_id, page_id) async def async_click(): @@ -138,22 +148,28 @@ async def async_click(): cookies = request.cookies click_options = request.action.click_options or {} navigation_options = request.action.navigation_options or {} - options = merged = {**click_options, **navigation_options} + options = {**click_options, **navigation_options} await page.click(selector, options) wait_options = request.action.payload().get("waitOptions", {}) or {} await self.wait_with_options(page, wait_options) response_html = await page.content() - return PuppeteerHtmlResponse(request.url, - request, - context_id = context_id, - page_id = page_id, - html = response_html, - cookies=cookies) - + return PuppeteerHtmlResponse( + request.url, + request, + context_id=context_id, + page_id=page_id, + html=response_html, + cookies=cookies, + ) + return syncer.sync(async_click()) def go_back(self, request: PuppeteerRequest): - context_id, page_id = syncer.sync(self.context_manager.check_context_and_page(request.context_id, request.page_id)) + context_id, page_id = syncer.sync( + self.context_manager.check_context_and_page( + request.context_id, request.page_id + ) + ) page = self.context_manager.get_page_by_id(context_id, page_id) async def async_go_back(): @@ -163,18 +179,23 @@ async def async_go_back(): wait_options = request.action.payload().get("waitOptions", {}) or {} await self.wait_with_options(page, wait_options) response_html = await page.content() - return PuppeteerHtmlResponse(request.url, - request, - context_id = context_id, - page_id = page_id, - html = response_html, - cookies=cookies) + return PuppeteerHtmlResponse( + request.url, + request, + context_id=context_id, + page_id=page_id, + html=response_html, + cookies=cookies, + ) return syncer.sync(async_go_back()) - def go_forward(self, request: PuppeteerRequest): - context_id, page_id = syncer.sync(self.context_manager.check_context_and_page(request.context_id, request.page_id)) + context_id, page_id = syncer.sync( + self.context_manager.check_context_and_page( + request.context_id, request.page_id + ) + ) page = self.context_manager.get_page_by_id(context_id, page_id) async def async_go_forward(): @@ -184,38 +205,47 @@ async def async_go_forward(): wait_options = request.action.payload().get("waitOptions", {}) or {} await self.wait_with_options(page, wait_options) response_html = await page.content() - return PuppeteerHtmlResponse(request.url, - request, - context_id = context_id, - page_id = page_id, - html = response_html, - cookies=cookies) + return PuppeteerHtmlResponse( + request.url, + request, + context_id=context_id, + page_id=page_id, + html=response_html, + cookies=cookies, + ) return syncer.sync(async_go_forward()) - - def screenshot(self, request: PuppeteerRequest): - context_id, page_id = syncer.sync(self.context_manager.check_context_and_page(request.context_id, request.page_id)) + context_id, page_id = syncer.sync( + self.context_manager.check_context_and_page( + request.context_id, request.page_id + ) + ) page = self.context_manager.get_page_by_id(context_id, page_id) async def async_screenshot(): request_options = request.action.options or {} - screenshot_options = {'encoding': 'binary'} + screenshot_options = {"encoding": "binary"} screenshot_options.update(request_options) screenshot_bytes = await page.screenshot(screenshot_options) - screenshot_base64 = base64.b64encode(screenshot_bytes).decode('utf-8') - return PuppeteerScreenshotResponse(request.url, - request, - context_id = context_id, - page_id = page_id, - screenshot = screenshot_base64) + screenshot_base64 = base64.b64encode(screenshot_bytes).decode("utf-8") + return PuppeteerScreenshotResponse( + request.url, + request, + context_id=context_id, + page_id=page_id, + screenshot=screenshot_base64, + ) return syncer.sync(async_screenshot()) - def scroll(self, request: PuppeteerRequest): - context_id, page_id = syncer.sync(self.context_manager.check_context_and_page(request.context_id, request.page_id)) + context_id, page_id = syncer.sync( + self.context_manager.check_context_and_page( + request.context_id, request.page_id + ) + ) page = self.context_manager.get_page_by_id(context_id, page_id) async def async_scroll(): @@ -234,24 +264,55 @@ async def async_scroll(): wait_options = request.action.payload().get("waitOptions", {}) or {} await self.wait_with_options(page, wait_options) response_html = await page.content() - return PuppeteerHtmlResponse(request.url, - request, - context_id = context_id, - page_id = page_id, - html = response_html, - cookies=cookies) + return PuppeteerHtmlResponse( + request.url, + request, + context_id=context_id, + page_id=page_id, + html=response_html, + cookies=cookies, + ) return syncer.sync(async_scroll()) - + + def fill_form(self, request: PuppeteerRequest): + context_id, page_id = syncer.sync( + self.context_manager.check_context_and_page( + request.context_id, request.page_id + ) + ) + page = self.context_manager.get_page_by_id(context_id, page_id) + + async def async_fill_form(): + input_mapping = request.action.payload().get("inputMapping") + submit_button = request.action.payload().get("submitButton", None) + cookies = request.cookies + + for selector, params in input_mapping.items(): + value = params.get("value", "no value was provided") + delay = params.get("delay", 0) + await page.type(selector, value, {"delay": delay}) + + if submit_button: + await page.click(submit_button) + + response_html = await page.content() + return PuppeteerHtmlResponse( + request.url, + request, + context_id=context_id, + page_id=page_id, + html=response_html, + cookies=cookies, + ) + + return syncer.sync(async_fill_form()) def action(self, request: PuppeteerRequest): raise ValueError("CustomJsAction is not available in local mode") def recaptcha_solver(self, request: PuppeteerRequest): raise ValueError("RecaptchaSolver is not available in local mode") - + def har(self, request: PuppeteerRequest): raise ValueError("Har is not available in local mode") - - - diff --git a/scrapypuppeteer/browser_managers/service_browser_manager.py b/scrapypuppeteer/browser_managers/service_browser_manager.py index 2e7e488..7b67619 100644 --- a/scrapypuppeteer/browser_managers/service_browser_manager.py +++ b/scrapypuppeteer/browser_managers/service_browser_manager.py @@ -1,17 +1,12 @@ import json import logging from collections import defaultdict -from typing import List, Union from urllib.parse import urlencode, urljoin -from abc import ABC, abstractmethod -from scrapy import signals -from scrapy.crawler import Crawler -from scrapy.exceptions import IgnoreRequest, NotConfigured, DontCloseSpider +from scrapy.exceptions import DontCloseSpider from scrapy.http import Headers, TextResponse, Response from scrapy.utils.log import failure_to_exc_info from twisted.python.failure import Failure -import time from scrapypuppeteer.actions import ( Click, @@ -21,11 +16,10 @@ RecaptchaSolver, Screenshot, Scroll, - CustomJsAction, Har, + FillForm, ) from scrapypuppeteer.response import ( - PuppeteerResponse, PuppeteerHtmlResponse, PuppeteerScreenshotResponse, PuppeteerHarResponse, @@ -33,7 +27,6 @@ PuppeteerJsonResponse, ) from scrapypuppeteer.request import ActionRequest, PuppeteerRequest, CloseContextRequest - from scrapypuppeteer.browser_managers import BrowserManager @@ -47,8 +40,7 @@ def __init__(self, service_base_url, include_meta, include_headers, crawler): self.crawler = crawler if self.service_base_url is None: - raise ValueError("Puppeteer service URL must be provided") - + raise ValueError("Puppeteer service URL must be provided") def process_request(self, request): @@ -57,14 +49,12 @@ def process_request(self, request): if isinstance(request, PuppeteerRequest): return self.process_puppeteer_request(request) - def process_close_context_request(self, request: CloseContextRequest): if not request.is_valid_url: return request.replace( url=urljoin(self.service_base_url, "/close_context"), ) - def process_puppeteer_request(self, request: PuppeteerRequest): action = request.action @@ -79,7 +69,7 @@ def process_puppeteer_request(self, request: PuppeteerRequest): } if self.include_meta: meta = {**request.meta, **meta} - action_request = ActionRequest( + action_request = ActionRequest( url=service_url, action=action, method="POST", @@ -94,7 +84,7 @@ def process_puppeteer_request(self, request: PuppeteerRequest): meta=meta, ) return action_request - + @staticmethod def _encode_service_params(request): service_params = {} @@ -105,7 +95,7 @@ def _encode_service_params(request): if request.close_page: service_params["closePage"] = 1 return urlencode(service_params) - + def _serialize_body(self, action, request): payload = action.payload() if action.content_type == "application/json": @@ -129,7 +119,7 @@ def _serialize_body(self, action, request): payload["headers"] = headers return json.dumps(payload) return str(payload) - + def close_used_contexts(self, spider): contexts = list(self.used_contexts.pop(id(spider), set())) if contexts: @@ -137,6 +127,7 @@ def close_used_contexts(self, spider): contexts, meta={"proxy": None}, ) + def handle_close_contexts_result(result): if isinstance(result, Response): if result.status == 200: @@ -153,11 +144,11 @@ def handle_close_contexts_result(result): f"Could not close contexts: {result.value}", exc_info=failure_to_exc_info(result), ) + dfd = self.crawler.engine.download(request) dfd.addBoth(handle_close_contexts_result) raise DontCloseSpider() - def process_response(self, middleware, request, response, spider): if not isinstance(response, TextResponse): @@ -210,7 +201,9 @@ def _form_response( @staticmethod def _get_response_class(request_action): - if isinstance(request_action, (GoTo, GoForward, GoBack, Click, Scroll)): + if isinstance( + request_action, (GoTo, GoForward, GoBack, Click, Scroll, FillForm) + ): return PuppeteerHtmlResponse if isinstance(request_action, Screenshot): return PuppeteerScreenshotResponse diff --git a/scrapypuppeteer/middleware.py b/scrapypuppeteer/middleware.py index 88587ff..78dc1fb 100644 --- a/scrapypuppeteer/middleware.py +++ b/scrapypuppeteer/middleware.py @@ -21,7 +21,7 @@ Screenshot, Scroll, CustomJsAction, - Har + Har, ) from scrapypuppeteer.response import ( PuppeteerResponse, @@ -32,11 +32,16 @@ ) from scrapypuppeteer.request import ActionRequest, PuppeteerRequest, CloseContextRequest -from scrapypuppeteer.browser_managers.local_browser_manager import LocalBrowserManager -from scrapypuppeteer.browser_managers.service_browser_manager import ServiceBrowserManager +from scrapypuppeteer.browser_managers.local_browser_manager import ( + LocalBrowserManager, +) +from scrapypuppeteer.browser_managers.service_browser_manager import ( + ServiceBrowserManager, +) from scrapypuppeteer.browser_managers import BrowserManager + class PuppeteerServiceDownloaderMiddleware: """ This downloader middleware converts PuppeteerRequest instances to @@ -80,7 +85,7 @@ def __init__( service_url: str, include_headers: Union[bool, List[str]], include_meta: bool, - browser_manager: BrowserManager + browser_manager: BrowserManager, ): self.service_base_url = service_url self.include_headers = include_headers @@ -105,23 +110,25 @@ def from_crawler(cls, crawler): if local_mode: browser_manager = LocalBrowserManager() else: - browser_manager = ServiceBrowserManager(service_url, include_meta, include_headers, crawler) + browser_manager = ServiceBrowserManager( + service_url, include_meta, include_headers, crawler + ) - middleware = cls(crawler, service_url, include_headers, include_meta, browser_manager) + middleware = cls( + crawler, service_url, include_headers, include_meta, browser_manager + ) crawler.signals.connect( middleware.browser_manager.close_used_contexts, signal=signals.spider_idle ) return middleware - + def process_request(self, request, spider): return self.browser_manager.process_request(request) - + def process_response(self, request, response, spider): return self.browser_manager.process_response(self, request, response, spider) - - class PuppeteerRecaptchaDownloaderMiddleware: """ This middleware is supposed to solve recaptcha on the page automatically. @@ -313,4 +320,4 @@ def __is_closing(self, response, remove_request: bool = True) -> bool: close_page = main_request in self._page_closing if close_page and remove_request: self._page_closing.remove(main_request) - return close_page \ No newline at end of file + return close_page diff --git a/scrapypuppeteer/request.py b/scrapypuppeteer/request.py index 8a69d7c..c00e41f 100644 --- a/scrapypuppeteer/request.py +++ b/scrapypuppeteer/request.py @@ -81,7 +81,10 @@ def __init__( navigation_options = kwargs.pop("navigation_options", None) wait_options = kwargs.pop("wait_options", None) action = GoTo( - url, navigation_options=navigation_options, wait_options=wait_options, har_recording = har_recording + url, + navigation_options=navigation_options, + wait_options=wait_options, + har_recording=har_recording, ) elif isinstance(action, GoTo): url = action.url diff --git a/scrapypuppeteer/response.py b/scrapypuppeteer/response.py index 8a1a2d8..043d49c 100644 --- a/scrapypuppeteer/response.py +++ b/scrapypuppeteer/response.py @@ -107,8 +107,8 @@ def __init__(self, url, puppeteer_request, context_id, page_id, **kwargs): self.screenshot = kwargs.pop("screenshot") super().__init__(url, puppeteer_request, context_id, page_id, **kwargs) -class PuppeteerHarResponse(PuppeteerResponse): +class PuppeteerHarResponse(PuppeteerResponse): """ Response for Har action. Har is available via self.har. diff --git a/setup.py b/setup.py index f0e383d..9b664ba 100644 --- a/setup.py +++ b/setup.py @@ -2,13 +2,15 @@ from setuptools import setup, find_packages + def read_long_description(file_path): with open(file_path, "r") as file: return file.read() + setup( name="scrapy-puppeteer-client", - version="0.3.4", + version="0.3.5", description="A library to use Puppeteer-managed browser in Scrapy spiders", long_description=read_long_description("README.md"), long_description_content_type="text/markdown", @@ -16,13 +18,8 @@ def read_long_description(file_path): author="MODIS @ ISP RAS", maintainer="Maksim Varlamov", maintainer_email="varlamov@ispras.ru", - packages=find_packages(), - install_requires=[ - "scrapy>=2.6", - "pyppeteer", - "syncer", - "bs4" - ], + packages=find_packages(), + install_requires=["scrapy>=2.6", "pyppeteer", "syncer", "bs4"], python_requires=">=3.6", license="BSD", classifiers=[ diff --git a/tests/actions/test_actions.py b/tests/actions/test_actions.py index 5dfa3f4..cd3f448 100644 --- a/tests/actions/test_actions.py +++ b/tests/actions/test_actions.py @@ -5,12 +5,14 @@ def _gen_goto(): - for url, nav_opt, wait_opt, har_recording in product(URLS, NAV_OPTS, WAIT_OPTS, HAR_RECORDING): + for url, nav_opt, wait_opt, har_recording in product( + URLS, NAV_OPTS, WAIT_OPTS, HAR_RECORDING + ): expected = { "url": url, "navigationOptions": nav_opt, "waitOptions": wait_opt, - "harRecording": har_recording + "harRecording": har_recording, } yield url, nav_opt, wait_opt, har_recording, expected @@ -43,7 +45,9 @@ def _gen_scroll(): yield selector, wait_opt, expected -@mark.parametrize("url, navigation_options, wait_options, har_recording, expected", _gen_goto()) +@mark.parametrize( + "url, navigation_options, wait_options, har_recording, expected", _gen_goto() +) def test_goto(url, navigation_options, wait_options, har_recording, expected): action = GoTo(url, navigation_options, wait_options, har_recording) assert action.payload() == expected